🐛 Bug fixes and optimization

This commit is contained in:
2024-08-20 22:55:58 +08:00
parent 37c47f9839
commit 7a8fa116d3
5 changed files with 59 additions and 36 deletions

View File

@ -1,10 +1,12 @@
package api
import (
"encoding/json"
"fmt"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/database"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/gap"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/models"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/server/exts"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/services"
"github.com/gofiber/fiber/v2"
"github.com/spf13/viper"
@ -19,12 +21,17 @@ func createAttachmentMultipartPlaceholder(c *fiber.Ctx) error {
var data struct {
Pool string `json:"pool" validate:"required"`
Size int64 `json:"size" validate:"required"`
FileName string `json:"name" validate:"required"`
Alternative string `json:"alt"`
MimeType string `json:"mimetype"`
Metadata map[string]any `json:"metadata"`
IsMature bool `json:"is_mature"`
}
if err := exts.BindAndValidate(c, &data); err != nil {
return err
}
aliasingMap := viper.GetStringMapString("pools.aliases")
if val, ok := aliasingMap[data.Pool]; ok {
data.Pool = val
@ -42,6 +49,8 @@ func createAttachmentMultipartPlaceholder(c *fiber.Ctx) error {
}
metadata, err := services.NewAttachmentPlaceholder(database.C, user, models.Attachment{
Name: data.FileName,
Size: data.Size,
Alternative: data.Alternative,
MimeType: data.MimeType,
Metadata: data.Metadata,
@ -56,8 +65,9 @@ func createAttachmentMultipartPlaceholder(c *fiber.Ctx) error {
}
return c.JSON(fiber.Map{
"chunk_size": viper.GetInt64("performance.file_chunk_size"),
"meta": metadata,
"chunk_size": viper.GetInt64("performance.file_chunk_size"),
"chunk_count": len(metadata.FileChunks),
"meta": metadata,
})
}
@ -100,8 +110,9 @@ func uploadAttachmentMultipart(c *fiber.Ctx) error {
if !services.CheckChunkExistsInTemporary(meta, cid) {
isAllUploaded = false
break
} else if val, ok := idx.(int); ok {
chunkArrange[val] = cid
} else if val, ok := idx.(json.Number); ok {
data, _ := val.Int64()
chunkArrange[data] = cid
}
}

View File

@ -53,7 +53,10 @@ func ScanUnanalyzedFileFromDatabase() {
}
var attachments []models.Attachment
if err := database.C.Where("destination = ? OR is_analyzed = ?", models.AttachmentDstTemporary, false).Find(&attachments).Error; err != nil {
if err := database.C.
Where("is_uploaded = ?", true).
Where("destination = ? OR is_analyzed = ?", models.AttachmentDstTemporary, false).
Find(&attachments).Error; err != nil {
log.Error().Err(err).Msg("Scan unanalyzed files from database failed...")
return
}
@ -93,7 +96,9 @@ func ScanUnanalyzedFileFromDatabase() {
}
func AnalyzeAttachment(file models.Attachment) error {
if file.Destination != models.AttachmentDstTemporary {
if !file.IsUploaded {
return fmt.Errorf("file isn't finish multipart upload")
} else if file.Destination != models.AttachmentDstTemporary {
return fmt.Errorf("attachment isn't in temporary storage, unable to analyze")
}

View File

@ -25,6 +25,7 @@ func MergeFileChunks(meta models.Attachment, arrange []string) (models.Attachmen
}
defer destFile.Close()
// Merge files
for _, chunk := range arrange {
chunkPath := filepath.Join(dest.Path, fmt.Sprintf("%s.%s", meta.Uuid, chunk))
chunkFile, err := os.Open(chunkPath)
@ -41,10 +42,17 @@ func MergeFileChunks(meta models.Attachment, arrange []string) (models.Attachmen
_ = chunkFile.Close()
}
// Do post-upload tasks
meta.IsUploaded = true
database.C.Save(&meta)
PublishAnalyzeTask(meta)
// Clean up
for _, chunk := range arrange {
chunkPath := filepath.Join(dest.Path, fmt.Sprintf("%s.%s", meta.Uuid, chunk))
_ = os.Remove(chunkPath)
}
return meta, nil
}

View File

@ -64,7 +64,7 @@ func RunMarkLifecycleDeletionTask() {
}
func RunMarkMultipartDeletionTask() {
lifecycle := time.Now().Add(-24 * time.Hour)
lifecycle := time.Now().Add(-60 * time.Minute)
tx := database.C.
Where("created_at < ?", lifecycle).
Where("is_uploaded = ?", false).