🔀 Merge pull request ' 使用服务器来计算元数据' (#2) from features/calc-in-backend into master

Reviewed-on: Hydrogen/Paperclip#2
This commit is contained in:
LittleSheep 2024-07-29 06:47:21 +00:00
commit 36c814e1dc
17 changed files with 547 additions and 737 deletions

View File

@ -5,15 +5,6 @@
</component> </component>
<component name="ChangeListManager"> <component name="ChangeListManager">
<list default="true" id="18dd0d68-b4b8-40db-9734-9119b5c848bd" name="更改" comment=":recycle: Moved onto dealer"> <list default="true" id="18dd0d68-b4b8-40db-9734-9119b5c848bd" name="更改" comment=":recycle: Moved onto dealer">
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/go.mod" beforeDir="false" afterPath="$PROJECT_DIR$/go.mod" afterDir="false" />
<change beforePath="$PROJECT_DIR$/go.sum" beforeDir="false" afterPath="$PROJECT_DIR$/go.sum" afterDir="false" />
<change beforePath="$PROJECT_DIR$/pkg/internal/gap/client.go" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/pkg/internal/gap/server.go" beforeDir="false" afterPath="$PROJECT_DIR$/pkg/internal/gap/server.go" afterDir="false" />
<change beforePath="$PROJECT_DIR$/pkg/internal/server/exts/auth.go" beforeDir="false" afterPath="$PROJECT_DIR$/pkg/internal/server/exts/auth.go" afterDir="false" />
<change beforePath="$PROJECT_DIR$/pkg/internal/services/auth.go" beforeDir="false" afterPath="$PROJECT_DIR$/pkg/internal/services/auth.go" afterDir="false" />
<change beforePath="$PROJECT_DIR$/pkg/internal/services/jwt.go" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/pkg/main.go" beforeDir="false" afterPath="$PROJECT_DIR$/pkg/main.go" afterDir="false" />
<change beforePath="$PROJECT_DIR$/settings.toml" beforeDir="false" afterPath="$PROJECT_DIR$/settings.toml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/settings.toml" beforeDir="false" afterPath="$PROJECT_DIR$/settings.toml" afterDir="false" />
</list> </list>
<option name="SHOW_DIALOG" value="false" /> <option name="SHOW_DIALOG" value="false" />
@ -54,7 +45,7 @@
"RunOnceActivity.go.migrated.go.modules.settings": "true", "RunOnceActivity.go.migrated.go.modules.settings": "true",
"RunOnceActivity.go.modules.automatic.dependencies.download": "true", "RunOnceActivity.go.modules.automatic.dependencies.download": "true",
"RunOnceActivity.go.modules.go.list.on.any.changes.was.set": "true", "RunOnceActivity.go.modules.go.list.on.any.changes.was.set": "true",
"git-widget-placeholder": "refactor/dealer", "git-widget-placeholder": "features/calc-in-backend",
"go.import.settings.migrated": "true", "go.import.settings.migrated": "true",
"go.sdk.automatically.set": "true", "go.sdk.automatically.set": "true",
"last_opened_file_path": "/Users/littlesheep/Documents/Projects/Hydrogen/Paperclip/pkg/internal/grpc", "last_opened_file_path": "/Users/littlesheep/Documents/Projects/Hydrogen/Paperclip/pkg/internal/grpc",

View File

@ -5,7 +5,20 @@ It contains file metadata compute, instant upload, calculating hashing, multi de
## Features ## Features
Paperclip store and processing uploaded files with pipeline flow.
When a user try to upload files. The file will store in local first for media processing.
Then the server will publish a message into the message queue.
And the background consumer will start dealing with the uploaded files.
The background consumer will hash the file and merge the files with same hashcode.
The background consumer will decode the image and generate ratio and read more info from image file too.
After the processing done. The consumer will upload the file to the permanent storage like a s3 bucket and remove local cache.
While the processing, the file record in database will marked to the temporary and load file from the temporary storage.
When the processing done, the file record will be updated.
### Supported Destinations ### Supported Destinations
- Local filesystem - Local filesystem
- S3 compilable bucket - S3 compilable bucket

8
go.mod
View File

@ -14,7 +14,6 @@ require (
github.com/samber/lo v1.39.0 github.com/samber/lo v1.39.0
github.com/spf13/viper v1.18.2 github.com/spf13/viper v1.18.2
google.golang.org/grpc v1.64.0 google.golang.org/grpc v1.64.0
google.golang.org/protobuf v1.34.2
gorm.io/datatypes v1.2.0 gorm.io/datatypes v1.2.0
gorm.io/driver/postgres v1.5.4 gorm.io/driver/postgres v1.5.4
gorm.io/gorm v1.25.6 gorm.io/gorm v1.25.6
@ -49,6 +48,7 @@ require (
github.com/jinzhu/inflection v1.0.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect github.com/jinzhu/now v1.1.5 // indirect
github.com/jpillora/backoff v1.0.0 // indirect github.com/jpillora/backoff v1.0.0 // indirect
github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213 // indirect
github.com/klauspost/compress v1.17.8 // indirect github.com/klauspost/compress v1.17.8 // indirect
github.com/klauspost/cpuid/v2 v2.2.7 // indirect github.com/klauspost/cpuid/v2 v2.2.7 // indirect
github.com/leodido/go-urn v1.2.4 // indirect github.com/leodido/go-urn v1.2.4 // indirect
@ -58,6 +58,7 @@ require (
github.com/mattn/go-runewidth v0.0.15 // indirect github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mbobakov/grpc-consul-resolver v1.5.3 // indirect github.com/mbobakov/grpc-consul-resolver v1.5.3 // indirect
github.com/minio/md5-simd v1.1.2 // indirect github.com/minio/md5-simd v1.1.2 // indirect
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
@ -69,6 +70,7 @@ require (
github.com/rs/xid v1.5.0 // indirect github.com/rs/xid v1.5.0 // indirect
github.com/sagikazarmark/locafero v0.4.0 // indirect github.com/sagikazarmark/locafero v0.4.0 // indirect
github.com/sagikazarmark/slog-shim v0.1.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect
github.com/schollz/progressbar/v3 v3.14.4 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.11.0 // indirect github.com/spf13/afero v1.11.0 // indirect
github.com/spf13/cast v1.6.0 // indirect github.com/spf13/cast v1.6.0 // indirect
@ -83,9 +85,11 @@ require (
golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8 // indirect golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8 // indirect
golang.org/x/net v0.26.0 // indirect golang.org/x/net v0.26.0 // indirect
golang.org/x/sync v0.7.0 // indirect golang.org/x/sync v0.7.0 // indirect
golang.org/x/sys v0.21.0 // indirect golang.org/x/sys v0.22.0 // indirect
golang.org/x/term v0.22.0 // indirect
golang.org/x/text v0.16.0 // indirect golang.org/x/text v0.16.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240604185151-ef581f913117 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240604185151-ef581f913117 // indirect
google.golang.org/protobuf v1.34.2 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
gorm.io/driver/mysql v1.5.2 // indirect gorm.io/driver/mysql v1.5.2 // indirect

12
go.sum
View File

@ -146,6 +146,8 @@ github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/u
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213 h1:qGQQKEcAR99REcMpsXCp3lJ03zYT1PkRd3kQGPn9GVg=
github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213/go.mod h1:vNUNkEQ1e29fT/6vq2aBdFsgNPmy8qMdSay1npru+Sw=
github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU= github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU=
github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
@ -197,6 +199,8 @@ github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEp
github.com/minio/minio-go/v7 v7.0.70 h1:1u9NtMgfK1U42kUxcsl5v0yj6TEOPR497OAQxpJnn2g= github.com/minio/minio-go/v7 v7.0.70 h1:1u9NtMgfK1U42kUxcsl5v0yj6TEOPR497OAQxpJnn2g=
github.com/minio/minio-go/v7 v7.0.70/go.mod h1:4yBA8v80xGA30cfM3fz0DKYMXunWl/AV/6tWEs9ryzo= github.com/minio/minio-go/v7 v7.0.70/go.mod h1:4yBA8v80xGA30cfM3fz0DKYMXunWl/AV/6tWEs9ryzo=
github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
@ -255,6 +259,8 @@ github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6g
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/samber/lo v1.39.0 h1:4gTz1wUhNYLhFSKl6O+8peW0v2F4BCY034GRpU9WnuA= github.com/samber/lo v1.39.0 h1:4gTz1wUhNYLhFSKl6O+8peW0v2F4BCY034GRpU9WnuA=
github.com/samber/lo v1.39.0/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA= github.com/samber/lo v1.39.0/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA=
github.com/schollz/progressbar/v3 v3.14.4 h1:W9ZrDSJk7eqmQhd3uxFNNcTr0QL+xuGNI9dEMrw0r74=
github.com/schollz/progressbar/v3 v3.14.4/go.mod h1:aT3UQ7yGm+2ZjeXPqsjTenwL3ddUiuZ0kfQ/2tHlyNI=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
@ -355,11 +361,17 @@ golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws= golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.22.0 h1:BbsgPEJULsl2fV/AT3v15Mjva5yXKQDyKf+TbDz7QJk=
golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=

View File

@ -1,76 +0,0 @@
package grpc
import (
"context"
"fmt"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/database"
"git.solsynth.dev/hydrogen/paperclip/pkg/proto"
"google.golang.org/protobuf/types/known/emptypb"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/models"
jsoniter "github.com/json-iterator/go"
"github.com/samber/lo"
)
func (v *Server) GetAttachment(ctx context.Context, request *proto.AttachmentLookupRequest) (*proto.Attachment, error) {
var attachment models.Attachment
tx := database.C.Model(&models.Attachment{})
if request.Id != nil {
tx = tx.Where("id = ?", request.GetId())
}
if request.Uuid != nil {
tx = tx.Where("uuid = ?", request.GetUuid())
}
if request.Usage != nil {
tx = tx.Where("usage = ?", request.GetUsage())
}
if err := tx.First(&attachment).Error; err != nil {
return nil, err
}
rawMetadata, _ := jsoniter.Marshal(attachment.Metadata)
if attachment.AccountID == nil {
attachment.AccountID = lo.ToPtr[uint](0)
}
return &proto.Attachment{
Id: uint64(attachment.ID),
Uuid: attachment.Uuid,
Size: attachment.Size,
Name: attachment.Name,
Alt: attachment.Alternative,
Usage: attachment.Usage,
Mimetype: attachment.MimeType,
Hash: attachment.HashCode,
Destination: attachment.Destination,
Metadata: rawMetadata,
IsMature: attachment.IsMature,
AccountId: uint64(*attachment.AccountID),
}, nil
}
func (v *Server) CheckAttachmentExists(ctx context.Context, request *proto.AttachmentLookupRequest) (*emptypb.Empty, error) {
tx := database.C.Model(&models.Attachment{})
if request.Id != nil {
tx = tx.Where("id = ?", request.GetId())
}
if request.Uuid != nil {
tx = tx.Where("uuid = ?", request.GetUuid())
}
if request.Usage != nil {
tx = tx.Where("usage = ?", request.GetUsage())
}
var count int64
if err := tx.Model(&models.Attachment{}).Count(&count).Error; err != nil {
return nil, err
} else if count == 0 {
return nil, fmt.Errorf("record not found")
}
return &emptypb.Empty{}, nil
}

View File

@ -1,16 +1,15 @@
package grpc package grpc
import ( import (
"git.solsynth.dev/hydrogen/paperclip/pkg/proto" "net"
"github.com/spf13/viper" "github.com/spf13/viper"
"google.golang.org/grpc" "google.golang.org/grpc"
health "google.golang.org/grpc/health/grpc_health_v1" health "google.golang.org/grpc/health/grpc_health_v1"
"google.golang.org/grpc/reflection" "google.golang.org/grpc/reflection"
"net"
) )
type Server struct { type Server struct {
proto.UnimplementedAttachmentsServer
} }
var S *grpc.Server var S *grpc.Server
@ -18,7 +17,6 @@ var S *grpc.Server
func NewGRPC() { func NewGRPC() {
S = grpc.NewServer() S = grpc.NewServer()
proto.RegisterAttachmentsServer(S, &Server{})
health.RegisterHealthServer(S, &Server{}) health.RegisterHealthServer(S, &Server{})
reflection.Register(S) reflection.Register(S)

View File

@ -2,21 +2,33 @@ package models
import "gorm.io/datatypes" import "gorm.io/datatypes"
type AttachmentDst = int8
const (
AttachmentDstTemporary = AttachmentDst(iota)
AttachmentDstPermanent
)
type Attachment struct { type Attachment struct {
BaseModel BaseModel
Uuid string `json:"uuid"` Uuid string `json:"uuid"`
Size int64 `json:"size"` Size int64 `json:"size"`
Name string `json:"name"` Name string `json:"name"`
Alternative string `json:"alt"` Alternative string `json:"alt"`
Usage string `json:"usage"` Usage string `json:"usage"`
MimeType string `json:"mimetype"` MimeType string `json:"mimetype"`
HashCode string `json:"hash"` HashCode string `json:"hash"`
Destination string `json:"destination"` Destination AttachmentDst `json:"destination"`
RefCount int `json:"ref_count"`
Metadata datatypes.JSONMap `json:"metadata"` Metadata datatypes.JSONMap `json:"metadata"`
IsMature bool `json:"is_mature"` IsMature bool `json:"is_mature"`
IsAnalyzed bool `json:"is_analyzed"`
Account *Account `json:"account"` Ref *Attachment `json:"ref"`
AccountID *uint `json:"account_id"` RefID *uint `json:"ref_id"`
Account Account `json:"account"`
AccountID uint `json:"account_id"`
} }

View File

@ -18,10 +18,11 @@ type LocalDestination struct {
type S3Destination struct { type S3Destination struct {
BaseDestination BaseDestination
Path string `json:"path"` Path string `json:"path"`
Bucket string `json:"bucket"` Bucket string `json:"bucket"`
Endpoint string `json:"endpoint"` Endpoint string `json:"endpoint"`
SecretID string `json:"secret_id"` SecretID string `json:"secret_id"`
SecretKey string `json:"secret_key"` SecretKey string `json:"secret_key"`
EnableSSL bool `json:"enable_ssl"` AccessBaseURL string `json:"access_baseurl"`
EnableSSL bool `json:"enable_ssl"`
} }

View File

@ -25,17 +25,18 @@ func openAttachment(c *fiber.Ctx) error {
return fiber.NewError(fiber.StatusNotFound) return fiber.NewError(fiber.StatusNotFound)
} }
destMap := viper.GetStringMap("destinations") var destMap map[string]any
dest, destOk := destMap[metadata.Destination] if metadata.Destination == models.AttachmentDstTemporary {
if !destOk { destMap = viper.GetStringMap("destinations.temporary")
return fiber.NewError(fiber.StatusInternalServerError, "invalid destination: destination configuration was not found") } else {
destMap = viper.GetStringMap("destinations.permanent")
} }
var destParsed models.BaseDestination var dest models.BaseDestination
rawDest, _ := jsoniter.Marshal(dest) rawDest, _ := jsoniter.Marshal(destMap)
_ = jsoniter.Unmarshal(rawDest, &destParsed) _ = jsoniter.Unmarshal(rawDest, &dest)
switch destParsed.Type { switch dest.Type {
case models.DestinationTypeLocal: case models.DestinationTypeLocal:
var destConfigured models.LocalDestination var destConfigured models.LocalDestination
_ = jsoniter.Unmarshal(rawDest, &destConfigured) _ = jsoniter.Unmarshal(rawDest, &destConfigured)
@ -43,21 +44,27 @@ func openAttachment(c *fiber.Ctx) error {
c.Set(fiber.HeaderContentType, metadata.MimeType) c.Set(fiber.HeaderContentType, metadata.MimeType)
} }
return c.SendFile(filepath.Join(destConfigured.Path, metadata.Uuid), false) return c.SendFile(filepath.Join(destConfigured.Path, metadata.Uuid), false)
case models.DestinationTypeS3: case models.DestinationTypeS3:
var destConfigured models.S3Destination var destConfigured models.S3Destination
_ = jsoniter.Unmarshal(rawDest, &destConfigured) _ = jsoniter.Unmarshal(rawDest, &destConfigured)
protocol := lo.Ternary(destConfigured.EnableSSL, "https", "http") if len(destConfigured.AccessBaseURL) > 0 {
return c.Redirect(fmt.Sprintf( return c.Redirect(fmt.Sprintf(
"%s://%s.%s/%s", "%s/%s",
protocol, destConfigured.AccessBaseURL,
destConfigured.Bucket, url.QueryEscape(filepath.Join(destConfigured.Path, metadata.Uuid)),
destConfigured.Endpoint, ), fiber.StatusMovedPermanently)
url.QueryEscape(filepath.Join(destConfigured.Path, metadata.Uuid)), } else {
)) protocol := lo.Ternary(destConfigured.EnableSSL, "https", "http")
return c.Redirect(fmt.Sprintf(
"%s://%s.%s/%s",
protocol,
destConfigured.Bucket,
destConfigured.Endpoint,
url.QueryEscape(filepath.Join(destConfigured.Path, metadata.Uuid)),
), fiber.StatusMovedPermanently)
}
default: default:
return fmt.Errorf("invalid destination: unsupported protocol %s", destParsed.Type) return fmt.Errorf("invalid destination: unsupported protocol %s", dest.Type)
} }
} }
@ -79,12 +86,6 @@ func createAttachment(c *fiber.Ctx) error {
} }
user = lo.ToPtr(c.Locals("user").(models.Account)) user = lo.ToPtr(c.Locals("user").(models.Account))
destName := c.Query("destination", viper.GetString("preferred_destination"))
hash := c.FormValue("hash")
if len(hash) != 64 {
return fiber.NewError(fiber.StatusBadRequest, "please provide a sha-256 hash code, length should be 64 characters")
}
usage := c.FormValue("usage") usage := c.FormValue("usage")
if !lo.Contains(viper.GetStringSlice("accepts_usage"), usage) { if !lo.Contains(viper.GetStringSlice("accepts_usage"), usage) {
return fiber.NewError(fiber.StatusBadRequest, fmt.Sprintf("disallowed usage: %s", usage)) return fiber.NewError(fiber.StatusBadRequest, fmt.Sprintf("disallowed usage: %s", usage))
@ -103,29 +104,30 @@ func createAttachment(c *fiber.Ctx) error {
_ = jsoniter.UnmarshalFromString(c.FormValue("metadata"), &usermeta) _ = jsoniter.UnmarshalFromString(c.FormValue("metadata"), &usermeta)
tx := database.C.Begin() tx := database.C.Begin()
metadata, linked, err := services.NewAttachmentMetadata(tx, user, file, models.Attachment{
metadata, err := services.NewAttachmentMetadata(tx, user, file, models.Attachment{
Usage: usage, Usage: usage,
HashCode: hash,
Alternative: c.FormValue("alt"), Alternative: c.FormValue("alt"),
MimeType: c.FormValue("mimetype"), MimeType: c.FormValue("mimetype"),
Metadata: usermeta, Metadata: usermeta,
IsMature: len(c.FormValue("mature")) > 0, IsMature: len(c.FormValue("mature")) > 0,
Destination: destName, IsAnalyzed: false,
Destination: models.AttachmentDstTemporary,
}) })
if err != nil { if err != nil {
tx.Rollback() tx.Rollback()
return fiber.NewError(fiber.StatusBadRequest, err.Error()) return fiber.NewError(fiber.StatusBadRequest, err.Error())
} }
if !linked { if err := services.UploadFileToTemporary(c, file, metadata); err != nil {
if err := services.UploadFile(destName, c, file, metadata); err != nil { tx.Rollback()
tx.Rollback() return fiber.NewError(fiber.StatusBadRequest, err.Error())
return fiber.NewError(fiber.StatusBadRequest, err.Error())
}
} }
tx.Commit() tx.Commit()
services.PublishAnalyzeTask(metadata)
return c.JSON(metadata) return c.JSON(metadata)
} }
@ -176,7 +178,7 @@ func deleteAttachment(c *fiber.Ctx) error {
attachment, err := services.GetAttachmentByID(uint(id)) attachment, err := services.GetAttachmentByID(uint(id))
if err != nil { if err != nil {
return fiber.NewError(fiber.StatusNotFound, err.Error()) return fiber.NewError(fiber.StatusNotFound, err.Error())
} else if attachment.AccountID == nil || *attachment.AccountID != user.ID { } else if attachment.AccountID != user.ID {
return fiber.NewError(fiber.StatusNotFound, "record not created by you") return fiber.NewError(fiber.StatusNotFound, "record not created by you")
} }

View File

@ -0,0 +1,212 @@
package services
import (
"crypto/sha256"
"encoding/hex"
"fmt"
"image"
"io"
"os"
"path/filepath"
"strings"
"time"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/database"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/models"
jsoniter "github.com/json-iterator/go"
"github.com/k0kubun/go-ansi"
"github.com/rs/zerolog/log"
"github.com/schollz/progressbar/v3"
"github.com/spf13/viper"
_ "image/gif"
_ "image/jpeg"
_ "image/png"
)
var fileAnalyzeQueue = make(chan models.Attachment, 256)
func PublishAnalyzeTask(file models.Attachment) {
fileAnalyzeQueue <- file
}
func StartConsumeAnalyzeTask() {
for {
task := <-fileAnalyzeQueue
start := time.Now()
if err := AnalyzeAttachment(task); err != nil {
log.Error().Err(err).Any("task", task).Msg("A file analyze task failed...")
} else {
log.Info().Dur("elapsed", time.Since(start)).Uint("id", task.ID).Msg("A file analyze task was completed.")
}
}
}
func ScanUnanalyzedFileFromDatabase() {
workers := viper.GetInt("workers.files_analyze")
if workers < 2 {
log.Warn().Int("val", workers).Int("min", 2).Msg("The file analyzer does not have enough computing power, and the scan of unanalyzed files will not start...")
}
var attachments []models.Attachment
if err := database.C.Where("destination = ? OR is_analyzed = ?", models.AttachmentDstTemporary, false).Find(&attachments).Error; err != nil {
log.Error().Err(err).Msg("Scan unanalyzed files from database failed...")
return
}
if len(attachments) == 0 {
return
}
go func() {
var deletionIdSet []uint
bar := progressbar.NewOptions(len(attachments),
progressbar.OptionSetWriter(ansi.NewAnsiStdout()),
progressbar.OptionEnableColorCodes(true),
progressbar.OptionShowBytes(true),
progressbar.OptionSetWidth(15),
progressbar.OptionSetDescription("Analyzing the unanalyzed files..."),
progressbar.OptionSetTheme(progressbar.Theme{
Saucer: "[green]=[reset]",
SaucerHead: "[green]>[reset]",
SaucerPadding: " ",
BarStart: "[",
BarEnd: "]",
}))
for _, task := range attachments {
if err := AnalyzeAttachment(task); err != nil {
log.Error().Err(err).Any("task", task).Msg("A background file analyze task failed...")
deletionIdSet = append(deletionIdSet, task.ID)
}
bar.Add(1)
}
log.Info().Int("count", len(attachments)).Int("fails", len(deletionIdSet)).Msg("All unanalyzed files has been analyzed!")
if len(deletionIdSet) > 0 {
database.C.Delete(&models.Attachment{}, deletionIdSet)
}
}()
}
func AnalyzeAttachment(file models.Attachment) error {
if file.Destination != models.AttachmentDstTemporary {
return fmt.Errorf("attachment isn't in temporary storage, unable to analyze")
}
var start time.Time
if !file.IsAnalyzed || len(file.HashCode) == 0 {
destMap := viper.GetStringMap("destinations.temporary")
var dest models.LocalDestination
rawDest, _ := jsoniter.Marshal(destMap)
_ = jsoniter.Unmarshal(rawDest, &dest)
start = time.Now()
dst := filepath.Join(dest.Path, file.Uuid)
if _, err := os.Stat(dst); os.IsNotExist(err) {
return fmt.Errorf("attachment doesn't exists in temporary storage: %v", err)
}
if t := strings.SplitN(file.MimeType, "/", 2)[0]; t == "image" {
// Dealing with image
reader, err := os.Open(dst)
if err != nil {
return fmt.Errorf("unable to open file: %v", err)
}
defer reader.Close()
im, _, err := image.Decode(reader)
if err != nil {
return fmt.Errorf("unable to decode file as an image: %v", err)
}
width := im.Bounds().Dx()
height := im.Bounds().Dy()
ratio := float64(width) / float64(height)
file.Metadata = map[string]any{
"width": width,
"height": height,
"ratio": ratio,
}
}
if hash, err := HashAttachment(file); err != nil {
return err
} else {
file.HashCode = hash
}
}
tx := database.C.Begin()
file.IsAnalyzed = true
linked, err := TryLinkAttachment(tx, file, file.HashCode)
if linked && err != nil {
return fmt.Errorf("unable to link file record: %v", err)
} else if !linked {
metadataCache.Store(file.ID, file)
if err := tx.Save(&file).Error; err != nil {
tx.Rollback()
return fmt.Errorf("unable to save file record: %v", err)
}
}
tx.Commit()
log.Info().Dur("elapsed", time.Since(start)).Uint("id", file.ID).Msg("A file analyze task was finished, starting uploading...")
start = time.Now()
// Move temporary to permanet
if !linked {
if err := ReUploadFileToPermanent(file); err != nil {
return fmt.Errorf("unable to move file to permanet storage: %v", err)
}
}
// Recycle the temporary file
file.Destination = models.AttachmentDstTemporary
PublishDeleteFileTask(file)
// Finish
log.Info().Dur("elapsed", time.Since(start)).Uint("id", file.ID).Bool("linked", linked).Msg("A file post-analyze upload task was finished.")
return nil
}
func HashAttachment(file models.Attachment) (hash string, err error) {
if file.Destination != models.AttachmentDstTemporary {
err = fmt.Errorf("attachment isn't in temporary storage, unable to hash")
return
}
destMap := viper.GetStringMap("destinations.temporary")
var dest models.LocalDestination
rawDest, _ := jsoniter.Marshal(destMap)
_ = jsoniter.Unmarshal(rawDest, &dest)
dst := filepath.Join(dest.Path, file.Uuid)
if _, err = os.Stat(dst); os.IsNotExist(err) {
err = fmt.Errorf("attachment doesn't exists in temporary storage: %v", err)
return
}
var in *os.File
in, err = os.Open(dst)
if err != nil {
err = fmt.Errorf("unable to open file: %v", err)
return
}
defer in.Close()
hasher := sha256.New()
if _, err = io.Copy(hasher, in); err != nil {
err = fmt.Errorf("unable to hash: %v", err)
return
}
hash = hex.EncodeToString(hasher.Sum(nil))
return
}

View File

@ -6,21 +6,23 @@ import (
"mime/multipart" "mime/multipart"
"net/http" "net/http"
"path/filepath" "path/filepath"
"sync"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/database" "git.solsynth.dev/hydrogen/paperclip/pkg/internal/database"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/models" "git.solsynth.dev/hydrogen/paperclip/pkg/internal/models"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/rs/zerolog/log"
"gorm.io/gorm" "gorm.io/gorm"
) )
const metadataCacheLimit = 512 const metadataCacheLimit = 512
var metadataCache = make(map[uint]models.Attachment) var metadataCache sync.Map
func GetAttachmentByID(id uint) (models.Attachment, error) { func GetAttachmentByID(id uint) (models.Attachment, error) {
if val, ok := metadataCache[id]; ok { if val, ok := metadataCache.Load(id); ok {
return val, nil return val.(models.Attachment), nil
} }
var attachment models.Attachment var attachment models.Attachment
@ -29,10 +31,8 @@ func GetAttachmentByID(id uint) (models.Attachment, error) {
}).Preload("Account").First(&attachment).Error; err != nil { }).Preload("Account").First(&attachment).Error; err != nil {
return attachment, err return attachment, err
} else { } else {
if len(metadataCache) > metadataCacheLimit { MaintainAttachmentCache()
clear(metadataCache) metadataCache.Store(id, attachment)
}
metadataCache[id] = attachment
} }
return attachment, nil return attachment, nil
@ -48,97 +48,126 @@ func GetAttachmentByHash(hash string) (models.Attachment, error) {
return attachment, nil return attachment, nil
} }
func NewAttachmentMetadata(tx *gorm.DB, user *models.Account, file *multipart.FileHeader, attachment models.Attachment) (models.Attachment, bool, error) { func NewAttachmentMetadata(tx *gorm.DB, user *models.Account, file *multipart.FileHeader, attachment models.Attachment) (models.Attachment, error) {
linked := false attachment.Uuid = uuid.NewString()
exists, pickupErr := GetAttachmentByHash(attachment.HashCode) attachment.Size = file.Size
if pickupErr == nil { attachment.Name = file.Filename
linked = true attachment.AccountID = user.ID
exists.Alternative = attachment.Alternative
exists.Usage = attachment.Usage
exists.Metadata = attachment.Metadata
attachment = exists
attachment.ID = 0
if user != nil { // If the user didn't provide file mimetype manually, we have to detect it
attachment.AccountID = &user.ID if len(attachment.MimeType) == 0 {
} if ext := filepath.Ext(attachment.Name); len(ext) > 0 {
} else { // Detect mimetype by file extensions
// Upload the new file attachment.MimeType = mime.TypeByExtension(ext)
attachment.Uuid = uuid.NewString() } else {
attachment.Size = file.Size // Detect mimetype by file header
attachment.Name = file.Filename // This method as a fallback method, because this isn't pretty accurate
header, err := file.Open()
if user != nil { if err != nil {
attachment.AccountID = &user.ID return attachment, fmt.Errorf("failed to read file header: %v", err)
}
// If the user didn't provide file mimetype manually, we have to detect it
if len(attachment.MimeType) == 0 {
if ext := filepath.Ext(attachment.Name); len(ext) > 0 {
// Detect mimetype by file extensions
attachment.MimeType = mime.TypeByExtension(ext)
} else {
// Detect mimetype by file header
// This method as a fallback method, because this isn't pretty accurate
header, err := file.Open()
if err != nil {
return attachment, false, fmt.Errorf("failed to read file header: %v", err)
}
defer header.Close()
fileHeader := make([]byte, 512)
_, err = header.Read(fileHeader)
if err != nil {
return attachment, false, err
}
attachment.MimeType = http.DetectContentType(fileHeader)
} }
defer header.Close()
fileHeader := make([]byte, 512)
_, err = header.Read(fileHeader)
if err != nil {
return attachment, err
}
attachment.MimeType = http.DetectContentType(fileHeader)
} }
} }
if err := tx.Save(&attachment).Error; err != nil { if err := tx.Save(&attachment).Error; err != nil {
return attachment, linked, fmt.Errorf("failed to save attachment record: %v", err) return attachment, fmt.Errorf("failed to save attachment record: %v", err)
} else { } else {
if len(metadataCache) > metadataCacheLimit { MaintainAttachmentCache()
clear(metadataCache) metadataCache.Store(attachment.ID, attachment)
}
metadataCache[attachment.ID] = attachment
} }
return attachment, linked, nil return attachment, nil
}
func TryLinkAttachment(tx *gorm.DB, og models.Attachment, hash string) (bool, error) {
prev, err := GetAttachmentByHash(hash)
if err != nil {
return false, err
}
prev.RefCount++
og.RefID = &prev.ID
og.Uuid = prev.Uuid
og.Destination = prev.Destination
if err := tx.Save(&og).Error; err != nil {
tx.Rollback()
return true, err
} else if err = tx.Save(&prev).Error; err != nil {
tx.Rollback()
return true, err
}
metadataCache.Store(prev.ID, prev)
metadataCache.Store(og.ID, og)
return true, nil
} }
func UpdateAttachment(item models.Attachment) (models.Attachment, error) { func UpdateAttachment(item models.Attachment) (models.Attachment, error) {
if err := database.C.Save(&item).Error; err != nil { if err := database.C.Save(&item).Error; err != nil {
return item, err return item, err
} else { } else {
if len(metadataCache) > metadataCacheLimit { MaintainAttachmentCache()
clear(metadataCache) metadataCache.Store(item.ID, item)
}
metadataCache[item.ID] = item
} }
return item, nil return item, nil
} }
func DeleteAttachment(item models.Attachment) error { func DeleteAttachment(item models.Attachment) error {
var dupeCount int64 dat := item
if err := database.C.
Where(&models.Attachment{HashCode: item.HashCode}).
Model(&models.Attachment{}).
Count(&dupeCount).Error; err != nil {
dupeCount = -1
}
tx := database.C.Begin()
if item.RefID != nil {
var refTarget models.Attachment
if err := database.C.Where(models.Attachment{
BaseModel: models.BaseModel{ID: *item.RefID},
}).First(&refTarget).Error; err == nil {
refTarget.RefCount--
if err := tx.Save(&refTarget).Error; err != nil {
tx.Rollback()
return fmt.Errorf("unable to update ref count: %v", err)
}
}
}
if err := database.C.Delete(&item).Error; err != nil { if err := database.C.Delete(&item).Error; err != nil {
tx.Rollback()
return err return err
} else { } else {
delete(metadataCache, item.ID) metadataCache.Delete(item.ID)
} }
if dupeCount != -1 && dupeCount <= 1 { tx.Commit()
return DeleteFile(item)
if dat.RefCount == 0 {
PublishDeleteFileTask(dat)
} }
return nil return nil
} }
func MaintainAttachmentCache() {
var keySet []uint
metadataCache.Range(func(k any, v any) bool {
keySet = append(keySet, k.(uint))
return true
})
if len(keySet) > metadataCacheLimit {
go func() {
log.Debug().Int("count", len(keySet)).Msg("Cleaning attachment metadata cache...")
for _, k := range keySet {
metadataCache.Delete(k)
}
}()
}
}

View File

@ -5,26 +5,47 @@ import (
"fmt" "fmt"
"os" "os"
"path/filepath" "path/filepath"
"time"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/models" "git.solsynth.dev/hydrogen/paperclip/pkg/internal/models"
jsoniter "github.com/json-iterator/go" jsoniter "github.com/json-iterator/go"
"github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7"
"github.com/minio/minio-go/v7/pkg/credentials" "github.com/minio/minio-go/v7/pkg/credentials"
"github.com/rs/zerolog/log"
"github.com/spf13/viper" "github.com/spf13/viper"
) )
var fileDeletionQueue = make(chan models.Attachment, 256)
func PublishDeleteFileTask(file models.Attachment) {
fileDeletionQueue <- file
}
func StartConsumeDeletionTask() {
for {
task := <-fileDeletionQueue
start := time.Now()
if err := DeleteFile(task); err != nil {
log.Error().Err(err).Any("task", task).Msg("A file deletion task failed...")
} else {
log.Info().Dur("elapsed", time.Since(start)).Uint("id", task.ID).Msg("A file deletion task was completed.")
}
}
}
func DeleteFile(meta models.Attachment) error { func DeleteFile(meta models.Attachment) error {
destMap := viper.GetStringMap("destinations") var destMap map[string]any
dest, destOk := destMap[meta.Destination] if meta.Destination == models.AttachmentDstTemporary {
if !destOk { destMap = viper.GetStringMap("destinations.temporary")
return fmt.Errorf("invalid destination: destination configuration was not found") } else {
destMap = viper.GetStringMap("destinations.permanent")
} }
var destParsed models.BaseDestination var dest models.BaseDestination
rawDest, _ := jsoniter.Marshal(dest) rawDest, _ := jsoniter.Marshal(destMap)
_ = jsoniter.Unmarshal(rawDest, &destParsed) _ = jsoniter.Unmarshal(rawDest, &dest)
switch destParsed.Type { switch dest.Type {
case models.DestinationTypeLocal: case models.DestinationTypeLocal:
var destConfigured models.LocalDestination var destConfigured models.LocalDestination
_ = jsoniter.Unmarshal(rawDest, &destConfigured) _ = jsoniter.Unmarshal(rawDest, &destConfigured)
@ -34,7 +55,7 @@ func DeleteFile(meta models.Attachment) error {
_ = jsoniter.Unmarshal(rawDest, &destConfigured) _ = jsoniter.Unmarshal(rawDest, &destConfigured)
return DeleteFileFromS3(destConfigured, meta) return DeleteFileFromS3(destConfigured, meta)
default: default:
return fmt.Errorf("invalid destination: unsupported protocol %s", destParsed.Type) return fmt.Errorf("invalid destination: unsupported protocol %s", dest.Type)
} }
} }

View File

@ -6,8 +6,10 @@ import (
"fmt" "fmt"
"io" "io"
"mime/multipart" "mime/multipart"
"os"
"path/filepath" "path/filepath"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/database"
"git.solsynth.dev/hydrogen/paperclip/pkg/internal/models" "git.solsynth.dev/hydrogen/paperclip/pkg/internal/models"
"github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2"
jsoniter "github.com/json-iterator/go" jsoniter "github.com/json-iterator/go"
@ -16,28 +18,97 @@ import (
"github.com/spf13/viper" "github.com/spf13/viper"
) )
func UploadFile(destName string, ctx *fiber.Ctx, file *multipart.FileHeader, meta models.Attachment) error { func UploadFileToTemporary(ctx *fiber.Ctx, file *multipart.FileHeader, meta models.Attachment) error {
destMap := viper.GetStringMap("destinations") destMap := viper.GetStringMap("destinations.temporary")
dest, destOk := destMap[destName]
if !destOk {
return fmt.Errorf("invalid destination: destination configuration was not found")
}
var destParsed models.BaseDestination var dest models.BaseDestination
rawDest, _ := jsoniter.Marshal(dest) rawDest, _ := jsoniter.Marshal(destMap)
_ = jsoniter.Unmarshal(rawDest, &destParsed) _ = jsoniter.Unmarshal(rawDest, &dest)
switch destParsed.Type { switch dest.Type {
case models.DestinationTypeLocal: case models.DestinationTypeLocal:
var destConfigured models.LocalDestination var destConfigured models.LocalDestination
_ = jsoniter.Unmarshal(rawDest, &destConfigured) _ = jsoniter.Unmarshal(rawDest, &destConfigured)
return UploadFileToLocal(destConfigured, ctx, file, meta) return UploadFileToLocal(destConfigured, ctx, file, meta)
default:
return fmt.Errorf("invalid destination: unsupported protocol %s", dest.Type)
}
}
func ReUploadFileToPermanent(meta models.Attachment) error {
if meta.Destination != models.AttachmentDstTemporary {
return fmt.Errorf("attachment isn't in temporary storage, unable to process")
}
meta.Destination = models.AttachmentDstPermanent
destMap := viper.GetStringMap("destinations.permanent")
var dest models.BaseDestination
rawDest, _ := jsoniter.Marshal(destMap)
_ = jsoniter.Unmarshal(rawDest, &dest)
prevDestMap := viper.GetStringMap("destinations.temporary")
// Currently the temporary destination only support the local
// So we can do this
var prevDest models.LocalDestination
prevRawDest, _ := jsoniter.Marshal(prevDestMap)
_ = jsoniter.Unmarshal(prevRawDest, &prevDest)
inDst := filepath.Join(prevDest.Path, meta.Uuid)
switch dest.Type {
case models.DestinationTypeLocal:
var destConfigured models.LocalDestination
_ = jsoniter.Unmarshal(rawDest, &destConfigured)
in, err := os.Open(inDst)
if err != nil {
return fmt.Errorf("unable to open file in temporary storage: %v", err)
}
defer in.Close()
out, err := os.Create(filepath.Join(destConfigured.Path, meta.Uuid))
if err != nil {
return fmt.Errorf("unable to open dest file: %v", err)
}
defer out.Close()
_, err = io.Copy(out, in)
if err != nil {
return fmt.Errorf("unable to copy data to dest file: %v", err)
}
database.C.Save(&meta)
metadataCache.Store(meta.ID, meta)
return nil
case models.DestinationTypeS3: case models.DestinationTypeS3:
var destConfigured models.S3Destination var destConfigured models.S3Destination
_ = jsoniter.Unmarshal(rawDest, &destConfigured) _ = jsoniter.Unmarshal(rawDest, &destConfigured)
return UploadFileToS3(destConfigured, file, meta)
client, err := minio.New(destConfigured.Endpoint, &minio.Options{
Creds: credentials.NewStaticV4(destConfigured.SecretID, destConfigured.SecretKey, ""),
Secure: destConfigured.EnableSSL,
})
if err != nil {
return fmt.Errorf("unable to configure s3 client: %v", err)
}
_, err = client.FPutObject(context.Background(), destConfigured.Bucket, filepath.Join(destConfigured.Path, meta.Uuid), inDst, minio.PutObjectOptions{
ContentType: meta.MimeType,
SendContentMd5: false,
DisableContentSha256: true,
})
if err != nil {
return fmt.Errorf("unable to upload file to s3: %v", err)
}
database.C.Save(&meta)
metadataCache.Store(meta.ID, meta)
return nil
default: default:
return fmt.Errorf("invalid destination: unsupported protocol %s", destParsed.Type) return fmt.Errorf("invalid destination: unsupported protocol %s", dest.Type)
} }
} }
@ -65,8 +136,10 @@ func UploadFileToS3(config models.S3Destination, file *multipart.FileHeader, met
return fmt.Errorf("unable to configure s3 client: %v", err) return fmt.Errorf("unable to configure s3 client: %v", err)
} }
_, err = client.PutObject(context.Background(), config.Bucket, filepath.Join(config.Path, meta.Uuid), buffer, -1, minio.PutObjectOptions{ _, err = client.PutObject(context.Background(), config.Bucket, filepath.Join(config.Path, meta.Uuid), buffer, file.Size, minio.PutObjectOptions{
ContentType: meta.MimeType, ContentType: meta.MimeType,
SendContentMd5: false,
DisableContentSha256: true,
}) })
if err != nil { if err != nil {
return fmt.Errorf("unable to upload file to s3: %v", err) return fmt.Errorf("unable to upload file to s3: %v", err)

View File

@ -48,6 +48,14 @@ func main() {
log.Error().Err(err).Msg("An error occurred when registering service to dealer...") log.Error().Err(err).Msg("An error occurred when registering service to dealer...")
} }
// Setup some workers
for idx := 0; idx < viper.GetInt("workers.files_deletion"); idx++ {
go services.StartConsumeDeletionTask()
}
for idx := 0; idx < viper.GetInt("workers.files_analyze"); idx++ {
go services.StartConsumeAnalyzeTask()
}
// Configure timed tasks // Configure timed tasks
quartz := cron.New(cron.WithLogger(cron.VerbosePrintfLogger(&log.Logger))) quartz := cron.New(cron.WithLogger(cron.VerbosePrintfLogger(&log.Logger)))
quartz.AddFunc("@every 60m", services.DoAutoDatabaseCleanup) quartz.AddFunc("@every 60m", services.DoAutoDatabaseCleanup)
@ -64,6 +72,8 @@ func main() {
// Messages // Messages
log.Info().Msgf("Paperclip v%s is started...", pkg.AppVersion) log.Info().Msgf("Paperclip v%s is started...", pkg.AppVersion)
services.ScanUnanalyzedFileFromDatabase()
quit := make(chan os.Signal, 1) quit := make(chan os.Signal, 1)
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM) signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
<-quit <-quit

View File

@ -1,349 +0,0 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.33.0
// protoc v5.26.1
// source: attachments.proto
package proto
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
emptypb "google.golang.org/protobuf/types/known/emptypb"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type Attachment struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"`
Uuid string `protobuf:"bytes,2,opt,name=uuid,proto3" json:"uuid,omitempty"`
Size int64 `protobuf:"varint,3,opt,name=size,proto3" json:"size,omitempty"`
Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"`
Alt string `protobuf:"bytes,5,opt,name=alt,proto3" json:"alt,omitempty"`
Usage string `protobuf:"bytes,6,opt,name=usage,proto3" json:"usage,omitempty"`
Mimetype string `protobuf:"bytes,7,opt,name=mimetype,proto3" json:"mimetype,omitempty"`
Hash string `protobuf:"bytes,8,opt,name=hash,proto3" json:"hash,omitempty"`
Destination string `protobuf:"bytes,9,opt,name=destination,proto3" json:"destination,omitempty"`
Metadata []byte `protobuf:"bytes,10,opt,name=metadata,proto3" json:"metadata,omitempty"`
IsMature bool `protobuf:"varint,11,opt,name=is_mature,json=isMature,proto3" json:"is_mature,omitempty"`
AccountId uint64 `protobuf:"varint,12,opt,name=account_id,json=accountId,proto3" json:"account_id,omitempty"`
}
func (x *Attachment) Reset() {
*x = Attachment{}
if protoimpl.UnsafeEnabled {
mi := &file_attachments_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Attachment) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Attachment) ProtoMessage() {}
func (x *Attachment) ProtoReflect() protoreflect.Message {
mi := &file_attachments_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Attachment.ProtoReflect.Descriptor instead.
func (*Attachment) Descriptor() ([]byte, []int) {
return file_attachments_proto_rawDescGZIP(), []int{0}
}
func (x *Attachment) GetId() uint64 {
if x != nil {
return x.Id
}
return 0
}
func (x *Attachment) GetUuid() string {
if x != nil {
return x.Uuid
}
return ""
}
func (x *Attachment) GetSize() int64 {
if x != nil {
return x.Size
}
return 0
}
func (x *Attachment) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *Attachment) GetAlt() string {
if x != nil {
return x.Alt
}
return ""
}
func (x *Attachment) GetUsage() string {
if x != nil {
return x.Usage
}
return ""
}
func (x *Attachment) GetMimetype() string {
if x != nil {
return x.Mimetype
}
return ""
}
func (x *Attachment) GetHash() string {
if x != nil {
return x.Hash
}
return ""
}
func (x *Attachment) GetDestination() string {
if x != nil {
return x.Destination
}
return ""
}
func (x *Attachment) GetMetadata() []byte {
if x != nil {
return x.Metadata
}
return nil
}
func (x *Attachment) GetIsMature() bool {
if x != nil {
return x.IsMature
}
return false
}
func (x *Attachment) GetAccountId() uint64 {
if x != nil {
return x.AccountId
}
return 0
}
type AttachmentLookupRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Id *uint64 `protobuf:"varint,1,opt,name=id,proto3,oneof" json:"id,omitempty"`
Uuid *string `protobuf:"bytes,2,opt,name=uuid,proto3,oneof" json:"uuid,omitempty"`
Usage *string `protobuf:"bytes,3,opt,name=usage,proto3,oneof" json:"usage,omitempty"`
}
func (x *AttachmentLookupRequest) Reset() {
*x = AttachmentLookupRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_attachments_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *AttachmentLookupRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*AttachmentLookupRequest) ProtoMessage() {}
func (x *AttachmentLookupRequest) ProtoReflect() protoreflect.Message {
mi := &file_attachments_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use AttachmentLookupRequest.ProtoReflect.Descriptor instead.
func (*AttachmentLookupRequest) Descriptor() ([]byte, []int) {
return file_attachments_proto_rawDescGZIP(), []int{1}
}
func (x *AttachmentLookupRequest) GetId() uint64 {
if x != nil && x.Id != nil {
return *x.Id
}
return 0
}
func (x *AttachmentLookupRequest) GetUuid() string {
if x != nil && x.Uuid != nil {
return *x.Uuid
}
return ""
}
func (x *AttachmentLookupRequest) GetUsage() string {
if x != nil && x.Usage != nil {
return *x.Usage
}
return ""
}
var File_attachments_proto protoreflect.FileDescriptor
var file_attachments_proto_rawDesc = []byte{
0x0a, 0x11, 0x61, 0x74, 0x74, 0x61, 0x63, 0x68, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x12, 0x05, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74,
0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xaa, 0x02, 0x0a, 0x0a, 0x41, 0x74, 0x74, 0x61,
0x63, 0x68, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01,
0x28, 0x04, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x75, 0x75, 0x69, 0x64, 0x18, 0x02,
0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x75, 0x75, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69,
0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x12, 0x12,
0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61,
0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x61, 0x6c, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52,
0x03, 0x61, 0x6c, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x75, 0x73, 0x61, 0x67, 0x65, 0x18, 0x06, 0x20,
0x01, 0x28, 0x09, 0x52, 0x05, 0x75, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x6d, 0x69,
0x6d, 0x65, 0x74, 0x79, 0x70, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6d, 0x69,
0x6d, 0x65, 0x74, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x08,
0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65,
0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52,
0x0b, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08,
0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08,
0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a, 0x09, 0x69, 0x73, 0x5f, 0x6d,
0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x73, 0x4d,
0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74,
0x5f, 0x69, 0x64, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x61, 0x63, 0x63, 0x6f, 0x75,
0x6e, 0x74, 0x49, 0x64, 0x22, 0x7c, 0x0a, 0x17, 0x41, 0x74, 0x74, 0x61, 0x63, 0x68, 0x6d, 0x65,
0x6e, 0x74, 0x4c, 0x6f, 0x6f, 0x6b, 0x75, 0x70, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
0x13, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x48, 0x00, 0x52, 0x02, 0x69,
0x64, 0x88, 0x01, 0x01, 0x12, 0x17, 0x0a, 0x04, 0x75, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01,
0x28, 0x09, 0x48, 0x01, 0x52, 0x04, 0x75, 0x75, 0x69, 0x64, 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a,
0x05, 0x75, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x05,
0x75, 0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x42, 0x05, 0x0a, 0x03, 0x5f, 0x69, 0x64, 0x42,
0x07, 0x0a, 0x05, 0x5f, 0x75, 0x75, 0x69, 0x64, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x75, 0x73, 0x61,
0x67, 0x65, 0x32, 0xa6, 0x01, 0x0a, 0x0b, 0x41, 0x74, 0x74, 0x61, 0x63, 0x68, 0x6d, 0x65, 0x6e,
0x74, 0x73, 0x12, 0x44, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x41, 0x74, 0x74, 0x61, 0x63, 0x68, 0x6d,
0x65, 0x6e, 0x74, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x74, 0x74, 0x61,
0x63, 0x68, 0x6d, 0x65, 0x6e, 0x74, 0x4c, 0x6f, 0x6f, 0x6b, 0x75, 0x70, 0x52, 0x65, 0x71, 0x75,
0x65, 0x73, 0x74, 0x1a, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x74, 0x74, 0x61,
0x63, 0x68, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x12, 0x51, 0x0a, 0x15, 0x43, 0x68, 0x65, 0x63,
0x6b, 0x41, 0x74, 0x74, 0x61, 0x63, 0x68, 0x6d, 0x65, 0x6e, 0x74, 0x45, 0x78, 0x69, 0x73, 0x74,
0x73, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x74, 0x74, 0x61, 0x63, 0x68,
0x6d, 0x65, 0x6e, 0x74, 0x4c, 0x6f, 0x6f, 0x6b, 0x75, 0x70, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x42, 0x09, 0x5a, 0x07, 0x2e,
0x3b, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_attachments_proto_rawDescOnce sync.Once
file_attachments_proto_rawDescData = file_attachments_proto_rawDesc
)
func file_attachments_proto_rawDescGZIP() []byte {
file_attachments_proto_rawDescOnce.Do(func() {
file_attachments_proto_rawDescData = protoimpl.X.CompressGZIP(file_attachments_proto_rawDescData)
})
return file_attachments_proto_rawDescData
}
var file_attachments_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
var file_attachments_proto_goTypes = []interface{}{
(*Attachment)(nil), // 0: proto.Attachment
(*AttachmentLookupRequest)(nil), // 1: proto.AttachmentLookupRequest
(*emptypb.Empty)(nil), // 2: google.protobuf.Empty
}
var file_attachments_proto_depIdxs = []int32{
1, // 0: proto.Attachments.GetAttachment:input_type -> proto.AttachmentLookupRequest
1, // 1: proto.Attachments.CheckAttachmentExists:input_type -> proto.AttachmentLookupRequest
0, // 2: proto.Attachments.GetAttachment:output_type -> proto.Attachment
2, // 3: proto.Attachments.CheckAttachmentExists:output_type -> google.protobuf.Empty
2, // [2:4] is the sub-list for method output_type
0, // [0:2] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_attachments_proto_init() }
func file_attachments_proto_init() {
if File_attachments_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_attachments_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Attachment); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_attachments_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*AttachmentLookupRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_attachments_proto_msgTypes[1].OneofWrappers = []interface{}{}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_attachments_proto_rawDesc,
NumEnums: 0,
NumMessages: 2,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_attachments_proto_goTypes,
DependencyIndexes: file_attachments_proto_depIdxs,
MessageInfos: file_attachments_proto_msgTypes,
}.Build()
File_attachments_proto = out.File
file_attachments_proto_rawDesc = nil
file_attachments_proto_goTypes = nil
file_attachments_proto_depIdxs = nil
}

View File

@ -1,147 +0,0 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.3.0
// - protoc v5.26.1
// source: attachments.proto
package proto
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
emptypb "google.golang.org/protobuf/types/known/emptypb"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
const (
Attachments_GetAttachment_FullMethodName = "/proto.Attachments/GetAttachment"
Attachments_CheckAttachmentExists_FullMethodName = "/proto.Attachments/CheckAttachmentExists"
)
// AttachmentsClient is the client API for Attachments service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type AttachmentsClient interface {
GetAttachment(ctx context.Context, in *AttachmentLookupRequest, opts ...grpc.CallOption) (*Attachment, error)
CheckAttachmentExists(ctx context.Context, in *AttachmentLookupRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
}
type attachmentsClient struct {
cc grpc.ClientConnInterface
}
func NewAttachmentsClient(cc grpc.ClientConnInterface) AttachmentsClient {
return &attachmentsClient{cc}
}
func (c *attachmentsClient) GetAttachment(ctx context.Context, in *AttachmentLookupRequest, opts ...grpc.CallOption) (*Attachment, error) {
out := new(Attachment)
err := c.cc.Invoke(ctx, Attachments_GetAttachment_FullMethodName, in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *attachmentsClient) CheckAttachmentExists(ctx context.Context, in *AttachmentLookupRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
out := new(emptypb.Empty)
err := c.cc.Invoke(ctx, Attachments_CheckAttachmentExists_FullMethodName, in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// AttachmentsServer is the server API for Attachments service.
// All implementations must embed UnimplementedAttachmentsServer
// for forward compatibility
type AttachmentsServer interface {
GetAttachment(context.Context, *AttachmentLookupRequest) (*Attachment, error)
CheckAttachmentExists(context.Context, *AttachmentLookupRequest) (*emptypb.Empty, error)
mustEmbedUnimplementedAttachmentsServer()
}
// UnimplementedAttachmentsServer must be embedded to have forward compatible implementations.
type UnimplementedAttachmentsServer struct {
}
func (UnimplementedAttachmentsServer) GetAttachment(context.Context, *AttachmentLookupRequest) (*Attachment, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetAttachment not implemented")
}
func (UnimplementedAttachmentsServer) CheckAttachmentExists(context.Context, *AttachmentLookupRequest) (*emptypb.Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method CheckAttachmentExists not implemented")
}
func (UnimplementedAttachmentsServer) mustEmbedUnimplementedAttachmentsServer() {}
// UnsafeAttachmentsServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to AttachmentsServer will
// result in compilation errors.
type UnsafeAttachmentsServer interface {
mustEmbedUnimplementedAttachmentsServer()
}
func RegisterAttachmentsServer(s grpc.ServiceRegistrar, srv AttachmentsServer) {
s.RegisterService(&Attachments_ServiceDesc, srv)
}
func _Attachments_GetAttachment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(AttachmentLookupRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(AttachmentsServer).GetAttachment(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Attachments_GetAttachment_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(AttachmentsServer).GetAttachment(ctx, req.(*AttachmentLookupRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Attachments_CheckAttachmentExists_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(AttachmentLookupRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(AttachmentsServer).CheckAttachmentExists(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Attachments_CheckAttachmentExists_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(AttachmentsServer).CheckAttachmentExists(ctx, req.(*AttachmentLookupRequest))
}
return interceptor(ctx, in, info, handler)
}
// Attachments_ServiceDesc is the grpc.ServiceDesc for Attachments service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var Attachments_ServiceDesc = grpc.ServiceDesc{
ServiceName: "proto.Attachments",
HandlerType: (*AttachmentsServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "GetAttachment",
Handler: _Attachments_GetAttachment_Handler,
},
{
MethodName: "CheckAttachmentExists",
Handler: _Attachments_CheckAttachmentExists_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "attachments.proto",
}

View File

@ -5,9 +5,12 @@ grpc_bind = "0.0.0.0:7443"
domain = "usercontent.solsynth.dev" domain = "usercontent.solsynth.dev"
secret = "LtTjzAGFLshwXhN4ZD4nG5KlMv1MWcsvfv03TSZYnT1VhiAnLIZFTnHUwR0XhGgi" secret = "LtTjzAGFLshwXhN4ZD4nG5KlMv1MWcsvfv03TSZYnT1VhiAnLIZFTnHUwR0XhGgi"
preferred_destination = "local"
accepts_usage = ["p.avatar", "p.banner", "i.attachment", "m.attachment"] accepts_usage = ["p.avatar", "p.banner", "i.attachment", "m.attachment"]
[workers]
files_deletion = 4
files_analyze = 4
[debug] [debug]
database = false database = false
print_routes = false print_routes = false
@ -25,14 +28,15 @@ refresh_token_duration = 2592000
dsn = "host=localhost user=postgres password=password dbname=hy_paperclip port=5432 sslmode=disable" dsn = "host=localhost user=postgres password=password dbname=hy_paperclip port=5432 sslmode=disable"
prefix = "paperclip_" prefix = "paperclip_"
[destinations.local] [destinations.temporary]
type = "local" type = "local"
path = "uploads" path = "uploads"
[destinations.s3] [destinations.permanent]
type = "s3" type = "s3"
bucket = "bucket" bucket = "bucket"
endpoint = "s3.ap-east-1.amazonaws.com" endpoint = "s3.ap-east-1.amazonaws.com"
secret_id = "secret" secret_id = "secret"
secret_key = "secret" secret_key = "secret"
access_baseurl = "https://raw.sn.solsynth.dev"
enable_ssl = true enable_ssl = true