🐛 Fix bug crash caused by hash small files

This commit is contained in:
LittleSheep 2024-10-18 23:36:54 +08:00
parent cbe034a049
commit a9156992f4
2 changed files with 35 additions and 26 deletions

6
.idea/workspace.xml generated
View File

@ -4,7 +4,7 @@
<option name="autoReloadType" value="ALL" />
</component>
<component name="ChangeListManager">
<list default="true" id="18dd0d68-b4b8-40db-9734-9119b5c848bd" name="更改" comment=":zap: Use exif whitelist to prevent produce garbage data">
<list default="true" id="18dd0d68-b4b8-40db-9734-9119b5c848bd" name="更改" comment=":bug: Trying to prevent exiftool causing analyze failed">
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/pkg/internal/services/analyzer.go" beforeDir="false" afterPath="$PROJECT_DIR$/pkg/internal/services/analyzer.go" afterDir="false" />
</list>
@ -117,7 +117,6 @@
</option>
</component>
<component name="VcsManagerConfiguration">
<MESSAGE value=":sparkles: Pool clean by lifecycle config" />
<MESSAGE value=":recycle: Split mark and delete file" />
<MESSAGE value=":bug: Fix migration issue on pools" />
<MESSAGE value=":bug: Fix schedule deletion will delete referenced file" />
@ -142,7 +141,8 @@
<MESSAGE value=":lock: Fix Attachment will contains GPS information" />
<MESSAGE value=":sparkles: Save EXIF into file metadata" />
<MESSAGE value=":zap: Use exif whitelist to prevent produce garbage data" />
<option name="LAST_COMMIT_MESSAGE" value=":zap: Use exif whitelist to prevent produce garbage data" />
<MESSAGE value=":bug: Trying to prevent exiftool causing analyze failed" />
<option name="LAST_COMMIT_MESSAGE" value=":bug: Trying to prevent exiftool causing analyze failed" />
</component>
<component name="VgoProject">
<settings-migrated>true</settings-migrated>

View File

@ -277,32 +277,41 @@ func HashAttachment(file models.Attachment) (hash string, err error) {
hasher := sha256.New()
// Hash the first 32KB
buf := make([]byte, chunkSize)
if _, err := inFile.Read(buf); err != nil && err != io.EOF {
return "", fmt.Errorf("error reading file: %v", err)
}
hasher.Write(buf)
if chunkSize*3 <= fileInfo.Size() {
// If the total size is smaller than three chunks, then hash the whole file
buf := make([]byte, fileInfo.Size())
if _, err := inFile.Read(buf); err != nil && err != io.EOF {
return "", fmt.Errorf("error reading whole file: %v", err)
}
hasher.Write(buf)
} else {
// Hash the first 32KB
buf := make([]byte, chunkSize)
if _, err := inFile.Read(buf); err != nil && err != io.EOF {
return "", fmt.Errorf("error reading file: %v", err)
}
hasher.Write(buf)
// Hash the middle 32KB
middleOffset := fileInfo.Size() / 2
if _, err := inFile.Seek(middleOffset, io.SeekStart); err != nil {
return "", fmt.Errorf("error seeking to middle: %v", err)
}
if _, err := inFile.Read(buf); err != nil && err != io.EOF {
return "", fmt.Errorf("error reading middle: %v", err)
}
hasher.Write(buf)
// Hash the middle 32KB
middleOffset := fileInfo.Size() / 2
if _, err := inFile.Seek(middleOffset, io.SeekStart); err != nil {
return "", fmt.Errorf("error seeking to middle: %v", err)
}
if _, err := inFile.Read(buf); err != nil && err != io.EOF {
return "", fmt.Errorf("error reading middle: %v", err)
}
hasher.Write(buf)
// Hash the last 32KB
endOffset := fileInfo.Size() - chunkSize
if _, err := inFile.Seek(endOffset, io.SeekStart); err != nil {
return "", fmt.Errorf("error seeking to end: %v", err)
// Hash the last 32KB
endOffset := fileInfo.Size() - chunkSize
if _, err := inFile.Seek(endOffset, io.SeekStart); err != nil {
return "", fmt.Errorf("error seeking to end: %v", err)
}
if _, err := inFile.Read(buf); err != nil && err != io.EOF {
return "", fmt.Errorf("error reading end: %v", err)
}
hasher.Write(buf)
}
if _, err := inFile.Read(buf); err != nil && err != io.EOF {
return "", fmt.Errorf("error reading end: %v", err)
}
hasher.Write(buf)
// Hash with the file metadata
hasher.Write([]byte(fmt.Sprintf("%d", file.Size)))