diff --git a/DysonNetwork.Drive/Migrations/20250725051034_UpdateCloudFileThumbnail.Designer.cs b/DysonNetwork.Drive/Migrations/20250725051034_UpdateCloudFileThumbnail.Designer.cs
new file mode 100644
index 0000000..a962603
--- /dev/null
+++ b/DysonNetwork.Drive/Migrations/20250725051034_UpdateCloudFileThumbnail.Designer.cs
@@ -0,0 +1,194 @@
+//
+using System;
+using System.Collections.Generic;
+using DysonNetwork.Drive;
+using DysonNetwork.Shared.Data;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.EntityFrameworkCore.Infrastructure;
+using Microsoft.EntityFrameworkCore.Migrations;
+using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
+using NodaTime;
+using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
+
+#nullable disable
+
+namespace DysonNetwork.Drive.Migrations
+{
+ [DbContext(typeof(AppDatabase))]
+ [Migration("20250725051034_UpdateCloudFileThumbnail")]
+ partial class UpdateCloudFileThumbnail
+ {
+ ///
+ protected override void BuildTargetModel(ModelBuilder modelBuilder)
+ {
+#pragma warning disable 612, 618
+ modelBuilder
+ .HasAnnotation("ProductVersion", "9.0.7")
+ .HasAnnotation("Relational:MaxIdentifierLength", 63);
+
+ NpgsqlModelBuilderExtensions.HasPostgresExtension(modelBuilder, "postgis");
+ NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
+
+ modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFile", b =>
+ {
+ b.Property("Id")
+ .HasMaxLength(32)
+ .HasColumnType("character varying(32)")
+ .HasColumnName("id");
+
+ b.Property("AccountId")
+ .HasColumnType("uuid")
+ .HasColumnName("account_id");
+
+ b.Property("CreatedAt")
+ .HasColumnType("timestamp with time zone")
+ .HasColumnName("created_at");
+
+ b.Property("DeletedAt")
+ .HasColumnType("timestamp with time zone")
+ .HasColumnName("deleted_at");
+
+ b.Property("Description")
+ .HasMaxLength(4096)
+ .HasColumnType("character varying(4096)")
+ .HasColumnName("description");
+
+ b.Property>("FileMeta")
+ .IsRequired()
+ .HasColumnType("jsonb")
+ .HasColumnName("file_meta");
+
+ b.Property("HasCompression")
+ .HasColumnType("boolean")
+ .HasColumnName("has_compression");
+
+ b.Property("HasThumbnail")
+ .HasColumnType("boolean")
+ .HasColumnName("has_thumbnail");
+
+ b.Property("Hash")
+ .HasMaxLength(256)
+ .HasColumnType("character varying(256)")
+ .HasColumnName("hash");
+
+ b.Property("IsMarkedRecycle")
+ .HasColumnType("boolean")
+ .HasColumnName("is_marked_recycle");
+
+ b.Property("MimeType")
+ .HasMaxLength(256)
+ .HasColumnType("character varying(256)")
+ .HasColumnName("mime_type");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasMaxLength(1024)
+ .HasColumnType("character varying(1024)")
+ .HasColumnName("name");
+
+ b.Property>("SensitiveMarks")
+ .HasColumnType("jsonb")
+ .HasColumnName("sensitive_marks");
+
+ b.Property("Size")
+ .HasColumnType("bigint")
+ .HasColumnName("size");
+
+ b.Property("StorageId")
+ .HasMaxLength(32)
+ .HasColumnType("character varying(32)")
+ .HasColumnName("storage_id");
+
+ b.Property("StorageUrl")
+ .HasMaxLength(4096)
+ .HasColumnType("character varying(4096)")
+ .HasColumnName("storage_url");
+
+ b.Property("UpdatedAt")
+ .HasColumnType("timestamp with time zone")
+ .HasColumnName("updated_at");
+
+ b.Property("UploadedAt")
+ .HasColumnType("timestamp with time zone")
+ .HasColumnName("uploaded_at");
+
+ b.Property("UploadedTo")
+ .HasMaxLength(128)
+ .HasColumnType("character varying(128)")
+ .HasColumnName("uploaded_to");
+
+ b.Property>("UserMeta")
+ .HasColumnType("jsonb")
+ .HasColumnName("user_meta");
+
+ b.HasKey("Id")
+ .HasName("pk_files");
+
+ b.ToTable("files", (string)null);
+ });
+
+ modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFileReference", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("uuid")
+ .HasColumnName("id");
+
+ b.Property("CreatedAt")
+ .HasColumnType("timestamp with time zone")
+ .HasColumnName("created_at");
+
+ b.Property("DeletedAt")
+ .HasColumnType("timestamp with time zone")
+ .HasColumnName("deleted_at");
+
+ b.Property("ExpiredAt")
+ .HasColumnType("timestamp with time zone")
+ .HasColumnName("expired_at");
+
+ b.Property("FileId")
+ .IsRequired()
+ .HasMaxLength(32)
+ .HasColumnType("character varying(32)")
+ .HasColumnName("file_id");
+
+ b.Property("ResourceId")
+ .IsRequired()
+ .HasMaxLength(1024)
+ .HasColumnType("character varying(1024)")
+ .HasColumnName("resource_id");
+
+ b.Property("UpdatedAt")
+ .HasColumnType("timestamp with time zone")
+ .HasColumnName("updated_at");
+
+ b.Property("Usage")
+ .IsRequired()
+ .HasMaxLength(1024)
+ .HasColumnType("character varying(1024)")
+ .HasColumnName("usage");
+
+ b.HasKey("Id")
+ .HasName("pk_file_references");
+
+ b.HasIndex("FileId")
+ .HasDatabaseName("ix_file_references_file_id");
+
+ b.ToTable("file_references", (string)null);
+ });
+
+ modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFileReference", b =>
+ {
+ b.HasOne("DysonNetwork.Drive.Storage.CloudFile", "File")
+ .WithMany()
+ .HasForeignKey("FileId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired()
+ .HasConstraintName("fk_file_references_files_file_id");
+
+ b.Navigation("File");
+ });
+#pragma warning restore 612, 618
+ }
+ }
+}
diff --git a/DysonNetwork.Drive/Migrations/20250725051034_UpdateCloudFileThumbnail.cs b/DysonNetwork.Drive/Migrations/20250725051034_UpdateCloudFileThumbnail.cs
new file mode 100644
index 0000000..369dcd9
--- /dev/null
+++ b/DysonNetwork.Drive/Migrations/20250725051034_UpdateCloudFileThumbnail.cs
@@ -0,0 +1,29 @@
+using Microsoft.EntityFrameworkCore.Migrations;
+
+#nullable disable
+
+namespace DysonNetwork.Drive.Migrations
+{
+ ///
+ public partial class UpdateCloudFileThumbnail : Migration
+ {
+ ///
+ protected override void Up(MigrationBuilder migrationBuilder)
+ {
+ migrationBuilder.AddColumn(
+ name: "has_thumbnail",
+ table: "files",
+ type: "boolean",
+ nullable: false,
+ defaultValue: false);
+ }
+
+ ///
+ protected override void Down(MigrationBuilder migrationBuilder)
+ {
+ migrationBuilder.DropColumn(
+ name: "has_thumbnail",
+ table: "files");
+ }
+ }
+}
diff --git a/DysonNetwork.Drive/Migrations/AppDatabaseModelSnapshot.cs b/DysonNetwork.Drive/Migrations/AppDatabaseModelSnapshot.cs
index 039bdfc..f80ba69 100644
--- a/DysonNetwork.Drive/Migrations/AppDatabaseModelSnapshot.cs
+++ b/DysonNetwork.Drive/Migrations/AppDatabaseModelSnapshot.cs
@@ -59,6 +59,10 @@ namespace DysonNetwork.Drive.Migrations
.HasColumnType("boolean")
.HasColumnName("has_compression");
+ b.Property("HasThumbnail")
+ .HasColumnType("boolean")
+ .HasColumnName("has_thumbnail");
+
b.Property("Hash")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
diff --git a/DysonNetwork.Drive/Storage/CloudFile.cs b/DysonNetwork.Drive/Storage/CloudFile.cs
index 4ba9803..a4b1948 100644
--- a/DysonNetwork.Drive/Storage/CloudFile.cs
+++ b/DysonNetwork.Drive/Storage/CloudFile.cs
@@ -56,6 +56,7 @@ public class CloudFile : ModelBase, ICloudFile, IIdentifiedResource
public Instant? UploadedAt { get; set; }
[MaxLength(128)] public string? UploadedTo { get; set; }
public bool HasCompression { get; set; } = false;
+ public bool HasThumbnail { get; set; } = false;
///
/// The field is set to true if the recycling job plans to delete the file.
diff --git a/DysonNetwork.Drive/Storage/FileService.cs b/DysonNetwork.Drive/Storage/FileService.cs
index 5f2f535..14de1f2 100644
--- a/DysonNetwork.Drive/Storage/FileService.cs
+++ b/DysonNetwork.Drive/Storage/FileService.cs
@@ -10,6 +10,8 @@ using Minio.DataModel.Args;
using NetVips;
using NodaTime;
using tusdotnet.Stores;
+using System.Linq.Expressions;
+using Microsoft.EntityFrameworkCore.Query;
namespace DysonNetwork.Drive.Storage;
@@ -92,13 +94,11 @@ public class FileService(
.ToList();
}
- private static readonly string TempFilePrefix = "dyn-cloudfile";
+ private const string TempFilePrefix = "dyn-cloudfile";
private static readonly string[] AnimatedImageTypes =
["image/gif", "image/apng", "image/webp", "image/avif"];
- // The analysis file method no longer will remove the GPS EXIF data
- // It should be handled on the client side, and for some specific cases it should be keep
public async Task ProcessNewFileAsync(
Account account,
string fileId,
@@ -107,8 +107,6 @@ public class FileService(
string? contentType
)
{
- var result = new List<(string filePath, string suffix)>();
-
var ogFilePath = Path.GetFullPath(Path.Join(configuration.GetValue("Tus:StorePath"), fileId));
var fileSize = stream.Length;
var hash = await HashFileAsync(stream, fileSize: fileSize);
@@ -124,82 +122,95 @@ public class FileService(
AccountId = Guid.Parse(account.Id)
};
- var existingFile = await db.Files.FirstOrDefaultAsync(f => f.Hash == hash);
- file.StorageId = existingFile is not null ? existingFile.StorageId : file.Id;
+ var existingFile = await db.Files.AsNoTracking().FirstOrDefaultAsync(f => f.Hash == hash);
+ file.StorageId = existingFile?.StorageId ?? file.Id;
if (existingFile is not null)
{
file.FileMeta = existingFile.FileMeta;
file.HasCompression = existingFile.HasCompression;
file.SensitiveMarks = existingFile.SensitiveMarks;
+ file.MimeType = existingFile.MimeType;
+ file.UploadedAt = existingFile.UploadedAt;
+ file.UploadedTo = existingFile.UploadedTo;
db.Files.Add(file);
await db.SaveChangesAsync();
+ // Since the file content is a duplicate, we can delete the new upload and we are done.
+ await stream.DisposeAsync();
+ await store.DeleteFileAsync(file.Id, CancellationToken.None);
return file;
}
- switch (contentType.Split('/')[0])
+ // Extract metadata on the current thread for a faster initial response
+ await ExtractMetadataAsync(file, ogFilePath, stream);
+
+ db.Files.Add(file);
+ await db.SaveChangesAsync();
+
+ // Offload optimization (image conversion, thumbnailing) and uploading to a background task
+ _ = Task.Run(() =>
+ ProcessAndUploadInBackgroundAsync(file.Id, file.StorageId, contentType, ogFilePath, stream));
+
+ return file;
+ }
+
+ ///
+ /// Extracts metadata from the file based on its content type.
+ /// This runs synchronously to ensure the initial database record has basic metadata.
+ ///
+ private async Task ExtractMetadataAsync(CloudFile file, string filePath, Stream stream)
+ {
+ switch (file.MimeType.Split('/')[0])
{
case "image":
- var blurhash =
- BlurHashSharp.SkiaSharp.BlurHashEncoder.Encode(
- xComponent: 3,
- yComponent: 3,
- filename: ogFilePath
- );
-
- // Rewind stream
- stream.Position = 0;
-
- // Use NetVips for the rest
- using (var vipsImage = NetVips.Image.NewFromStream(stream))
+ try
{
+ var blurhash = BlurHashSharp.SkiaSharp.BlurHashEncoder.Encode(3, 3, filePath);
+ stream.Position = 0;
+
+ using var vipsImage = Image.NewFromStream(stream);
var width = vipsImage.Width;
var height = vipsImage.Height;
- var format = vipsImage.Get("vips-loader") ?? "unknown";
+ var orientation = vipsImage.Get("orientation") as int? ?? 1;
- // Try to get orientation from exif data
- var orientation = 1;
var meta = new Dictionary
{
["blur"] = blurhash,
- ["format"] = format,
+ ["format"] = vipsImage.Get("vips-loader") ?? "unknown",
["width"] = width,
["height"] = height,
["orientation"] = orientation,
};
- Dictionary exif = [];
+ var exif = new Dictionary();
foreach (var field in vipsImage.GetFields())
{
+ if (IsIgnoredField(field)) continue;
var value = vipsImage.Get(field);
-
- // Skip GPS-related EXIF fields to remove location data
- if (IsIgnoredField(field))
- continue;
-
- if (field.StartsWith("exif-")) exif[field.Replace("exif-", "")] = value;
- else meta[field] = value;
-
- if (field == "orientation") orientation = (int)value;
+ if (field.StartsWith("exif-"))
+ exif[field.Replace("exif-", "")] = value;
+ else
+ meta[field] = value;
}
- if (orientation is 6 or 8)
- (width, height) = (height, width);
-
- var aspectRatio = height != 0 ? (double)width / height : 0;
-
+ if (orientation is 6 or 8) (width, height) = (height, width);
meta["exif"] = exif;
- meta["ratio"] = aspectRatio;
+ meta["ratio"] = height != 0 ? (double)width / height : 0;
file.FileMeta = meta;
}
+ catch (Exception ex)
+ {
+ logger.LogError(ex, "Failed to analyze image file {FileId}", file.Id);
+ }
break;
+
case "video":
case "audio":
try
{
- var mediaInfo = await FFProbe.AnalyseAsync(ogFilePath);
+ var mediaInfo = await FFProbe.AnalyseAsync(filePath);
file.FileMeta = new Dictionary
{
["duration"] = mediaInfo.Duration.TotalSeconds,
@@ -207,116 +218,152 @@ public class FileService(
["format_long_name"] = mediaInfo.Format.FormatLongName,
["start_time"] = mediaInfo.Format.StartTime.ToString(),
["bit_rate"] = mediaInfo.Format.BitRate.ToString(CultureInfo.InvariantCulture),
- ["tags"] = mediaInfo.Format.Tags ?? [],
+ ["tags"] = mediaInfo.Format.Tags ?? new Dictionary(),
["chapters"] = mediaInfo.Chapters,
+ // Add detailed stream information
+ ["video_streams"] = mediaInfo.VideoStreams.Select(s => new
+ {
+ s.AvgFrameRate, s.BitRate, s.CodecName, s.Duration, s.Height, s.Width, s.Language,
+ s.PixelFormat, s.Rotation
+ }).ToList(),
+ ["audio_streams"] = mediaInfo.AudioStreams.Select(s => new
+ {
+ s.BitRate, s.Channels, s.ChannelLayout, s.CodecName, s.Duration, s.Language,
+ s.SampleRateHz
+ })
+ .ToList(),
};
if (mediaInfo.PrimaryVideoStream is not null)
- file.FileMeta["ratio"] =
- mediaInfo.PrimaryVideoStream.Width / mediaInfo.PrimaryVideoStream.Height;
+ file.FileMeta["ratio"] = (double)mediaInfo.PrimaryVideoStream.Width /
+ mediaInfo.PrimaryVideoStream.Height;
}
catch (Exception ex)
{
- logger.LogError("File analyzed failed, unable collect video / audio information: {Message}",
- ex.Message);
+ logger.LogError(ex, "Failed to analyze media file {FileId}", file.Id);
}
break;
}
+ }
- db.Files.Add(file);
- await db.SaveChangesAsync();
+ ///
+ /// Handles file optimization (image compression, video thumbnailing) and uploads to remote storage in the background.
+ ///
+ private async Task ProcessAndUploadInBackgroundAsync(string fileId, string storageId, string contentType,
+ string originalFilePath, Stream stream)
+ {
+ await using var bgStream = stream; // Ensure stream is disposed at the end of this task
+ using var scope = scopeFactory.CreateScope();
+ var nfs = scope.ServiceProvider.GetRequiredService();
+ var scopedDb = scope.ServiceProvider.GetRequiredService();
- _ = Task.Run(async () =>
+ var uploads = new List<(string FilePath, string Suffix, string ContentType, bool SelfDestruct)>();
+ var newMimeType = contentType;
+ var hasCompression = false;
+ var hasThumbnail = false;
+
+ try
{
- using var scope = scopeFactory.CreateScope();
- var nfs = scope.ServiceProvider.GetRequiredService();
+ logger.LogInformation("Processing file {FileId} in background...", fileId);
- try
+ switch (contentType.Split('/')[0])
{
- logger.LogInformation("Processed file {fileId}, now trying optimizing if possible...", fileId);
-
- if (contentType.Split('/')[0] == "image")
- {
- // Skip compression for animated image types
- var animatedMimeTypes = AnimatedImageTypes;
- if (Enumerable.Contains(animatedMimeTypes, contentType))
+ case "image" when !AnimatedImageTypes.Contains(contentType):
+ newMimeType = "image/webp";
+ using (var vipsImage = Image.NewFromFile(originalFilePath, access: Enums.Access.Sequential))
{
- logger.LogInformation(
- "File {fileId} is an animated image (MIME: {mime}), skipping WebP conversion.", fileId,
- contentType
- );
- var tempFilePath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}");
- result.Add((tempFilePath, string.Empty));
- return;
+ var imageToWrite = vipsImage;
+
+ if (vipsImage.Interpretation is Enums.Interpretation.Scrgb or Enums.Interpretation.Xyz)
+ {
+ imageToWrite = vipsImage.Colourspace(Enums.Interpretation.Srgb);
+ }
+
+ var webpPath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{fileId}.webp");
+ imageToWrite.Autorot().WriteToFile(webpPath,
+ new VOption { { "lossless", true }, { "strip", true } });
+ uploads.Add((webpPath, string.Empty, newMimeType, true));
+
+ if (imageToWrite.Width * imageToWrite.Height >= 1024 * 1024)
+ {
+ var scale = 1024.0 / Math.Max(imageToWrite.Width, imageToWrite.Height);
+ var compressedPath =
+ Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{fileId}-compressed.webp");
+ using var compressedImage = imageToWrite.Resize(scale);
+ compressedImage.Autorot().WriteToFile(compressedPath,
+ new VOption { { "Q", 80 }, { "strip", true } });
+ uploads.Add((compressedPath, ".compressed", newMimeType, true));
+ hasCompression = true;
+ }
+
+ if (!ReferenceEquals(imageToWrite, vipsImage))
+ {
+ imageToWrite.Dispose(); // Clean up manually created colourspace-converted image
+ }
}
- file.MimeType = "image/webp";
+ break;
- using var vipsImage = Image.NewFromFile(ogFilePath);
- var imagePath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}");
- vipsImage.Autorot().WriteToFile(imagePath + ".webp",
- new VOption { { "lossless", true }, { "strip", true } });
- result.Add((imagePath + ".webp", string.Empty));
-
- if (vipsImage.Width * vipsImage.Height >= 1024 * 1024)
+ case "video":
+ uploads.Add((originalFilePath, string.Empty, contentType, false));
+ var thumbnailPath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{fileId}.thumbnail.webp");
+ try
{
- var scale = 1024.0 / Math.Max(vipsImage.Width, vipsImage.Height);
- var imageCompressedPath =
- Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}-compressed");
-
- // Create and save image within the same synchronous block to avoid disposal issues
- using var compressedImage = vipsImage.Resize(scale);
- compressedImage.Autorot().WriteToFile(imageCompressedPath + ".webp",
- new VOption { { "Q", 80 }, { "strip", true } });
-
- result.Add((imageCompressedPath + ".webp", ".compressed"));
- file.HasCompression = true;
+ var mediaInfo = await FFProbe.AnalyseAsync(originalFilePath);
+ var snapshotTime = mediaInfo.Duration > TimeSpan.FromSeconds(5)
+ ? TimeSpan.FromSeconds(5)
+ : TimeSpan.FromSeconds(1);
+ await FFMpeg.SnapshotAsync(originalFilePath, thumbnailPath, captureTime: snapshotTime);
+ uploads.Add((thumbnailPath, ".thumbnail.webp", "image/webp", true));
+ hasThumbnail = true;
+ }
+ catch (Exception ex)
+ {
+ logger.LogError(ex, "Failed to generate thumbnail for video {FileId}", fileId);
}
- }
- else
- {
- // No extra process for video, add it to the upload queue.
- result.Add((ogFilePath, string.Empty));
- }
- logger.LogInformation("Optimized file {fileId}, now uploading...", fileId);
+ break;
- if (result.Count > 0)
- {
- List> tasks = [];
- tasks.AddRange(result.Select(item =>
- nfs.UploadFileToRemoteAsync(file, item.filePath, null, item.suffix, true))
- );
+ default:
+ uploads.Add((originalFilePath, string.Empty, contentType, false));
+ break;
+ }
- await Task.WhenAll(tasks);
- file = await tasks.First();
- }
- else
- {
- file = await nfs.UploadFileToRemoteAsync(file, stream, null);
- }
+ logger.LogInformation("Optimized file {FileId}, now uploading...", fileId);
- logger.LogInformation("Uploaded file {fileId} done!", fileId);
+ if (uploads.Count > 0)
+ {
+ var uploadedTo = configuration.GetValue("Storage:PreferredRemote")!;
+ var uploadTasks = uploads.Select(item =>
+ nfs.UploadFileToRemoteAsync(storageId, uploadedTo, item.FilePath, item.Suffix, item.ContentType,
+ item.SelfDestruct)
+ ).ToList();
- var scopedDb = scope.ServiceProvider.GetRequiredService();
- await scopedDb.Files.Where(f => f.Id == file.Id).ExecuteUpdateAsync(setter => setter
- .SetProperty(f => f.UploadedAt, file.UploadedAt)
- .SetProperty(f => f.UploadedTo, file.UploadedTo)
- .SetProperty(f => f.MimeType, file.MimeType)
- .SetProperty(f => f.HasCompression, file.HasCompression)
+ await Task.WhenAll(uploadTasks);
+
+ logger.LogInformation("Uploaded file {FileId} done!", fileId);
+
+ var fileToUpdate = await scopedDb.Files.FirstAsync(f => f.Id == fileId);
+ if (hasThumbnail) fileToUpdate.HasThumbnail = true;
+
+ await scopedDb.Files.Where(f => f.Id == fileId).ExecuteUpdateAsync(setter => setter
+ .SetProperty(f => f.UploadedAt, Instant.FromDateTimeUtc(DateTime.UtcNow))
+ .SetProperty(f => f.UploadedTo, uploadedTo)
+ .SetProperty(f => f.MimeType, newMimeType)
+ .SetProperty(f => f.HasCompression, hasCompression)
+ .SetProperty(f => f.HasThumbnail, hasThumbnail)
);
}
- catch (Exception err)
- {
- logger.LogError(err, "Failed to process {fileId}", fileId);
- }
-
- await stream.DisposeAsync();
- await store.DeleteFileAsync(file.Id, CancellationToken.None);
- await nfs._PurgeCacheAsync(file.Id);
- });
-
- return file;
+ }
+ catch (Exception err)
+ {
+ logger.LogError(err, "Failed to process and upload {FileId}", fileId);
+ }
+ finally
+ {
+ await store.DeleteFileAsync(fileId, CancellationToken.None);
+ await nfs._PurgeCacheAsync(fileId);
+ }
}
private static async Task HashFileAsync(Stream stream, int chunkSize = 1024 * 1024, long? fileSize = null)
@@ -327,6 +374,7 @@ public class FileService(
using var md5 = MD5.Create();
var hashBytes = await md5.ComputeHashAsync(stream);
+ stream.Position = 0; // Reset stream position after reading
return Convert.ToHexString(hashBytes).ToLowerInvariant();
}
@@ -349,45 +397,38 @@ public class FileService(
}
var hash = md5.ComputeHash(buffer, 0, bytesRead);
+ stream.Position = 0; // Reset stream position
return Convert.ToHexString(hash).ToLowerInvariant();
}
- public async Task UploadFileToRemoteAsync(CloudFile file, string filePath, string? targetRemote,
- string? suffix = null, bool selfDestruct = false)
+ public async Task UploadFileToRemoteAsync(string storageId, string targetRemote, string filePath,
+ string? suffix = null, string? contentType = null, bool selfDestruct = false)
{
- var fileStream = File.OpenRead(filePath);
- var result = await UploadFileToRemoteAsync(file, fileStream, targetRemote, suffix);
+ await using var fileStream = File.OpenRead(filePath);
+ await UploadFileToRemoteAsync(storageId, targetRemote, fileStream, suffix, contentType);
if (selfDestruct) File.Delete(filePath);
- return result;
}
- public async Task UploadFileToRemoteAsync(CloudFile file, Stream stream, string? targetRemote,
- string? suffix = null)
+ public async Task UploadFileToRemoteAsync(string storageId, string targetRemote, Stream stream,
+ string? suffix = null, string? contentType = null)
{
- if (file.UploadedAt.HasValue) return file;
-
- file.UploadedTo = targetRemote ?? configuration.GetValue("Storage:PreferredRemote")!;
-
- var dest = GetRemoteStorageConfig(file.UploadedTo);
+ var dest = GetRemoteStorageConfig(targetRemote);
var client = CreateMinioClient(dest);
if (client is null)
throw new InvalidOperationException(
- $"Failed to configure client for remote destination '{file.UploadedTo}'"
+ $"Failed to configure client for remote destination '{targetRemote}'"
);
var bucket = dest.Bucket;
- var contentType = file.MimeType ?? "application/octet-stream";
+ contentType ??= "application/octet-stream";
await client.PutObjectAsync(new PutObjectArgs()
.WithBucket(bucket)
- .WithObject(string.IsNullOrWhiteSpace(suffix) ? file.Id : file.Id + suffix)
- .WithStreamData(stream) // Fix this disposed
+ .WithObject(string.IsNullOrWhiteSpace(suffix) ? storageId : storageId + suffix)
+ .WithStreamData(stream)
.WithObjectSize(stream.Length)
.WithContentType(contentType)
);
-
- file.UploadedAt = Instant.FromDateTimeUtc(DateTime.UtcNow);
- return file;
}
public async Task UpdateFileAsync(CloudFile file, FieldMask updateMask)
@@ -398,58 +439,38 @@ public class FileService(
throw new InvalidOperationException($"File with ID {file.Id} not found.");
}
+ var updatable = new UpdatableCloudFile(existingFile);
+
foreach (var path in updateMask.Paths)
{
switch (path)
{
case "name":
- existingFile.Name = file.Name;
+ updatable.Name = file.Name;
break;
case "description":
- existingFile.Description = file.Description;
+ updatable.Description = file.Description;
break;
case "file_meta":
- existingFile.FileMeta = file.FileMeta;
+ updatable.FileMeta = file.FileMeta;
break;
case "user_meta":
- existingFile.UserMeta = file.UserMeta;
- break;
- case "mime_type":
- existingFile.MimeType = file.MimeType;
- break;
- case "hash":
- existingFile.Hash = file.Hash;
- break;
- case "size":
- existingFile.Size = file.Size;
- break;
- case "uploaded_at":
- existingFile.UploadedAt = file.UploadedAt;
- break;
- case "uploaded_to":
- existingFile.UploadedTo = file.UploadedTo;
- break;
- case "has_compression":
- existingFile.HasCompression = file.HasCompression;
+ updatable.UserMeta = file.UserMeta;
break;
case "is_marked_recycle":
- existingFile.IsMarkedRecycle = file.IsMarkedRecycle;
- break;
- case "storage_id":
- existingFile.StorageId = file.StorageId;
- break;
- case "storage_url":
- existingFile.StorageUrl = file.StorageUrl;
+ updatable.IsMarkedRecycle = file.IsMarkedRecycle;
break;
default:
- logger.LogWarning("Attempted to update unknown field: {Field}", path);
+ logger.LogWarning("Attempted to update unmodifiable field: {Field}", path);
break;
}
}
- await db.SaveChangesAsync();
+ await db.Files.Where(f => f.Id == file.Id).ExecuteUpdateAsync(updatable.ToSetPropertyCalls());
+
await _PurgeCacheAsync(file.Id);
- return existingFile;
+ // Re-fetch the file to return the updated state
+ return await db.Files.AsNoTracking().FirstAsync(f => f.Id == file.Id);
}
public async Task DeleteFileAsync(CloudFile file)
@@ -618,46 +639,46 @@ public class FileService(
}
///
- /// Checks if an EXIF field contains GPS location data
+ /// Checks if an EXIF field should be ignored (e.g., GPS data).
///
- /// The EXIF field name
- /// True if the field contains GPS data, false otherwise
- private static bool IsGpsExifField(string fieldName)
+ private static bool IsIgnoredField(string fieldName)
{
// Common GPS EXIF field names
var gpsFields = new[]
{
- "gps-latitude",
- "gps-longitude",
- "gps-altitude",
- "gps-latitude-ref",
- "gps-longitude-ref",
- "gps-altitude-ref",
- "gps-timestamp",
- "gps-datestamp",
- "gps-speed",
- "gps-speed-ref",
- "gps-track",
- "gps-track-ref",
- "gps-img-direction",
- "gps-img-direction-ref",
- "gps-dest-latitude",
- "gps-dest-longitude",
- "gps-dest-latitude-ref",
- "gps-dest-longitude-ref",
- "gps-processing-method",
+ "gps-latitude", "gps-longitude", "gps-altitude", "gps-latitude-ref", "gps-longitude-ref",
+ "gps-altitude-ref", "gps-timestamp", "gps-datestamp", "gps-speed", "gps-speed-ref", "gps-track",
+ "gps-track-ref", "gps-img-direction", "gps-img-direction-ref", "gps-dest-latitude",
+ "gps-dest-longitude", "gps-dest-latitude-ref", "gps-dest-longitude-ref", "gps-processing-method",
"gps-area-information"
};
- return gpsFields.Any(gpsField =>
- fieldName.Equals(gpsField, StringComparison.OrdinalIgnoreCase) ||
- fieldName.StartsWith("gps", StringComparison.OrdinalIgnoreCase));
- }
-
- private static bool IsIgnoredField(string fieldName)
- {
- if (IsGpsExifField(fieldName)) return true;
+ if (fieldName.StartsWith("exif-GPS")) return true;
+ if (fieldName.StartsWith("ifd3-GPS")) return true;
if (fieldName.EndsWith("-data")) return true;
- return false;
+ return gpsFields.Any(gpsField => fieldName.StartsWith(gpsField, StringComparison.OrdinalIgnoreCase));
+ }
+}
+
+///
+/// A helper class to build an ExecuteUpdateAsync call for CloudFile.
+///
+file class UpdatableCloudFile(CloudFile file)
+{
+ public string Name { get; set; } = file.Name;
+ public string? Description { get; set; } = file.Description;
+ public Dictionary? FileMeta { get; set; } = file.FileMeta;
+ public Dictionary? UserMeta { get; set; } = file.UserMeta;
+ public bool IsMarkedRecycle { get; set; } = file.IsMarkedRecycle;
+
+ public Expression, SetPropertyCalls>> ToSetPropertyCalls()
+ {
+ var userMeta = UserMeta ?? new Dictionary();
+ return setter => setter
+ .SetProperty(f => f.Name, Name)
+ .SetProperty(f => f.Description, Description)
+ .SetProperty(f => f.FileMeta, FileMeta)
+ .SetProperty(f => f.UserMeta, userMeta!)
+ .SetProperty(f => f.IsMarkedRecycle, IsMarkedRecycle);
}
}
\ No newline at end of file
diff --git a/DysonNetwork.Sphere/Chat/ChatRoom.cs b/DysonNetwork.Sphere/Chat/ChatRoom.cs
index 44f0fd7..1d32ad6 100644
--- a/DysonNetwork.Sphere/Chat/ChatRoom.cs
+++ b/DysonNetwork.Sphere/Chat/ChatRoom.cs
@@ -74,7 +74,7 @@ public class ChatMember : ModelBase
public Guid ChatRoomId { get; set; }
public ChatRoom ChatRoom { get; set; } = null!;
public Guid AccountId { get; set; }
- [NotMapped] public Account Account { get; set; } = null!;
+ [NotMapped] public Account? Account { get; set; }
[MaxLength(1024)] public string? Nick { get; set; }
diff --git a/DysonNetwork.Sphere/Chat/ChatService.cs b/DysonNetwork.Sphere/Chat/ChatService.cs
index 173969f..7c3bc31 100644
--- a/DysonNetwork.Sphere/Chat/ChatService.cs
+++ b/DysonNetwork.Sphere/Chat/ChatService.cs
@@ -260,7 +260,8 @@ public partial class ChatService(
}
else if (member.Notify == ChatMemberNotify.Mentions) continue;
- accountsToNotify.Add(member.Account.ToProtoValue());
+ if (member.Account is not null)
+ accountsToNotify.Add(member.Account.ToProtoValue());
}
logger.LogInformation($"Trying to deliver message to {accountsToNotify.Count} accounts...");
@@ -596,4 +597,4 @@ public class SyncResponse
{
public List Changes { get; set; } = [];
public Instant CurrentTimestamp { get; set; }
-}
+}
\ No newline at end of file
diff --git a/DysonNetwork.sln.DotSettings.user b/DysonNetwork.sln.DotSettings.user
index 98f1329..017ec22 100644
--- a/DysonNetwork.sln.DotSettings.user
+++ b/DysonNetwork.sln.DotSettings.user
@@ -38,6 +38,7 @@
ForceIncluded
ForceIncluded
ForceIncluded
+ ForceIncluded
ForceIncluded
ForceIncluded
ForceIncluded