using System.Globalization; using FFMpegCore; using System.Security.Cryptography; using Blurhash.ImageSharp; using Microsoft.EntityFrameworkCore; using Minio; using Minio.DataModel; using Minio.DataModel.Args; using NodaTime; using Quartz; using SixLabors.ImageSharp.PixelFormats; using SixLabors.ImageSharp.Metadata.Profiles.Exif; using ExifTag = SixLabors.ImageSharp.Metadata.Profiles.Exif.ExifTag; namespace DysonNetwork.Sphere.Storage; public class FileService(AppDatabase db, IConfiguration configuration, ILogger logger, IServiceScopeFactory scopeFactory) { private static readonly string TempFilePrefix = "dyn-cloudfile"; // The analysis file method no longer will remove the GPS EXIF data // It should be handled on the client side, and for some specific cases it should be keep public async Task ProcessNewFileAsync( Account.Account account, string fileId, Stream stream, string fileName, string? contentType ) { // If this variable present a value means the processor modified the uploaded file // Upload this file to the remote instead var modifiedResult = new List<(string filePath, string suffix)>(); var fileSize = stream.Length; var hash = await HashFileAsync(stream, fileSize: fileSize); contentType ??= !fileName.Contains('.') ? "application/octet-stream" : MimeTypes.GetMimeType(fileName); var existingFile = await db.Files.Where(f => f.Hash == hash).FirstOrDefaultAsync(); if (existingFile is not null) return existingFile; var file = new CloudFile { Id = fileId, Name = fileName, MimeType = contentType, Size = fileSize, Hash = hash, AccountId = account.Id }; switch (contentType.Split('/')[0]) { case "image": stream.Position = 0; using (var imageSharp = await Image.LoadAsync(stream)) { var width = imageSharp.Width; var height = imageSharp.Height; var blurhash = Blurhasher.Encode(imageSharp, 3, 3); var format = imageSharp.Metadata.DecodedImageFormat?.Name ?? "unknown"; var exifProfile = imageSharp.Metadata.ExifProfile; ushort orientation = 1; List exif = []; if (exifProfile?.Values.FirstOrDefault(e => e.Tag == ExifTag.Orientation) ?.GetValue() is ushort o) orientation = o; if (orientation is 6 or 8) (width, height) = (height, width); var aspectRatio = height != 0 ? (double)width / height : 0; file.FileMeta = new Dictionary { ["blur"] = blurhash, ["format"] = format, ["width"] = width, ["height"] = height, ["orientation"] = orientation, ["ratio"] = aspectRatio, ["exif"] = exif }; } break; case "video": case "audio": try { var mediaInfo = await FFProbe.AnalyseAsync(stream); file.FileMeta = new Dictionary { ["duration"] = mediaInfo.Duration.TotalSeconds, ["format_name"] = mediaInfo.Format.FormatName, ["format_long_name"] = mediaInfo.Format.FormatLongName, ["start_time"] = mediaInfo.Format.StartTime.ToString(), ["bit_rate"] = mediaInfo.Format.BitRate.ToString(CultureInfo.InvariantCulture), ["tags"] = mediaInfo.Format.Tags ?? [], ["chapters"] = mediaInfo.Chapters, }; } catch { // ignored } break; } db.Files.Add(file); await db.SaveChangesAsync(); _ = Task.Run(async () => { using var scope = scopeFactory.CreateScope(); var nfs = scope.ServiceProvider.GetRequiredService(); try { logger.LogInformation("Processed file {fileId}, now trying optimizing if possible...", fileId); if (contentType.Split('/')[0] == "image") { file.MimeType = "image/webp"; List tasks = []; var ogFilePath = Path.Join(configuration.GetValue("Tus:StorePath"), file.Id); using var imageSharp = await Image.LoadAsync(ogFilePath); var imagePath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}"); tasks.Add(imageSharp.SaveAsWebpAsync(imagePath)); modifiedResult.Add((imagePath, string.Empty)); if (imageSharp.Size.Width * imageSharp.Size.Height >= 1024 * 1024) { var compressedClone = imageSharp.Clone(); compressedClone.Mutate(i => i.Resize(new ResizeOptions { Mode = ResizeMode.Max, Size = new Size(1024, 1024), }) ); var imageCompressedPath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}-compressed"); tasks.Add(compressedClone.SaveAsWebpAsync(imageCompressedPath)); modifiedResult.Add((imageCompressedPath, ".compressed")); file.HasCompression = true; } await Task.WhenAll(tasks); } logger.LogInformation("Optimized file {fileId}, now uploading...", fileId); if (modifiedResult.Count > 0) { List> tasks = []; tasks.AddRange(modifiedResult.Select(result => nfs.UploadFileToRemoteAsync(file, result.filePath, null, result.suffix, true))); await Task.WhenAll(tasks); file = await tasks.First(); } else { file = await nfs.UploadFileToRemoteAsync(file, stream, null); } logger.LogInformation("Uploaded file {fileId} done!", fileId); var scopedDb = scope.ServiceProvider.GetRequiredService(); await scopedDb.Files.Where(f => f.Id == file.Id).ExecuteUpdateAsync(setter => setter .SetProperty(f => f.UploadedAt, file.UploadedAt) .SetProperty(f => f.UploadedTo, file.UploadedTo) .SetProperty(f => f.MimeType, file.MimeType) .SetProperty(f => f.HasCompression, file.HasCompression) ); } catch (Exception err) { logger.LogError(err, "Failed to process {fileId}", fileId); } }).ConfigureAwait(false); return file; } private static async Task HashFileAsync(Stream stream, int chunkSize = 1024 * 1024, long? fileSize = null) { fileSize ??= stream.Length; if (fileSize > chunkSize * 1024 * 5) return await HashFastApproximateAsync(stream, chunkSize); using var md5 = MD5.Create(); var hashBytes = await md5.ComputeHashAsync(stream); return Convert.ToHexString(hashBytes).ToLowerInvariant(); } private static async Task HashFastApproximateAsync(Stream stream, int chunkSize = 1024 * 1024) { // Scale the chunk size to kB level chunkSize *= 1024; using var md5 = MD5.Create(); var buffer = new byte[chunkSize * 2]; var fileLength = stream.Length; var bytesRead = await stream.ReadAsync(buffer.AsMemory(0, chunkSize)); if (fileLength > chunkSize) { stream.Seek(-chunkSize, SeekOrigin.End); bytesRead += await stream.ReadAsync(buffer.AsMemory(chunkSize, chunkSize)); } var hash = md5.ComputeHash(buffer, 0, bytesRead); return Convert.ToHexString(hash).ToLowerInvariant(); } public async Task UploadFileToRemoteAsync(CloudFile file, string filePath, string? targetRemote, string? suffix = null, bool selfDestruct = false) { var fileStream = File.OpenRead(filePath); var result = await UploadFileToRemoteAsync(file, fileStream, targetRemote, suffix); if (selfDestruct) File.Delete(filePath); return result; } public async Task UploadFileToRemoteAsync(CloudFile file, Stream stream, string? targetRemote, string? suffix = null) { if (file.UploadedAt.HasValue) return file; file.UploadedTo = targetRemote ?? configuration.GetValue("Storage:PreferredRemote")!; var dest = GetRemoteStorageConfig(file.UploadedTo); var client = CreateMinioClient(dest); if (client is null) throw new InvalidOperationException( $"Failed to configure client for remote destination '{file.UploadedTo}'" ); var bucket = dest.Bucket; var contentType = file.MimeType ?? "application/octet-stream"; await client.PutObjectAsync(new PutObjectArgs() .WithBucket(bucket) .WithObject(string.IsNullOrWhiteSpace(suffix) ? file.Id : file.Id + suffix) .WithStreamData(stream) .WithObjectSize(stream.Length) .WithContentType(contentType) ); file.UploadedAt = Instant.FromDateTimeUtc(DateTime.UtcNow); return file; } public async Task DeleteFileAsync(CloudFile file) { await DeleteFileDataAsync(file); db.Remove(file); await db.SaveChangesAsync(); } public async Task DeleteFileDataAsync(CloudFile file) { if (file.UploadedTo is null) return; var dest = GetRemoteStorageConfig(file.UploadedTo); var client = CreateMinioClient(dest); if (client is null) throw new InvalidOperationException( $"Failed to configure client for remote destination '{file.UploadedTo}'" ); var bucket = dest.Bucket; await client.RemoveObjectAsync( new RemoveObjectArgs().WithBucket(bucket).WithObject(file.Id) ); db.Remove(file); await db.SaveChangesAsync(); } public RemoteStorageConfig GetRemoteStorageConfig(string destination) { var destinations = configuration.GetSection("Storage:Remote").Get>()!; var dest = destinations.FirstOrDefault(d => d.Id == destination); if (dest is null) throw new InvalidOperationException($"Remote destination '{destination}' not found"); return dest; } public IMinioClient? CreateMinioClient(RemoteStorageConfig dest) { var client = new MinioClient() .WithEndpoint(dest.Endpoint) .WithRegion(dest.Region) .WithCredentials(dest.SecretId, dest.SecretKey); if (dest.EnableSsl) client = client.WithSSL(); return client.Build(); } public async Task MarkUsageAsync(CloudFile file, int delta) { await db.Files.Where(o => o.Id == file.Id) .ExecuteUpdateAsync(setter => setter.SetProperty( b => b.UsedCount, b => b.UsedCount + delta ) ); } public async Task MarkUsageRangeAsync(ICollection files, int delta) { var ids = files.Select(f => f.Id).ToArray(); await db.Files.Where(o => ids.Contains(o.Id)) .ExecuteUpdateAsync(setter => setter.SetProperty( b => b.UsedCount, b => b.UsedCount + delta ) ); } } public class CloudFileUnusedRecyclingJob(AppDatabase db, FileService fs, ILogger logger) : IJob { public async Task Execute(IJobExecutionContext context) { logger.LogInformation("Deleting unused cloud files..."); var cutoff = SystemClock.Instance.GetCurrentInstant() - Duration.FromHours(1); var now = SystemClock.Instance.GetCurrentInstant(); var files = db.Files .Where(f => (f.ExpiredAt == null && f.UsedCount == 0 && f.CreatedAt < cutoff) || (f.ExpiredAt != null && f.ExpiredAt >= now) ) .ToList(); logger.LogInformation($"Deleting {files.Count} unused cloud files..."); var tasks = files.Select(fs.DeleteFileDataAsync); await Task.WhenAll(tasks); await db.Files .Where(f => f.UsedCount == 0 && f.CreatedAt < cutoff) .ExecuteDeleteAsync(); } }