Functionable new upload method

This commit is contained in:
2025-09-21 18:32:08 +08:00
parent e1459951c4
commit b2a0d25ffa
7 changed files with 110 additions and 144 deletions

View File

@@ -1,5 +1,3 @@
using System.Net;
using System.Net.Sockets;
using Aspire.Hosting.Yarp.Transforms; using Aspire.Hosting.Yarp.Transforms;
using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Hosting;
@@ -62,7 +60,7 @@ for (var idx = 0; idx < services.Count; idx++)
// Extra double-ended references // Extra double-ended references
ringService.WithReference(passService); ringService.WithReference(passService);
builder.AddYarp("gateway") var gateway = builder.AddYarp("gateway")
.WithConfiguration(yarp => .WithConfiguration(yarp =>
{ {
var ringCluster = yarp.AddCluster(ringService.GetEndpoint("http")); var ringCluster = yarp.AddCluster(ringService.GetEndpoint("http"));
@@ -91,6 +89,8 @@ builder.AddYarp("gateway")
.WithTransformPathPrefix("/api"); .WithTransformPathPrefix("/api");
}); });
if (isDev) gateway.WithHostPort(5001);
builder.AddDockerComposeEnvironment("docker-compose"); builder.AddDockerComposeEnvironment("docker-compose");
builder.Build().Run(); builder.Build().Run();

View File

@@ -46,12 +46,36 @@ public class FileController(
if (!string.IsNullOrWhiteSpace(file.StorageUrl)) return Redirect(file.StorageUrl); if (!string.IsNullOrWhiteSpace(file.StorageUrl)) return Redirect(file.StorageUrl);
if (file.UploadedAt is null)
{
// File is not yet uploaded to remote storage. Try to serve from local temp storage.
var tempFilePath = Path.Combine(Path.GetTempPath(), file.Id);
if (System.IO.File.Exists(tempFilePath))
{
if (file.IsEncrypted)
{
return StatusCode(StatusCodes.Status403Forbidden, "Encrypted files cannot be accessed before they are processed and stored.");
}
return PhysicalFile(tempFilePath, file.MimeType ?? "application/octet-stream", file.Name, enableRangeProcessing: true);
}
// Fallback for tus uploads that are not processed yet.
var tusStorePath = configuration.GetValue<string>("Tus:StorePath");
if (!string.IsNullOrEmpty(tusStorePath))
{
var tusFilePath = Path.Combine(env.ContentRootPath, tusStorePath, file.Id);
if (System.IO.File.Exists(tusFilePath))
{
return PhysicalFile(tusFilePath, file.MimeType ?? "application/octet-stream", file.Name, enableRangeProcessing: true);
}
}
return StatusCode(StatusCodes.Status503ServiceUnavailable, "File is being processed. Please try again later.");
}
if (!file.PoolId.HasValue) if (!file.PoolId.HasValue)
{ {
var tusStorePath = configuration.GetValue<string>("Tus:StorePath")!; return StatusCode(StatusCodes.Status500InternalServerError, "File is in an inconsistent state: uploaded but no pool ID.");
var filePath = Path.Combine(env.ContentRootPath, tusStorePath, file.Id);
if (!System.IO.File.Exists(filePath)) return new NotFoundResult();
return PhysicalFile(filePath, file.MimeType ?? "application/octet-stream", file.Name);
} }
var pool = await fs.GetPoolAsync(file.PoolId.Value); var pool = await fs.GetPoolAsync(file.PoolId.Value);

View File

@@ -19,7 +19,6 @@ namespace DysonNetwork.Drive.Storage;
public class FileService( public class FileService(
AppDatabase db, AppDatabase db,
IConfiguration configuration,
ILogger<FileService> logger, ILogger<FileService> logger,
IServiceScopeFactory scopeFactory, IServiceScopeFactory scopeFactory,
ICacheService cache ICacheService cache
@@ -28,14 +27,6 @@ public class FileService(
private const string CacheKeyPrefix = "file:"; private const string CacheKeyPrefix = "file:";
private static readonly TimeSpan CacheDuration = TimeSpan.FromMinutes(15); private static readonly TimeSpan CacheDuration = TimeSpan.FromMinutes(15);
/// <summary>
/// The api for getting file meta with cache,
/// the best use case is for accessing the file data.
///
/// <b>This function won't load uploader's information, only keep minimal file meta</b>
/// </summary>
/// <param name="fileId">The id of the cloud file requested</param>
/// <returns>The minimal file meta</returns>
public async Task<CloudFile?> GetFileAsync(string fileId) public async Task<CloudFile?> GetFileAsync(string fileId)
{ {
var cacheKey = $"{CacheKeyPrefix}{fileId}"; var cacheKey = $"{CacheKeyPrefix}{fileId}";
@@ -61,7 +52,6 @@ public class FileService(
var cachedFiles = new Dictionary<string, CloudFile>(); var cachedFiles = new Dictionary<string, CloudFile>();
var uncachedIds = new List<string>(); var uncachedIds = new List<string>();
// Check cache first
foreach (var fileId in fileIds) foreach (var fileId in fileIds)
{ {
var cacheKey = $"{CacheKeyPrefix}{fileId}"; var cacheKey = $"{CacheKeyPrefix}{fileId}";
@@ -73,7 +63,6 @@ public class FileService(
uncachedIds.Add(fileId); uncachedIds.Add(fileId);
} }
// Load uncached files from database
if (uncachedIds.Count > 0) if (uncachedIds.Count > 0)
{ {
var dbFiles = await db.Files var dbFiles = await db.Files
@@ -81,7 +70,6 @@ public class FileService(
.Include(f => f.Pool) .Include(f => f.Pool)
.ToListAsync(); .ToListAsync();
// Add to cache
foreach (var file in dbFiles) foreach (var file in dbFiles)
{ {
var cacheKey = $"{CacheKeyPrefix}{file.Id}"; var cacheKey = $"{CacheKeyPrefix}{file.Id}";
@@ -90,7 +78,6 @@ public class FileService(
} }
} }
// Preserve original order
return fileIds return fileIds
.Select(f => cachedFiles.GetValueOrDefault(f)) .Select(f => cachedFiles.GetValueOrDefault(f))
.Where(f => f != null) .Where(f => f != null)
@@ -111,7 +98,7 @@ public class FileService(
string fileId, string fileId,
string filePool, string filePool,
string? fileBundleId, string? fileBundleId,
Stream stream, string filePath,
string fileName, string fileName,
string? contentType, string? contentType,
string? encryptPassword, string? encryptPassword,
@@ -142,58 +129,64 @@ public class FileService(
if (bundle?.ExpiredAt != null) if (bundle?.ExpiredAt != null)
expiredAt = bundle.ExpiredAt.Value; expiredAt = bundle.ExpiredAt.Value;
var managedTempPath = Path.Combine(Path.GetTempPath(), fileId);
File.Copy(filePath, managedTempPath, true);
var ogFilePath = Path.GetFullPath(Path.Join(configuration.GetValue<string>("Tus:StorePath"), fileId)); var fileInfo = new FileInfo(managedTempPath);
var fileSize = stream.Length; var fileSize = fileInfo.Length;
contentType ??= !fileName.Contains('.') ? "application/octet-stream" : MimeTypes.GetMimeType(fileName); var finalContentType = contentType ?? (!fileName.Contains('.') ? "application/octet-stream" : MimeTypes.GetMimeType(fileName));
if (!string.IsNullOrWhiteSpace(encryptPassword))
{
if (!pool.PolicyConfig.AllowEncryption)
throw new InvalidOperationException("Encryption is not allowed in this pool");
var encryptedPath = Path.Combine(Path.GetTempPath(), $"{fileId}.encrypted");
FileEncryptor.EncryptFile(ogFilePath, encryptedPath, encryptPassword);
File.Delete(ogFilePath); // Delete original unencrypted
File.Move(encryptedPath, ogFilePath); // Replace the original one with encrypted
contentType = "application/octet-stream";
}
var hash = await HashFileAsync(ogFilePath);
var file = new CloudFile var file = new CloudFile
{ {
Id = fileId, Id = fileId,
Name = fileName, Name = fileName,
MimeType = contentType, MimeType = finalContentType,
Size = fileSize, Size = fileSize,
Hash = hash,
ExpiredAt = expiredAt, ExpiredAt = expiredAt,
BundleId = bundle?.Id, BundleId = bundle?.Id,
AccountId = Guid.Parse(account.Id), AccountId = Guid.Parse(account.Id),
IsEncrypted = !string.IsNullOrWhiteSpace(encryptPassword) && pool.PolicyConfig.AllowEncryption
}; };
// Extract metadata on the current thread for a faster initial response
if (!pool.PolicyConfig.NoMetadata) if (!pool.PolicyConfig.NoMetadata)
await ExtractMetadataAsync(file, ogFilePath, stream); {
await ExtractMetadataAsync(file, managedTempPath);
}
string processingPath = managedTempPath;
bool isTempFile = true;
if (!string.IsNullOrWhiteSpace(encryptPassword))
{
if (!pool.PolicyConfig.AllowEncryption)
throw new InvalidOperationException("Encryption is not allowed in this pool");
var encryptedPath = Path.Combine(Path.GetTempPath(), $"{fileId}.encrypted");
FileEncryptor.EncryptFile(managedTempPath, encryptedPath, encryptPassword);
File.Delete(managedTempPath);
processingPath = encryptedPath;
file.IsEncrypted = true;
file.MimeType = "application/octet-stream";
file.Size = new FileInfo(processingPath).Length;
}
file.Hash = await HashFileAsync(processingPath);
db.Files.Add(file); db.Files.Add(file);
await db.SaveChangesAsync(); await db.SaveChangesAsync();
file.StorageId ??= file.Id; file.StorageId ??= file.Id;
// Offload optimization (image conversion, thumbnailing) and uploading to a background task
_ = Task.Run(() => _ = Task.Run(() =>
ProcessAndUploadInBackgroundAsync(file.Id, filePool, file.StorageId, contentType, ogFilePath, stream)); ProcessAndUploadInBackgroundAsync(file.Id, filePool, file.StorageId, file.MimeType, processingPath, isTempFile));
return file; return file;
} }
/// <summary> private async Task ExtractMetadataAsync(CloudFile file, string filePath)
/// Extracts metadata from the file based on its content type.
/// This runs synchronously to ensure the initial database record has basic metadata.
/// </summary>
private async Task ExtractMetadataAsync(CloudFile file, string filePath, Stream stream)
{ {
switch (file.MimeType?.Split('/')[0]) switch (file.MimeType?.Split('/')[0])
{ {
@@ -201,6 +194,7 @@ public class FileService(
try try
{ {
var blurhash = BlurHashSharp.SkiaSharp.BlurHashEncoder.Encode(3, 3, filePath); var blurhash = BlurHashSharp.SkiaSharp.BlurHashEncoder.Encode(3, 3, filePath);
await using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read);
stream.Position = 0; stream.Position = 0;
using var vipsImage = Image.NewFromStream(stream); using var vipsImage = Image.NewFromStream(stream);
@@ -265,7 +259,6 @@ public class FileService(
["bit_rate"] = mediaInfo.Format.BitRate.ToString(CultureInfo.InvariantCulture), ["bit_rate"] = mediaInfo.Format.BitRate.ToString(CultureInfo.InvariantCulture),
["tags"] = mediaInfo.Format.Tags ?? new Dictionary<string, string>(), ["tags"] = mediaInfo.Format.Tags ?? new Dictionary<string, string>(),
["chapters"] = mediaInfo.Chapters, ["chapters"] = mediaInfo.Chapters,
// Add detailed stream information
["video_streams"] = mediaInfo.VideoStreams.Select(s => new ["video_streams"] = mediaInfo.VideoStreams.Select(s => new
{ {
s.AvgFrameRate, s.AvgFrameRate,
@@ -303,22 +296,18 @@ public class FileService(
} }
} }
/// <summary>
/// Handles file optimization (image compression, video thumbnail) and uploads to remote storage in the background.
/// </summary>
private async Task ProcessAndUploadInBackgroundAsync( private async Task ProcessAndUploadInBackgroundAsync(
string fileId, string fileId,
string remoteId, string remoteId,
string storageId, string storageId,
string contentType, string contentType,
string originalFilePath, string processingFilePath,
Stream stream bool isTempFile
) )
{ {
var pool = await GetPoolAsync(Guid.Parse(remoteId)); var pool = await GetPoolAsync(Guid.Parse(remoteId));
if (pool is null) return; if (pool is null) return;
await using var bgStream = stream; // Ensure stream is disposed at the end of this task
using var scope = scopeFactory.CreateScope(); using var scope = scopeFactory.CreateScope();
var nfs = scope.ServiceProvider.GetRequiredService<FileService>(); var nfs = scope.ServiceProvider.GetRequiredService<FileService>();
var scopedDb = scope.ServiceProvider.GetRequiredService<AppDatabase>(); var scopedDb = scope.ServiceProvider.GetRequiredService<AppDatabase>();
@@ -332,21 +321,27 @@ public class FileService(
{ {
logger.LogInformation("Processing file {FileId} in background...", fileId); logger.LogInformation("Processing file {FileId} in background...", fileId);
var fileExtension = Path.GetExtension(originalFilePath); var fileToUpdate = await scopedDb.Files.AsNoTracking().FirstAsync(f => f.Id == fileId);
if (!pool.PolicyConfig.NoOptimization) if (fileToUpdate.IsEncrypted)
{
uploads.Add((processingFilePath, string.Empty, contentType, false));
}
else if (!pool.PolicyConfig.NoOptimization)
{
var fileExtension = Path.GetExtension(processingFilePath);
switch (contentType.Split('/')[0]) switch (contentType.Split('/')[0])
{ {
case "image": case "image":
if (AnimatedImageTypes.Contains(contentType) || AnimatedImageExtensions.Contains(fileExtension)) if (AnimatedImageTypes.Contains(contentType) || AnimatedImageExtensions.Contains(fileExtension))
{ {
logger.LogInformation("Skip optimize file {FileId} due to it is animated...", fileId); logger.LogInformation("Skip optimize file {FileId} due to it is animated...", fileId);
uploads.Add((originalFilePath, string.Empty, contentType, false)); uploads.Add((processingFilePath, string.Empty, contentType, false));
break; break;
} }
newMimeType = "image/webp"; newMimeType = "image/webp";
using (var vipsImage = Image.NewFromFile(originalFilePath)) using (var vipsImage = Image.NewFromFile(processingFilePath))
{ {
var imageToWrite = vipsImage; var imageToWrite = vipsImage;
@@ -374,20 +369,20 @@ public class FileService(
if (!ReferenceEquals(imageToWrite, vipsImage)) if (!ReferenceEquals(imageToWrite, vipsImage))
{ {
imageToWrite.Dispose(); // Clean up manually created colourspace-converted image imageToWrite.Dispose();
} }
} }
break; break;
case "video": case "video":
uploads.Add((originalFilePath, string.Empty, contentType, false)); uploads.Add((processingFilePath, string.Empty, contentType, false));
var thumbnailPath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{fileId}.thumbnail.jpg"); var thumbnailPath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{fileId}.thumbnail.jpg");
try try
{ {
await FFMpegArguments await FFMpegArguments
.FromFileInput(originalFilePath, verifyExists: true) .FromFileInput(processingFilePath, verifyExists: true)
.OutputToFile(thumbnailPath, overwrite: true, options => options .OutputToFile(thumbnailPath, overwrite: true, options => options
.Seek(TimeSpan.FromSeconds(0)) .Seek(TimeSpan.FromSeconds(0))
.WithFrameOutputCount(1) .WithFrameOutputCount(1)
@@ -415,10 +410,11 @@ public class FileService(
break; break;
default: default:
uploads.Add((originalFilePath, string.Empty, contentType, false)); uploads.Add((processingFilePath, string.Empty, contentType, false));
break; break;
} }
else uploads.Add((originalFilePath, string.Empty, contentType, false)); }
else uploads.Add((processingFilePath, string.Empty, contentType, false));
logger.LogInformation("Optimized file {FileId}, now uploading...", fileId); logger.LogInformation("Optimized file {FileId}, now uploading...", fileId);
@@ -440,9 +436,6 @@ public class FileService(
logger.LogInformation("Uploaded file {FileId} done!", fileId); logger.LogInformation("Uploaded file {FileId} done!", fileId);
var fileToUpdate = await scopedDb.Files.FirstAsync(f => f.Id == fileId);
if (hasThumbnail) fileToUpdate.HasThumbnail = true;
var now = SystemClock.Instance.GetCurrentInstant(); var now = SystemClock.Instance.GetCurrentInstant();
await scopedDb.Files.Where(f => f.Id == fileId).ExecuteUpdateAsync(setter => setter await scopedDb.Files.Where(f => f.Id == fileId).ExecuteUpdateAsync(setter => setter
.SetProperty(f => f.UploadedAt, now) .SetProperty(f => f.UploadedAt, now)
@@ -459,6 +452,10 @@ public class FileService(
} }
finally finally
{ {
if (isTempFile)
{
File.Delete(processingFilePath);
}
await nfs._PurgeCacheAsync(fileId); await nfs._PurgeCacheAsync(fileId);
} }
} }
@@ -491,7 +488,7 @@ public class FileService(
} }
var hash = MD5.HashData(buffer.AsSpan(0, bytesRead)); var hash = MD5.HashData(buffer.AsSpan(0, bytesRead));
stream.Position = 0; // Reset stream position stream.Position = 0;
return Convert.ToHexString(hash).ToLowerInvariant(); return Convert.ToHexString(hash).ToLowerInvariant();
} }
@@ -574,7 +571,6 @@ public class FileService(
await db.Files.Where(f => f.Id == file.Id).ExecuteUpdateAsync(updatable.ToSetPropertyCalls()); await db.Files.Where(f => f.Id == file.Id).ExecuteUpdateAsync(updatable.ToSetPropertyCalls());
await _PurgeCacheAsync(file.Id); await _PurgeCacheAsync(file.Id);
// Re-fetch the file to return the updated state
return await db.Files.AsNoTracking().FirstAsync(f => f.Id == file.Id); return await db.Files.AsNoTracking().FirstAsync(f => f.Id == file.Id);
} }
@@ -593,18 +589,15 @@ public class FileService(
if (!force) if (!force)
{ {
// Check if any other file with the same storage ID is referenced
var sameOriginFiles = await db.Files var sameOriginFiles = await db.Files
.Where(f => f.StorageId == file.StorageId && f.Id != file.Id) .Where(f => f.StorageId == file.StorageId && f.Id != file.Id)
.Select(f => f.Id) .Select(f => f.Id)
.ToListAsync(); .ToListAsync();
// Check if any of these files are referenced
if (sameOriginFiles.Count != 0) if (sameOriginFiles.Count != 0)
return; return;
} }
// If any other file with the same storage ID is referenced, don't delete the actual file data
var dest = await GetRemoteStorageConfig(file.PoolId.Value); var dest = await GetRemoteStorageConfig(file.PoolId.Value);
if (dest is null) throw new InvalidOperationException($"No remote storage configured for pool {file.PoolId}"); if (dest is null) throw new InvalidOperationException($"No remote storage configured for pool {file.PoolId}");
var client = CreateMinioClient(dest); var client = CreateMinioClient(dest);
@@ -614,7 +607,7 @@ public class FileService(
); );
var bucket = dest.Bucket; var bucket = dest.Bucket;
var objectId = file.StorageId ?? file.Id; // Use StorageId if available, otherwise fall back to Id var objectId = file.StorageId ?? file.Id;
await client.RemoveObjectAsync( await client.RemoveObjectAsync(
new RemoveObjectArgs().WithBucket(bucket).WithObject(objectId) new RemoveObjectArgs().WithBucket(bucket).WithObject(objectId)
@@ -630,7 +623,6 @@ public class FileService(
} }
catch catch
{ {
// Ignore errors when deleting compressed version
logger.LogWarning("Failed to delete compressed version of file {fileId}", file.Id); logger.LogWarning("Failed to delete compressed version of file {fileId}", file.Id);
} }
} }
@@ -645,25 +637,17 @@ public class FileService(
} }
catch catch
{ {
// Ignore errors when deleting thumbnail
logger.LogWarning("Failed to delete thumbnail of file {fileId}", file.Id); logger.LogWarning("Failed to delete thumbnail of file {fileId}", file.Id);
} }
} }
} }
/// <summary>
/// The most efficent way to delete file data (stored files) in batch.
/// But this DO NOT check the storage id, so use with caution!
/// </summary>
/// <param name="files">Files to delete</param>
/// <exception cref="InvalidOperationException">Something went wrong</exception>
public async Task DeleteFileDataBatchAsync(List<CloudFile> files) public async Task DeleteFileDataBatchAsync(List<CloudFile> files)
{ {
files = files.Where(f => f.PoolId.HasValue).ToList(); files = files.Where(f => f.PoolId.HasValue).ToList();
foreach (var fileGroup in files.GroupBy(f => f.PoolId!.Value)) foreach (var fileGroup in files.GroupBy(f => f.PoolId!.Value))
{ {
// If any other file with the same storage ID is referenced, don't delete the actual file data
var dest = await GetRemoteStorageConfig(fileGroup.Key); var dest = await GetRemoteStorageConfig(fileGroup.Key);
if (dest is null) if (dest is null)
throw new InvalidOperationException($"No remote storage configured for pool {fileGroup.Key}"); throw new InvalidOperationException($"No remote storage configured for pool {fileGroup.Key}");
@@ -733,15 +717,12 @@ public class FileService(
return client.Build(); return client.Build();
} }
// Helper method to purge the cache for a specific file
// Made internal to allow FileReferenceService to use it
internal async Task _PurgeCacheAsync(string fileId) internal async Task _PurgeCacheAsync(string fileId)
{ {
var cacheKey = $"{CacheKeyPrefix}{fileId}"; var cacheKey = $"{CacheKeyPrefix}{fileId}";
await cache.RemoveAsync(cacheKey); await cache.RemoveAsync(cacheKey);
} }
// Helper method to purge cache for multiple files
internal async Task _PurgeCacheRangeAsync(IEnumerable<string> fileIds) internal async Task _PurgeCacheRangeAsync(IEnumerable<string> fileIds)
{ {
var tasks = fileIds.Select(_PurgeCacheAsync); var tasks = fileIds.Select(_PurgeCacheAsync);
@@ -753,7 +734,6 @@ public class FileService(
var cachedFiles = new Dictionary<string, CloudFile>(); var cachedFiles = new Dictionary<string, CloudFile>();
var uncachedIds = new List<string>(); var uncachedIds = new List<string>();
// Check cache first
foreach (var reference in references) foreach (var reference in references)
{ {
var cacheKey = $"{CacheKeyPrefix}{reference.Id}"; var cacheKey = $"{CacheKeyPrefix}{reference.Id}";
@@ -769,14 +749,12 @@ public class FileService(
} }
} }
// Load uncached files from database
if (uncachedIds.Count > 0) if (uncachedIds.Count > 0)
{ {
var dbFiles = await db.Files var dbFiles = await db.Files
.Where(f => uncachedIds.Contains(f.Id)) .Where(f => uncachedIds.Contains(f.Id))
.ToListAsync(); .ToListAsync();
// Add to cache
foreach (var file in dbFiles) foreach (var file in dbFiles)
{ {
var cacheKey = $"{CacheKeyPrefix}{file.Id}"; var cacheKey = $"{CacheKeyPrefix}{file.Id}";
@@ -785,18 +763,12 @@ public class FileService(
} }
} }
// Preserve original order
return references return references
.Select(r => cachedFiles.GetValueOrDefault(r.Id)) .Select(r => cachedFiles.GetValueOrDefault(r.Id))
.Where(f => f != null) .Where(f => f != null)
.ToList(); .ToList();
} }
/// <summary>
/// Gets the number of references to a file based on CloudFileReference records
/// </summary>
/// <param name="fileId">The ID of the file</param>
/// <returns>The number of references to the file</returns>
public async Task<int> GetReferenceCountAsync(string fileId) public async Task<int> GetReferenceCountAsync(string fileId)
{ {
return await db.FileReferences return await db.FileReferences
@@ -804,11 +776,6 @@ public class FileService(
.CountAsync(); .CountAsync();
} }
/// <summary>
/// Checks if a file is referenced by any resource
/// </summary>
/// <param name="fileId">The ID of the file to check</param>
/// <returns>True if the file is referenced, false otherwise</returns>
public async Task<bool> IsReferencedAsync(string fileId) public async Task<bool> IsReferencedAsync(string fileId)
{ {
return await db.FileReferences return await db.FileReferences
@@ -816,12 +783,8 @@ public class FileService(
.AnyAsync(); .AnyAsync();
} }
/// <summary>
/// Checks if an EXIF field should be ignored (e.g., GPS data).
/// </summary>
private static bool IsIgnoredField(string fieldName) private static bool IsIgnoredField(string fieldName)
{ {
// Common GPS EXIF field names
var gpsFields = new[] var gpsFields = new[]
{ {
"gps-latitude", "gps-longitude", "gps-altitude", "gps-latitude-ref", "gps-longitude-ref", "gps-latitude", "gps-longitude", "gps-altitude", "gps-latitude-ref", "gps-longitude-ref",
@@ -904,9 +867,6 @@ public class FileService(
} }
} }
/// <summary>
/// A helper class to build an ExecuteUpdateAsync call for CloudFile.
/// </summary>
file class UpdatableCloudFile(CloudFile file) file class UpdatableCloudFile(CloudFile file)
{ {
public string Name { get; set; } = file.Name; public string Name { get; set; } = file.Name;
@@ -925,4 +885,4 @@ file class UpdatableCloudFile(CloudFile file)
.SetProperty(f => f.UserMeta, userMeta!) .SetProperty(f => f.UserMeta, userMeta!)
.SetProperty(f => f.IsMarkedRecycle, IsMarkedRecycle); .SetProperty(f => f.IsMarkedRecycle, IsMarkedRecycle);
} }
} }

View File

@@ -23,7 +23,7 @@ public class FileUploadController(
: ControllerBase : ControllerBase
{ {
private readonly string _tempPath = private readonly string _tempPath =
Path.Combine(configuration.GetValue<string>("Storage:Uploads") ?? Path.GetTempPath(), "multipart-uploads"); configuration.GetValue<string>("Storage:Uploads") ?? Path.Combine(Path.GetTempPath(), "multipart-uploads");
private const long DefaultChunkSize = 1024 * 1024 * 5; // 5MB private const long DefaultChunkSize = 1024 * 1024 * 5; // 5MB
@@ -42,12 +42,9 @@ public class FileUploadController(
} }
} }
if (!Guid.TryParse(request.PoolId, out var poolGuid)) request.PoolId ??= Guid.Parse(configuration["Storage:PreferredRemote"]!);
{
return BadRequest("Invalid file pool id");
}
var pool = await fileService.GetPoolAsync(poolGuid); var pool = await fileService.GetPoolAsync(request.PoolId.Value);
if (pool is null) if (pool is null)
{ {
return BadRequest("Pool not found"); return BadRequest("Pool not found");
@@ -73,11 +70,6 @@ public class FileUploadController(
} }
} }
if (!string.IsNullOrEmpty(request.BundleId) && !Guid.TryParse(request.BundleId, out _))
{
return BadRequest("Invalid file bundle id");
}
var policy = pool.PolicyConfig; var policy = pool.PolicyConfig;
if (!policy.AllowEncryption && !string.IsNullOrEmpty(request.EncryptPassword)) if (!policy.AllowEncryption && !string.IsNullOrEmpty(request.EncryptPassword))
{ {
@@ -160,7 +152,7 @@ public class FileUploadController(
ContentType = request.ContentType, ContentType = request.ContentType,
ChunkSize = chunkSize, ChunkSize = chunkSize,
ChunksCount = chunksCount, ChunksCount = chunksCount,
PoolId = request.PoolId, PoolId = request.PoolId.Value,
BundleId = request.BundleId, BundleId = request.BundleId,
EncryptPassword = request.EncryptPassword, EncryptPassword = request.EncryptPassword,
ExpiredAt = request.ExpiredAt, ExpiredAt = request.ExpiredAt,
@@ -241,26 +233,22 @@ public class FileUploadController(
var fileId = await Nanoid.GenerateAsync(); var fileId = await Nanoid.GenerateAsync();
await using (var fileStream = var cloudFile = await fileService.ProcessNewFileAsync(
new FileStream(mergedFilePath, FileMode.Open, FileAccess.Read, FileShare.Read))
{
var cloudFile = await fileService.ProcessNewFileAsync(
currentUser, currentUser,
fileId, fileId,
task.PoolId, task.PoolId.ToString(),
task.BundleId, task.BundleId?.ToString(),
fileStream, mergedFilePath,
task.FileName, task.FileName,
task.ContentType, task.ContentType,
task.EncryptPassword, task.EncryptPassword,
task.ExpiredAt task.ExpiredAt
); );
// Clean up // Clean up
Directory.Delete(taskPath, true); Directory.Delete(taskPath, true);
System.IO.File.Delete(mergedFilePath); System.IO.File.Delete(mergedFilePath);
return Ok(cloudFile); return Ok(cloudFile);
}
} }
} }

View File

@@ -9,8 +9,8 @@ namespace DysonNetwork.Drive.Storage.Model
public string FileName { get; set; } = null!; public string FileName { get; set; } = null!;
public long FileSize { get; set; } public long FileSize { get; set; }
public string ContentType { get; set; } = null!; public string ContentType { get; set; } = null!;
public string PoolId { get; set; } = null!; public Guid? PoolId { get; set; } = null!;
public string? BundleId { get; set; } public Guid? BundleId { get; set; }
public string? EncryptPassword { get; set; } public string? EncryptPassword { get; set; }
public Instant? ExpiredAt { get; set; } public Instant? ExpiredAt { get; set; }
public long? ChunkSize { get; set; } public long? ChunkSize { get; set; }
@@ -33,8 +33,8 @@ namespace DysonNetwork.Drive.Storage.Model
public string ContentType { get; set; } = null!; public string ContentType { get; set; } = null!;
public long ChunkSize { get; set; } public long ChunkSize { get; set; }
public int ChunksCount { get; set; } public int ChunksCount { get; set; }
public string PoolId { get; set; } = null!; public Guid PoolId { get; set; }
public string? BundleId { get; set; } public Guid? BundleId { get; set; }
public string? EncryptPassword { get; set; } public string? EncryptPassword { get; set; }
public Instant? ExpiredAt { get; set; } public Instant? ExpiredAt { get; set; }
public string Hash { get; set; } = null!; public string Hash { get; set; } = null!;

View File

@@ -113,7 +113,7 @@ public abstract class TusService
: "uploaded_file"; : "uploaded_file";
var contentType = metadata.TryGetValue("content-type", out var ct) ? ct.GetString(Encoding.UTF8) : null; var contentType = metadata.TryGetValue("content-type", out var ct) ? ct.GetString(Encoding.UTF8) : null;
var fileStream = await file.GetContentAsync(eventContext.CancellationToken); var filePath = Path.Combine(configuration.GetValue<string>("Tus:StorePath")!, file.Id);
var filePool = httpContext.Request.Headers["X-FilePool"].FirstOrDefault(); var filePool = httpContext.Request.Headers["X-FilePool"].FirstOrDefault();
var bundleId = eventContext.HttpContext.Request.Headers["X-FileBundle"].FirstOrDefault(); var bundleId = eventContext.HttpContext.Request.Headers["X-FileBundle"].FirstOrDefault();
@@ -135,7 +135,7 @@ public abstract class TusService
file.Id, file.Id,
filePool!, filePool!,
bundleId, bundleId,
fileStream, filePath,
fileName, fileName,
contentType, contentType,
encryptPassword, encryptPassword,
@@ -155,11 +155,6 @@ public abstract class TusService
await eventContext.HttpContext.Response.WriteAsync(ex.Message); await eventContext.HttpContext.Response.WriteAsync(ex.Message);
logger.LogError(ex, "Error handling file upload..."); logger.LogError(ex, "Error handling file upload...");
} }
finally
{
// Dispose the stream after all processing is complete
await fileStream.DisposeAsync();
}
}, },
OnBeforeCreateAsync = async eventContext => OnBeforeCreateAsync = async eventContext =>
{ {

View File

@@ -4,7 +4,6 @@ using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Server.Kestrel.Core; using Microsoft.AspNetCore.Server.Kestrel.Core;
using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Hosting;
namespace DysonNetwork.Shared.Http; namespace DysonNetwork.Shared.Http;