diff --git a/DysonNetwork.Drive/Client/src/views/index.vue b/DysonNetwork.Drive/Client/src/views/index.vue index 69add13..af6cfd2 100644 --- a/DysonNetwork.Drive/Client/src/views/index.vue +++ b/DysonNetwork.Drive/Client/src/views/index.vue @@ -38,20 +38,31 @@ -
-

File Password

- -

- Only available for Stellar Program and certian file pool. -

+
+
+

File Password

+ +

+ Only available for Stellar Program and certian file pool. +

+
+
+

File Expiration Date

+ +
@@ -107,6 +118,7 @@ import { NSelect, NTag, NCollapseTransition, + NDatePicker, type UploadCustomRequestOptions, type UploadSettledFileInfo, type SelectOption, @@ -155,6 +167,8 @@ const renderSingleSelectTag: SelectRenderTag = ({ option }) => { ) } +const perkPrivilegeList = ['Stellar', 'Nova', 'Supernova'] + function renderPoolSelectLabel(option: SelectOption & SnFilePool) { const policy: any = option.policy_config return h( @@ -198,9 +212,14 @@ function renderPoolSelectLabel(option: SelectOption & SnFilePool) { default: () => h('span', policy.accept_types.join(', ')), }, ), - ].flatMap((el, idx, arr) => - idx < arr.length - 1 ? [el, h(NDivider, { vertical: true })] : [el], - ), + policy.require_privilege && + h('span', `Require ${perkPrivilegeList[policy.require_privilege - 1]} Program`), + h('span', `Cost x${option.billing_config.cost_multiplier.toFixed(1)} NSD`) + ] + .filter((el) => el) + .flatMap((el, idx, arr) => + idx < arr.length - 1 ? [el, h(NDivider, { vertical: true })] : [el], + ), ), h( 'div', @@ -244,7 +263,7 @@ function renderPoolSelectLabel(option: SelectOption & SnFilePool) { }, { default: () => 'Allow Encryption' }, ), - option.allow_anonymous && + policy.allow_anonymous && h( NTag, { @@ -254,6 +273,16 @@ function renderPoolSelectLabel(option: SelectOption & SnFilePool) { }, { default: () => 'Allow Anonymous' }, ), + policy.enable_recycle && + h( + NTag, + { + type: 'info', + size: 'small', + round: true, + }, + { default: () => 'Recycle Enabled' }, + ), ], ), ], @@ -264,6 +293,7 @@ const modeAdvanced = ref(false) const filePool = ref(null) const filePass = ref('') +const fileExpire = ref(null) const currentFilePool = computed(() => { if (!filePool.value) return null @@ -285,6 +315,7 @@ function customRequest({ const requestHeaders: Record = {} if (filePool.value) requestHeaders['X-FilePool'] = filePool.value if (filePass.value) requestHeaders['X-FilePass'] = filePass.value + if (fileExpire.value) requestHeaders['X-FileExpire'] = fileExpire.value.toString() const upload = new tus.Upload(file.file, { endpoint: '/api/tus', retryDelays: [0, 3000, 5000, 10000, 20000], @@ -352,4 +383,8 @@ function customPreview(file: UploadFileInfo, detail: { event: MouseEvent }) { if (!url) return window.open(url.replace('/api', ''), '_blank') } + +function disablePreviousDate(ts: number) { + return ts <= Date.now() +} diff --git a/DysonNetwork.Drive/Migrations/20250726172039_AddCloudFileExpiration.Designer.cs b/DysonNetwork.Drive/Migrations/20250726172039_AddCloudFileExpiration.Designer.cs new file mode 100644 index 0000000..b99df8b --- /dev/null +++ b/DysonNetwork.Drive/Migrations/20250726172039_AddCloudFileExpiration.Designer.cs @@ -0,0 +1,275 @@ +// +using System; +using System.Collections.Generic; +using DysonNetwork.Drive; +using DysonNetwork.Drive.Storage; +using DysonNetwork.Shared.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using NodaTime; +using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; + +#nullable disable + +namespace DysonNetwork.Drive.Migrations +{ + [DbContext(typeof(AppDatabase))] + [Migration("20250726172039_AddCloudFileExpiration")] + partial class AddCloudFileExpiration + { + /// + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "9.0.7") + .HasAnnotation("Relational:MaxIdentifierLength", 63); + + NpgsqlModelBuilderExtensions.HasPostgresExtension(modelBuilder, "postgis"); + NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder); + + modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFile", b => + { + b.Property("Id") + .HasMaxLength(32) + .HasColumnType("character varying(32)") + .HasColumnName("id"); + + b.Property("AccountId") + .HasColumnType("uuid") + .HasColumnName("account_id"); + + b.Property("CreatedAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("created_at"); + + b.Property("DeletedAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("deleted_at"); + + b.Property("Description") + .HasMaxLength(4096) + .HasColumnType("character varying(4096)") + .HasColumnName("description"); + + b.Property("ExpiredAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("expired_at"); + + b.Property>("FileMeta") + .HasColumnType("jsonb") + .HasColumnName("file_meta"); + + b.Property("HasCompression") + .HasColumnType("boolean") + .HasColumnName("has_compression"); + + b.Property("HasThumbnail") + .HasColumnType("boolean") + .HasColumnName("has_thumbnail"); + + b.Property("Hash") + .HasMaxLength(256) + .HasColumnType("character varying(256)") + .HasColumnName("hash"); + + b.Property("IsEncrypted") + .HasColumnType("boolean") + .HasColumnName("is_encrypted"); + + b.Property("IsMarkedRecycle") + .HasColumnType("boolean") + .HasColumnName("is_marked_recycle"); + + b.Property("MimeType") + .HasMaxLength(256) + .HasColumnType("character varying(256)") + .HasColumnName("mime_type"); + + b.Property("Name") + .IsRequired() + .HasMaxLength(1024) + .HasColumnType("character varying(1024)") + .HasColumnName("name"); + + b.Property("PoolId") + .HasColumnType("uuid") + .HasColumnName("pool_id"); + + b.Property>("SensitiveMarks") + .HasColumnType("jsonb") + .HasColumnName("sensitive_marks"); + + b.Property("Size") + .HasColumnType("bigint") + .HasColumnName("size"); + + b.Property("StorageId") + .HasMaxLength(32) + .HasColumnType("character varying(32)") + .HasColumnName("storage_id"); + + b.Property("StorageUrl") + .HasMaxLength(4096) + .HasColumnType("character varying(4096)") + .HasColumnName("storage_url"); + + b.Property("UpdatedAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("updated_at"); + + b.Property("UploadedAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("uploaded_at"); + + b.Property("UploadedTo") + .HasMaxLength(128) + .HasColumnType("character varying(128)") + .HasColumnName("uploaded_to"); + + b.Property>("UserMeta") + .HasColumnType("jsonb") + .HasColumnName("user_meta"); + + b.HasKey("Id") + .HasName("pk_files"); + + b.HasIndex("PoolId") + .HasDatabaseName("ix_files_pool_id"); + + b.ToTable("files", (string)null); + }); + + modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFileReference", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("uuid") + .HasColumnName("id"); + + b.Property("CreatedAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("created_at"); + + b.Property("DeletedAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("deleted_at"); + + b.Property("ExpiredAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("expired_at"); + + b.Property("FileId") + .IsRequired() + .HasMaxLength(32) + .HasColumnType("character varying(32)") + .HasColumnName("file_id"); + + b.Property("ResourceId") + .IsRequired() + .HasMaxLength(1024) + .HasColumnType("character varying(1024)") + .HasColumnName("resource_id"); + + b.Property("UpdatedAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("updated_at"); + + b.Property("Usage") + .IsRequired() + .HasMaxLength(1024) + .HasColumnType("character varying(1024)") + .HasColumnName("usage"); + + b.HasKey("Id") + .HasName("pk_file_references"); + + b.HasIndex("FileId") + .HasDatabaseName("ix_file_references_file_id"); + + b.ToTable("file_references", (string)null); + }); + + modelBuilder.Entity("DysonNetwork.Drive.Storage.FilePool", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("uuid") + .HasColumnName("id"); + + b.Property("AccountId") + .HasColumnType("uuid") + .HasColumnName("account_id"); + + b.Property("BillingConfig") + .IsRequired() + .HasColumnType("jsonb") + .HasColumnName("billing_config"); + + b.Property("CreatedAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("created_at"); + + b.Property("DeletedAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("deleted_at"); + + b.Property("Description") + .IsRequired() + .HasMaxLength(8192) + .HasColumnType("character varying(8192)") + .HasColumnName("description"); + + b.Property("Name") + .IsRequired() + .HasMaxLength(1024) + .HasColumnType("character varying(1024)") + .HasColumnName("name"); + + b.Property("PolicyConfig") + .IsRequired() + .HasColumnType("jsonb") + .HasColumnName("policy_config"); + + b.Property("StorageConfig") + .IsRequired() + .HasColumnType("jsonb") + .HasColumnName("storage_config"); + + b.Property("UpdatedAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("updated_at"); + + b.HasKey("Id") + .HasName("pk_pools"); + + b.ToTable("pools", (string)null); + }); + + modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFile", b => + { + b.HasOne("DysonNetwork.Drive.Storage.FilePool", "Pool") + .WithMany() + .HasForeignKey("PoolId") + .HasConstraintName("fk_files_pools_pool_id"); + + b.Navigation("Pool"); + }); + + modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFileReference", b => + { + b.HasOne("DysonNetwork.Drive.Storage.CloudFile", "File") + .WithMany() + .HasForeignKey("FileId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired() + .HasConstraintName("fk_file_references_files_file_id"); + + b.Navigation("File"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/DysonNetwork.Drive/Migrations/20250726172039_AddCloudFileExpiration.cs b/DysonNetwork.Drive/Migrations/20250726172039_AddCloudFileExpiration.cs new file mode 100644 index 0000000..2c09e8b --- /dev/null +++ b/DysonNetwork.Drive/Migrations/20250726172039_AddCloudFileExpiration.cs @@ -0,0 +1,29 @@ +using Microsoft.EntityFrameworkCore.Migrations; +using NodaTime; + +#nullable disable + +namespace DysonNetwork.Drive.Migrations +{ + /// + public partial class AddCloudFileExpiration : Migration + { + /// + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "expired_at", + table: "files", + type: "timestamp with time zone", + nullable: true); + } + + /// + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "expired_at", + table: "files"); + } + } +} diff --git a/DysonNetwork.Drive/Migrations/AppDatabaseModelSnapshot.cs b/DysonNetwork.Drive/Migrations/AppDatabaseModelSnapshot.cs index f31a577..1f83209 100644 --- a/DysonNetwork.Drive/Migrations/AppDatabaseModelSnapshot.cs +++ b/DysonNetwork.Drive/Migrations/AppDatabaseModelSnapshot.cs @@ -51,6 +51,10 @@ namespace DysonNetwork.Drive.Migrations .HasColumnType("character varying(4096)") .HasColumnName("description"); + b.Property("ExpiredAt") + .HasColumnType("timestamp with time zone") + .HasColumnName("expired_at"); + b.Property>("FileMeta") .HasColumnType("jsonb") .HasColumnName("file_meta"); diff --git a/DysonNetwork.Drive/Startup/ScheduledJobsConfiguration.cs b/DysonNetwork.Drive/Startup/ScheduledJobsConfiguration.cs index e4fc9eb..e54ee86 100644 --- a/DysonNetwork.Drive/Startup/ScheduledJobsConfiguration.cs +++ b/DysonNetwork.Drive/Startup/ScheduledJobsConfiguration.cs @@ -1,3 +1,4 @@ +using DysonNetwork.Drive.Storage; using Quartz; namespace DysonNetwork.Drive.Startup; @@ -14,6 +15,13 @@ public static class ScheduledJobsConfiguration .ForJob(appDatabaseRecyclingJob) .WithIdentity("AppDatabaseRecyclingTrigger") .WithCronSchedule("0 0 0 * * ?")); + + var cloudFileUnusedRecyclingJob = new JobKey("CloudFileUnusedRecycling"); + q.AddJob(opts => opts.WithIdentity(cloudFileUnusedRecyclingJob)); + q.AddTrigger(opts => opts + .ForJob(cloudFileUnusedRecyclingJob) + .WithIdentity("CloudFileUnusedRecyclingTrigger") + .WithCronSchedule("0 0 0 * * ?")); }); services.AddQuartzHostedService(q => q.WaitForJobsToComplete = true); diff --git a/DysonNetwork.Drive/Storage/CloudFile.cs b/DysonNetwork.Drive/Storage/CloudFile.cs index 155e8d8..f0e1854 100644 --- a/DysonNetwork.Drive/Storage/CloudFile.cs +++ b/DysonNetwork.Drive/Storage/CloudFile.cs @@ -38,6 +38,7 @@ public class CloudFile : ModelBase, ICloudFile, IIdentifiedResource [Column(TypeName = "jsonb")] public List? SensitiveMarks { get; set; } = []; [MaxLength(256)] public string? MimeType { get; set; } [MaxLength(256)] public string? Hash { get; set; } + public Instant? ExpiredAt { get; set; } public long Size { get; set; } public Instant? UploadedAt { get; set; } public bool HasCompression { get; set; } = false; diff --git a/DysonNetwork.Drive/Storage/CloudFileUnusedRecyclingJob.cs b/DysonNetwork.Drive/Storage/CloudFileUnusedRecyclingJob.cs index 98d4cdb..312a3da 100644 --- a/DysonNetwork.Drive/Storage/CloudFileUnusedRecyclingJob.cs +++ b/DysonNetwork.Drive/Storage/CloudFileUnusedRecyclingJob.cs @@ -15,11 +15,19 @@ public class CloudFileUnusedRecyclingJob( { logger.LogInformation("Marking unused cloud files..."); + var recyclablePools = await db.Pools + .Where(p => p.PolicyConfig.EnableRecycle) + .Select(p => p.Id) + .ToListAsync(); + var now = SystemClock.Instance.GetCurrentInstant(); const int batchSize = 1000; // Process larger batches for efficiency var processedCount = 0; var markedCount = 0; - var totalFiles = await db.Files.Where(f => !f.IsMarkedRecycle).CountAsync(); + var totalFiles = await db.Files + .Where(f => f.PoolId.HasValue && recyclablePools.Contains(f.PoolId.Value)) + .Where(f => !f.IsMarkedRecycle) + .CountAsync(); logger.LogInformation("Found {TotalFiles} files to check for unused status", totalFiles); @@ -35,13 +43,12 @@ public class CloudFileUnusedRecyclingJob( { // Query for the next batch of files using keyset pagination var filesQuery = db.Files + .Where(f => f.PoolId.HasValue && recyclablePools.Contains(f.PoolId.Value)) .Where(f => !f.IsMarkedRecycle) .Where(f => f.CreatedAt <= ageThreshold); // Only process older files first if (lastProcessedId != null) - { filesQuery = filesQuery.Where(f => string.Compare(f.Id, lastProcessedId) > 0); - } var fileBatch = await filesQuery .OrderBy(f => f.Id) // Ensure consistent ordering for pagination @@ -84,9 +91,17 @@ public class CloudFileUnusedRecyclingJob( { logger.LogInformation( "Progress: processed {ProcessedCount}/{TotalFiles} files, marked {MarkedCount} for recycling", - processedCount, totalFiles, markedCount); + processedCount, + totalFiles, + markedCount + ); } } + + var expiredCount = await db.Files + .Where(f => f.ExpiredAt.HasValue && f.ExpiredAt.Value <= now) + .ExecuteUpdateAsync(s => s.SetProperty(f => f.IsMarkedRecycle, true)); + markedCount += expiredCount; logger.LogInformation("Completed marking {MarkedCount} files for recycling", markedCount); } diff --git a/DysonNetwork.Drive/Storage/FilePool.cs b/DysonNetwork.Drive/Storage/FilePool.cs index 1f77fe4..6d9d4b2 100644 --- a/DysonNetwork.Drive/Storage/FilePool.cs +++ b/DysonNetwork.Drive/Storage/FilePool.cs @@ -26,6 +26,7 @@ public class BillingConfig public class PolicyConfig { + public bool EnableRecycle { get; set; } = false; public bool PublicIndexable { get; set; } = false; public bool PublicUsable { get; set; } = false; public bool NoOptimization { get; set; } = false; diff --git a/DysonNetwork.Drive/Storage/FileReferenceService.cs b/DysonNetwork.Drive/Storage/FileReferenceService.cs index 01feb4a..31fc1a2 100644 --- a/DysonNetwork.Drive/Storage/FileReferenceService.cs +++ b/DysonNetwork.Drive/Storage/FileReferenceService.cs @@ -32,19 +32,6 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach if (duration.HasValue) finalExpiration = SystemClock.Instance.GetCurrentInstant() + duration.Value; - var file = await db.Files - .Where(f => f.Id == fileId) - .Include(f => f.Pool) - .FirstOrDefaultAsync(); - if (file is null) throw new InvalidOperationException("File not found"); - if (file.Pool?.StorageConfig.Expiration != null) - { - var now = SystemClock.Instance.GetCurrentInstant(); - var expectedDuration = finalExpiration - now; - if (finalExpiration == null || expectedDuration > file.Pool.StorageConfig.Expiration) - finalExpiration = now.Plus(file.Pool.StorageConfig.Expiration.Value); - } - var reference = new CloudFileReference { FileId = fileId, diff --git a/DysonNetwork.Drive/Storage/FileService.cs b/DysonNetwork.Drive/Storage/FileService.cs index 869e788..a6eea7b 100644 --- a/DysonNetwork.Drive/Storage/FileService.cs +++ b/DysonNetwork.Drive/Storage/FileService.cs @@ -26,7 +26,7 @@ public class FileService( { private const string CacheKeyPrefix = "file:"; private static readonly TimeSpan CacheDuration = TimeSpan.FromMinutes(15); - + /// /// The api for getting file meta with cache, /// the best use case is for accessing the file data. @@ -108,19 +108,30 @@ public class FileService( Stream stream, string fileName, string? contentType, - string? encryptPassword + string? encryptPassword, + Instant? expiredAt ) { var pool = await GetPoolAsync(Guid.Parse(filePool)); if (pool is null) throw new InvalidOperationException("Pool not found"); + if (pool.StorageConfig.Expiration is not null && expiredAt.HasValue) + { + var expectedExpiration = SystemClock.Instance.GetCurrentInstant() - expiredAt.Value; + var effectiveExpiration = pool.StorageConfig.Expiration < expectedExpiration + ? pool.StorageConfig.Expiration + : expectedExpiration; + expiredAt = SystemClock.Instance.GetCurrentInstant() + effectiveExpiration; + } + var ogFilePath = Path.GetFullPath(Path.Join(configuration.GetValue("Tus:StorePath"), fileId)); var fileSize = stream.Length; contentType ??= !fileName.Contains('.') ? "application/octet-stream" : MimeTypes.GetMimeType(fileName); if (!string.IsNullOrWhiteSpace(encryptPassword)) { - if (!pool.PolicyConfig.AllowEncryption) throw new InvalidOperationException("Encryption is not allowed in this pool"); + if (!pool.PolicyConfig.AllowEncryption) + throw new InvalidOperationException("Encryption is not allowed in this pool"); var encryptedPath = Path.Combine(Path.GetTempPath(), $"{fileId}.encrypted"); FileEncryptor.EncryptFile(ogFilePath, encryptedPath, encryptPassword); File.Delete(ogFilePath); // Delete original unencrypted @@ -137,6 +148,7 @@ public class FileService( MimeType = contentType, Size = fileSize, Hash = hash, + ExpiredAt = expiredAt, AccountId = Guid.Parse(account.Id), IsEncrypted = !string.IsNullOrWhiteSpace(encryptPassword) && pool.PolicyConfig.AllowEncryption }; @@ -369,6 +381,7 @@ public class FileService( { logger.LogError(ex, "Failed to generate thumbnail for video {FileId}", fileId); } + break; default: @@ -431,7 +444,7 @@ public class FileService( private static async Task HashFastApproximateAsync(string filePath, int chunkSize = 1024 * 1024) { await using var stream = File.OpenRead(filePath); - + // Scale the chunk size to kB level chunkSize *= 1024; diff --git a/DysonNetwork.Drive/Storage/TusService.cs b/DysonNetwork.Drive/Storage/TusService.cs index 9c0137a..11537fc 100644 --- a/DysonNetwork.Drive/Storage/TusService.cs +++ b/DysonNetwork.Drive/Storage/TusService.cs @@ -5,6 +5,7 @@ using DysonNetwork.Shared.Auth; using DysonNetwork.Shared.Proto; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Options; +using NodaTime; using tusdotnet.Interfaces; using tusdotnet.Models; using tusdotnet.Models.Configuration; @@ -112,6 +113,11 @@ public abstract class TusService if (string.IsNullOrEmpty(filePool)) filePool = configuration["Storage:PreferredRemote"]; + Instant? expiredAt = null; + var expiredString = httpContext.Request.Headers["X-FileExpire"].FirstOrDefault(); + if (!string.IsNullOrEmpty(expiredString) && int.TryParse(expiredString, out var expired)) + expiredAt = Instant.FromUnixTimeSeconds(expired); + try { var fileService = services.GetRequiredService(); @@ -122,7 +128,8 @@ public abstract class TusService fileStream, fileName, contentType, - encryptPassword + encryptPassword, + expiredAt ); using var finalScope = eventContext.HttpContext.RequestServices.CreateScope(); diff --git a/DysonNetwork.Pass/Auth/AuthServiceGrpc.cs b/DysonNetwork.Pass/Auth/AuthServiceGrpc.cs index b3e956c..a890ad6 100644 --- a/DysonNetwork.Pass/Auth/AuthServiceGrpc.cs +++ b/DysonNetwork.Pass/Auth/AuthServiceGrpc.cs @@ -1,3 +1,4 @@ +using DysonNetwork.Pass.Wallet; using DysonNetwork.Shared.Cache; using DysonNetwork.Shared.Proto; using Grpc.Core; @@ -8,6 +9,7 @@ namespace DysonNetwork.Pass.Auth; public class AuthServiceGrpc( AuthService authService, + SubscriptionService subscriptions, ICacheService cache, AppDatabase db ) @@ -36,6 +38,9 @@ public class AuthServiceGrpc( var now = SystemClock.Instance.GetCurrentInstant(); if (session.ExpiredAt.HasValue && session.ExpiredAt < now) return new AuthenticateResponse { Valid = false, Message = "Session has been expired." }; + + var perk = await subscriptions.GetPerkSubscriptionAsync(session.AccountId); + session.Account.PerkSubscription = perk?.ToReference(); await cache.SetWithGroupsAsync( $"auth:{sessionId}",