File expiration

This commit is contained in:
2025-07-27 01:43:54 +08:00
parent 71accd725e
commit 4e68ab4ef0
12 changed files with 420 additions and 40 deletions

View File

@@ -38,20 +38,31 @@
<n-collapse-transition :show="modeAdvanced"> <n-collapse-transition :show="modeAdvanced">
<n-card title="Advance Options" size="small" class="mb-3"> <n-card title="Advance Options" size="small" class="mb-3">
<div> <div class="flex flex-col gap-3">
<p class="pl-1 mb-0.5">File Password</p> <div>
<n-input <p class="pl-1 mb-0.5">File Password</p>
v-model:value="filePass" <n-input
:disabled="!currentFilePool?.allow_encryption" v-model:value="filePass"
placeholder="Enter password to protect the file" :disabled="!currentFilePool?.allow_encryption"
show-password-toggle placeholder="Enter password to protect the file"
size="large" show-password-toggle
type="password" size="large"
class="mb-2" type="password"
/> class="mb-2"
<p class="pl-1 text-xs opacity-75 mt-[-4px]"> />
Only available for Stellar Program and certian file pool. <p class="pl-1 text-xs opacity-75 mt-[-4px]">
</p> Only available for Stellar Program and certian file pool.
</p>
</div>
<div>
<p class="pl-1 mb-0.5">File Expiration Date</p>
<n-date-picker
v-model:value="fileExpire"
type="datetime"
clearable
:is-date-disabled="disablePreviousDate"
/>
</div>
</div> </div>
</n-card> </n-card>
</n-collapse-transition> </n-collapse-transition>
@@ -107,6 +118,7 @@ import {
NSelect, NSelect,
NTag, NTag,
NCollapseTransition, NCollapseTransition,
NDatePicker,
type UploadCustomRequestOptions, type UploadCustomRequestOptions,
type UploadSettledFileInfo, type UploadSettledFileInfo,
type SelectOption, type SelectOption,
@@ -155,6 +167,8 @@ const renderSingleSelectTag: SelectRenderTag = ({ option }) => {
) )
} }
const perkPrivilegeList = ['Stellar', 'Nova', 'Supernova']
function renderPoolSelectLabel(option: SelectOption & SnFilePool) { function renderPoolSelectLabel(option: SelectOption & SnFilePool) {
const policy: any = option.policy_config const policy: any = option.policy_config
return h( return h(
@@ -198,9 +212,14 @@ function renderPoolSelectLabel(option: SelectOption & SnFilePool) {
default: () => h('span', policy.accept_types.join(', ')), default: () => h('span', policy.accept_types.join(', ')),
}, },
), ),
].flatMap((el, idx, arr) => policy.require_privilege &&
idx < arr.length - 1 ? [el, h(NDivider, { vertical: true })] : [el], h('span', `Require ${perkPrivilegeList[policy.require_privilege - 1]} Program`),
), h('span', `Cost x${option.billing_config.cost_multiplier.toFixed(1)} NSD`)
]
.filter((el) => el)
.flatMap((el, idx, arr) =>
idx < arr.length - 1 ? [el, h(NDivider, { vertical: true })] : [el],
),
), ),
h( h(
'div', 'div',
@@ -244,7 +263,7 @@ function renderPoolSelectLabel(option: SelectOption & SnFilePool) {
}, },
{ default: () => 'Allow Encryption' }, { default: () => 'Allow Encryption' },
), ),
option.allow_anonymous && policy.allow_anonymous &&
h( h(
NTag, NTag,
{ {
@@ -254,6 +273,16 @@ function renderPoolSelectLabel(option: SelectOption & SnFilePool) {
}, },
{ default: () => 'Allow Anonymous' }, { default: () => 'Allow Anonymous' },
), ),
policy.enable_recycle &&
h(
NTag,
{
type: 'info',
size: 'small',
round: true,
},
{ default: () => 'Recycle Enabled' },
),
], ],
), ),
], ],
@@ -264,6 +293,7 @@ const modeAdvanced = ref(false)
const filePool = ref<string | null>(null) const filePool = ref<string | null>(null)
const filePass = ref<string>('') const filePass = ref<string>('')
const fileExpire = ref<number | null>(null)
const currentFilePool = computed(() => { const currentFilePool = computed(() => {
if (!filePool.value) return null if (!filePool.value) return null
@@ -285,6 +315,7 @@ function customRequest({
const requestHeaders: Record<string, string> = {} const requestHeaders: Record<string, string> = {}
if (filePool.value) requestHeaders['X-FilePool'] = filePool.value if (filePool.value) requestHeaders['X-FilePool'] = filePool.value
if (filePass.value) requestHeaders['X-FilePass'] = filePass.value if (filePass.value) requestHeaders['X-FilePass'] = filePass.value
if (fileExpire.value) requestHeaders['X-FileExpire'] = fileExpire.value.toString()
const upload = new tus.Upload(file.file, { const upload = new tus.Upload(file.file, {
endpoint: '/api/tus', endpoint: '/api/tus',
retryDelays: [0, 3000, 5000, 10000, 20000], retryDelays: [0, 3000, 5000, 10000, 20000],
@@ -352,4 +383,8 @@ function customPreview(file: UploadFileInfo, detail: { event: MouseEvent }) {
if (!url) return if (!url) return
window.open(url.replace('/api', ''), '_blank') window.open(url.replace('/api', ''), '_blank')
} }
function disablePreviousDate(ts: number) {
return ts <= Date.now()
}
</script> </script>

View File

@@ -0,0 +1,275 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using DysonNetwork.Drive;
using DysonNetwork.Drive.Storage;
using DysonNetwork.Shared.Data;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DysonNetwork.Drive.Migrations
{
[DbContext(typeof(AppDatabase))]
[Migration("20250726172039_AddCloudFileExpiration")]
partial class AddCloudFileExpiration
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.7")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.HasPostgresExtension(modelBuilder, "postgis");
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFile", b =>
{
b.Property<string>("Id")
.HasMaxLength(32)
.HasColumnType("character varying(32)")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("description");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<Dictionary<string, object>>("FileMeta")
.HasColumnType("jsonb")
.HasColumnName("file_meta");
b.Property<bool>("HasCompression")
.HasColumnType("boolean")
.HasColumnName("has_compression");
b.Property<bool>("HasThumbnail")
.HasColumnType("boolean")
.HasColumnName("has_thumbnail");
b.Property<string>("Hash")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("hash");
b.Property<bool>("IsEncrypted")
.HasColumnType("boolean")
.HasColumnName("is_encrypted");
b.Property<bool>("IsMarkedRecycle")
.HasColumnType("boolean")
.HasColumnName("is_marked_recycle");
b.Property<string>("MimeType")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("mime_type");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("name");
b.Property<Guid?>("PoolId")
.HasColumnType("uuid")
.HasColumnName("pool_id");
b.Property<List<ContentSensitiveMark>>("SensitiveMarks")
.HasColumnType("jsonb")
.HasColumnName("sensitive_marks");
b.Property<long>("Size")
.HasColumnType("bigint")
.HasColumnName("size");
b.Property<string>("StorageId")
.HasMaxLength(32)
.HasColumnType("character varying(32)")
.HasColumnName("storage_id");
b.Property<string>("StorageUrl")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("storage_url");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.Property<Instant?>("UploadedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("uploaded_at");
b.Property<string>("UploadedTo")
.HasMaxLength(128)
.HasColumnType("character varying(128)")
.HasColumnName("uploaded_to");
b.Property<Dictionary<string, object>>("UserMeta")
.HasColumnType("jsonb")
.HasColumnName("user_meta");
b.HasKey("Id")
.HasName("pk_files");
b.HasIndex("PoolId")
.HasDatabaseName("ix_files_pool_id");
b.ToTable("files", (string)null);
});
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFileReference", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<string>("FileId")
.IsRequired()
.HasMaxLength(32)
.HasColumnType("character varying(32)")
.HasColumnName("file_id");
b.Property<string>("ResourceId")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("resource_id");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.Property<string>("Usage")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("usage");
b.HasKey("Id")
.HasName("pk_file_references");
b.HasIndex("FileId")
.HasDatabaseName("ix_file_references_file_id");
b.ToTable("file_references", (string)null);
});
modelBuilder.Entity("DysonNetwork.Drive.Storage.FilePool", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid?>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<BillingConfig>("BillingConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("billing_config");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.IsRequired()
.HasMaxLength(8192)
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("name");
b.Property<PolicyConfig>("PolicyConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("policy_config");
b.Property<RemoteStorageConfig>("StorageConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("storage_config");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_pools");
b.ToTable("pools", (string)null);
});
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFile", b =>
{
b.HasOne("DysonNetwork.Drive.Storage.FilePool", "Pool")
.WithMany()
.HasForeignKey("PoolId")
.HasConstraintName("fk_files_pools_pool_id");
b.Navigation("Pool");
});
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFileReference", b =>
{
b.HasOne("DysonNetwork.Drive.Storage.CloudFile", "File")
.WithMany()
.HasForeignKey("FileId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_file_references_files_file_id");
b.Navigation("File");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,29 @@
using Microsoft.EntityFrameworkCore.Migrations;
using NodaTime;
#nullable disable
namespace DysonNetwork.Drive.Migrations
{
/// <inheritdoc />
public partial class AddCloudFileExpiration : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<Instant>(
name: "expired_at",
table: "files",
type: "timestamp with time zone",
nullable: true);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "expired_at",
table: "files");
}
}
}

View File

@@ -51,6 +51,10 @@ namespace DysonNetwork.Drive.Migrations
.HasColumnType("character varying(4096)") .HasColumnType("character varying(4096)")
.HasColumnName("description"); .HasColumnName("description");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<Dictionary<string, object>>("FileMeta") b.Property<Dictionary<string, object>>("FileMeta")
.HasColumnType("jsonb") .HasColumnType("jsonb")
.HasColumnName("file_meta"); .HasColumnName("file_meta");

View File

@@ -1,3 +1,4 @@
using DysonNetwork.Drive.Storage;
using Quartz; using Quartz;
namespace DysonNetwork.Drive.Startup; namespace DysonNetwork.Drive.Startup;
@@ -14,6 +15,13 @@ public static class ScheduledJobsConfiguration
.ForJob(appDatabaseRecyclingJob) .ForJob(appDatabaseRecyclingJob)
.WithIdentity("AppDatabaseRecyclingTrigger") .WithIdentity("AppDatabaseRecyclingTrigger")
.WithCronSchedule("0 0 0 * * ?")); .WithCronSchedule("0 0 0 * * ?"));
var cloudFileUnusedRecyclingJob = new JobKey("CloudFileUnusedRecycling");
q.AddJob<CloudFileUnusedRecyclingJob>(opts => opts.WithIdentity(cloudFileUnusedRecyclingJob));
q.AddTrigger(opts => opts
.ForJob(cloudFileUnusedRecyclingJob)
.WithIdentity("CloudFileUnusedRecyclingTrigger")
.WithCronSchedule("0 0 0 * * ?"));
}); });
services.AddQuartzHostedService(q => q.WaitForJobsToComplete = true); services.AddQuartzHostedService(q => q.WaitForJobsToComplete = true);

View File

@@ -38,6 +38,7 @@ public class CloudFile : ModelBase, ICloudFile, IIdentifiedResource
[Column(TypeName = "jsonb")] public List<ContentSensitiveMark>? SensitiveMarks { get; set; } = []; [Column(TypeName = "jsonb")] public List<ContentSensitiveMark>? SensitiveMarks { get; set; } = [];
[MaxLength(256)] public string? MimeType { get; set; } [MaxLength(256)] public string? MimeType { get; set; }
[MaxLength(256)] public string? Hash { get; set; } [MaxLength(256)] public string? Hash { get; set; }
public Instant? ExpiredAt { get; set; }
public long Size { get; set; } public long Size { get; set; }
public Instant? UploadedAt { get; set; } public Instant? UploadedAt { get; set; }
public bool HasCompression { get; set; } = false; public bool HasCompression { get; set; } = false;

View File

@@ -15,11 +15,19 @@ public class CloudFileUnusedRecyclingJob(
{ {
logger.LogInformation("Marking unused cloud files..."); logger.LogInformation("Marking unused cloud files...");
var recyclablePools = await db.Pools
.Where(p => p.PolicyConfig.EnableRecycle)
.Select(p => p.Id)
.ToListAsync();
var now = SystemClock.Instance.GetCurrentInstant(); var now = SystemClock.Instance.GetCurrentInstant();
const int batchSize = 1000; // Process larger batches for efficiency const int batchSize = 1000; // Process larger batches for efficiency
var processedCount = 0; var processedCount = 0;
var markedCount = 0; var markedCount = 0;
var totalFiles = await db.Files.Where(f => !f.IsMarkedRecycle).CountAsync(); var totalFiles = await db.Files
.Where(f => f.PoolId.HasValue && recyclablePools.Contains(f.PoolId.Value))
.Where(f => !f.IsMarkedRecycle)
.CountAsync();
logger.LogInformation("Found {TotalFiles} files to check for unused status", totalFiles); logger.LogInformation("Found {TotalFiles} files to check for unused status", totalFiles);
@@ -35,13 +43,12 @@ public class CloudFileUnusedRecyclingJob(
{ {
// Query for the next batch of files using keyset pagination // Query for the next batch of files using keyset pagination
var filesQuery = db.Files var filesQuery = db.Files
.Where(f => f.PoolId.HasValue && recyclablePools.Contains(f.PoolId.Value))
.Where(f => !f.IsMarkedRecycle) .Where(f => !f.IsMarkedRecycle)
.Where(f => f.CreatedAt <= ageThreshold); // Only process older files first .Where(f => f.CreatedAt <= ageThreshold); // Only process older files first
if (lastProcessedId != null) if (lastProcessedId != null)
{
filesQuery = filesQuery.Where(f => string.Compare(f.Id, lastProcessedId) > 0); filesQuery = filesQuery.Where(f => string.Compare(f.Id, lastProcessedId) > 0);
}
var fileBatch = await filesQuery var fileBatch = await filesQuery
.OrderBy(f => f.Id) // Ensure consistent ordering for pagination .OrderBy(f => f.Id) // Ensure consistent ordering for pagination
@@ -84,9 +91,17 @@ public class CloudFileUnusedRecyclingJob(
{ {
logger.LogInformation( logger.LogInformation(
"Progress: processed {ProcessedCount}/{TotalFiles} files, marked {MarkedCount} for recycling", "Progress: processed {ProcessedCount}/{TotalFiles} files, marked {MarkedCount} for recycling",
processedCount, totalFiles, markedCount); processedCount,
totalFiles,
markedCount
);
} }
} }
var expiredCount = await db.Files
.Where(f => f.ExpiredAt.HasValue && f.ExpiredAt.Value <= now)
.ExecuteUpdateAsync(s => s.SetProperty(f => f.IsMarkedRecycle, true));
markedCount += expiredCount;
logger.LogInformation("Completed marking {MarkedCount} files for recycling", markedCount); logger.LogInformation("Completed marking {MarkedCount} files for recycling", markedCount);
} }

View File

@@ -26,6 +26,7 @@ public class BillingConfig
public class PolicyConfig public class PolicyConfig
{ {
public bool EnableRecycle { get; set; } = false;
public bool PublicIndexable { get; set; } = false; public bool PublicIndexable { get; set; } = false;
public bool PublicUsable { get; set; } = false; public bool PublicUsable { get; set; } = false;
public bool NoOptimization { get; set; } = false; public bool NoOptimization { get; set; } = false;

View File

@@ -32,19 +32,6 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
if (duration.HasValue) if (duration.HasValue)
finalExpiration = SystemClock.Instance.GetCurrentInstant() + duration.Value; finalExpiration = SystemClock.Instance.GetCurrentInstant() + duration.Value;
var file = await db.Files
.Where(f => f.Id == fileId)
.Include(f => f.Pool)
.FirstOrDefaultAsync();
if (file is null) throw new InvalidOperationException("File not found");
if (file.Pool?.StorageConfig.Expiration != null)
{
var now = SystemClock.Instance.GetCurrentInstant();
var expectedDuration = finalExpiration - now;
if (finalExpiration == null || expectedDuration > file.Pool.StorageConfig.Expiration)
finalExpiration = now.Plus(file.Pool.StorageConfig.Expiration.Value);
}
var reference = new CloudFileReference var reference = new CloudFileReference
{ {
FileId = fileId, FileId = fileId,

View File

@@ -26,7 +26,7 @@ public class FileService(
{ {
private const string CacheKeyPrefix = "file:"; private const string CacheKeyPrefix = "file:";
private static readonly TimeSpan CacheDuration = TimeSpan.FromMinutes(15); private static readonly TimeSpan CacheDuration = TimeSpan.FromMinutes(15);
/// <summary> /// <summary>
/// The api for getting file meta with cache, /// The api for getting file meta with cache,
/// the best use case is for accessing the file data. /// the best use case is for accessing the file data.
@@ -108,19 +108,30 @@ public class FileService(
Stream stream, Stream stream,
string fileName, string fileName,
string? contentType, string? contentType,
string? encryptPassword string? encryptPassword,
Instant? expiredAt
) )
{ {
var pool = await GetPoolAsync(Guid.Parse(filePool)); var pool = await GetPoolAsync(Guid.Parse(filePool));
if (pool is null) throw new InvalidOperationException("Pool not found"); if (pool is null) throw new InvalidOperationException("Pool not found");
if (pool.StorageConfig.Expiration is not null && expiredAt.HasValue)
{
var expectedExpiration = SystemClock.Instance.GetCurrentInstant() - expiredAt.Value;
var effectiveExpiration = pool.StorageConfig.Expiration < expectedExpiration
? pool.StorageConfig.Expiration
: expectedExpiration;
expiredAt = SystemClock.Instance.GetCurrentInstant() + effectiveExpiration;
}
var ogFilePath = Path.GetFullPath(Path.Join(configuration.GetValue<string>("Tus:StorePath"), fileId)); var ogFilePath = Path.GetFullPath(Path.Join(configuration.GetValue<string>("Tus:StorePath"), fileId));
var fileSize = stream.Length; var fileSize = stream.Length;
contentType ??= !fileName.Contains('.') ? "application/octet-stream" : MimeTypes.GetMimeType(fileName); contentType ??= !fileName.Contains('.') ? "application/octet-stream" : MimeTypes.GetMimeType(fileName);
if (!string.IsNullOrWhiteSpace(encryptPassword)) if (!string.IsNullOrWhiteSpace(encryptPassword))
{ {
if (!pool.PolicyConfig.AllowEncryption) throw new InvalidOperationException("Encryption is not allowed in this pool"); if (!pool.PolicyConfig.AllowEncryption)
throw new InvalidOperationException("Encryption is not allowed in this pool");
var encryptedPath = Path.Combine(Path.GetTempPath(), $"{fileId}.encrypted"); var encryptedPath = Path.Combine(Path.GetTempPath(), $"{fileId}.encrypted");
FileEncryptor.EncryptFile(ogFilePath, encryptedPath, encryptPassword); FileEncryptor.EncryptFile(ogFilePath, encryptedPath, encryptPassword);
File.Delete(ogFilePath); // Delete original unencrypted File.Delete(ogFilePath); // Delete original unencrypted
@@ -137,6 +148,7 @@ public class FileService(
MimeType = contentType, MimeType = contentType,
Size = fileSize, Size = fileSize,
Hash = hash, Hash = hash,
ExpiredAt = expiredAt,
AccountId = Guid.Parse(account.Id), AccountId = Guid.Parse(account.Id),
IsEncrypted = !string.IsNullOrWhiteSpace(encryptPassword) && pool.PolicyConfig.AllowEncryption IsEncrypted = !string.IsNullOrWhiteSpace(encryptPassword) && pool.PolicyConfig.AllowEncryption
}; };
@@ -369,6 +381,7 @@ public class FileService(
{ {
logger.LogError(ex, "Failed to generate thumbnail for video {FileId}", fileId); logger.LogError(ex, "Failed to generate thumbnail for video {FileId}", fileId);
} }
break; break;
default: default:
@@ -431,7 +444,7 @@ public class FileService(
private static async Task<string> HashFastApproximateAsync(string filePath, int chunkSize = 1024 * 1024) private static async Task<string> HashFastApproximateAsync(string filePath, int chunkSize = 1024 * 1024)
{ {
await using var stream = File.OpenRead(filePath); await using var stream = File.OpenRead(filePath);
// Scale the chunk size to kB level // Scale the chunk size to kB level
chunkSize *= 1024; chunkSize *= 1024;

View File

@@ -5,6 +5,7 @@ using DysonNetwork.Shared.Auth;
using DysonNetwork.Shared.Proto; using DysonNetwork.Shared.Proto;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options; using Microsoft.Extensions.Options;
using NodaTime;
using tusdotnet.Interfaces; using tusdotnet.Interfaces;
using tusdotnet.Models; using tusdotnet.Models;
using tusdotnet.Models.Configuration; using tusdotnet.Models.Configuration;
@@ -112,6 +113,11 @@ public abstract class TusService
if (string.IsNullOrEmpty(filePool)) if (string.IsNullOrEmpty(filePool))
filePool = configuration["Storage:PreferredRemote"]; filePool = configuration["Storage:PreferredRemote"];
Instant? expiredAt = null;
var expiredString = httpContext.Request.Headers["X-FileExpire"].FirstOrDefault();
if (!string.IsNullOrEmpty(expiredString) && int.TryParse(expiredString, out var expired))
expiredAt = Instant.FromUnixTimeSeconds(expired);
try try
{ {
var fileService = services.GetRequiredService<FileService>(); var fileService = services.GetRequiredService<FileService>();
@@ -122,7 +128,8 @@ public abstract class TusService
fileStream, fileStream,
fileName, fileName,
contentType, contentType,
encryptPassword encryptPassword,
expiredAt
); );
using var finalScope = eventContext.HttpContext.RequestServices.CreateScope(); using var finalScope = eventContext.HttpContext.RequestServices.CreateScope();

View File

@@ -1,3 +1,4 @@
using DysonNetwork.Pass.Wallet;
using DysonNetwork.Shared.Cache; using DysonNetwork.Shared.Cache;
using DysonNetwork.Shared.Proto; using DysonNetwork.Shared.Proto;
using Grpc.Core; using Grpc.Core;
@@ -8,6 +9,7 @@ namespace DysonNetwork.Pass.Auth;
public class AuthServiceGrpc( public class AuthServiceGrpc(
AuthService authService, AuthService authService,
SubscriptionService subscriptions,
ICacheService cache, ICacheService cache,
AppDatabase db AppDatabase db
) )
@@ -36,6 +38,9 @@ public class AuthServiceGrpc(
var now = SystemClock.Instance.GetCurrentInstant(); var now = SystemClock.Instance.GetCurrentInstant();
if (session.ExpiredAt.HasValue && session.ExpiredAt < now) if (session.ExpiredAt.HasValue && session.ExpiredAt < now)
return new AuthenticateResponse { Valid = false, Message = "Session has been expired." }; return new AuthenticateResponse { Valid = false, Message = "Session has been expired." };
var perk = await subscriptions.GetPerkSubscriptionAsync(session.AccountId);
session.Account.PerkSubscription = perk?.ToReference();
await cache.SetWithGroupsAsync( await cache.SetWithGroupsAsync(
$"auth:{sessionId}", $"auth:{sessionId}",