From afdbde951cd53c0a76820391fb0084c3a274ad45 Mon Sep 17 00:00:00 2001 From: LittleSheep Date: Sun, 13 Jul 2025 18:36:51 +0800 Subject: [PATCH] :sparkles: Shared auth scheme --- DysonNetwork.Drive/AppDatabase.cs | 179 ++++++ DysonNetwork.Drive/Dockerfile | 23 + DysonNetwork.Drive/DysonNetwork.Drive.csproj | 66 +++ DysonNetwork.Drive/Program.cs | 45 ++ .../Properties/launchSettings.json | 23 + .../Startup/ApplicationBuilderExtensions.cs | 28 + .../Startup/KestrelConfiguration.cs | 17 + .../Startup/ScheduledJobsConfiguration.cs | 22 + .../Startup/ServiceCollectionExtensions.cs | 130 ++++ DysonNetwork.Drive/Storage/CloudFile.cs | 131 +++++ .../Storage/CloudFileUnusedRecyclingJob.cs | 93 +++ DysonNetwork.Drive/Storage/FileController.cs | 144 +++++ .../Storage/FileExpirationJob.cs | 66 +++ .../Storage/FileReferenceService.cs | 434 ++++++++++++++ DysonNetwork.Drive/Storage/FileService.cs | 555 ++++++++++++++++++ DysonNetwork.Drive/Storage/TusService.cs | 79 +++ DysonNetwork.Drive/appsettings.json | 129 ++++ DysonNetwork.Pass/Auth/AuthServiceGrpc.cs | 46 +- DysonNetwork.Pass/DysonNetwork.Pass.csproj | 52 +- .../Permission/PermissionServiceGrpc.cs | 96 +++ DysonNetwork.Pass/Program.cs | 2 +- .../DysonNetwork.Pusher.csproj | 1 + DysonNetwork.Pusher/Program.cs | 9 +- .../Startup/KestrelConfiguration.cs | 2 +- DysonNetwork.Shared/Auth/AuthScheme.cs | 154 +++++ DysonNetwork.Shared/Auth/Startup.cs | 35 ++ .../DysonNetwork.Shared.csproj | 19 +- .../Middleware/AuthMiddleware.cs | 107 ---- DysonNetwork.Shared/Proto/auth.proto | 118 +++- .../Registry/RegistryHostedService.cs | 45 ++ DysonNetwork.Shared/Registry/Startup.cs | 5 +- .../DysonNetwork.Sphere.csproj | 20 +- DysonNetwork.sln | 6 + DysonNetwork.sln.DotSettings.user | 2 + 34 files changed, 2704 insertions(+), 179 deletions(-) create mode 100644 DysonNetwork.Drive/AppDatabase.cs create mode 100644 DysonNetwork.Drive/Dockerfile create mode 100644 DysonNetwork.Drive/DysonNetwork.Drive.csproj create mode 100644 DysonNetwork.Drive/Program.cs create mode 100644 DysonNetwork.Drive/Properties/launchSettings.json create mode 100644 DysonNetwork.Drive/Startup/ApplicationBuilderExtensions.cs create mode 100644 DysonNetwork.Drive/Startup/KestrelConfiguration.cs create mode 100644 DysonNetwork.Drive/Startup/ScheduledJobsConfiguration.cs create mode 100644 DysonNetwork.Drive/Startup/ServiceCollectionExtensions.cs create mode 100644 DysonNetwork.Drive/Storage/CloudFile.cs create mode 100644 DysonNetwork.Drive/Storage/CloudFileUnusedRecyclingJob.cs create mode 100644 DysonNetwork.Drive/Storage/FileController.cs create mode 100644 DysonNetwork.Drive/Storage/FileExpirationJob.cs create mode 100644 DysonNetwork.Drive/Storage/FileReferenceService.cs create mode 100644 DysonNetwork.Drive/Storage/FileService.cs create mode 100644 DysonNetwork.Drive/Storage/TusService.cs create mode 100644 DysonNetwork.Drive/appsettings.json create mode 100644 DysonNetwork.Pass/Permission/PermissionServiceGrpc.cs create mode 100644 DysonNetwork.Shared/Auth/AuthScheme.cs create mode 100644 DysonNetwork.Shared/Auth/Startup.cs delete mode 100644 DysonNetwork.Shared/Middleware/AuthMiddleware.cs create mode 100644 DysonNetwork.Shared/Registry/RegistryHostedService.cs diff --git a/DysonNetwork.Drive/AppDatabase.cs b/DysonNetwork.Drive/AppDatabase.cs new file mode 100644 index 0000000..74a7e1c --- /dev/null +++ b/DysonNetwork.Drive/AppDatabase.cs @@ -0,0 +1,179 @@ +using System.Linq.Expressions; +using System.Reflection; +using DysonNetwork.Drive.Storage; +using DysonNetwork.Shared.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Design; +using Microsoft.EntityFrameworkCore.Query; +using NodaTime; +using Quartz; + +namespace DysonNetwork.Drive; + +public class AppDatabase( + DbContextOptions options, + IConfiguration configuration +) : DbContext(options) +{ + public DbSet Files { get; set; } = null!; + public DbSet FileReferences { get; set; } = null!; + + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + { + optionsBuilder.UseNpgsql( + configuration.GetConnectionString("App"), + opt => opt + .ConfigureDataSource(optSource => optSource.EnableDynamicJson()) + .UseQuerySplittingBehavior(QuerySplittingBehavior.SplitQuery) + .UseNetTopologySuite() + .UseNodaTime() + ).UseSnakeCaseNamingConvention(); + + base.OnConfiguring(optionsBuilder); + } + + protected override void OnModelCreating(ModelBuilder modelBuilder) + { + base.OnModelCreating(modelBuilder); + + // Automatically apply soft-delete filter to all entities inheriting BaseModel + foreach (var entityType in modelBuilder.Model.GetEntityTypes()) + { + if (!typeof(ModelBase).IsAssignableFrom(entityType.ClrType)) continue; + var method = typeof(AppDatabase) + .GetMethod(nameof(SetSoftDeleteFilter), + BindingFlags.NonPublic | BindingFlags.Static)! + .MakeGenericMethod(entityType.ClrType); + + method.Invoke(null, [modelBuilder]); + } + } + + private static void SetSoftDeleteFilter(ModelBuilder modelBuilder) + where TEntity : ModelBase + { + modelBuilder.Entity().HasQueryFilter(e => e.DeletedAt == null); + } + + public override async Task SaveChangesAsync(CancellationToken cancellationToken = default) + { + var now = SystemClock.Instance.GetCurrentInstant(); + + foreach (var entry in ChangeTracker.Entries()) + { + switch (entry.State) + { + case EntityState.Added: + entry.Entity.CreatedAt = now; + entry.Entity.UpdatedAt = now; + break; + case EntityState.Modified: + entry.Entity.UpdatedAt = now; + break; + case EntityState.Deleted: + entry.State = EntityState.Modified; + entry.Entity.DeletedAt = now; + break; + case EntityState.Detached: + case EntityState.Unchanged: + default: + break; + } + } + + return await base.SaveChangesAsync(cancellationToken); + } +} + +public class AppDatabaseRecyclingJob(AppDatabase db, ILogger logger) : IJob +{ + public async Task Execute(IJobExecutionContext context) + { + var now = SystemClock.Instance.GetCurrentInstant(); + + logger.LogInformation("Deleting soft-deleted records..."); + + var threshold = now - Duration.FromDays(7); + + var entityTypes = db.Model.GetEntityTypes() + .Where(t => typeof(ModelBase).IsAssignableFrom(t.ClrType) && t.ClrType != typeof(ModelBase)) + .Select(t => t.ClrType); + + foreach (var entityType in entityTypes) + { + var set = (IQueryable)db.GetType().GetMethod(nameof(DbContext.Set), Type.EmptyTypes)! + .MakeGenericMethod(entityType).Invoke(db, null)!; + var parameter = Expression.Parameter(entityType, "e"); + var property = Expression.Property(parameter, nameof(ModelBase.DeletedAt)); + var condition = Expression.LessThan(property, Expression.Constant(threshold, typeof(Instant?))); + var notNull = Expression.NotEqual(property, Expression.Constant(null, typeof(Instant?))); + var finalCondition = Expression.AndAlso(notNull, condition); + var lambda = Expression.Lambda(finalCondition, parameter); + + var queryable = set.Provider.CreateQuery( + Expression.Call( + typeof(Queryable), + "Where", + [entityType], + set.Expression, + Expression.Quote(lambda) + ) + ); + + var toListAsync = typeof(EntityFrameworkQueryableExtensions) + .GetMethod(nameof(EntityFrameworkQueryableExtensions.ToListAsync))! + .MakeGenericMethod(entityType); + + var items = await (dynamic)toListAsync.Invoke(null, [queryable, CancellationToken.None])!; + db.RemoveRange(items); + } + + await db.SaveChangesAsync(); + } +} + +public class AppDatabaseFactory : IDesignTimeDbContextFactory +{ + public AppDatabase CreateDbContext(string[] args) + { + var configuration = new ConfigurationBuilder() + .SetBasePath(Directory.GetCurrentDirectory()) + .AddJsonFile("appsettings.json") + .Build(); + + var optionsBuilder = new DbContextOptionsBuilder(); + return new AppDatabase(optionsBuilder.Options, configuration); + } +} + +public static class OptionalQueryExtensions +{ + public static IQueryable If( + this IQueryable source, + bool condition, + Func, IQueryable> transform + ) + { + return condition ? transform(source) : source; + } + + public static IQueryable If( + this IIncludableQueryable source, + bool condition, + Func, IQueryable> transform + ) + where T : class + { + return condition ? transform(source) : source; + } + + public static IQueryable If( + this IIncludableQueryable> source, + bool condition, + Func>, IQueryable> transform + ) + where T : class + { + return condition ? transform(source) : source; + } +} \ No newline at end of file diff --git a/DysonNetwork.Drive/Dockerfile b/DysonNetwork.Drive/Dockerfile new file mode 100644 index 0000000..dd92d67 --- /dev/null +++ b/DysonNetwork.Drive/Dockerfile @@ -0,0 +1,23 @@ +FROM mcr.microsoft.com/dotnet/aspnet:9.0 AS base +USER $APP_UID +WORKDIR /app +EXPOSE 8080 +EXPOSE 8081 + +FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build +ARG BUILD_CONFIGURATION=Release +WORKDIR /src +COPY ["DysonNetwork.Drive/DysonNetwork.Drive.csproj", "DysonNetwork.Drive/"] +RUN dotnet restore "DysonNetwork.Drive/DysonNetwork.Drive.csproj" +COPY . . +WORKDIR "/src/DysonNetwork.Drive" +RUN dotnet build "./DysonNetwork.Drive.csproj" -c $BUILD_CONFIGURATION -o /app/build + +FROM build AS publish +ARG BUILD_CONFIGURATION=Release +RUN dotnet publish "./DysonNetwork.Drive.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false + +FROM base AS final +WORKDIR /app +COPY --from=publish /app/publish . +ENTRYPOINT ["dotnet", "DysonNetwork.Drive.dll"] diff --git a/DysonNetwork.Drive/DysonNetwork.Drive.csproj b/DysonNetwork.Drive/DysonNetwork.Drive.csproj new file mode 100644 index 0000000..199eb2d --- /dev/null +++ b/DysonNetwork.Drive/DysonNetwork.Drive.csproj @@ -0,0 +1,66 @@ + + + + net9.0 + enable + enable + Linux + + + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .dockerignore + + + + + + + + diff --git a/DysonNetwork.Drive/Program.cs b/DysonNetwork.Drive/Program.cs new file mode 100644 index 0000000..e701c37 --- /dev/null +++ b/DysonNetwork.Drive/Program.cs @@ -0,0 +1,45 @@ +using DysonNetwork.Drive; +using DysonNetwork.Drive.Startup; +using DysonNetwork.Pusher.Startup; +using DysonNetwork.Shared.Auth; +using DysonNetwork.Shared.Registry; +using Microsoft.EntityFrameworkCore; + +var builder = WebApplication.CreateBuilder(args); + +// Configure Kestrel and server options +builder.ConfigureAppKestrel(); + +// Add application services +builder.Services.AddRegistryService(builder.Configuration); +builder.Services.AddAppServices(builder.Configuration); +builder.Services.AddAppRateLimiting(); +builder.Services.AddAppAuthentication(); +builder.Services.AddAppSwagger(); +builder.Services.AddDysonAuth(builder.Configuration); + +// Add flush handlers and websocket handlers +builder.Services.AddAppFlushHandlers(); + +// Add business services +builder.Services.AddAppBusinessServices(); + +// Add scheduled jobs +builder.Services.AddAppScheduledJobs(); + +var app = builder.Build(); + +// Run database migrations +using (var scope = app.Services.CreateScope()) +{ + var db = scope.ServiceProvider.GetRequiredService(); + await db.Database.MigrateAsync(); +} + +// Configure application middleware pipeline +app.ConfigureAppMiddleware(builder.Configuration); + +// Configure gRPC +app.ConfigureGrpcServices(); + +app.Run(); \ No newline at end of file diff --git a/DysonNetwork.Drive/Properties/launchSettings.json b/DysonNetwork.Drive/Properties/launchSettings.json new file mode 100644 index 0000000..d4fe22b --- /dev/null +++ b/DysonNetwork.Drive/Properties/launchSettings.json @@ -0,0 +1,23 @@ +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "http": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "http://localhost:5090", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + }, + "https": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "https://localhost:7092;http://localhost:5090", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + } + } +} diff --git a/DysonNetwork.Drive/Startup/ApplicationBuilderExtensions.cs b/DysonNetwork.Drive/Startup/ApplicationBuilderExtensions.cs new file mode 100644 index 0000000..7504fbe --- /dev/null +++ b/DysonNetwork.Drive/Startup/ApplicationBuilderExtensions.cs @@ -0,0 +1,28 @@ +namespace DysonNetwork.Drive.Startup; + +public static class ApplicationBuilderExtensions +{ + public static WebApplication ConfigureAppMiddleware(this WebApplication app, IConfiguration configuration) + { + // Configure the HTTP request pipeline. + if (app.Environment.IsDevelopment()) + { + app.UseSwagger(); + app.UseSwaggerUI(); + } + + app.UseHttpsRedirection(); + app.UseAuthorization(); + app.MapControllers(); + + return app; + } + + public static WebApplication ConfigureGrpcServices(this WebApplication app) + { + // Map your gRPC services here + // Example: app.MapGrpcService(); + + return app; + } +} diff --git a/DysonNetwork.Drive/Startup/KestrelConfiguration.cs b/DysonNetwork.Drive/Startup/KestrelConfiguration.cs new file mode 100644 index 0000000..f35e4dd --- /dev/null +++ b/DysonNetwork.Drive/Startup/KestrelConfiguration.cs @@ -0,0 +1,17 @@ +namespace DysonNetwork.Pusher.Startup; + +public static class KestrelConfiguration +{ + public static WebApplicationBuilder ConfigureAppKestrel(this WebApplicationBuilder builder) + { + builder.Host.UseContentRoot(Directory.GetCurrentDirectory()); + builder.WebHost.ConfigureKestrel(options => + { + options.Limits.MaxRequestBodySize = 50 * 1024 * 1024; + options.Limits.KeepAliveTimeout = TimeSpan.FromMinutes(2); + options.Limits.RequestHeadersTimeout = TimeSpan.FromSeconds(30); + }); + + return builder; + } +} diff --git a/DysonNetwork.Drive/Startup/ScheduledJobsConfiguration.cs b/DysonNetwork.Drive/Startup/ScheduledJobsConfiguration.cs new file mode 100644 index 0000000..e4fc9eb --- /dev/null +++ b/DysonNetwork.Drive/Startup/ScheduledJobsConfiguration.cs @@ -0,0 +1,22 @@ +using Quartz; + +namespace DysonNetwork.Drive.Startup; + +public static class ScheduledJobsConfiguration +{ + public static IServiceCollection AddAppScheduledJobs(this IServiceCollection services) + { + services.AddQuartz(q => + { + var appDatabaseRecyclingJob = new JobKey("AppDatabaseRecycling"); + q.AddJob(opts => opts.WithIdentity(appDatabaseRecyclingJob)); + q.AddTrigger(opts => opts + .ForJob(appDatabaseRecyclingJob) + .WithIdentity("AppDatabaseRecyclingTrigger") + .WithCronSchedule("0 0 0 * * ?")); + }); + services.AddQuartzHostedService(q => q.WaitForJobsToComplete = true); + + return services; + } +} diff --git a/DysonNetwork.Drive/Startup/ServiceCollectionExtensions.cs b/DysonNetwork.Drive/Startup/ServiceCollectionExtensions.cs new file mode 100644 index 0000000..a9b4ae1 --- /dev/null +++ b/DysonNetwork.Drive/Startup/ServiceCollectionExtensions.cs @@ -0,0 +1,130 @@ +using System.Text.Json; +using System.Threading.RateLimiting; +using dotnet_etcd.interfaces; +using DysonNetwork.Shared.Cache; +using Microsoft.AspNetCore.RateLimiting; +using Microsoft.OpenApi.Models; +using NodaTime; +using NodaTime.Serialization.SystemTextJson; +using StackExchange.Redis; +using DysonNetwork.Shared.Proto; + +namespace DysonNetwork.Drive.Startup; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddAppServices(this IServiceCollection services, IConfiguration configuration) + { + services.AddDbContext(); // Assuming you'll have an AppDatabase + services.AddSingleton(_ => + { + var connection = configuration.GetConnectionString("FastRetrieve")!; + return ConnectionMultiplexer.Connect(connection); + }); + services.AddSingleton(SystemClock.Instance); + services.AddHttpContextAccessor(); + services.AddSingleton(); // Uncomment if you have CacheServiceRedis + + services.AddHttpClient(); + + // Register gRPC services + services.AddGrpc(options => + { + options.EnableDetailedErrors = true; // Will be adjusted in Program.cs + options.MaxReceiveMessageSize = 16 * 1024 * 1024; // 16MB + options.MaxSendMessageSize = 16 * 1024 * 1024; // 16MB + }); + + // Register gRPC reflection for service discovery + services.AddGrpc(); + + services.AddControllers().AddJsonOptions(options => + { + options.JsonSerializerOptions.PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower; + options.JsonSerializerOptions.DictionaryKeyPolicy = JsonNamingPolicy.SnakeCaseLower; + + options.JsonSerializerOptions.ConfigureForNodaTime(DateTimeZoneProviders.Tzdb); + }); + + return services; + } + + public static IServiceCollection AddAppRateLimiting(this IServiceCollection services) + { + services.AddRateLimiter(o => o.AddFixedWindowLimiter(policyName: "fixed", opts => + { + opts.Window = TimeSpan.FromMinutes(1); + opts.PermitLimit = 120; + opts.QueueLimit = 2; + opts.QueueProcessingOrder = QueueProcessingOrder.OldestFirst; + })); + + return services; + } + + public static IServiceCollection AddAppAuthentication(this IServiceCollection services) + { + services.AddCors(); + services.AddAuthorization(); + + return services; + } + + public static IServiceCollection AddAppFlushHandlers(this IServiceCollection services) + { + services.AddSingleton(); + + return services; + } + + public static IServiceCollection AddAppSwagger(this IServiceCollection services) + { + services.AddEndpointsApiExplorer(); + services.AddSwaggerGen(options => + { + options.SwaggerDoc("v1", new OpenApiInfo + { + Version = "v1", + Title = "DysonNetwork.Drive API", + Description = "DysonNetwork Drive Service", + TermsOfService = new Uri("https://example.com/terms"), // Update with actual terms + License = new OpenApiLicense + { + Name = "APGLv3", // Update with actual license + Url = new Uri("https://www.gnu.org/licenses/agpl-3.0.html") + } + }); + options.AddSecurityDefinition("Bearer", new OpenApiSecurityScheme + { + In = ParameterLocation.Header, + Description = "Please enter a valid token", + Name = "Authorization", + Type = SecuritySchemeType.Http, + BearerFormat = "JWT", + Scheme = "Bearer" + }); + options.AddSecurityRequirement(new OpenApiSecurityRequirement + { + { + new OpenApiSecurityScheme + { + Reference = new OpenApiReference + { + Type = ReferenceType.SecurityScheme, + Id = "Bearer" + } + }, + [] + } + }); + }); + + return services; + } + + public static IServiceCollection AddAppBusinessServices(this IServiceCollection services) + { + // Add your business services here + return services; + } +} diff --git a/DysonNetwork.Drive/Storage/CloudFile.cs b/DysonNetwork.Drive/Storage/CloudFile.cs new file mode 100644 index 0000000..c5c6f35 --- /dev/null +++ b/DysonNetwork.Drive/Storage/CloudFile.cs @@ -0,0 +1,131 @@ +using System.ComponentModel.DataAnnotations; +using System.ComponentModel.DataAnnotations.Schema; +using System.Text.Json.Serialization; +using DysonNetwork.Shared.Data; +using DysonNetwork.Shared.Proto; +using NodaTime; + +namespace DysonNetwork.Drive.Storage; + +public class RemoteStorageConfig +{ + public string Id { get; set; } = string.Empty; + public string Label { get; set; } = string.Empty; + public string Region { get; set; } = string.Empty; + public string Bucket { get; set; } = string.Empty; + public string Endpoint { get; set; } = string.Empty; + public string SecretId { get; set; } = string.Empty; + public string SecretKey { get; set; } = string.Empty; + public bool EnableSigned { get; set; } + public bool EnableSsl { get; set; } + public string? ImageProxy { get; set; } + public string? AccessProxy { get; set; } +} + +/// +/// The class that used in jsonb columns which referenced the cloud file. +/// The aim of this class is to store some properties that won't change to a file to reduce the database load. +/// +public class CloudFileReferenceObject : ModelBase, ICloudFile +{ + public string Id { get; set; } = null!; + public string Name { get; set; } = string.Empty; + public Dictionary? FileMeta { get; set; } = null!; + public Dictionary? UserMeta { get; set; } = null!; + public string? MimeType { get; set; } + public string? Hash { get; set; } + public long Size { get; set; } + public bool HasCompression { get; set; } = false; +} + +public class CloudFile : ModelBase, ICloudFile, IIdentifiedResource +{ + /// The id generated by TuS, basically just UUID remove the dash lines + [MaxLength(32)] + public string Id { get; set; } = Guid.NewGuid().ToString(); + + [MaxLength(1024)] public string Name { get; set; } = string.Empty; + [MaxLength(4096)] public string? Description { get; set; } + [Column(TypeName = "jsonb")] public Dictionary? FileMeta { get; set; } = null!; + [Column(TypeName = "jsonb")] public Dictionary? UserMeta { get; set; } = null!; + [Column(TypeName = "jsonb")] public List? SensitiveMarks { get; set; } = []; + [MaxLength(256)] public string? MimeType { get; set; } + [MaxLength(256)] public string? Hash { get; set; } + public long Size { get; set; } + public Instant? UploadedAt { get; set; } + [MaxLength(128)] public string? UploadedTo { get; set; } + public bool HasCompression { get; set; } = false; + + /// + /// The field is set to true if the recycling job plans to delete the file. + /// Due to the unstable of the recycling job, this doesn't really delete the file until a human verifies it. + /// + public bool IsMarkedRecycle { get; set; } = false; + + /// The object name which stored remotely, + /// multiple cloud file may have same storage id to indicate they are the same file + /// + /// If the storage id was null and the uploaded at is not null, means it is an embedding file, + /// The embedding file means the file is store on another site, + /// or it is a webpage (based on mimetype) + [MaxLength(32)] + public string? StorageId { get; set; } + + /// This field should be null when the storage id is filled + /// Indicates the off-site accessible url of the file + [MaxLength(4096)] + public string? StorageUrl { get; set; } + + public Guid AccountId { get; set; } + + public CloudFileReferenceObject ToReferenceObject() + { + return new CloudFileReferenceObject + { + CreatedAt = CreatedAt, + UpdatedAt = UpdatedAt, + DeletedAt = DeletedAt, + Id = Id, + Name = Name, + FileMeta = FileMeta, + UserMeta = UserMeta, + MimeType = MimeType, + Hash = Hash, + Size = Size, + HasCompression = HasCompression + }; + } + + public string ResourceIdentifier => $"file/{Id}"; +} + +public enum ContentSensitiveMark +{ + Language, + SexualContent, + Violence, + Profanity, + HateSpeech, + Racism, + AdultContent, + DrugAbuse, + AlcoholAbuse, + Gambling, + SelfHarm, + ChildAbuse, + Other +} + +public class CloudFileReference : ModelBase +{ + public Guid Id { get; set; } = Guid.NewGuid(); + [MaxLength(32)] public string FileId { get; set; } = null!; + public CloudFile File { get; set; } = null!; + [MaxLength(1024)] public string Usage { get; set; } = null!; + [MaxLength(1024)] public string ResourceId { get; set; } = null!; + + /// + /// Optional expiration date for the file reference + /// + public Instant? ExpiredAt { get; set; } +} \ No newline at end of file diff --git a/DysonNetwork.Drive/Storage/CloudFileUnusedRecyclingJob.cs b/DysonNetwork.Drive/Storage/CloudFileUnusedRecyclingJob.cs new file mode 100644 index 0000000..98d4cdb --- /dev/null +++ b/DysonNetwork.Drive/Storage/CloudFileUnusedRecyclingJob.cs @@ -0,0 +1,93 @@ +using Microsoft.EntityFrameworkCore; +using NodaTime; +using Quartz; + +namespace DysonNetwork.Drive.Storage; + +public class CloudFileUnusedRecyclingJob( + AppDatabase db, + FileReferenceService fileRefService, + ILogger logger +) + : IJob +{ + public async Task Execute(IJobExecutionContext context) + { + logger.LogInformation("Marking unused cloud files..."); + + var now = SystemClock.Instance.GetCurrentInstant(); + const int batchSize = 1000; // Process larger batches for efficiency + var processedCount = 0; + var markedCount = 0; + var totalFiles = await db.Files.Where(f => !f.IsMarkedRecycle).CountAsync(); + + logger.LogInformation("Found {TotalFiles} files to check for unused status", totalFiles); + + // Define a timestamp to limit the age of files we're processing in this run + // This spreads the processing across multiple job runs for very large databases + var ageThreshold = now - Duration.FromDays(30); // Process files up to 90 days old in this run + + // Instead of loading all files at once, use pagination + var hasMoreFiles = true; + string? lastProcessedId = null; + + while (hasMoreFiles) + { + // Query for the next batch of files using keyset pagination + var filesQuery = db.Files + .Where(f => !f.IsMarkedRecycle) + .Where(f => f.CreatedAt <= ageThreshold); // Only process older files first + + if (lastProcessedId != null) + { + filesQuery = filesQuery.Where(f => string.Compare(f.Id, lastProcessedId) > 0); + } + + var fileBatch = await filesQuery + .OrderBy(f => f.Id) // Ensure consistent ordering for pagination + .Take(batchSize) + .Select(f => f.Id) + .ToListAsync(); + + if (fileBatch.Count == 0) + { + hasMoreFiles = false; + continue; + } + + processedCount += fileBatch.Count; + lastProcessedId = fileBatch.Last(); + + // Get all relevant file references for this batch + var fileReferences = await fileRefService.GetReferencesAsync(fileBatch); + + // Filter to find files that have no references or all expired references + var filesToMark = fileBatch.Where(fileId => + !fileReferences.TryGetValue(fileId, out var references) || + references.Count == 0 || + references.All(r => r.ExpiredAt.HasValue && r.ExpiredAt.Value <= now) + ).ToList(); + + if (filesToMark.Count > 0) + { + // Use a bulk update for better performance - mark all qualifying files at once + var updateCount = await db.Files + .Where(f => filesToMark.Contains(f.Id)) + .ExecuteUpdateAsync(setter => setter + .SetProperty(f => f.IsMarkedRecycle, true)); + + markedCount += updateCount; + } + + // Log progress periodically + if (processedCount % 10000 == 0 || !hasMoreFiles) + { + logger.LogInformation( + "Progress: processed {ProcessedCount}/{TotalFiles} files, marked {MarkedCount} for recycling", + processedCount, totalFiles, markedCount); + } + } + + logger.LogInformation("Completed marking {MarkedCount} files for recycling", markedCount); + } +} \ No newline at end of file diff --git a/DysonNetwork.Drive/Storage/FileController.cs b/DysonNetwork.Drive/Storage/FileController.cs new file mode 100644 index 0000000..e8b4f49 --- /dev/null +++ b/DysonNetwork.Drive/Storage/FileController.cs @@ -0,0 +1,144 @@ +using DysonNetwork.Shared.Proto; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Mvc; +using Microsoft.EntityFrameworkCore; +using Minio.DataModel.Args; + +namespace DysonNetwork.Drive.Storage; + +[ApiController] +[Route("/api/files")] +public class FileController( + AppDatabase db, + FileService fs, + IConfiguration configuration, + IWebHostEnvironment env +) : ControllerBase +{ + [HttpGet("{id}")] + public async Task OpenFile( + string id, + [FromQuery] bool download = false, + [FromQuery] bool original = false, + [FromQuery] string? overrideMimeType = null + ) + { + // Support the file extension for client side data recognize + string? fileExtension = null; + if (id.Contains('.')) + { + var splitId = id.Split('.'); + id = splitId.First(); + fileExtension = splitId.Last(); + } + + var file = await fs.GetFileAsync(id); + if (file is null) return NotFound(); + + if (!string.IsNullOrWhiteSpace(file.StorageUrl)) return Redirect(file.StorageUrl); + + if (file.UploadedTo is null) + { + var tusStorePath = configuration.GetValue("Tus:StorePath")!; + var filePath = Path.Combine(env.ContentRootPath, tusStorePath, file.Id); + if (!System.IO.File.Exists(filePath)) return new NotFoundResult(); + return PhysicalFile(filePath, file.MimeType ?? "application/octet-stream", file.Name); + } + + var dest = fs.GetRemoteStorageConfig(file.UploadedTo); + var fileName = string.IsNullOrWhiteSpace(file.StorageId) ? file.Id : file.StorageId; + + if (!original && file.HasCompression) + fileName += ".compressed"; + + if (dest.ImageProxy is not null && (file.MimeType?.StartsWith("image/") ?? false)) + { + var proxyUrl = dest.ImageProxy; + var baseUri = new Uri(proxyUrl.EndsWith('/') ? proxyUrl : $"{proxyUrl}/"); + var fullUri = new Uri(baseUri, fileName); + return Redirect(fullUri.ToString()); + } + + if (dest.AccessProxy is not null) + { + var proxyUrl = dest.AccessProxy; + var baseUri = new Uri(proxyUrl.EndsWith('/') ? proxyUrl : $"{proxyUrl}/"); + var fullUri = new Uri(baseUri, fileName); + return Redirect(fullUri.ToString()); + } + + if (dest.EnableSigned) + { + var client = fs.CreateMinioClient(dest); + if (client is null) + return BadRequest( + "Failed to configure client for remote destination, file got an invalid storage remote."); + + var headers = new Dictionary(); + if (fileExtension is not null) + { + if (MimeTypes.TryGetMimeType(fileExtension, out var mimeType)) + headers.Add("Response-Content-Type", mimeType); + } + else if (overrideMimeType is not null) + { + headers.Add("Response-Content-Type", overrideMimeType); + } + else if (file.MimeType is not null && !file.MimeType!.EndsWith("unknown")) + { + headers.Add("Response-Content-Type", file.MimeType); + } + + if (download) + { + headers.Add("Response-Content-Disposition", $"attachment; filename=\"{file.Name}\""); + } + + var bucket = dest.Bucket; + var openUrl = await client.PresignedGetObjectAsync( + new PresignedGetObjectArgs() + .WithBucket(bucket) + .WithObject(fileName) + .WithExpiry(3600) + .WithHeaders(headers) + ); + + return Redirect(openUrl); + } + + // Fallback redirect to the S3 endpoint (public read) + var protocol = dest.EnableSsl ? "https" : "http"; + // Use the path bucket lookup mode + return Redirect($"{protocol}://{dest.Endpoint}/{dest.Bucket}/{fileName}"); + } + + [HttpGet("{id}/info")] + public async Task> GetFileInfo(string id) + { + var file = await db.Files.FindAsync(id); + if (file is null) return NotFound(); + + return file; + } + + [Authorize] + [HttpDelete("{id}")] + public async Task DeleteFile(string id) + { + if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized(); + var userId = Guid.Parse(currentUser.Id); + + var file = await db.Files + .Where(e => e.Id == id) + .Where(e => e.AccountId == userId) + .FirstOrDefaultAsync(); + if (file is null) return NotFound(); + + await fs.DeleteFileAsync(file); + + db.Files.Remove(file); + await db.SaveChangesAsync(); + + return NoContent(); + } +} \ No newline at end of file diff --git a/DysonNetwork.Drive/Storage/FileExpirationJob.cs b/DysonNetwork.Drive/Storage/FileExpirationJob.cs new file mode 100644 index 0000000..13fde3d --- /dev/null +++ b/DysonNetwork.Drive/Storage/FileExpirationJob.cs @@ -0,0 +1,66 @@ +using Microsoft.EntityFrameworkCore; +using NodaTime; +using Quartz; + +namespace DysonNetwork.Drive.Storage; + +/// +/// Job responsible for cleaning up expired file references +/// +public class FileExpirationJob(AppDatabase db, FileService fileService, ILogger logger) : IJob +{ + public async Task Execute(IJobExecutionContext context) + { + var now = SystemClock.Instance.GetCurrentInstant(); + logger.LogInformation("Running file reference expiration job at {now}", now); + + // Find all expired references + var expiredReferences = await db.FileReferences + .Where(r => r.ExpiredAt < now && r.ExpiredAt != null) + .ToListAsync(); + + if (!expiredReferences.Any()) + { + logger.LogInformation("No expired file references found"); + return; + } + + logger.LogInformation("Found {count} expired file references", expiredReferences.Count); + + // Get unique file IDs + var fileIds = expiredReferences.Select(r => r.FileId).Distinct().ToList(); + var filesAndReferenceCount = new Dictionary(); + + // Delete expired references + db.FileReferences.RemoveRange(expiredReferences); + await db.SaveChangesAsync(); + + // Check remaining references for each file + foreach (var fileId in fileIds) + { + var remainingReferences = await db.FileReferences + .Where(r => r.FileId == fileId) + .CountAsync(); + + filesAndReferenceCount[fileId] = remainingReferences; + + // If no references remain, delete the file + if (remainingReferences == 0) + { + var file = await db.Files.FirstOrDefaultAsync(f => f.Id == fileId); + if (file != null) + { + logger.LogInformation("Deleting file {fileId} as all references have expired", fileId); + await fileService.DeleteFileAsync(file); + } + } + else + { + // Just purge the cache + await fileService._PurgeCacheAsync(fileId); + } + } + + logger.LogInformation("Completed file reference expiration job"); + } +} diff --git a/DysonNetwork.Drive/Storage/FileReferenceService.cs b/DysonNetwork.Drive/Storage/FileReferenceService.cs new file mode 100644 index 0000000..94a704a --- /dev/null +++ b/DysonNetwork.Drive/Storage/FileReferenceService.cs @@ -0,0 +1,434 @@ +using DysonNetwork.Shared.Cache; +using Microsoft.EntityFrameworkCore; +using NodaTime; + +namespace DysonNetwork.Drive.Storage; + +public class FileReferenceService(AppDatabase db, FileService fileService, ICacheService cache) +{ + private const string CacheKeyPrefix = "fileref:"; + private static readonly TimeSpan CacheDuration = TimeSpan.FromMinutes(15); + + /// + /// Creates a new reference to a file for a specific resource + /// + /// The ID of the file to reference + /// The usage context (e.g., "avatar", "post-attachment") + /// The ID of the resource using the file + /// Optional expiration time for the file + /// Optional duration after which the file expires (alternative to expiredAt) + /// The created file reference + public async Task CreateReferenceAsync( + string fileId, + string usage, + string resourceId, + Instant? expiredAt = null, + Duration? duration = null) + { + // Calculate expiration time if needed + var finalExpiration = expiredAt; + if (duration.HasValue) + finalExpiration = SystemClock.Instance.GetCurrentInstant() + duration.Value; + + var reference = new CloudFileReference + { + FileId = fileId, + Usage = usage, + ResourceId = resourceId, + ExpiredAt = finalExpiration + }; + + db.FileReferences.Add(reference); + + await db.SaveChangesAsync(); + await fileService._PurgeCacheAsync(fileId); + + return reference; + } + + /// + /// Gets all references to a file + /// + /// The ID of the file + /// A list of all references to the file + public async Task> GetReferencesAsync(string fileId) + { + var cacheKey = $"{CacheKeyPrefix}list:{fileId}"; + + var cachedReferences = await cache.GetAsync>(cacheKey); + if (cachedReferences is not null) + return cachedReferences; + + var references = await db.FileReferences + .Where(r => r.FileId == fileId) + .ToListAsync(); + + await cache.SetAsync(cacheKey, references, CacheDuration); + + return references; + } + + public async Task>> GetReferencesAsync(IEnumerable fileId) + { + var references = await db.FileReferences + .Where(r => fileId.Contains(r.FileId)) + .GroupBy(r => r.FileId) + .ToDictionaryAsync(r => r.Key, r => r.ToList()); + return references; + } + + /// + /// Gets the number of references to a file + /// + /// The ID of the file + /// The number of references to the file + public async Task GetReferenceCountAsync(string fileId) + { + var cacheKey = $"{CacheKeyPrefix}count:{fileId}"; + + var cachedCount = await cache.GetAsync(cacheKey); + if (cachedCount.HasValue) + return cachedCount.Value; + + var count = await db.FileReferences + .Where(r => r.FileId == fileId) + .CountAsync(); + + await cache.SetAsync(cacheKey, count, CacheDuration); + + return count; + } + + /// + /// Gets all references for a specific resource + /// + /// The ID of the resource + /// A list of file references associated with the resource + public async Task> GetResourceReferencesAsync(string resourceId) + { + var cacheKey = $"{CacheKeyPrefix}resource:{resourceId}"; + + var cachedReferences = await cache.GetAsync>(cacheKey); + if (cachedReferences is not null) + return cachedReferences; + + var references = await db.FileReferences + .Where(r => r.ResourceId == resourceId) + .ToListAsync(); + + await cache.SetAsync(cacheKey, references, CacheDuration); + + return references; + } + + /// + /// Gets all file references for a specific usage context + /// + /// The usage context + /// A list of file references with the specified usage + public async Task> GetUsageReferencesAsync(string usage) + { + return await db.FileReferences + .Where(r => r.Usage == usage) + .ToListAsync(); + } + + /// + /// Deletes references for a specific resource + /// + /// The ID of the resource + /// The number of deleted references + public async Task DeleteResourceReferencesAsync(string resourceId) + { + var references = await db.FileReferences + .Where(r => r.ResourceId == resourceId) + .ToListAsync(); + + var fileIds = references.Select(r => r.FileId).Distinct().ToList(); + + db.FileReferences.RemoveRange(references); + var deletedCount = await db.SaveChangesAsync(); + + // Purge caches + var tasks = fileIds.Select(fileService._PurgeCacheAsync).ToList(); + tasks.Add(PurgeCacheForResourceAsync(resourceId)); + await Task.WhenAll(tasks); + + return deletedCount; + } + + /// + /// Deletes references for a specific resource and usage + /// + /// The ID of the resource + /// The usage context + /// The number of deleted references + public async Task DeleteResourceReferencesAsync(string resourceId, string usage) + { + var references = await db.FileReferences + .Where(r => r.ResourceId == resourceId && r.Usage == usage) + .ToListAsync(); + + if (!references.Any()) + { + return 0; + } + + var fileIds = references.Select(r => r.FileId).Distinct().ToList(); + + db.FileReferences.RemoveRange(references); + var deletedCount = await db.SaveChangesAsync(); + + // Purge caches + var tasks = fileIds.Select(fileService._PurgeCacheAsync).ToList(); + tasks.Add(PurgeCacheForResourceAsync(resourceId)); + await Task.WhenAll(tasks); + + return deletedCount; + } + + /// + /// Deletes a specific file reference + /// + /// The ID of the reference to delete + /// True if the reference was deleted, false otherwise + public async Task DeleteReferenceAsync(Guid referenceId) + { + var reference = await db.FileReferences + .FirstOrDefaultAsync(r => r.Id == referenceId); + + if (reference == null) + return false; + + db.FileReferences.Remove(reference); + await db.SaveChangesAsync(); + + // Purge caches + await fileService._PurgeCacheAsync(reference.FileId); + await PurgeCacheForResourceAsync(reference.ResourceId); + await PurgeCacheForFileAsync(reference.FileId); + + return true; + } + + /// + /// Updates the files referenced by a resource + /// + /// The ID of the resource + /// The new list of file IDs + /// The usage context + /// Optional expiration time for newly added files + /// Optional duration after which newly added files expire + /// A list of the updated file references + public async Task> UpdateResourceFilesAsync( + string resourceId, + IEnumerable? newFileIds, + string usage, + Instant? expiredAt = null, + Duration? duration = null) + { + if (newFileIds == null) + return new List(); + + var existingReferences = await db.FileReferences + .Where(r => r.ResourceId == resourceId && r.Usage == usage) + .ToListAsync(); + + var existingFileIds = existingReferences.Select(r => r.FileId).ToHashSet(); + var newFileIdsList = newFileIds.ToList(); + var newFileIdsSet = newFileIdsList.ToHashSet(); + + // Files to remove + var toRemove = existingReferences + .Where(r => !newFileIdsSet.Contains(r.FileId)) + .ToList(); + + // Files to add + var toAdd = newFileIdsList + .Where(id => !existingFileIds.Contains(id)) + .Select(id => new CloudFileReference + { + FileId = id, + Usage = usage, + ResourceId = resourceId + }) + .ToList(); + + // Apply changes + if (toRemove.Any()) + db.FileReferences.RemoveRange(toRemove); + + if (toAdd.Any()) + db.FileReferences.AddRange(toAdd); + + await db.SaveChangesAsync(); + + // Update expiration for newly added references if specified + if ((expiredAt.HasValue || duration.HasValue) && toAdd.Any()) + { + var finalExpiration = expiredAt; + if (duration.HasValue) + { + finalExpiration = SystemClock.Instance.GetCurrentInstant() + duration.Value; + } + + // Update newly added references with the expiration time + var referenceIds = await db.FileReferences + .Where(r => toAdd.Select(a => a.FileId).Contains(r.FileId) && + r.ResourceId == resourceId && + r.Usage == usage) + .Select(r => r.Id) + .ToListAsync(); + + await db.FileReferences + .Where(r => referenceIds.Contains(r.Id)) + .ExecuteUpdateAsync(setter => setter.SetProperty( + r => r.ExpiredAt, + _ => finalExpiration + )); + } + + // Purge caches + var allFileIds = existingFileIds.Union(newFileIdsSet).ToList(); + var tasks = allFileIds.Select(fileService._PurgeCacheAsync).ToList(); + tasks.Add(PurgeCacheForResourceAsync(resourceId)); + await Task.WhenAll(tasks); + + // Return updated references + return await db.FileReferences + .Where(r => r.ResourceId == resourceId && r.Usage == usage) + .ToListAsync(); + } + + /// + /// Gets all files referenced by a resource + /// + /// The ID of the resource + /// Optional filter by usage context + /// A list of files referenced by the resource + public async Task> GetResourceFilesAsync(string resourceId, string? usage = null) + { + var query = db.FileReferences.Where(r => r.ResourceId == resourceId); + + if (usage != null) + query = query.Where(r => r.Usage == usage); + + var references = await query.ToListAsync(); + var fileIds = references.Select(r => r.FileId).ToList(); + + return await db.Files + .Where(f => fileIds.Contains(f.Id)) + .ToListAsync(); + } + + /// + /// Purges all caches related to a resource + /// + private async Task PurgeCacheForResourceAsync(string resourceId) + { + var cacheKey = $"{CacheKeyPrefix}resource:{resourceId}"; + await cache.RemoveAsync(cacheKey); + } + + /// + /// Purges all caches related to a file + /// + private async Task PurgeCacheForFileAsync(string fileId) + { + var cacheKeys = new[] + { + $"{CacheKeyPrefix}list:{fileId}", + $"{CacheKeyPrefix}count:{fileId}" + }; + + var tasks = cacheKeys.Select(cache.RemoveAsync); + await Task.WhenAll(tasks); + } + + /// + /// Updates the expiration time for a file reference + /// + /// The ID of the reference + /// The new expiration time, or null to remove expiration + /// True if the reference was found and updated, false otherwise + public async Task SetReferenceExpirationAsync(Guid referenceId, Instant? expiredAt) + { + var reference = await db.FileReferences + .FirstOrDefaultAsync(r => r.Id == referenceId); + + if (reference == null) + return false; + + reference.ExpiredAt = expiredAt; + await db.SaveChangesAsync(); + + await PurgeCacheForFileAsync(reference.FileId); + await PurgeCacheForResourceAsync(reference.ResourceId); + + return true; + } + + /// + /// Updates the expiration time for all references to a file + /// + /// The ID of the file + /// The new expiration time, or null to remove expiration + /// The number of references updated + public async Task SetFileReferencesExpirationAsync(string fileId, Instant? expiredAt) + { + var rowsAffected = await db.FileReferences + .Where(r => r.FileId == fileId) + .ExecuteUpdateAsync(setter => setter.SetProperty( + r => r.ExpiredAt, + _ => expiredAt + )); + + if (rowsAffected > 0) + { + await fileService._PurgeCacheAsync(fileId); + await PurgeCacheForFileAsync(fileId); + } + + return rowsAffected; + } + + /// + /// Get all file references for a specific resource and usage type + /// + /// The resource ID + /// The usage type + /// List of file references + public async Task> GetResourceReferencesAsync(string resourceId, string usageType) + { + return await db.FileReferences + .Where(r => r.ResourceId == resourceId && r.Usage == usageType) + .ToListAsync(); + } + + /// + /// Check if a file has any references + /// + /// The file ID to check + /// True if the file has references, false otherwise + public async Task HasFileReferencesAsync(string fileId) + { + return await db.FileReferences.AnyAsync(r => r.FileId == fileId); + } + + /// + /// Updates the expiration time for a file reference using a duration from now + /// + /// The ID of the reference + /// The duration after which the reference expires, or null to remove expiration + /// True if the reference was found and updated, false otherwise + public async Task SetReferenceExpirationDurationAsync(Guid referenceId, Duration? duration) + { + Instant? expiredAt = null; + if (duration.HasValue) + { + expiredAt = SystemClock.Instance.GetCurrentInstant() + duration.Value; + } + + return await SetReferenceExpirationAsync(referenceId, expiredAt); + } +} diff --git a/DysonNetwork.Drive/Storage/FileService.cs b/DysonNetwork.Drive/Storage/FileService.cs new file mode 100644 index 0000000..7387de0 --- /dev/null +++ b/DysonNetwork.Drive/Storage/FileService.cs @@ -0,0 +1,555 @@ +using System.Globalization; +using FFMpegCore; +using System.Security.Cryptography; +using DysonNetwork.Shared.Cache; +using DysonNetwork.Shared.Proto; +using Microsoft.EntityFrameworkCore; +using Minio; +using Minio.DataModel.Args; +using NetVips; +using NodaTime; +using tusdotnet.Stores; + +namespace DysonNetwork.Drive.Storage; + +public class FileService( + AppDatabase db, + IConfiguration configuration, + TusDiskStore store, + ILogger logger, + IServiceScopeFactory scopeFactory, + ICacheService cache +) +{ + private const string CacheKeyPrefix = "file:"; + private static readonly TimeSpan CacheDuration = TimeSpan.FromMinutes(15); + + /// + /// The api for getting file meta with cache, + /// the best use case is for accessing the file data. + /// + /// This function won't load uploader's information, only keep minimal file meta + /// + /// The id of the cloud file requested + /// The minimal file meta + public async Task GetFileAsync(string fileId) + { + var cacheKey = $"{CacheKeyPrefix}{fileId}"; + + var cachedFile = await cache.GetAsync(cacheKey); + if (cachedFile is not null) + return cachedFile; + + var file = await db.Files + .Where(f => f.Id == fileId) + .FirstOrDefaultAsync(); + + if (file != null) + await cache.SetAsync(cacheKey, file, CacheDuration); + + return file; + } + + private static readonly string TempFilePrefix = "dyn-cloudfile"; + + private static readonly string[] AnimatedImageTypes = + ["image/gif", "image/apng", "image/webp", "image/avif"]; + + // The analysis file method no longer will remove the GPS EXIF data + // It should be handled on the client side, and for some specific cases it should be keep + public async Task ProcessNewFileAsync( + Account account, + string fileId, + Stream stream, + string fileName, + string? contentType + ) + { + var result = new List<(string filePath, string suffix)>(); + + var ogFilePath = Path.GetFullPath(Path.Join(configuration.GetValue("Tus:StorePath"), fileId)); + var fileSize = stream.Length; + var hash = await HashFileAsync(stream, fileSize: fileSize); + contentType ??= !fileName.Contains('.') ? "application/octet-stream" : MimeTypes.GetMimeType(fileName); + + var file = new CloudFile + { + Id = fileId, + Name = fileName, + MimeType = contentType, + Size = fileSize, + Hash = hash, + AccountId = Guid.Parse(account.Id) + }; + + var existingFile = await db.Files.FirstOrDefaultAsync(f => f.Hash == hash); + file.StorageId = existingFile is not null ? existingFile.StorageId : file.Id; + + if (existingFile is not null) + { + file.FileMeta = existingFile.FileMeta; + file.HasCompression = existingFile.HasCompression; + file.SensitiveMarks = existingFile.SensitiveMarks; + + db.Files.Add(file); + await db.SaveChangesAsync(); + return file; + } + + switch (contentType.Split('/')[0]) + { + case "image": + var blurhash = + BlurHashSharp.SkiaSharp.BlurHashEncoder.Encode(xComponent: 3, yComponent: 3, filename: ogFilePath); + + // Rewind stream + stream.Position = 0; + + // Use NetVips for the rest + using (var vipsImage = NetVips.Image.NewFromStream(stream)) + { + var width = vipsImage.Width; + var height = vipsImage.Height; + var format = vipsImage.Get("vips-loader") ?? "unknown"; + + // Try to get orientation from exif data + var orientation = 1; + var meta = new Dictionary + { + ["blur"] = blurhash, + ["format"] = format, + ["width"] = width, + ["height"] = height, + ["orientation"] = orientation, + }; + Dictionary exif = []; + + foreach (var field in vipsImage.GetFields()) + { + var value = vipsImage.Get(field); + + // Skip GPS-related EXIF fields to remove location data + if (IsIgnoredField(field)) + continue; + + if (field.StartsWith("exif-")) exif[field.Replace("exif-", "")] = value; + else meta[field] = value; + + if (field == "orientation") orientation = (int)value; + } + + if (orientation is 6 or 8) + (width, height) = (height, width); + + var aspectRatio = height != 0 ? (double)width / height : 0; + + meta["exif"] = exif; + meta["ratio"] = aspectRatio; + file.FileMeta = meta; + } + + break; + case "video": + case "audio": + try + { + var mediaInfo = await FFProbe.AnalyseAsync(ogFilePath); + file.FileMeta = new Dictionary + { + ["duration"] = mediaInfo.Duration.TotalSeconds, + ["format_name"] = mediaInfo.Format.FormatName, + ["format_long_name"] = mediaInfo.Format.FormatLongName, + ["start_time"] = mediaInfo.Format.StartTime.ToString(), + ["bit_rate"] = mediaInfo.Format.BitRate.ToString(CultureInfo.InvariantCulture), + ["tags"] = mediaInfo.Format.Tags ?? [], + ["chapters"] = mediaInfo.Chapters, + }; + if (mediaInfo.PrimaryVideoStream is not null) + file.FileMeta["ratio"] = + mediaInfo.PrimaryVideoStream.Width / mediaInfo.PrimaryVideoStream.Height; + } + catch (Exception ex) + { + logger.LogError("File analyzed failed, unable collect video / audio information: {Message}", + ex.Message); + } + + break; + } + + db.Files.Add(file); + await db.SaveChangesAsync(); + + _ = Task.Run(async () => + { + using var scope = scopeFactory.CreateScope(); + var nfs = scope.ServiceProvider.GetRequiredService(); + + try + { + logger.LogInformation("Processed file {fileId}, now trying optimizing if possible...", fileId); + + if (contentType.Split('/')[0] == "image") + { + // Skip compression for animated image types + var animatedMimeTypes = AnimatedImageTypes; + if (Enumerable.Contains(animatedMimeTypes, contentType)) + { + logger.LogInformation( + "File {fileId} is an animated image (MIME: {mime}), skipping WebP conversion.", fileId, + contentType + ); + var tempFilePath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}"); + result.Add((tempFilePath, string.Empty)); + return; + } + + file.MimeType = "image/webp"; + + using var vipsImage = Image.NewFromFile(ogFilePath); + var imagePath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}"); + vipsImage.Autorot().WriteToFile(imagePath + ".webp", + new VOption { { "lossless", true }, { "strip", true } }); + result.Add((imagePath + ".webp", string.Empty)); + + if (vipsImage.Width * vipsImage.Height >= 1024 * 1024) + { + var scale = 1024.0 / Math.Max(vipsImage.Width, vipsImage.Height); + var imageCompressedPath = + Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}-compressed"); + + // Create and save image within the same synchronous block to avoid disposal issues + using var compressedImage = vipsImage.Resize(scale); + compressedImage.Autorot().WriteToFile(imageCompressedPath + ".webp", + new VOption { { "Q", 80 }, { "strip", true } }); + + result.Add((imageCompressedPath + ".webp", ".compressed")); + file.HasCompression = true; + } + } + else + { + // No extra process for video, add it to the upload queue. + result.Add((ogFilePath, string.Empty)); + } + + logger.LogInformation("Optimized file {fileId}, now uploading...", fileId); + + if (result.Count > 0) + { + List> tasks = []; + tasks.AddRange(result.Select(item => + nfs.UploadFileToRemoteAsync(file, item.filePath, null, item.suffix, true)) + ); + + await Task.WhenAll(tasks); + file = await tasks.First(); + } + else + { + file = await nfs.UploadFileToRemoteAsync(file, stream, null); + } + + logger.LogInformation("Uploaded file {fileId} done!", fileId); + + var scopedDb = scope.ServiceProvider.GetRequiredService(); + await scopedDb.Files.Where(f => f.Id == file.Id).ExecuteUpdateAsync(setter => setter + .SetProperty(f => f.UploadedAt, file.UploadedAt) + .SetProperty(f => f.UploadedTo, file.UploadedTo) + .SetProperty(f => f.MimeType, file.MimeType) + .SetProperty(f => f.HasCompression, file.HasCompression) + ); + } + catch (Exception err) + { + logger.LogError(err, "Failed to process {fileId}", fileId); + } + + await stream.DisposeAsync(); + await store.DeleteFileAsync(file.Id, CancellationToken.None); + await nfs._PurgeCacheAsync(file.Id); + }); + + return file; + } + + private static async Task HashFileAsync(Stream stream, int chunkSize = 1024 * 1024, long? fileSize = null) + { + fileSize ??= stream.Length; + if (fileSize > chunkSize * 1024 * 5) + return await HashFastApproximateAsync(stream, chunkSize); + + using var md5 = MD5.Create(); + var hashBytes = await md5.ComputeHashAsync(stream); + return Convert.ToHexString(hashBytes).ToLowerInvariant(); + } + + private static async Task HashFastApproximateAsync(Stream stream, int chunkSize = 1024 * 1024) + { + // Scale the chunk size to kB level + chunkSize *= 1024; + + using var md5 = MD5.Create(); + + var buffer = new byte[chunkSize * 2]; + var fileLength = stream.Length; + + var bytesRead = await stream.ReadAsync(buffer.AsMemory(0, chunkSize)); + + if (fileLength > chunkSize) + { + stream.Seek(-chunkSize, SeekOrigin.End); + bytesRead += await stream.ReadAsync(buffer.AsMemory(chunkSize, chunkSize)); + } + + var hash = md5.ComputeHash(buffer, 0, bytesRead); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + public async Task UploadFileToRemoteAsync(CloudFile file, string filePath, string? targetRemote, + string? suffix = null, bool selfDestruct = false) + { + var fileStream = File.OpenRead(filePath); + var result = await UploadFileToRemoteAsync(file, fileStream, targetRemote, suffix); + if (selfDestruct) File.Delete(filePath); + return result; + } + + public async Task UploadFileToRemoteAsync(CloudFile file, Stream stream, string? targetRemote, + string? suffix = null) + { + if (file.UploadedAt.HasValue) return file; + + file.UploadedTo = targetRemote ?? configuration.GetValue("Storage:PreferredRemote")!; + + var dest = GetRemoteStorageConfig(file.UploadedTo); + var client = CreateMinioClient(dest); + if (client is null) + throw new InvalidOperationException( + $"Failed to configure client for remote destination '{file.UploadedTo}'" + ); + + var bucket = dest.Bucket; + var contentType = file.MimeType ?? "application/octet-stream"; + + await client.PutObjectAsync(new PutObjectArgs() + .WithBucket(bucket) + .WithObject(string.IsNullOrWhiteSpace(suffix) ? file.Id : file.Id + suffix) + .WithStreamData(stream) // Fix this disposed + .WithObjectSize(stream.Length) + .WithContentType(contentType) + ); + + file.UploadedAt = Instant.FromDateTimeUtc(DateTime.UtcNow); + return file; + } + + public async Task DeleteFileAsync(CloudFile file) + { + await DeleteFileDataAsync(file); + + db.Remove(file); + await db.SaveChangesAsync(); + await _PurgeCacheAsync(file.Id); + } + + public async Task DeleteFileDataAsync(CloudFile file) + { + if (file.StorageId is null) return; + if (file.UploadedTo is null) return; + + // Check if any other file with the same storage ID is referenced + var otherFilesWithSameStorageId = await db.Files + .Where(f => f.StorageId == file.StorageId && f.Id != file.Id) + .Select(f => f.Id) + .ToListAsync(); + + // Check if any of these files are referenced + var anyReferenced = false; + if (otherFilesWithSameStorageId.Any()) + { + anyReferenced = await db.FileReferences + .Where(r => otherFilesWithSameStorageId.Contains(r.FileId)) + .AnyAsync(); + } + + // If any other file with the same storage ID is referenced, don't delete the actual file data + if (anyReferenced) return; + + var dest = GetRemoteStorageConfig(file.UploadedTo); + var client = CreateMinioClient(dest); + if (client is null) + throw new InvalidOperationException( + $"Failed to configure client for remote destination '{file.UploadedTo}'" + ); + + var bucket = dest.Bucket; + var objectId = file.StorageId ?? file.Id; // Use StorageId if available, otherwise fall back to Id + + await client.RemoveObjectAsync( + new RemoveObjectArgs().WithBucket(bucket).WithObject(objectId) + ); + + if (file.HasCompression) + { + // Also remove the compressed version if it exists + try + { + await client.RemoveObjectAsync( + new RemoveObjectArgs().WithBucket(bucket).WithObject(objectId + ".compressed") + ); + } + catch + { + // Ignore errors when deleting compressed version + logger.LogWarning("Failed to delete compressed version of file {fileId}", file.Id); + } + } + } + + public RemoteStorageConfig GetRemoteStorageConfig(string destination) + { + var destinations = configuration.GetSection("Storage:Remote").Get>()!; + var dest = destinations.FirstOrDefault(d => d.Id == destination); + if (dest is null) throw new InvalidOperationException($"Remote destination '{destination}' not found"); + return dest; + } + + public IMinioClient? CreateMinioClient(RemoteStorageConfig dest) + { + var client = new MinioClient() + .WithEndpoint(dest.Endpoint) + .WithRegion(dest.Region) + .WithCredentials(dest.SecretId, dest.SecretKey); + if (dest.EnableSsl) client = client.WithSSL(); + + return client.Build(); + } + + // Helper method to purge the cache for a specific file + // Made internal to allow FileReferenceService to use it + internal async Task _PurgeCacheAsync(string fileId) + { + var cacheKey = $"{CacheKeyPrefix}{fileId}"; + await cache.RemoveAsync(cacheKey); + } + + // Helper method to purge cache for multiple files + internal async Task _PurgeCacheRangeAsync(IEnumerable fileIds) + { + var tasks = fileIds.Select(_PurgeCacheAsync); + await Task.WhenAll(tasks); + } + + public async Task> LoadFromReference(List references) + { + var cachedFiles = new Dictionary(); + var uncachedIds = new List(); + + // Check cache first + foreach (var reference in references) + { + var cacheKey = $"{CacheKeyPrefix}{reference.Id}"; + var cachedFile = await cache.GetAsync(cacheKey); + + if (cachedFile != null) + { + cachedFiles[reference.Id] = cachedFile; + } + else + { + uncachedIds.Add(reference.Id); + } + } + + // Load uncached files from database + if (uncachedIds.Count > 0) + { + var dbFiles = await db.Files + .Where(f => uncachedIds.Contains(f.Id)) + .ToListAsync(); + + // Add to cache + foreach (var file in dbFiles) + { + var cacheKey = $"{CacheKeyPrefix}{file.Id}"; + await cache.SetAsync(cacheKey, file, CacheDuration); + cachedFiles[file.Id] = file; + } + } + + // Preserve original order + return references + .Select(r => cachedFiles.GetValueOrDefault(r.Id)) + .Where(f => f != null) + .ToList(); + } + + /// + /// Gets the number of references to a file based on CloudFileReference records + /// + /// The ID of the file + /// The number of references to the file + public async Task GetReferenceCountAsync(string fileId) + { + return await db.FileReferences + .Where(r => r.FileId == fileId) + .CountAsync(); + } + + /// + /// Checks if a file is referenced by any resource + /// + /// The ID of the file to check + /// True if the file is referenced, false otherwise + public async Task IsReferencedAsync(string fileId) + { + return await db.FileReferences + .Where(r => r.FileId == fileId) + .AnyAsync(); + } + + /// + /// Checks if an EXIF field contains GPS location data + /// + /// The EXIF field name + /// True if the field contains GPS data, false otherwise + private static bool IsGpsExifField(string fieldName) + { + // Common GPS EXIF field names + var gpsFields = new[] + { + "gps-latitude", + "gps-longitude", + "gps-altitude", + "gps-latitude-ref", + "gps-longitude-ref", + "gps-altitude-ref", + "gps-timestamp", + "gps-datestamp", + "gps-speed", + "gps-speed-ref", + "gps-track", + "gps-track-ref", + "gps-img-direction", + "gps-img-direction-ref", + "gps-dest-latitude", + "gps-dest-longitude", + "gps-dest-latitude-ref", + "gps-dest-longitude-ref", + "gps-processing-method", + "gps-area-information" + }; + + return gpsFields.Any(gpsField => + fieldName.Equals(gpsField, StringComparison.OrdinalIgnoreCase) || + fieldName.StartsWith("gps", StringComparison.OrdinalIgnoreCase)); + } + + private static bool IsIgnoredField(string fieldName) + { + if (IsGpsExifField(fieldName)) return true; + if (fieldName.EndsWith("-data")) return true; + return false; + } +} \ No newline at end of file diff --git a/DysonNetwork.Drive/Storage/TusService.cs b/DysonNetwork.Drive/Storage/TusService.cs new file mode 100644 index 0000000..8f66936 --- /dev/null +++ b/DysonNetwork.Drive/Storage/TusService.cs @@ -0,0 +1,79 @@ +using System.Net; +using System.Text; +using System.Text.Json; +using DysonNetwork.Shared.Proto; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Options; +using tusdotnet.Interfaces; +using tusdotnet.Models; +using tusdotnet.Models.Configuration; + +namespace DysonNetwork.Drive.Storage; + +public abstract class TusService +{ + public static DefaultTusConfiguration BuildConfiguration(ITusStore store) => new() + { + Store = store, + Events = new Events + { + OnAuthorizeAsync = async eventContext => + { + if (eventContext.Intent == IntentType.DeleteFile) + { + eventContext.FailRequest( + HttpStatusCode.BadRequest, + "Deleting files from this endpoint was disabled, please refer to the Dyson Network File API." + ); + return; + } + + var httpContext = eventContext.HttpContext; + if (httpContext.Items["CurrentUser"] is not Account user) + { + eventContext.FailRequest(HttpStatusCode.Unauthorized); + return; + } + + if (!user.IsSuperuser) + { + using var scope = httpContext.RequestServices.CreateScope(); + var pm = scope.ServiceProvider.GetRequiredService(); + var allowed = await pm.HasPermissionAsync(new HasPermissionRequest + { Actor = $"user:{user.Id}", Area = "global", Key = "files.create" }); + if (!allowed.HasPermission) + eventContext.FailRequest(HttpStatusCode.Forbidden); + } + }, + OnFileCompleteAsync = async eventContext => + { + using var scope = eventContext.HttpContext.RequestServices.CreateScope(); + var services = scope.ServiceProvider; + + var httpContext = eventContext.HttpContext; + if (httpContext.Items["CurrentUser"] is not Account user) return; + + var file = await eventContext.GetFileAsync(); + var metadata = await file.GetMetadataAsync(eventContext.CancellationToken); + var fileName = metadata.TryGetValue("filename", out var fn) + ? fn.GetString(Encoding.UTF8) + : "uploaded_file"; + var contentType = metadata.TryGetValue("content-type", out var ct) ? ct.GetString(Encoding.UTF8) : null; + + var fileStream = await file.GetContentAsync(eventContext.CancellationToken); + + var fileService = services.GetRequiredService(); + var info = await fileService.ProcessNewFileAsync(user, file.Id, fileStream, fileName, contentType); + + using var finalScope = eventContext.HttpContext.RequestServices.CreateScope(); + var jsonOptions = finalScope.ServiceProvider.GetRequiredService>().Value + .JsonSerializerOptions; + var infoJson = JsonSerializer.Serialize(info, jsonOptions); + eventContext.HttpContext.Response.Headers.Append("X-FileInfo", infoJson); + + // Dispose the stream after all processing is complete + await fileStream.DisposeAsync(); + } + } + }; +} \ No newline at end of file diff --git a/DysonNetwork.Drive/appsettings.json b/DysonNetwork.Drive/appsettings.json new file mode 100644 index 0000000..82c1088 --- /dev/null +++ b/DysonNetwork.Drive/appsettings.json @@ -0,0 +1,129 @@ +{ + "Debug": true, + "BaseUrl": "http://localhost:5071", + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*", + "ConnectionStrings": { + "App": "Host=localhost;Port=5432;Database=dyson_network;Username=postgres;Password=postgres;Include Error Detail=True;Maximum Pool Size=20;Connection Idle Lifetime=60", + "FastRetrieve": "localhost:6379" + }, + "Authentication": { + "Schemes": { + "Bearer": { + "ValidAudiences": [ + "http://localhost:5071", + "https://localhost:7099" + ], + "ValidIssuer": "solar-network" + } + } + }, + "AuthToken": { + "PublicKeyPath": "Keys/PublicKey.pem", + "PrivateKeyPath": "Keys/PrivateKey.pem" + }, + "OidcProvider": { + "IssuerUri": "https://nt.solian.app", + "PublicKeyPath": "Keys/PublicKey.pem", + "PrivateKeyPath": "Keys/PrivateKey.pem", + "AccessTokenLifetime": "01:00:00", + "RefreshTokenLifetime": "30.00:00:00", + "AuthorizationCodeLifetime": "00:30:00", + "RequireHttpsMetadata": true + }, + "Tus": { + "StorePath": "Uploads" + }, + "Storage": { + "PreferredRemote": "minio", + "Remote": [ + { + "Id": "minio", + "Label": "Minio", + "Region": "auto", + "Bucket": "solar-network-development", + "Endpoint": "localhost:9000", + "SecretId": "littlesheep", + "SecretKey": "password", + "EnabledSigned": true, + "EnableSsl": false + }, + { + "Id": "cloudflare", + "Label": "Cloudflare R2", + "Region": "auto", + "Bucket": "solar-network", + "Endpoint": "0a70a6d1b7128888c823359d0008f4e1.r2.cloudflarestorage.com", + "SecretId": "8ff5d06c7b1639829d60bc6838a542e6", + "SecretKey": "fd58158c5201be16d1872c9209d9cf199421dae3c2f9972f94b2305976580d67", + "EnableSigned": true, + "EnableSsl": true + } + ] + }, + "Captcha": { + "Provider": "cloudflare", + "ApiKey": "0x4AAAAAABCDUdOujj4feOb_", + "ApiSecret": "0x4AAAAAABCDUWABiJQweqlB7tYq-IqIm8U" + }, + "Notifications": { + "Topic": "dev.solsynth.solian", + "Endpoint": "http://localhost:8088" + }, + "Email": { + "Server": "smtp4dev.orb.local", + "Port": 25, + "UseSsl": false, + "Username": "no-reply@mail.solsynth.dev", + "Password": "password", + "FromAddress": "no-reply@mail.solsynth.dev", + "FromName": "Alphabot", + "SubjectPrefix": "Solar Network" + }, + "RealtimeChat": { + "Endpoint": "https://solar-network-im44o8gq.livekit.cloud", + "ApiKey": "APIs6TiL8wj3A4j", + "ApiSecret": "SffxRneIwTnlHPtEf3zicmmv3LUEl7xXael4PvWZrEhE" + }, + "GeoIp": { + "DatabasePath": "./Keys/GeoLite2-City.mmdb" + }, + "Oidc": { + "Google": { + "ClientId": "961776991058-963m1qin2vtp8fv693b5fdrab5hmpl89.apps.googleusercontent.com", + "ClientSecret": "" + }, + "Apple": { + "ClientId": "dev.solsynth.solian", + "TeamId": "W7HPZ53V6B", + "KeyId": "B668YP4KBG", + "PrivateKeyPath": "./Keys/Solarpass.p8" + }, + "Microsoft": { + "ClientId": "YOUR_MICROSOFT_CLIENT_ID", + "ClientSecret": "YOUR_MICROSOFT_CLIENT_SECRET", + "DiscoveryEndpoint": "YOUR_MICROSOFT_DISCOVERY_ENDPOINT" + } + }, + "Payment": { + "Auth": { + "Afdian": "" + }, + "Subscriptions": { + "Afdian": { + "7d17aae23c9611f0b5705254001e7c00": "solian.stellar.primary", + "7dfae4743c9611f0b3a55254001e7c00": "solian.stellar.nova", + "141713ee3d6211f085b352540025c377": "solian.stellar.supernova" + } + } + }, + "KnownProxies": [ + "127.0.0.1", + "::1" + ] +} diff --git a/DysonNetwork.Pass/Auth/AuthServiceGrpc.cs b/DysonNetwork.Pass/Auth/AuthServiceGrpc.cs index 0e023ad..996dab4 100644 --- a/DysonNetwork.Pass/Auth/AuthServiceGrpc.cs +++ b/DysonNetwork.Pass/Auth/AuthServiceGrpc.cs @@ -1,27 +1,49 @@ +using DysonNetwork.Shared.Cache; using DysonNetwork.Shared.Proto; using Grpc.Core; using Microsoft.EntityFrameworkCore; +using NodaTime; namespace DysonNetwork.Pass.Auth; -public class AuthServiceGrpc(AuthService authService, AppDatabase db) : Shared.Proto.AuthService.AuthServiceBase +public class AuthServiceGrpc( + AuthService authService, + ICacheService cache, + AppDatabase db +) + : Shared.Proto.AuthService.AuthServiceBase { - public override async Task Authenticate(AuthenticateRequest request, ServerCallContext context) + public override async Task Authenticate( + AuthenticateRequest request, + ServerCallContext context + ) { if (!authService.ValidateToken(request.Token, out var sessionId)) - { - throw new RpcException(new Status(StatusCode.Unauthenticated, "Invalid token.")); - } + return new AuthenticateResponse { Valid = false, Message = "Invalid token." }; + + var session = await cache.GetAsync($"{DysonTokenAuthHandler.AuthCachePrefix}{sessionId}"); + if (session is not null) + return new AuthenticateResponse { Valid = true, Session = session.ToProtoValue() }; - var session = await db.AuthSessions + session = await db.AuthSessions .AsNoTracking() + .Include(e => e.Challenge) + .Include(e => e.Account) + .ThenInclude(e => e.Profile) .FirstOrDefaultAsync(s => s.Id == sessionId); - if (session == null) - { - throw new RpcException(new Status(StatusCode.NotFound, "Session not found.")); - } + return new AuthenticateResponse { Valid = false, Message = "Session was not found." }; + var now = SystemClock.Instance.GetCurrentInstant(); + if (session.ExpiredAt.HasValue && session.ExpiredAt < now) + return new AuthenticateResponse { Valid = false, Message = "Session has been expired." }; + + await cache.SetWithGroupsAsync( + $"auth:{sessionId}", + session, + [$"{Account.AccountService.AccountCachePrefix}{session.Account.Id}"], + TimeSpan.FromHours(1) + ); - return session.ToProtoValue(); + return new AuthenticateResponse { Valid = true, Session = session.ToProtoValue() }; } -} +} \ No newline at end of file diff --git a/DysonNetwork.Pass/DysonNetwork.Pass.csproj b/DysonNetwork.Pass/DysonNetwork.Pass.csproj index 3b281ce..e13db9f 100644 --- a/DysonNetwork.Pass/DysonNetwork.Pass.csproj +++ b/DysonNetwork.Pass/DysonNetwork.Pass.csproj @@ -8,33 +8,33 @@ - - - + + + - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + diff --git a/DysonNetwork.Pass/Permission/PermissionServiceGrpc.cs b/DysonNetwork.Pass/Permission/PermissionServiceGrpc.cs new file mode 100644 index 0000000..435c8f6 --- /dev/null +++ b/DysonNetwork.Pass/Permission/PermissionServiceGrpc.cs @@ -0,0 +1,96 @@ +using Grpc.Core; +using Microsoft.EntityFrameworkCore; +using DysonNetwork.Shared.Proto; +using Google.Protobuf.WellKnownTypes; +using System.Text.Json; +using NodaTime.Serialization.Protobuf; + +namespace DysonNetwork.Pass.Permission; + +public class PermissionServiceGrpc( + PermissionService permissionService, + AppDatabase db +) : DysonNetwork.Shared.Proto.PermissionService.PermissionServiceBase +{ + public override async Task HasPermission(HasPermissionRequest request, ServerCallContext context) + { + var hasPermission = await permissionService.HasPermissionAsync(request.Actor, request.Area, request.Key); + return new HasPermissionResponse { HasPermission = hasPermission }; + } + + public override async Task GetPermission(GetPermissionRequest request, ServerCallContext context) + { + var permissionValue = await permissionService.GetPermissionAsync(request.Actor, request.Area, request.Key); + return new GetPermissionResponse { Value = permissionValue != null ? Value.Parser.ParseJson(permissionValue.RootElement.GetRawText()) : null }; + } + + public override async Task AddPermissionNode(AddPermissionNodeRequest request, ServerCallContext context) + { + var node = await permissionService.AddPermissionNode( + request.Actor, + request.Area, + request.Key, + JsonDocument.Parse(request.Value.ToString()), // Convert Value to JsonDocument + request.ExpiredAt?.ToInstant(), + request.AffectedAt?.ToInstant() + ); + return new AddPermissionNodeResponse { Node = node.ToProtoValue() }; + } + + public override async Task AddPermissionNodeToGroup(AddPermissionNodeToGroupRequest request, ServerCallContext context) + { + var group = await db.PermissionGroups.FirstOrDefaultAsync(g => g.Id == Guid.Parse(request.Group.Id)); + if (group == null) + { + throw new RpcException(new Status(StatusCode.NotFound, "Permission group not found.")); + } + + var node = await permissionService.AddPermissionNodeToGroup( + group, + request.Actor, + request.Area, + request.Key, + JsonDocument.Parse(request.Value.ToString()), // Convert Value to JsonDocument + request.ExpiredAt?.ToInstant(), + request.AffectedAt?.ToInstant() + ); + return new AddPermissionNodeToGroupResponse { Node = node.ToProtoValue() }; + } + + public override async Task RemovePermissionNode(RemovePermissionNodeRequest request, ServerCallContext context) + { + await permissionService.RemovePermissionNode(request.Actor, request.Area, request.Key); + return new RemovePermissionNodeResponse { Success = true }; + } + + public override async Task RemovePermissionNodeFromGroup(RemovePermissionNodeFromGroupRequest request, ServerCallContext context) + { + var group = await db.PermissionGroups.FirstOrDefaultAsync(g => g.Id == Guid.Parse(request.Group.Id)); + if (group == null) + { + throw new RpcException(new Status(StatusCode.NotFound, "Permission group not found.")); + } + + await permissionService.RemovePermissionNodeFromGroup(group, request.Actor, request.Area, request.Key); + return new RemovePermissionNodeFromGroupResponse { Success = true }; + } +} + +public static class PermissionExtensions +{ + public static DysonNetwork.Shared.Proto.PermissionNode ToProtoValue(this PermissionNode node) + { + return new DysonNetwork.Shared.Proto.PermissionNode + { + Id = node.Id.ToString(), + Actor = node.Actor, + Area = node.Area, + Key = node.Key, + Value = Value.Parser.ParseJson(node.Value.RootElement.GetRawText()), + ExpiredAt = node.ExpiredAt?.ToTimestamp(), + AffectedAt = node.AffectedAt?.ToTimestamp(), + GroupId = node.GroupId?.ToString() ?? string.Empty + }; + } +} + diff --git a/DysonNetwork.Pass/Program.cs b/DysonNetwork.Pass/Program.cs index c6b9257..831b8f6 100644 --- a/DysonNetwork.Pass/Program.cs +++ b/DysonNetwork.Pass/Program.cs @@ -13,7 +13,7 @@ builder.ConfigureAppKestrel(); builder.Services.AddAppMetrics(); // Add application services -builder.Services.AddEtcdService(builder.Configuration); +builder.Services.AddRegistryService(builder.Configuration); builder.Services.AddAppServices(builder.Configuration); builder.Services.AddAppRateLimiting(); builder.Services.AddAppAuthentication(); diff --git a/DysonNetwork.Pusher/DysonNetwork.Pusher.csproj b/DysonNetwork.Pusher/DysonNetwork.Pusher.csproj index 718f65d..5bf2531 100644 --- a/DysonNetwork.Pusher/DysonNetwork.Pusher.csproj +++ b/DysonNetwork.Pusher/DysonNetwork.Pusher.csproj @@ -15,6 +15,7 @@ + diff --git a/DysonNetwork.Pusher/Program.cs b/DysonNetwork.Pusher/Program.cs index 6f8fc57..c3e1599 100644 --- a/DysonNetwork.Pusher/Program.cs +++ b/DysonNetwork.Pusher/Program.cs @@ -1,6 +1,7 @@ -using DysonNetwork.Pass.Startup; using DysonNetwork.Pusher; using DysonNetwork.Pusher.Startup; +using DysonNetwork.Shared.Auth; +using DysonNetwork.Shared.Registry; using Microsoft.EntityFrameworkCore; var builder = WebApplication.CreateBuilder(args); @@ -9,10 +10,12 @@ var builder = WebApplication.CreateBuilder(args); builder.ConfigureAppKestrel(); // Add application services +builder.Services.AddRegistryService(builder.Configuration); builder.Services.AddAppServices(builder.Configuration); builder.Services.AddAppRateLimiting(); builder.Services.AddAppAuthentication(); builder.Services.AddAppSwagger(); +builder.Services.AddDysonAuth(builder.Configuration); // Add flush handlers and websocket handlers builder.Services.AddAppFlushHandlers(); @@ -23,8 +26,6 @@ builder.Services.AddAppBusinessServices(); // Add scheduled jobs builder.Services.AddAppScheduledJobs(); -builder.Services.AddHostedService(); - var app = builder.Build(); // Run database migrations @@ -37,8 +38,6 @@ using (var scope = app.Services.CreateScope()) // Configure application middleware pipeline app.ConfigureAppMiddleware(builder.Configuration); -app.UseMiddleware(); - // Configure gRPC app.ConfigureGrpcServices(); diff --git a/DysonNetwork.Pusher/Startup/KestrelConfiguration.cs b/DysonNetwork.Pusher/Startup/KestrelConfiguration.cs index b042534..f35e4dd 100644 --- a/DysonNetwork.Pusher/Startup/KestrelConfiguration.cs +++ b/DysonNetwork.Pusher/Startup/KestrelConfiguration.cs @@ -1,4 +1,4 @@ -namespace DysonNetwork.Pass.Startup; +namespace DysonNetwork.Pusher.Startup; public static class KestrelConfiguration { diff --git a/DysonNetwork.Shared/Auth/AuthScheme.cs b/DysonNetwork.Shared/Auth/AuthScheme.cs new file mode 100644 index 0000000..69d4873 --- /dev/null +++ b/DysonNetwork.Shared/Auth/AuthScheme.cs @@ -0,0 +1,154 @@ +using System.Security.Claims; +using System.Text.Encodings.Web; +using DysonNetwork.Shared.Proto; +using Grpc.Core; +using Microsoft.AspNetCore.Authentication; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using SystemClock = NodaTime.SystemClock; + +namespace DysonNetwork.Shared.Auth; + +public class DysonTokenAuthOptions : AuthenticationSchemeOptions; + +public class DysonTokenAuthHandler( + IOptionsMonitor options, + ILoggerFactory logger, + UrlEncoder encoder, + ISystemClock clock, + AuthService.AuthServiceClient auth +) + : AuthenticationHandler(options, logger, encoder, clock) +{ + protected override async Task HandleAuthenticateAsync() + { + var tokenInfo = _ExtractToken(Request); + + if (tokenInfo == null || string.IsNullOrEmpty(tokenInfo.Token)) + return AuthenticateResult.Fail("No token was provided."); + + try + { + var now = SystemClock.Instance.GetCurrentInstant(); + + // Validate token and extract session ID + AuthSession session; + try + { + session = await ValidateToken(tokenInfo.Token); + } + catch (InvalidOperationException ex) + { + return AuthenticateResult.Fail(ex.Message); + } + catch (RpcException ex) + { + return AuthenticateResult.Fail($"Remote error: {ex.Status.StatusCode} - {ex.Status.Detail}"); + } + + // Store user and session in the HttpContext.Items for easy access in controllers + Context.Items["CurrentUser"] = session.Account; + Context.Items["CurrentSession"] = session; + Context.Items["CurrentTokenType"] = tokenInfo.Type.ToString(); + + // Create claims from the session + var claims = new List + { + new("user_id", session.Account.Id), + new("session_id", session.Id), + new("token_type", tokenInfo.Type.ToString()) + }; + + // return AuthenticateResult.Success(ticket); + return AuthenticateResult.NoResult(); + } + catch (Exception ex) + { + return AuthenticateResult.Fail($"Authentication failed: {ex.Message}"); + } + } + + private async Task ValidateToken(string token) + { + var resp = await auth.AuthenticateAsync(new AuthenticateRequest { Token = token }); + if (!resp.Valid) throw new InvalidOperationException(resp.Message); + if (resp.Session == null) throw new InvalidOperationException("Session not found."); + return resp.Session; + } + + private static byte[] Base64UrlDecode(string base64Url) + { + var padded = base64Url + .Replace('-', '+') + .Replace('_', '/'); + + switch (padded.Length % 4) + { + case 2: padded += "=="; break; + case 3: padded += "="; break; + } + + return Convert.FromBase64String(padded); + } + + private static TokenInfo? _ExtractToken(HttpRequest request) + { + // Check for token in query parameters + if (request.Query.TryGetValue(AuthConstants.TokenQueryParamName, out var queryToken)) + { + return new TokenInfo + { + Token = queryToken.ToString(), + Type = TokenType.AuthKey + }; + } + + + // Check for token in Authorization header + var authHeader = request.Headers["Authorization"].ToString(); + if (!string.IsNullOrEmpty(authHeader)) + { + if (authHeader.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase)) + { + var token = authHeader["Bearer ".Length..].Trim(); + var parts = token.Split('.'); + + return new TokenInfo + { + Token = token, + Type = parts.Length == 3 ? TokenType.OidcKey : TokenType.AuthKey + }; + } + else if (authHeader.StartsWith("AtField ", StringComparison.OrdinalIgnoreCase)) + { + return new TokenInfo + { + Token = authHeader["AtField ".Length..].Trim(), + Type = TokenType.AuthKey + }; + } + else if (authHeader.StartsWith("AkField ", StringComparison.OrdinalIgnoreCase)) + { + return new TokenInfo + { + Token = authHeader["AkField ".Length..].Trim(), + Type = TokenType.ApiKey + }; + } + } + + // Check for token in cookies + if (request.Cookies.TryGetValue(AuthConstants.CookieTokenName, out var cookieToken)) + { + return new TokenInfo + { + Token = cookieToken, + Type = cookieToken.Count(c => c == '.') == 2 ? TokenType.OidcKey : TokenType.AuthKey + }; + } + + + return null; + } +} \ No newline at end of file diff --git a/DysonNetwork.Shared/Auth/Startup.cs b/DysonNetwork.Shared/Auth/Startup.cs new file mode 100644 index 0000000..b5894fb --- /dev/null +++ b/DysonNetwork.Shared/Auth/Startup.cs @@ -0,0 +1,35 @@ +using dotnet_etcd.interfaces; +using DysonNetwork.Shared.Proto; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; + +namespace DysonNetwork.Shared.Auth; + +public static class DysonAuthStartup +{ + public static IServiceCollection AddDysonAuth( + this IServiceCollection services, + IConfiguration configuration + ) + { + services.AddSingleton(sp => + { + var etcdClient = sp.GetRequiredService(); + var config = sp.GetRequiredService(); + var clientCertPath = config["ClientCert:Path"]; + var clientKeyPath = config["ClientKey:Path"]; + var clientCertPassword = config["ClientCert:Password"]; + + return GrpcClientHelper.CreateAuthServiceClient(etcdClient, clientCertPath, clientKeyPath, clientCertPassword); + }); + + services.AddAuthentication(options => + { + options.DefaultAuthenticateScheme = AuthConstants.SchemeName; + options.DefaultChallengeScheme = AuthConstants.SchemeName; + }) + .AddScheme(AuthConstants.SchemeName, _ => { }); + + return services; + } +} \ No newline at end of file diff --git a/DysonNetwork.Shared/DysonNetwork.Shared.csproj b/DysonNetwork.Shared/DysonNetwork.Shared.csproj index 0c82947..38296a3 100644 --- a/DysonNetwork.Shared/DysonNetwork.Shared.csproj +++ b/DysonNetwork.Shared/DysonNetwork.Shared.csproj @@ -8,7 +8,7 @@ - + @@ -17,21 +17,22 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive - + + - - - + + + - - - + + + - + diff --git a/DysonNetwork.Shared/Middleware/AuthMiddleware.cs b/DysonNetwork.Shared/Middleware/AuthMiddleware.cs deleted file mode 100644 index 6356427..0000000 --- a/DysonNetwork.Shared/Middleware/AuthMiddleware.cs +++ /dev/null @@ -1,107 +0,0 @@ -using Grpc.Core; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Logging; -using DysonNetwork.Shared.Proto; -using System.Threading.Tasks; -using DysonNetwork.Shared.Auth; - -namespace DysonNetwork.Shared.Middleware; - -public class AuthMiddleware -{ - private readonly RequestDelegate _next; - private readonly ILogger _logger; - - public AuthMiddleware(RequestDelegate next, ILogger logger) - { - _next = next; - _logger = logger; - } - - public async Task InvokeAsync(HttpContext context, AuthService.AuthServiceClient authServiceClient) - { - var tokenInfo = _ExtractToken(context.Request); - - if (tokenInfo == null || string.IsNullOrEmpty(tokenInfo.Token)) - { - await _next(context); - return; - } - - try - { - var authSession = await authServiceClient.AuthenticateAsync(new AuthenticateRequest { Token = tokenInfo.Token }); - context.Items["AuthSession"] = authSession; - context.Items["CurrentTokenType"] = tokenInfo.Type.ToString(); - // Assuming AuthSession contains Account information or can be retrieved - // context.Items["CurrentUser"] = authSession.Account; // You might need to fetch Account separately if not embedded - } - catch (RpcException ex) - { - _logger.LogWarning(ex, "Authentication failed for token: {Token}", tokenInfo.Token); - // Optionally, you can return an unauthorized response here - // context.Response.StatusCode = StatusCodes.Status401Unauthorized; - // return; - } - - await _next(context); - } - - private TokenInfo? _ExtractToken(HttpRequest request) - { - // Check for token in query parameters - if (request.Query.TryGetValue(AuthConstants.TokenQueryParamName, out var queryToken)) - { - return new TokenInfo - { - Token = queryToken.ToString(), - Type = TokenType.AuthKey - }; - } - - // Check for token in Authorization header - var authHeader = request.Headers["Authorization"].ToString(); - if (!string.IsNullOrEmpty(authHeader)) - { - if (authHeader.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase)) - { - var token = authHeader["Bearer ".Length..].Trim(); - var parts = token.Split('.'); - - return new TokenInfo - { - Token = token, - Type = parts.Length == 3 ? TokenType.OidcKey : TokenType.AuthKey - }; - } - else if (authHeader.StartsWith("AtField ", StringComparison.OrdinalIgnoreCase)) - { - return new TokenInfo - { - Token = authHeader["AtField ".Length..].Trim(), - Type = TokenType.AuthKey - }; - } - else if (authHeader.StartsWith("AkField ", StringComparison.OrdinalIgnoreCase)) - { - return new TokenInfo - { - Token = authHeader["AkField ".Length..].Trim(), - Type = TokenType.ApiKey - }; - } - } - - // Check for token in cookies - if (request.Cookies.TryGetValue(AuthConstants.CookieTokenName, out var cookieToken)) - { - return new TokenInfo - { - Token = cookieToken, - Type = cookieToken.Count(c => c == '.') == 2 ? TokenType.OidcKey : TokenType.AuthKey - }; - } - - return null; - } -} diff --git a/DysonNetwork.Shared/Proto/auth.proto b/DysonNetwork.Shared/Proto/auth.proto index e96f35e..2b70f8c 100644 --- a/DysonNetwork.Shared/Proto/auth.proto +++ b/DysonNetwork.Shared/Proto/auth.proto @@ -6,17 +6,21 @@ option csharp_namespace = "DysonNetwork.Shared.Proto"; import "google/protobuf/timestamp.proto"; import "google/protobuf/wrappers.proto"; +import "google/protobuf/struct.proto"; + +import 'account.proto'; // Represents a user session message AuthSession { string id = 1; google.protobuf.StringValue label = 2; - google.protobuf.Timestamp last_granted_at = 3; - google.protobuf.Timestamp expired_at = 4; + optional google.protobuf.Timestamp last_granted_at = 3; + optional google.protobuf.Timestamp expired_at = 4; string account_id = 5; - string challenge_id = 6; - AuthChallenge challenge = 7; - google.protobuf.StringValue app_id = 8; + Account account = 6; + string challenge_id = 7; + AuthChallenge challenge = 8; + google.protobuf.StringValue app_id = 9; } // Represents an authentication challenge @@ -60,9 +64,111 @@ enum ChallengePlatform { } service AuthService { - rpc Authenticate(AuthenticateRequest) returns (AuthSession) {} + rpc Authenticate(AuthenticateRequest) returns (AuthenticateResponse) {} } message AuthenticateRequest { string token = 1; } + +message AuthenticateResponse { + bool valid = 1; + optional string message = 2; + optional AuthSession session = 3; +} + +// Permission related messages and services +message PermissionNode { + string id = 1; + string actor = 2; + string area = 3; + string key = 4; + google.protobuf.Value value = 5; // Using Value to represent generic type + google.protobuf.Timestamp expired_at = 6; + google.protobuf.Timestamp affected_at = 7; + string group_id = 8; // Optional group ID +} + +message PermissionGroup { + string id = 1; + string name = 2; + google.protobuf.Timestamp created_at = 3; +} + +message HasPermissionRequest { + string actor = 1; + string area = 2; + string key = 3; +} + +message HasPermissionResponse { + bool has_permission = 1; +} + +message GetPermissionRequest { + string actor = 1; + string area = 2; + string key = 3; +} + +message GetPermissionResponse { + google.protobuf.Value value = 1; // Using Value to represent generic type +} + +message AddPermissionNodeRequest { + string actor = 1; + string area = 2; + string key = 3; + google.protobuf.Value value = 4; + google.protobuf.Timestamp expired_at = 5; + google.protobuf.Timestamp affected_at = 6; +} + +message AddPermissionNodeResponse { + PermissionNode node = 1; +} + +message AddPermissionNodeToGroupRequest { + PermissionGroup group = 1; + string actor = 2; + string area = 3; + string key = 4; + google.protobuf.Value value = 5; + google.protobuf.Timestamp expired_at = 6; + google.protobuf.Timestamp affected_at = 7; +} + +message AddPermissionNodeToGroupResponse { + PermissionNode node = 1; +} + +message RemovePermissionNodeRequest { + string actor = 1; + string area = 2; + string key = 3; +} + +message RemovePermissionNodeResponse { + bool success = 1; +} + +message RemovePermissionNodeFromGroupRequest { + PermissionGroup group = 1; + string actor = 2; + string area = 3; + string key = 4; +} + +message RemovePermissionNodeFromGroupResponse { + bool success = 1; +} + +service PermissionService { + rpc HasPermission(HasPermissionRequest) returns (HasPermissionResponse) {} + rpc GetPermission(GetPermissionRequest) returns (GetPermissionResponse) {} + rpc AddPermissionNode(AddPermissionNodeRequest) returns (AddPermissionNodeResponse) {} + rpc AddPermissionNodeToGroup(AddPermissionNodeToGroupRequest) returns (AddPermissionNodeToGroupResponse) {} + rpc RemovePermissionNode(RemovePermissionNodeRequest) returns (RemovePermissionNodeResponse) {} + rpc RemovePermissionNodeFromGroup(RemovePermissionNodeFromGroupRequest) returns (RemovePermissionNodeFromGroupResponse) {} +} + diff --git a/DysonNetwork.Shared/Registry/RegistryHostedService.cs b/DysonNetwork.Shared/Registry/RegistryHostedService.cs new file mode 100644 index 0000000..da7d72d --- /dev/null +++ b/DysonNetwork.Shared/Registry/RegistryHostedService.cs @@ -0,0 +1,45 @@ +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; + +namespace DysonNetwork.Shared.Registry; + +public class RegistryHostedService( + ServiceRegistry serviceRegistry, + IConfiguration configuration, + ILogger logger +) + : IHostedService +{ + public async Task StartAsync(CancellationToken cancellationToken) + { + var serviceName = configuration["Service:Name"]; + var serviceUrl = configuration["Service:Url"]; + + if (string.IsNullOrEmpty(serviceUrl) || string.IsNullOrEmpty(serviceName)) + { + logger.LogWarning("Service URL or Service Name was not configured. Skipping Etcd registration."); + return; + } + + logger.LogInformation("Registering service {ServiceName} at {ServiceUrl} with Etcd.", serviceName, serviceUrl); + try + { + await serviceRegistry.RegisterService(serviceName, serviceUrl); + logger.LogInformation("Service {ServiceName} registered successfully.", serviceName); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to register service {ServiceName} with Etcd.", serviceName); + } + } + + public async Task StopAsync(CancellationToken cancellationToken) + { + // The lease will expire automatically if the service stops ungracefully. + var serviceName = configuration["Service:Name"]; + if (serviceName is not null) + await serviceRegistry.UnregisterService(serviceName); + logger.LogInformation("Service registration hosted service is stopping."); + } +} \ No newline at end of file diff --git a/DysonNetwork.Shared/Registry/Startup.cs b/DysonNetwork.Shared/Registry/Startup.cs index cceef2d..a5e050c 100644 --- a/DysonNetwork.Shared/Registry/Startup.cs +++ b/DysonNetwork.Shared/Registry/Startup.cs @@ -4,9 +4,9 @@ using Microsoft.Extensions.DependencyInjection; namespace DysonNetwork.Shared.Registry; -public static class EtcdStartup +public static class RegistryStartup { - public static IServiceCollection AddEtcdService( + public static IServiceCollection AddRegistryService( this IServiceCollection services, IConfiguration configuration ) @@ -17,6 +17,7 @@ public static class EtcdStartup options.UseInsecureChannel = configuration.GetValue("Etcd:Insecure"); }); services.AddSingleton(); + services.AddHostedService(); return services; } diff --git a/DysonNetwork.Sphere/DysonNetwork.Sphere.csproj b/DysonNetwork.Sphere/DysonNetwork.Sphere.csproj index 1c05644..b65e109 100644 --- a/DysonNetwork.Sphere/DysonNetwork.Sphere.csproj +++ b/DysonNetwork.Sphere/DysonNetwork.Sphere.csproj @@ -29,7 +29,7 @@ - + all @@ -40,10 +40,10 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive - - - - + + + + @@ -66,16 +66,16 @@ - - - - + + + + - + diff --git a/DysonNetwork.sln b/DysonNetwork.sln index 4b2f2be..1ef9fd7 100644 --- a/DysonNetwork.sln +++ b/DysonNetwork.sln @@ -13,6 +13,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DysonNetwork.Shared", "Dyso EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DysonNetwork.Pusher", "DysonNetwork.Pusher\DysonNetwork.Pusher.csproj", "{D5DAFB0D-487E-48EF-BA2F-C581C846F63B}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DysonNetwork.Drive", "DysonNetwork.Drive\DysonNetwork.Drive.csproj", "{8DE0B783-8852-494D-B90A-201ABBB71202}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -35,5 +37,9 @@ Global {D5DAFB0D-487E-48EF-BA2F-C581C846F63B}.Debug|Any CPU.Build.0 = Debug|Any CPU {D5DAFB0D-487E-48EF-BA2F-C581C846F63B}.Release|Any CPU.ActiveCfg = Release|Any CPU {D5DAFB0D-487E-48EF-BA2F-C581C846F63B}.Release|Any CPU.Build.0 = Release|Any CPU + {8DE0B783-8852-494D-B90A-201ABBB71202}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8DE0B783-8852-494D-B90A-201ABBB71202}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8DE0B783-8852-494D-B90A-201ABBB71202}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8DE0B783-8852-494D-B90A-201ABBB71202}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection EndGlobal diff --git a/DysonNetwork.sln.DotSettings.user b/DysonNetwork.sln.DotSettings.user index 95dd6c2..caeddec 100644 --- a/DysonNetwork.sln.DotSettings.user +++ b/DysonNetwork.sln.DotSettings.user @@ -3,7 +3,9 @@ ForceIncluded ForceIncluded ForceIncluded + ForceIncluded ForceIncluded + ForceIncluded ForceIncluded ForceIncluded ForceIncluded