✨ Shared auth scheme
This commit is contained in:
179
DysonNetwork.Drive/AppDatabase.cs
Normal file
179
DysonNetwork.Drive/AppDatabase.cs
Normal file
@ -0,0 +1,179 @@
|
|||||||
|
using System.Linq.Expressions;
|
||||||
|
using System.Reflection;
|
||||||
|
using DysonNetwork.Drive.Storage;
|
||||||
|
using DysonNetwork.Shared.Data;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
using Microsoft.EntityFrameworkCore.Design;
|
||||||
|
using Microsoft.EntityFrameworkCore.Query;
|
||||||
|
using NodaTime;
|
||||||
|
using Quartz;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Drive;
|
||||||
|
|
||||||
|
public class AppDatabase(
|
||||||
|
DbContextOptions<AppDatabase> options,
|
||||||
|
IConfiguration configuration
|
||||||
|
) : DbContext(options)
|
||||||
|
{
|
||||||
|
public DbSet<CloudFile> Files { get; set; } = null!;
|
||||||
|
public DbSet<CloudFileReference> FileReferences { get; set; } = null!;
|
||||||
|
|
||||||
|
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
|
||||||
|
{
|
||||||
|
optionsBuilder.UseNpgsql(
|
||||||
|
configuration.GetConnectionString("App"),
|
||||||
|
opt => opt
|
||||||
|
.ConfigureDataSource(optSource => optSource.EnableDynamicJson())
|
||||||
|
.UseQuerySplittingBehavior(QuerySplittingBehavior.SplitQuery)
|
||||||
|
.UseNetTopologySuite()
|
||||||
|
.UseNodaTime()
|
||||||
|
).UseSnakeCaseNamingConvention();
|
||||||
|
|
||||||
|
base.OnConfiguring(optionsBuilder);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected override void OnModelCreating(ModelBuilder modelBuilder)
|
||||||
|
{
|
||||||
|
base.OnModelCreating(modelBuilder);
|
||||||
|
|
||||||
|
// Automatically apply soft-delete filter to all entities inheriting BaseModel
|
||||||
|
foreach (var entityType in modelBuilder.Model.GetEntityTypes())
|
||||||
|
{
|
||||||
|
if (!typeof(ModelBase).IsAssignableFrom(entityType.ClrType)) continue;
|
||||||
|
var method = typeof(AppDatabase)
|
||||||
|
.GetMethod(nameof(SetSoftDeleteFilter),
|
||||||
|
BindingFlags.NonPublic | BindingFlags.Static)!
|
||||||
|
.MakeGenericMethod(entityType.ClrType);
|
||||||
|
|
||||||
|
method.Invoke(null, [modelBuilder]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void SetSoftDeleteFilter<TEntity>(ModelBuilder modelBuilder)
|
||||||
|
where TEntity : ModelBase
|
||||||
|
{
|
||||||
|
modelBuilder.Entity<TEntity>().HasQueryFilter(e => e.DeletedAt == null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public override async Task<int> SaveChangesAsync(CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var now = SystemClock.Instance.GetCurrentInstant();
|
||||||
|
|
||||||
|
foreach (var entry in ChangeTracker.Entries<ModelBase>())
|
||||||
|
{
|
||||||
|
switch (entry.State)
|
||||||
|
{
|
||||||
|
case EntityState.Added:
|
||||||
|
entry.Entity.CreatedAt = now;
|
||||||
|
entry.Entity.UpdatedAt = now;
|
||||||
|
break;
|
||||||
|
case EntityState.Modified:
|
||||||
|
entry.Entity.UpdatedAt = now;
|
||||||
|
break;
|
||||||
|
case EntityState.Deleted:
|
||||||
|
entry.State = EntityState.Modified;
|
||||||
|
entry.Entity.DeletedAt = now;
|
||||||
|
break;
|
||||||
|
case EntityState.Detached:
|
||||||
|
case EntityState.Unchanged:
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return await base.SaveChangesAsync(cancellationToken);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public class AppDatabaseRecyclingJob(AppDatabase db, ILogger<AppDatabaseRecyclingJob> logger) : IJob
|
||||||
|
{
|
||||||
|
public async Task Execute(IJobExecutionContext context)
|
||||||
|
{
|
||||||
|
var now = SystemClock.Instance.GetCurrentInstant();
|
||||||
|
|
||||||
|
logger.LogInformation("Deleting soft-deleted records...");
|
||||||
|
|
||||||
|
var threshold = now - Duration.FromDays(7);
|
||||||
|
|
||||||
|
var entityTypes = db.Model.GetEntityTypes()
|
||||||
|
.Where(t => typeof(ModelBase).IsAssignableFrom(t.ClrType) && t.ClrType != typeof(ModelBase))
|
||||||
|
.Select(t => t.ClrType);
|
||||||
|
|
||||||
|
foreach (var entityType in entityTypes)
|
||||||
|
{
|
||||||
|
var set = (IQueryable)db.GetType().GetMethod(nameof(DbContext.Set), Type.EmptyTypes)!
|
||||||
|
.MakeGenericMethod(entityType).Invoke(db, null)!;
|
||||||
|
var parameter = Expression.Parameter(entityType, "e");
|
||||||
|
var property = Expression.Property(parameter, nameof(ModelBase.DeletedAt));
|
||||||
|
var condition = Expression.LessThan(property, Expression.Constant(threshold, typeof(Instant?)));
|
||||||
|
var notNull = Expression.NotEqual(property, Expression.Constant(null, typeof(Instant?)));
|
||||||
|
var finalCondition = Expression.AndAlso(notNull, condition);
|
||||||
|
var lambda = Expression.Lambda(finalCondition, parameter);
|
||||||
|
|
||||||
|
var queryable = set.Provider.CreateQuery(
|
||||||
|
Expression.Call(
|
||||||
|
typeof(Queryable),
|
||||||
|
"Where",
|
||||||
|
[entityType],
|
||||||
|
set.Expression,
|
||||||
|
Expression.Quote(lambda)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
var toListAsync = typeof(EntityFrameworkQueryableExtensions)
|
||||||
|
.GetMethod(nameof(EntityFrameworkQueryableExtensions.ToListAsync))!
|
||||||
|
.MakeGenericMethod(entityType);
|
||||||
|
|
||||||
|
var items = await (dynamic)toListAsync.Invoke(null, [queryable, CancellationToken.None])!;
|
||||||
|
db.RemoveRange(items);
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public class AppDatabaseFactory : IDesignTimeDbContextFactory<AppDatabase>
|
||||||
|
{
|
||||||
|
public AppDatabase CreateDbContext(string[] args)
|
||||||
|
{
|
||||||
|
var configuration = new ConfigurationBuilder()
|
||||||
|
.SetBasePath(Directory.GetCurrentDirectory())
|
||||||
|
.AddJsonFile("appsettings.json")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var optionsBuilder = new DbContextOptionsBuilder<AppDatabase>();
|
||||||
|
return new AppDatabase(optionsBuilder.Options, configuration);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class OptionalQueryExtensions
|
||||||
|
{
|
||||||
|
public static IQueryable<T> If<T>(
|
||||||
|
this IQueryable<T> source,
|
||||||
|
bool condition,
|
||||||
|
Func<IQueryable<T>, IQueryable<T>> transform
|
||||||
|
)
|
||||||
|
{
|
||||||
|
return condition ? transform(source) : source;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static IQueryable<T> If<T, TP>(
|
||||||
|
this IIncludableQueryable<T, TP> source,
|
||||||
|
bool condition,
|
||||||
|
Func<IIncludableQueryable<T, TP>, IQueryable<T>> transform
|
||||||
|
)
|
||||||
|
where T : class
|
||||||
|
{
|
||||||
|
return condition ? transform(source) : source;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static IQueryable<T> If<T, TP>(
|
||||||
|
this IIncludableQueryable<T, IEnumerable<TP>> source,
|
||||||
|
bool condition,
|
||||||
|
Func<IIncludableQueryable<T, IEnumerable<TP>>, IQueryable<T>> transform
|
||||||
|
)
|
||||||
|
where T : class
|
||||||
|
{
|
||||||
|
return condition ? transform(source) : source;
|
||||||
|
}
|
||||||
|
}
|
23
DysonNetwork.Drive/Dockerfile
Normal file
23
DysonNetwork.Drive/Dockerfile
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
FROM mcr.microsoft.com/dotnet/aspnet:9.0 AS base
|
||||||
|
USER $APP_UID
|
||||||
|
WORKDIR /app
|
||||||
|
EXPOSE 8080
|
||||||
|
EXPOSE 8081
|
||||||
|
|
||||||
|
FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build
|
||||||
|
ARG BUILD_CONFIGURATION=Release
|
||||||
|
WORKDIR /src
|
||||||
|
COPY ["DysonNetwork.Drive/DysonNetwork.Drive.csproj", "DysonNetwork.Drive/"]
|
||||||
|
RUN dotnet restore "DysonNetwork.Drive/DysonNetwork.Drive.csproj"
|
||||||
|
COPY . .
|
||||||
|
WORKDIR "/src/DysonNetwork.Drive"
|
||||||
|
RUN dotnet build "./DysonNetwork.Drive.csproj" -c $BUILD_CONFIGURATION -o /app/build
|
||||||
|
|
||||||
|
FROM build AS publish
|
||||||
|
ARG BUILD_CONFIGURATION=Release
|
||||||
|
RUN dotnet publish "./DysonNetwork.Drive.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
|
||||||
|
|
||||||
|
FROM base AS final
|
||||||
|
WORKDIR /app
|
||||||
|
COPY --from=publish /app/publish .
|
||||||
|
ENTRYPOINT ["dotnet", "DysonNetwork.Drive.dll"]
|
66
DysonNetwork.Drive/DysonNetwork.Drive.csproj
Normal file
66
DysonNetwork.Drive/DysonNetwork.Drive.csproj
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||||
|
|
||||||
|
<PropertyGroup>
|
||||||
|
<TargetFramework>net9.0</TargetFramework>
|
||||||
|
<Nullable>enable</Nullable>
|
||||||
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
|
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
|
||||||
|
</PropertyGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<PackageReference Include="BlurHashSharp.SkiaSharp" Version="1.3.4" />
|
||||||
|
<PackageReference Include="FFMpegCore" Version="5.2.0" />
|
||||||
|
<PackageReference Include="Grpc.AspNetCore.Server" Version="2.71.0" />
|
||||||
|
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.7"/>
|
||||||
|
<PackageReference Include="MimeTypes" Version="2.5.2">
|
||||||
|
<PrivateAssets>all</PrivateAssets>
|
||||||
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||||
|
</PackageReference>
|
||||||
|
<PackageReference Include="Minio" Version="6.0.5" />
|
||||||
|
<PackageReference Include="NetVips" Version="3.1.0" />
|
||||||
|
<PackageReference Include="NetVips.Native.linux-x64" Version="8.17.1" />
|
||||||
|
<PackageReference Include="NetVips.Native.osx-arm64" Version="8.17.1" />
|
||||||
|
<PackageReference Include="NodaTime" Version="3.2.2"/>
|
||||||
|
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.2.0"/>
|
||||||
|
<PackageReference Include="NodaTime.Serialization.Protobuf" Version="2.0.2" />
|
||||||
|
<PackageReference Include="NodaTime.Serialization.SystemTextJson" Version="1.3.0"/>
|
||||||
|
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4"/>
|
||||||
|
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.Design" Version="1.1.0"/>
|
||||||
|
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NetTopologySuite" Version="9.0.4"/>
|
||||||
|
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NodaTime" Version="9.0.4"/>
|
||||||
|
<PackageReference Include="OpenTelemetry.Exporter.OpenTelemetryProtocol" Version="1.12.0"/>
|
||||||
|
<PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.12.0"/>
|
||||||
|
<PackageReference Include="OpenTelemetry.Instrumentation.AspNetCore" Version="1.12.0"/>
|
||||||
|
<PackageReference Include="OpenTelemetry.Instrumentation.Http" Version="1.12.0"/>
|
||||||
|
<PackageReference Include="OpenTelemetry.Instrumentation.Runtime" Version="1.12.0"/>
|
||||||
|
<PackageReference Include="prometheus-net.AspNetCore" Version="8.2.1"/>
|
||||||
|
<PackageReference Include="prometheus-net.AspNetCore.HealthChecks" Version="8.2.1"/>
|
||||||
|
<PackageReference Include="prometheus-net.DotNetRuntime" Version="4.4.1"/>
|
||||||
|
<PackageReference Include="prometheus-net.EntityFramework" Version="0.9.5"/>
|
||||||
|
<PackageReference Include="prometheus-net.SystemMetrics" Version="3.1.0"/>
|
||||||
|
<PackageReference Include="Quartz" Version="3.14.0"/>
|
||||||
|
<PackageReference Include="Quartz.AspNetCore" Version="3.14.0"/>
|
||||||
|
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.14.0"/>
|
||||||
|
<PackageReference Include="EFCore.BulkExtensions" Version="9.0.1"/>
|
||||||
|
<PackageReference Include="EFCore.BulkExtensions.PostgreSql" Version="9.0.1"/>
|
||||||
|
<PackageReference Include="EFCore.NamingConventions" Version="9.0.0"/>
|
||||||
|
<PackageReference Include="SkiaSharp" Version="3.119.0" />
|
||||||
|
<PackageReference Include="SkiaSharp.NativeAssets.Linux" Version="3.119.0" />
|
||||||
|
<PackageReference Include="SkiaSharp.NativeAssets.Linux.NoDependencies" Version="3.119.0" />
|
||||||
|
<PackageReference Include="SkiaSharp.NativeAssets.macOS" Version="3.119.0" />
|
||||||
|
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.3" />
|
||||||
|
<PackageReference Include="Swashbuckle.AspNetCore.SwaggerUI" Version="9.0.3" />
|
||||||
|
<PackageReference Include="tusdotnet" Version="2.10.0" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<Content Include="..\.dockerignore">
|
||||||
|
<Link>.dockerignore</Link>
|
||||||
|
</Content>
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<ProjectReference Include="..\DysonNetwork.Shared\DysonNetwork.Shared.csproj" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
|
</Project>
|
45
DysonNetwork.Drive/Program.cs
Normal file
45
DysonNetwork.Drive/Program.cs
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
using DysonNetwork.Drive;
|
||||||
|
using DysonNetwork.Drive.Startup;
|
||||||
|
using DysonNetwork.Pusher.Startup;
|
||||||
|
using DysonNetwork.Shared.Auth;
|
||||||
|
using DysonNetwork.Shared.Registry;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
|
||||||
|
var builder = WebApplication.CreateBuilder(args);
|
||||||
|
|
||||||
|
// Configure Kestrel and server options
|
||||||
|
builder.ConfigureAppKestrel();
|
||||||
|
|
||||||
|
// Add application services
|
||||||
|
builder.Services.AddRegistryService(builder.Configuration);
|
||||||
|
builder.Services.AddAppServices(builder.Configuration);
|
||||||
|
builder.Services.AddAppRateLimiting();
|
||||||
|
builder.Services.AddAppAuthentication();
|
||||||
|
builder.Services.AddAppSwagger();
|
||||||
|
builder.Services.AddDysonAuth(builder.Configuration);
|
||||||
|
|
||||||
|
// Add flush handlers and websocket handlers
|
||||||
|
builder.Services.AddAppFlushHandlers();
|
||||||
|
|
||||||
|
// Add business services
|
||||||
|
builder.Services.AddAppBusinessServices();
|
||||||
|
|
||||||
|
// Add scheduled jobs
|
||||||
|
builder.Services.AddAppScheduledJobs();
|
||||||
|
|
||||||
|
var app = builder.Build();
|
||||||
|
|
||||||
|
// Run database migrations
|
||||||
|
using (var scope = app.Services.CreateScope())
|
||||||
|
{
|
||||||
|
var db = scope.ServiceProvider.GetRequiredService<AppDatabase>();
|
||||||
|
await db.Database.MigrateAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure application middleware pipeline
|
||||||
|
app.ConfigureAppMiddleware(builder.Configuration);
|
||||||
|
|
||||||
|
// Configure gRPC
|
||||||
|
app.ConfigureGrpcServices();
|
||||||
|
|
||||||
|
app.Run();
|
23
DysonNetwork.Drive/Properties/launchSettings.json
Normal file
23
DysonNetwork.Drive/Properties/launchSettings.json
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/launchsettings.json",
|
||||||
|
"profiles": {
|
||||||
|
"http": {
|
||||||
|
"commandName": "Project",
|
||||||
|
"dotnetRunMessages": true,
|
||||||
|
"launchBrowser": false,
|
||||||
|
"applicationUrl": "http://localhost:5090",
|
||||||
|
"environmentVariables": {
|
||||||
|
"ASPNETCORE_ENVIRONMENT": "Development"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"https": {
|
||||||
|
"commandName": "Project",
|
||||||
|
"dotnetRunMessages": true,
|
||||||
|
"launchBrowser": false,
|
||||||
|
"applicationUrl": "https://localhost:7092;http://localhost:5090",
|
||||||
|
"environmentVariables": {
|
||||||
|
"ASPNETCORE_ENVIRONMENT": "Development"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
28
DysonNetwork.Drive/Startup/ApplicationBuilderExtensions.cs
Normal file
28
DysonNetwork.Drive/Startup/ApplicationBuilderExtensions.cs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
namespace DysonNetwork.Drive.Startup;
|
||||||
|
|
||||||
|
public static class ApplicationBuilderExtensions
|
||||||
|
{
|
||||||
|
public static WebApplication ConfigureAppMiddleware(this WebApplication app, IConfiguration configuration)
|
||||||
|
{
|
||||||
|
// Configure the HTTP request pipeline.
|
||||||
|
if (app.Environment.IsDevelopment())
|
||||||
|
{
|
||||||
|
app.UseSwagger();
|
||||||
|
app.UseSwaggerUI();
|
||||||
|
}
|
||||||
|
|
||||||
|
app.UseHttpsRedirection();
|
||||||
|
app.UseAuthorization();
|
||||||
|
app.MapControllers();
|
||||||
|
|
||||||
|
return app;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static WebApplication ConfigureGrpcServices(this WebApplication app)
|
||||||
|
{
|
||||||
|
// Map your gRPC services here
|
||||||
|
// Example: app.MapGrpcService<MyGrpcService>();
|
||||||
|
|
||||||
|
return app;
|
||||||
|
}
|
||||||
|
}
|
17
DysonNetwork.Drive/Startup/KestrelConfiguration.cs
Normal file
17
DysonNetwork.Drive/Startup/KestrelConfiguration.cs
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
namespace DysonNetwork.Pusher.Startup;
|
||||||
|
|
||||||
|
public static class KestrelConfiguration
|
||||||
|
{
|
||||||
|
public static WebApplicationBuilder ConfigureAppKestrel(this WebApplicationBuilder builder)
|
||||||
|
{
|
||||||
|
builder.Host.UseContentRoot(Directory.GetCurrentDirectory());
|
||||||
|
builder.WebHost.ConfigureKestrel(options =>
|
||||||
|
{
|
||||||
|
options.Limits.MaxRequestBodySize = 50 * 1024 * 1024;
|
||||||
|
options.Limits.KeepAliveTimeout = TimeSpan.FromMinutes(2);
|
||||||
|
options.Limits.RequestHeadersTimeout = TimeSpan.FromSeconds(30);
|
||||||
|
});
|
||||||
|
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
}
|
22
DysonNetwork.Drive/Startup/ScheduledJobsConfiguration.cs
Normal file
22
DysonNetwork.Drive/Startup/ScheduledJobsConfiguration.cs
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
using Quartz;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Drive.Startup;
|
||||||
|
|
||||||
|
public static class ScheduledJobsConfiguration
|
||||||
|
{
|
||||||
|
public static IServiceCollection AddAppScheduledJobs(this IServiceCollection services)
|
||||||
|
{
|
||||||
|
services.AddQuartz(q =>
|
||||||
|
{
|
||||||
|
var appDatabaseRecyclingJob = new JobKey("AppDatabaseRecycling");
|
||||||
|
q.AddJob<AppDatabaseRecyclingJob>(opts => opts.WithIdentity(appDatabaseRecyclingJob));
|
||||||
|
q.AddTrigger(opts => opts
|
||||||
|
.ForJob(appDatabaseRecyclingJob)
|
||||||
|
.WithIdentity("AppDatabaseRecyclingTrigger")
|
||||||
|
.WithCronSchedule("0 0 0 * * ?"));
|
||||||
|
});
|
||||||
|
services.AddQuartzHostedService(q => q.WaitForJobsToComplete = true);
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
}
|
130
DysonNetwork.Drive/Startup/ServiceCollectionExtensions.cs
Normal file
130
DysonNetwork.Drive/Startup/ServiceCollectionExtensions.cs
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
using System.Text.Json;
|
||||||
|
using System.Threading.RateLimiting;
|
||||||
|
using dotnet_etcd.interfaces;
|
||||||
|
using DysonNetwork.Shared.Cache;
|
||||||
|
using Microsoft.AspNetCore.RateLimiting;
|
||||||
|
using Microsoft.OpenApi.Models;
|
||||||
|
using NodaTime;
|
||||||
|
using NodaTime.Serialization.SystemTextJson;
|
||||||
|
using StackExchange.Redis;
|
||||||
|
using DysonNetwork.Shared.Proto;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Drive.Startup;
|
||||||
|
|
||||||
|
public static class ServiceCollectionExtensions
|
||||||
|
{
|
||||||
|
public static IServiceCollection AddAppServices(this IServiceCollection services, IConfiguration configuration)
|
||||||
|
{
|
||||||
|
services.AddDbContext<AppDatabase>(); // Assuming you'll have an AppDatabase
|
||||||
|
services.AddSingleton<IConnectionMultiplexer>(_ =>
|
||||||
|
{
|
||||||
|
var connection = configuration.GetConnectionString("FastRetrieve")!;
|
||||||
|
return ConnectionMultiplexer.Connect(connection);
|
||||||
|
});
|
||||||
|
services.AddSingleton<IClock>(SystemClock.Instance);
|
||||||
|
services.AddHttpContextAccessor();
|
||||||
|
services.AddSingleton<ICacheService, CacheServiceRedis>(); // Uncomment if you have CacheServiceRedis
|
||||||
|
|
||||||
|
services.AddHttpClient();
|
||||||
|
|
||||||
|
// Register gRPC services
|
||||||
|
services.AddGrpc(options =>
|
||||||
|
{
|
||||||
|
options.EnableDetailedErrors = true; // Will be adjusted in Program.cs
|
||||||
|
options.MaxReceiveMessageSize = 16 * 1024 * 1024; // 16MB
|
||||||
|
options.MaxSendMessageSize = 16 * 1024 * 1024; // 16MB
|
||||||
|
});
|
||||||
|
|
||||||
|
// Register gRPC reflection for service discovery
|
||||||
|
services.AddGrpc();
|
||||||
|
|
||||||
|
services.AddControllers().AddJsonOptions(options =>
|
||||||
|
{
|
||||||
|
options.JsonSerializerOptions.PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower;
|
||||||
|
options.JsonSerializerOptions.DictionaryKeyPolicy = JsonNamingPolicy.SnakeCaseLower;
|
||||||
|
|
||||||
|
options.JsonSerializerOptions.ConfigureForNodaTime(DateTimeZoneProviders.Tzdb);
|
||||||
|
});
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static IServiceCollection AddAppRateLimiting(this IServiceCollection services)
|
||||||
|
{
|
||||||
|
services.AddRateLimiter(o => o.AddFixedWindowLimiter(policyName: "fixed", opts =>
|
||||||
|
{
|
||||||
|
opts.Window = TimeSpan.FromMinutes(1);
|
||||||
|
opts.PermitLimit = 120;
|
||||||
|
opts.QueueLimit = 2;
|
||||||
|
opts.QueueProcessingOrder = QueueProcessingOrder.OldestFirst;
|
||||||
|
}));
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static IServiceCollection AddAppAuthentication(this IServiceCollection services)
|
||||||
|
{
|
||||||
|
services.AddCors();
|
||||||
|
services.AddAuthorization();
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static IServiceCollection AddAppFlushHandlers(this IServiceCollection services)
|
||||||
|
{
|
||||||
|
services.AddSingleton<FlushBufferService>();
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static IServiceCollection AddAppSwagger(this IServiceCollection services)
|
||||||
|
{
|
||||||
|
services.AddEndpointsApiExplorer();
|
||||||
|
services.AddSwaggerGen(options =>
|
||||||
|
{
|
||||||
|
options.SwaggerDoc("v1", new OpenApiInfo
|
||||||
|
{
|
||||||
|
Version = "v1",
|
||||||
|
Title = "DysonNetwork.Drive API",
|
||||||
|
Description = "DysonNetwork Drive Service",
|
||||||
|
TermsOfService = new Uri("https://example.com/terms"), // Update with actual terms
|
||||||
|
License = new OpenApiLicense
|
||||||
|
{
|
||||||
|
Name = "APGLv3", // Update with actual license
|
||||||
|
Url = new Uri("https://www.gnu.org/licenses/agpl-3.0.html")
|
||||||
|
}
|
||||||
|
});
|
||||||
|
options.AddSecurityDefinition("Bearer", new OpenApiSecurityScheme
|
||||||
|
{
|
||||||
|
In = ParameterLocation.Header,
|
||||||
|
Description = "Please enter a valid token",
|
||||||
|
Name = "Authorization",
|
||||||
|
Type = SecuritySchemeType.Http,
|
||||||
|
BearerFormat = "JWT",
|
||||||
|
Scheme = "Bearer"
|
||||||
|
});
|
||||||
|
options.AddSecurityRequirement(new OpenApiSecurityRequirement
|
||||||
|
{
|
||||||
|
{
|
||||||
|
new OpenApiSecurityScheme
|
||||||
|
{
|
||||||
|
Reference = new OpenApiReference
|
||||||
|
{
|
||||||
|
Type = ReferenceType.SecurityScheme,
|
||||||
|
Id = "Bearer"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[]
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static IServiceCollection AddAppBusinessServices(this IServiceCollection services)
|
||||||
|
{
|
||||||
|
// Add your business services here
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
}
|
131
DysonNetwork.Drive/Storage/CloudFile.cs
Normal file
131
DysonNetwork.Drive/Storage/CloudFile.cs
Normal file
@ -0,0 +1,131 @@
|
|||||||
|
using System.ComponentModel.DataAnnotations;
|
||||||
|
using System.ComponentModel.DataAnnotations.Schema;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
|
using DysonNetwork.Shared.Data;
|
||||||
|
using DysonNetwork.Shared.Proto;
|
||||||
|
using NodaTime;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Drive.Storage;
|
||||||
|
|
||||||
|
public class RemoteStorageConfig
|
||||||
|
{
|
||||||
|
public string Id { get; set; } = string.Empty;
|
||||||
|
public string Label { get; set; } = string.Empty;
|
||||||
|
public string Region { get; set; } = string.Empty;
|
||||||
|
public string Bucket { get; set; } = string.Empty;
|
||||||
|
public string Endpoint { get; set; } = string.Empty;
|
||||||
|
public string SecretId { get; set; } = string.Empty;
|
||||||
|
public string SecretKey { get; set; } = string.Empty;
|
||||||
|
public bool EnableSigned { get; set; }
|
||||||
|
public bool EnableSsl { get; set; }
|
||||||
|
public string? ImageProxy { get; set; }
|
||||||
|
public string? AccessProxy { get; set; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// The class that used in jsonb columns which referenced the cloud file.
|
||||||
|
/// The aim of this class is to store some properties that won't change to a file to reduce the database load.
|
||||||
|
/// </summary>
|
||||||
|
public class CloudFileReferenceObject : ModelBase, ICloudFile
|
||||||
|
{
|
||||||
|
public string Id { get; set; } = null!;
|
||||||
|
public string Name { get; set; } = string.Empty;
|
||||||
|
public Dictionary<string, object>? FileMeta { get; set; } = null!;
|
||||||
|
public Dictionary<string, object>? UserMeta { get; set; } = null!;
|
||||||
|
public string? MimeType { get; set; }
|
||||||
|
public string? Hash { get; set; }
|
||||||
|
public long Size { get; set; }
|
||||||
|
public bool HasCompression { get; set; } = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public class CloudFile : ModelBase, ICloudFile, IIdentifiedResource
|
||||||
|
{
|
||||||
|
/// The id generated by TuS, basically just UUID remove the dash lines
|
||||||
|
[MaxLength(32)]
|
||||||
|
public string Id { get; set; } = Guid.NewGuid().ToString();
|
||||||
|
|
||||||
|
[MaxLength(1024)] public string Name { get; set; } = string.Empty;
|
||||||
|
[MaxLength(4096)] public string? Description { get; set; }
|
||||||
|
[Column(TypeName = "jsonb")] public Dictionary<string, object>? FileMeta { get; set; } = null!;
|
||||||
|
[Column(TypeName = "jsonb")] public Dictionary<string, object>? UserMeta { get; set; } = null!;
|
||||||
|
[Column(TypeName = "jsonb")] public List<ContentSensitiveMark>? SensitiveMarks { get; set; } = [];
|
||||||
|
[MaxLength(256)] public string? MimeType { get; set; }
|
||||||
|
[MaxLength(256)] public string? Hash { get; set; }
|
||||||
|
public long Size { get; set; }
|
||||||
|
public Instant? UploadedAt { get; set; }
|
||||||
|
[MaxLength(128)] public string? UploadedTo { get; set; }
|
||||||
|
public bool HasCompression { get; set; } = false;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// The field is set to true if the recycling job plans to delete the file.
|
||||||
|
/// Due to the unstable of the recycling job, this doesn't really delete the file until a human verifies it.
|
||||||
|
/// </summary>
|
||||||
|
public bool IsMarkedRecycle { get; set; } = false;
|
||||||
|
|
||||||
|
/// The object name which stored remotely,
|
||||||
|
/// multiple cloud file may have same storage id to indicate they are the same file
|
||||||
|
///
|
||||||
|
/// If the storage id was null and the uploaded at is not null, means it is an embedding file,
|
||||||
|
/// The embedding file means the file is store on another site,
|
||||||
|
/// or it is a webpage (based on mimetype)
|
||||||
|
[MaxLength(32)]
|
||||||
|
public string? StorageId { get; set; }
|
||||||
|
|
||||||
|
/// This field should be null when the storage id is filled
|
||||||
|
/// Indicates the off-site accessible url of the file
|
||||||
|
[MaxLength(4096)]
|
||||||
|
public string? StorageUrl { get; set; }
|
||||||
|
|
||||||
|
public Guid AccountId { get; set; }
|
||||||
|
|
||||||
|
public CloudFileReferenceObject ToReferenceObject()
|
||||||
|
{
|
||||||
|
return new CloudFileReferenceObject
|
||||||
|
{
|
||||||
|
CreatedAt = CreatedAt,
|
||||||
|
UpdatedAt = UpdatedAt,
|
||||||
|
DeletedAt = DeletedAt,
|
||||||
|
Id = Id,
|
||||||
|
Name = Name,
|
||||||
|
FileMeta = FileMeta,
|
||||||
|
UserMeta = UserMeta,
|
||||||
|
MimeType = MimeType,
|
||||||
|
Hash = Hash,
|
||||||
|
Size = Size,
|
||||||
|
HasCompression = HasCompression
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public string ResourceIdentifier => $"file/{Id}";
|
||||||
|
}
|
||||||
|
|
||||||
|
public enum ContentSensitiveMark
|
||||||
|
{
|
||||||
|
Language,
|
||||||
|
SexualContent,
|
||||||
|
Violence,
|
||||||
|
Profanity,
|
||||||
|
HateSpeech,
|
||||||
|
Racism,
|
||||||
|
AdultContent,
|
||||||
|
DrugAbuse,
|
||||||
|
AlcoholAbuse,
|
||||||
|
Gambling,
|
||||||
|
SelfHarm,
|
||||||
|
ChildAbuse,
|
||||||
|
Other
|
||||||
|
}
|
||||||
|
|
||||||
|
public class CloudFileReference : ModelBase
|
||||||
|
{
|
||||||
|
public Guid Id { get; set; } = Guid.NewGuid();
|
||||||
|
[MaxLength(32)] public string FileId { get; set; } = null!;
|
||||||
|
public CloudFile File { get; set; } = null!;
|
||||||
|
[MaxLength(1024)] public string Usage { get; set; } = null!;
|
||||||
|
[MaxLength(1024)] public string ResourceId { get; set; } = null!;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Optional expiration date for the file reference
|
||||||
|
/// </summary>
|
||||||
|
public Instant? ExpiredAt { get; set; }
|
||||||
|
}
|
93
DysonNetwork.Drive/Storage/CloudFileUnusedRecyclingJob.cs
Normal file
93
DysonNetwork.Drive/Storage/CloudFileUnusedRecyclingJob.cs
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
using NodaTime;
|
||||||
|
using Quartz;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Drive.Storage;
|
||||||
|
|
||||||
|
public class CloudFileUnusedRecyclingJob(
|
||||||
|
AppDatabase db,
|
||||||
|
FileReferenceService fileRefService,
|
||||||
|
ILogger<CloudFileUnusedRecyclingJob> logger
|
||||||
|
)
|
||||||
|
: IJob
|
||||||
|
{
|
||||||
|
public async Task Execute(IJobExecutionContext context)
|
||||||
|
{
|
||||||
|
logger.LogInformation("Marking unused cloud files...");
|
||||||
|
|
||||||
|
var now = SystemClock.Instance.GetCurrentInstant();
|
||||||
|
const int batchSize = 1000; // Process larger batches for efficiency
|
||||||
|
var processedCount = 0;
|
||||||
|
var markedCount = 0;
|
||||||
|
var totalFiles = await db.Files.Where(f => !f.IsMarkedRecycle).CountAsync();
|
||||||
|
|
||||||
|
logger.LogInformation("Found {TotalFiles} files to check for unused status", totalFiles);
|
||||||
|
|
||||||
|
// Define a timestamp to limit the age of files we're processing in this run
|
||||||
|
// This spreads the processing across multiple job runs for very large databases
|
||||||
|
var ageThreshold = now - Duration.FromDays(30); // Process files up to 90 days old in this run
|
||||||
|
|
||||||
|
// Instead of loading all files at once, use pagination
|
||||||
|
var hasMoreFiles = true;
|
||||||
|
string? lastProcessedId = null;
|
||||||
|
|
||||||
|
while (hasMoreFiles)
|
||||||
|
{
|
||||||
|
// Query for the next batch of files using keyset pagination
|
||||||
|
var filesQuery = db.Files
|
||||||
|
.Where(f => !f.IsMarkedRecycle)
|
||||||
|
.Where(f => f.CreatedAt <= ageThreshold); // Only process older files first
|
||||||
|
|
||||||
|
if (lastProcessedId != null)
|
||||||
|
{
|
||||||
|
filesQuery = filesQuery.Where(f => string.Compare(f.Id, lastProcessedId) > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
var fileBatch = await filesQuery
|
||||||
|
.OrderBy(f => f.Id) // Ensure consistent ordering for pagination
|
||||||
|
.Take(batchSize)
|
||||||
|
.Select(f => f.Id)
|
||||||
|
.ToListAsync();
|
||||||
|
|
||||||
|
if (fileBatch.Count == 0)
|
||||||
|
{
|
||||||
|
hasMoreFiles = false;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
processedCount += fileBatch.Count;
|
||||||
|
lastProcessedId = fileBatch.Last();
|
||||||
|
|
||||||
|
// Get all relevant file references for this batch
|
||||||
|
var fileReferences = await fileRefService.GetReferencesAsync(fileBatch);
|
||||||
|
|
||||||
|
// Filter to find files that have no references or all expired references
|
||||||
|
var filesToMark = fileBatch.Where(fileId =>
|
||||||
|
!fileReferences.TryGetValue(fileId, out var references) ||
|
||||||
|
references.Count == 0 ||
|
||||||
|
references.All(r => r.ExpiredAt.HasValue && r.ExpiredAt.Value <= now)
|
||||||
|
).ToList();
|
||||||
|
|
||||||
|
if (filesToMark.Count > 0)
|
||||||
|
{
|
||||||
|
// Use a bulk update for better performance - mark all qualifying files at once
|
||||||
|
var updateCount = await db.Files
|
||||||
|
.Where(f => filesToMark.Contains(f.Id))
|
||||||
|
.ExecuteUpdateAsync(setter => setter
|
||||||
|
.SetProperty(f => f.IsMarkedRecycle, true));
|
||||||
|
|
||||||
|
markedCount += updateCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log progress periodically
|
||||||
|
if (processedCount % 10000 == 0 || !hasMoreFiles)
|
||||||
|
{
|
||||||
|
logger.LogInformation(
|
||||||
|
"Progress: processed {ProcessedCount}/{TotalFiles} files, marked {MarkedCount} for recycling",
|
||||||
|
processedCount, totalFiles, markedCount);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.LogInformation("Completed marking {MarkedCount} files for recycling", markedCount);
|
||||||
|
}
|
||||||
|
}
|
144
DysonNetwork.Drive/Storage/FileController.cs
Normal file
144
DysonNetwork.Drive/Storage/FileController.cs
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
using DysonNetwork.Shared.Proto;
|
||||||
|
using Microsoft.AspNetCore.Authorization;
|
||||||
|
using Microsoft.AspNetCore.Mvc;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
using Minio.DataModel.Args;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Drive.Storage;
|
||||||
|
|
||||||
|
[ApiController]
|
||||||
|
[Route("/api/files")]
|
||||||
|
public class FileController(
|
||||||
|
AppDatabase db,
|
||||||
|
FileService fs,
|
||||||
|
IConfiguration configuration,
|
||||||
|
IWebHostEnvironment env
|
||||||
|
) : ControllerBase
|
||||||
|
{
|
||||||
|
[HttpGet("{id}")]
|
||||||
|
public async Task<ActionResult> OpenFile(
|
||||||
|
string id,
|
||||||
|
[FromQuery] bool download = false,
|
||||||
|
[FromQuery] bool original = false,
|
||||||
|
[FromQuery] string? overrideMimeType = null
|
||||||
|
)
|
||||||
|
{
|
||||||
|
// Support the file extension for client side data recognize
|
||||||
|
string? fileExtension = null;
|
||||||
|
if (id.Contains('.'))
|
||||||
|
{
|
||||||
|
var splitId = id.Split('.');
|
||||||
|
id = splitId.First();
|
||||||
|
fileExtension = splitId.Last();
|
||||||
|
}
|
||||||
|
|
||||||
|
var file = await fs.GetFileAsync(id);
|
||||||
|
if (file is null) return NotFound();
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(file.StorageUrl)) return Redirect(file.StorageUrl);
|
||||||
|
|
||||||
|
if (file.UploadedTo is null)
|
||||||
|
{
|
||||||
|
var tusStorePath = configuration.GetValue<string>("Tus:StorePath")!;
|
||||||
|
var filePath = Path.Combine(env.ContentRootPath, tusStorePath, file.Id);
|
||||||
|
if (!System.IO.File.Exists(filePath)) return new NotFoundResult();
|
||||||
|
return PhysicalFile(filePath, file.MimeType ?? "application/octet-stream", file.Name);
|
||||||
|
}
|
||||||
|
|
||||||
|
var dest = fs.GetRemoteStorageConfig(file.UploadedTo);
|
||||||
|
var fileName = string.IsNullOrWhiteSpace(file.StorageId) ? file.Id : file.StorageId;
|
||||||
|
|
||||||
|
if (!original && file.HasCompression)
|
||||||
|
fileName += ".compressed";
|
||||||
|
|
||||||
|
if (dest.ImageProxy is not null && (file.MimeType?.StartsWith("image/") ?? false))
|
||||||
|
{
|
||||||
|
var proxyUrl = dest.ImageProxy;
|
||||||
|
var baseUri = new Uri(proxyUrl.EndsWith('/') ? proxyUrl : $"{proxyUrl}/");
|
||||||
|
var fullUri = new Uri(baseUri, fileName);
|
||||||
|
return Redirect(fullUri.ToString());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dest.AccessProxy is not null)
|
||||||
|
{
|
||||||
|
var proxyUrl = dest.AccessProxy;
|
||||||
|
var baseUri = new Uri(proxyUrl.EndsWith('/') ? proxyUrl : $"{proxyUrl}/");
|
||||||
|
var fullUri = new Uri(baseUri, fileName);
|
||||||
|
return Redirect(fullUri.ToString());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dest.EnableSigned)
|
||||||
|
{
|
||||||
|
var client = fs.CreateMinioClient(dest);
|
||||||
|
if (client is null)
|
||||||
|
return BadRequest(
|
||||||
|
"Failed to configure client for remote destination, file got an invalid storage remote.");
|
||||||
|
|
||||||
|
var headers = new Dictionary<string, string>();
|
||||||
|
if (fileExtension is not null)
|
||||||
|
{
|
||||||
|
if (MimeTypes.TryGetMimeType(fileExtension, out var mimeType))
|
||||||
|
headers.Add("Response-Content-Type", mimeType);
|
||||||
|
}
|
||||||
|
else if (overrideMimeType is not null)
|
||||||
|
{
|
||||||
|
headers.Add("Response-Content-Type", overrideMimeType);
|
||||||
|
}
|
||||||
|
else if (file.MimeType is not null && !file.MimeType!.EndsWith("unknown"))
|
||||||
|
{
|
||||||
|
headers.Add("Response-Content-Type", file.MimeType);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (download)
|
||||||
|
{
|
||||||
|
headers.Add("Response-Content-Disposition", $"attachment; filename=\"{file.Name}\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
var bucket = dest.Bucket;
|
||||||
|
var openUrl = await client.PresignedGetObjectAsync(
|
||||||
|
new PresignedGetObjectArgs()
|
||||||
|
.WithBucket(bucket)
|
||||||
|
.WithObject(fileName)
|
||||||
|
.WithExpiry(3600)
|
||||||
|
.WithHeaders(headers)
|
||||||
|
);
|
||||||
|
|
||||||
|
return Redirect(openUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback redirect to the S3 endpoint (public read)
|
||||||
|
var protocol = dest.EnableSsl ? "https" : "http";
|
||||||
|
// Use the path bucket lookup mode
|
||||||
|
return Redirect($"{protocol}://{dest.Endpoint}/{dest.Bucket}/{fileName}");
|
||||||
|
}
|
||||||
|
|
||||||
|
[HttpGet("{id}/info")]
|
||||||
|
public async Task<ActionResult<CloudFile>> GetFileInfo(string id)
|
||||||
|
{
|
||||||
|
var file = await db.Files.FindAsync(id);
|
||||||
|
if (file is null) return NotFound();
|
||||||
|
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
[Authorize]
|
||||||
|
[HttpDelete("{id}")]
|
||||||
|
public async Task<ActionResult> DeleteFile(string id)
|
||||||
|
{
|
||||||
|
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||||
|
var userId = Guid.Parse(currentUser.Id);
|
||||||
|
|
||||||
|
var file = await db.Files
|
||||||
|
.Where(e => e.Id == id)
|
||||||
|
.Where(e => e.AccountId == userId)
|
||||||
|
.FirstOrDefaultAsync();
|
||||||
|
if (file is null) return NotFound();
|
||||||
|
|
||||||
|
await fs.DeleteFileAsync(file);
|
||||||
|
|
||||||
|
db.Files.Remove(file);
|
||||||
|
await db.SaveChangesAsync();
|
||||||
|
|
||||||
|
return NoContent();
|
||||||
|
}
|
||||||
|
}
|
66
DysonNetwork.Drive/Storage/FileExpirationJob.cs
Normal file
66
DysonNetwork.Drive/Storage/FileExpirationJob.cs
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
using NodaTime;
|
||||||
|
using Quartz;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Drive.Storage;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Job responsible for cleaning up expired file references
|
||||||
|
/// </summary>
|
||||||
|
public class FileExpirationJob(AppDatabase db, FileService fileService, ILogger<FileExpirationJob> logger) : IJob
|
||||||
|
{
|
||||||
|
public async Task Execute(IJobExecutionContext context)
|
||||||
|
{
|
||||||
|
var now = SystemClock.Instance.GetCurrentInstant();
|
||||||
|
logger.LogInformation("Running file reference expiration job at {now}", now);
|
||||||
|
|
||||||
|
// Find all expired references
|
||||||
|
var expiredReferences = await db.FileReferences
|
||||||
|
.Where(r => r.ExpiredAt < now && r.ExpiredAt != null)
|
||||||
|
.ToListAsync();
|
||||||
|
|
||||||
|
if (!expiredReferences.Any())
|
||||||
|
{
|
||||||
|
logger.LogInformation("No expired file references found");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.LogInformation("Found {count} expired file references", expiredReferences.Count);
|
||||||
|
|
||||||
|
// Get unique file IDs
|
||||||
|
var fileIds = expiredReferences.Select(r => r.FileId).Distinct().ToList();
|
||||||
|
var filesAndReferenceCount = new Dictionary<string, int>();
|
||||||
|
|
||||||
|
// Delete expired references
|
||||||
|
db.FileReferences.RemoveRange(expiredReferences);
|
||||||
|
await db.SaveChangesAsync();
|
||||||
|
|
||||||
|
// Check remaining references for each file
|
||||||
|
foreach (var fileId in fileIds)
|
||||||
|
{
|
||||||
|
var remainingReferences = await db.FileReferences
|
||||||
|
.Where(r => r.FileId == fileId)
|
||||||
|
.CountAsync();
|
||||||
|
|
||||||
|
filesAndReferenceCount[fileId] = remainingReferences;
|
||||||
|
|
||||||
|
// If no references remain, delete the file
|
||||||
|
if (remainingReferences == 0)
|
||||||
|
{
|
||||||
|
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == fileId);
|
||||||
|
if (file != null)
|
||||||
|
{
|
||||||
|
logger.LogInformation("Deleting file {fileId} as all references have expired", fileId);
|
||||||
|
await fileService.DeleteFileAsync(file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Just purge the cache
|
||||||
|
await fileService._PurgeCacheAsync(fileId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.LogInformation("Completed file reference expiration job");
|
||||||
|
}
|
||||||
|
}
|
434
DysonNetwork.Drive/Storage/FileReferenceService.cs
Normal file
434
DysonNetwork.Drive/Storage/FileReferenceService.cs
Normal file
@ -0,0 +1,434 @@
|
|||||||
|
using DysonNetwork.Shared.Cache;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
using NodaTime;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Drive.Storage;
|
||||||
|
|
||||||
|
public class FileReferenceService(AppDatabase db, FileService fileService, ICacheService cache)
|
||||||
|
{
|
||||||
|
private const string CacheKeyPrefix = "fileref:";
|
||||||
|
private static readonly TimeSpan CacheDuration = TimeSpan.FromMinutes(15);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Creates a new reference to a file for a specific resource
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="fileId">The ID of the file to reference</param>
|
||||||
|
/// <param name="usage">The usage context (e.g., "avatar", "post-attachment")</param>
|
||||||
|
/// <param name="resourceId">The ID of the resource using the file</param>
|
||||||
|
/// <param name="expiredAt">Optional expiration time for the file</param>
|
||||||
|
/// <param name="duration">Optional duration after which the file expires (alternative to expiredAt)</param>
|
||||||
|
/// <returns>The created file reference</returns>
|
||||||
|
public async Task<CloudFileReference> CreateReferenceAsync(
|
||||||
|
string fileId,
|
||||||
|
string usage,
|
||||||
|
string resourceId,
|
||||||
|
Instant? expiredAt = null,
|
||||||
|
Duration? duration = null)
|
||||||
|
{
|
||||||
|
// Calculate expiration time if needed
|
||||||
|
var finalExpiration = expiredAt;
|
||||||
|
if (duration.HasValue)
|
||||||
|
finalExpiration = SystemClock.Instance.GetCurrentInstant() + duration.Value;
|
||||||
|
|
||||||
|
var reference = new CloudFileReference
|
||||||
|
{
|
||||||
|
FileId = fileId,
|
||||||
|
Usage = usage,
|
||||||
|
ResourceId = resourceId,
|
||||||
|
ExpiredAt = finalExpiration
|
||||||
|
};
|
||||||
|
|
||||||
|
db.FileReferences.Add(reference);
|
||||||
|
|
||||||
|
await db.SaveChangesAsync();
|
||||||
|
await fileService._PurgeCacheAsync(fileId);
|
||||||
|
|
||||||
|
return reference;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets all references to a file
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="fileId">The ID of the file</param>
|
||||||
|
/// <returns>A list of all references to the file</returns>
|
||||||
|
public async Task<List<CloudFileReference>> GetReferencesAsync(string fileId)
|
||||||
|
{
|
||||||
|
var cacheKey = $"{CacheKeyPrefix}list:{fileId}";
|
||||||
|
|
||||||
|
var cachedReferences = await cache.GetAsync<List<CloudFileReference>>(cacheKey);
|
||||||
|
if (cachedReferences is not null)
|
||||||
|
return cachedReferences;
|
||||||
|
|
||||||
|
var references = await db.FileReferences
|
||||||
|
.Where(r => r.FileId == fileId)
|
||||||
|
.ToListAsync();
|
||||||
|
|
||||||
|
await cache.SetAsync(cacheKey, references, CacheDuration);
|
||||||
|
|
||||||
|
return references;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<Dictionary<string, List<CloudFileReference>>> GetReferencesAsync(IEnumerable<string> fileId)
|
||||||
|
{
|
||||||
|
var references = await db.FileReferences
|
||||||
|
.Where(r => fileId.Contains(r.FileId))
|
||||||
|
.GroupBy(r => r.FileId)
|
||||||
|
.ToDictionaryAsync(r => r.Key, r => r.ToList());
|
||||||
|
return references;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the number of references to a file
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="fileId">The ID of the file</param>
|
||||||
|
/// <returns>The number of references to the file</returns>
|
||||||
|
public async Task<int> GetReferenceCountAsync(string fileId)
|
||||||
|
{
|
||||||
|
var cacheKey = $"{CacheKeyPrefix}count:{fileId}";
|
||||||
|
|
||||||
|
var cachedCount = await cache.GetAsync<int?>(cacheKey);
|
||||||
|
if (cachedCount.HasValue)
|
||||||
|
return cachedCount.Value;
|
||||||
|
|
||||||
|
var count = await db.FileReferences
|
||||||
|
.Where(r => r.FileId == fileId)
|
||||||
|
.CountAsync();
|
||||||
|
|
||||||
|
await cache.SetAsync(cacheKey, count, CacheDuration);
|
||||||
|
|
||||||
|
return count;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets all references for a specific resource
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="resourceId">The ID of the resource</param>
|
||||||
|
/// <returns>A list of file references associated with the resource</returns>
|
||||||
|
public async Task<List<CloudFileReference>> GetResourceReferencesAsync(string resourceId)
|
||||||
|
{
|
||||||
|
var cacheKey = $"{CacheKeyPrefix}resource:{resourceId}";
|
||||||
|
|
||||||
|
var cachedReferences = await cache.GetAsync<List<CloudFileReference>>(cacheKey);
|
||||||
|
if (cachedReferences is not null)
|
||||||
|
return cachedReferences;
|
||||||
|
|
||||||
|
var references = await db.FileReferences
|
||||||
|
.Where(r => r.ResourceId == resourceId)
|
||||||
|
.ToListAsync();
|
||||||
|
|
||||||
|
await cache.SetAsync(cacheKey, references, CacheDuration);
|
||||||
|
|
||||||
|
return references;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets all file references for a specific usage context
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="usage">The usage context</param>
|
||||||
|
/// <returns>A list of file references with the specified usage</returns>
|
||||||
|
public async Task<List<CloudFileReference>> GetUsageReferencesAsync(string usage)
|
||||||
|
{
|
||||||
|
return await db.FileReferences
|
||||||
|
.Where(r => r.Usage == usage)
|
||||||
|
.ToListAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Deletes references for a specific resource
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="resourceId">The ID of the resource</param>
|
||||||
|
/// <returns>The number of deleted references</returns>
|
||||||
|
public async Task<int> DeleteResourceReferencesAsync(string resourceId)
|
||||||
|
{
|
||||||
|
var references = await db.FileReferences
|
||||||
|
.Where(r => r.ResourceId == resourceId)
|
||||||
|
.ToListAsync();
|
||||||
|
|
||||||
|
var fileIds = references.Select(r => r.FileId).Distinct().ToList();
|
||||||
|
|
||||||
|
db.FileReferences.RemoveRange(references);
|
||||||
|
var deletedCount = await db.SaveChangesAsync();
|
||||||
|
|
||||||
|
// Purge caches
|
||||||
|
var tasks = fileIds.Select(fileService._PurgeCacheAsync).ToList();
|
||||||
|
tasks.Add(PurgeCacheForResourceAsync(resourceId));
|
||||||
|
await Task.WhenAll(tasks);
|
||||||
|
|
||||||
|
return deletedCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Deletes references for a specific resource and usage
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="resourceId">The ID of the resource</param>
|
||||||
|
/// <param name="usage">The usage context</param>
|
||||||
|
/// <returns>The number of deleted references</returns>
|
||||||
|
public async Task<int> DeleteResourceReferencesAsync(string resourceId, string usage)
|
||||||
|
{
|
||||||
|
var references = await db.FileReferences
|
||||||
|
.Where(r => r.ResourceId == resourceId && r.Usage == usage)
|
||||||
|
.ToListAsync();
|
||||||
|
|
||||||
|
if (!references.Any())
|
||||||
|
{
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
var fileIds = references.Select(r => r.FileId).Distinct().ToList();
|
||||||
|
|
||||||
|
db.FileReferences.RemoveRange(references);
|
||||||
|
var deletedCount = await db.SaveChangesAsync();
|
||||||
|
|
||||||
|
// Purge caches
|
||||||
|
var tasks = fileIds.Select(fileService._PurgeCacheAsync).ToList();
|
||||||
|
tasks.Add(PurgeCacheForResourceAsync(resourceId));
|
||||||
|
await Task.WhenAll(tasks);
|
||||||
|
|
||||||
|
return deletedCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Deletes a specific file reference
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="referenceId">The ID of the reference to delete</param>
|
||||||
|
/// <returns>True if the reference was deleted, false otherwise</returns>
|
||||||
|
public async Task<bool> DeleteReferenceAsync(Guid referenceId)
|
||||||
|
{
|
||||||
|
var reference = await db.FileReferences
|
||||||
|
.FirstOrDefaultAsync(r => r.Id == referenceId);
|
||||||
|
|
||||||
|
if (reference == null)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
db.FileReferences.Remove(reference);
|
||||||
|
await db.SaveChangesAsync();
|
||||||
|
|
||||||
|
// Purge caches
|
||||||
|
await fileService._PurgeCacheAsync(reference.FileId);
|
||||||
|
await PurgeCacheForResourceAsync(reference.ResourceId);
|
||||||
|
await PurgeCacheForFileAsync(reference.FileId);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Updates the files referenced by a resource
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="resourceId">The ID of the resource</param>
|
||||||
|
/// <param name="newFileIds">The new list of file IDs</param>
|
||||||
|
/// <param name="usage">The usage context</param>
|
||||||
|
/// <param name="expiredAt">Optional expiration time for newly added files</param>
|
||||||
|
/// <param name="duration">Optional duration after which newly added files expire</param>
|
||||||
|
/// <returns>A list of the updated file references</returns>
|
||||||
|
public async Task<List<CloudFileReference>> UpdateResourceFilesAsync(
|
||||||
|
string resourceId,
|
||||||
|
IEnumerable<string>? newFileIds,
|
||||||
|
string usage,
|
||||||
|
Instant? expiredAt = null,
|
||||||
|
Duration? duration = null)
|
||||||
|
{
|
||||||
|
if (newFileIds == null)
|
||||||
|
return new List<CloudFileReference>();
|
||||||
|
|
||||||
|
var existingReferences = await db.FileReferences
|
||||||
|
.Where(r => r.ResourceId == resourceId && r.Usage == usage)
|
||||||
|
.ToListAsync();
|
||||||
|
|
||||||
|
var existingFileIds = existingReferences.Select(r => r.FileId).ToHashSet();
|
||||||
|
var newFileIdsList = newFileIds.ToList();
|
||||||
|
var newFileIdsSet = newFileIdsList.ToHashSet();
|
||||||
|
|
||||||
|
// Files to remove
|
||||||
|
var toRemove = existingReferences
|
||||||
|
.Where(r => !newFileIdsSet.Contains(r.FileId))
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
// Files to add
|
||||||
|
var toAdd = newFileIdsList
|
||||||
|
.Where(id => !existingFileIds.Contains(id))
|
||||||
|
.Select(id => new CloudFileReference
|
||||||
|
{
|
||||||
|
FileId = id,
|
||||||
|
Usage = usage,
|
||||||
|
ResourceId = resourceId
|
||||||
|
})
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
// Apply changes
|
||||||
|
if (toRemove.Any())
|
||||||
|
db.FileReferences.RemoveRange(toRemove);
|
||||||
|
|
||||||
|
if (toAdd.Any())
|
||||||
|
db.FileReferences.AddRange(toAdd);
|
||||||
|
|
||||||
|
await db.SaveChangesAsync();
|
||||||
|
|
||||||
|
// Update expiration for newly added references if specified
|
||||||
|
if ((expiredAt.HasValue || duration.HasValue) && toAdd.Any())
|
||||||
|
{
|
||||||
|
var finalExpiration = expiredAt;
|
||||||
|
if (duration.HasValue)
|
||||||
|
{
|
||||||
|
finalExpiration = SystemClock.Instance.GetCurrentInstant() + duration.Value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update newly added references with the expiration time
|
||||||
|
var referenceIds = await db.FileReferences
|
||||||
|
.Where(r => toAdd.Select(a => a.FileId).Contains(r.FileId) &&
|
||||||
|
r.ResourceId == resourceId &&
|
||||||
|
r.Usage == usage)
|
||||||
|
.Select(r => r.Id)
|
||||||
|
.ToListAsync();
|
||||||
|
|
||||||
|
await db.FileReferences
|
||||||
|
.Where(r => referenceIds.Contains(r.Id))
|
||||||
|
.ExecuteUpdateAsync(setter => setter.SetProperty(
|
||||||
|
r => r.ExpiredAt,
|
||||||
|
_ => finalExpiration
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Purge caches
|
||||||
|
var allFileIds = existingFileIds.Union(newFileIdsSet).ToList();
|
||||||
|
var tasks = allFileIds.Select(fileService._PurgeCacheAsync).ToList();
|
||||||
|
tasks.Add(PurgeCacheForResourceAsync(resourceId));
|
||||||
|
await Task.WhenAll(tasks);
|
||||||
|
|
||||||
|
// Return updated references
|
||||||
|
return await db.FileReferences
|
||||||
|
.Where(r => r.ResourceId == resourceId && r.Usage == usage)
|
||||||
|
.ToListAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets all files referenced by a resource
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="resourceId">The ID of the resource</param>
|
||||||
|
/// <param name="usage">Optional filter by usage context</param>
|
||||||
|
/// <returns>A list of files referenced by the resource</returns>
|
||||||
|
public async Task<List<CloudFile>> GetResourceFilesAsync(string resourceId, string? usage = null)
|
||||||
|
{
|
||||||
|
var query = db.FileReferences.Where(r => r.ResourceId == resourceId);
|
||||||
|
|
||||||
|
if (usage != null)
|
||||||
|
query = query.Where(r => r.Usage == usage);
|
||||||
|
|
||||||
|
var references = await query.ToListAsync();
|
||||||
|
var fileIds = references.Select(r => r.FileId).ToList();
|
||||||
|
|
||||||
|
return await db.Files
|
||||||
|
.Where(f => fileIds.Contains(f.Id))
|
||||||
|
.ToListAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Purges all caches related to a resource
|
||||||
|
/// </summary>
|
||||||
|
private async Task PurgeCacheForResourceAsync(string resourceId)
|
||||||
|
{
|
||||||
|
var cacheKey = $"{CacheKeyPrefix}resource:{resourceId}";
|
||||||
|
await cache.RemoveAsync(cacheKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Purges all caches related to a file
|
||||||
|
/// </summary>
|
||||||
|
private async Task PurgeCacheForFileAsync(string fileId)
|
||||||
|
{
|
||||||
|
var cacheKeys = new[]
|
||||||
|
{
|
||||||
|
$"{CacheKeyPrefix}list:{fileId}",
|
||||||
|
$"{CacheKeyPrefix}count:{fileId}"
|
||||||
|
};
|
||||||
|
|
||||||
|
var tasks = cacheKeys.Select(cache.RemoveAsync);
|
||||||
|
await Task.WhenAll(tasks);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Updates the expiration time for a file reference
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="referenceId">The ID of the reference</param>
|
||||||
|
/// <param name="expiredAt">The new expiration time, or null to remove expiration</param>
|
||||||
|
/// <returns>True if the reference was found and updated, false otherwise</returns>
|
||||||
|
public async Task<bool> SetReferenceExpirationAsync(Guid referenceId, Instant? expiredAt)
|
||||||
|
{
|
||||||
|
var reference = await db.FileReferences
|
||||||
|
.FirstOrDefaultAsync(r => r.Id == referenceId);
|
||||||
|
|
||||||
|
if (reference == null)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
reference.ExpiredAt = expiredAt;
|
||||||
|
await db.SaveChangesAsync();
|
||||||
|
|
||||||
|
await PurgeCacheForFileAsync(reference.FileId);
|
||||||
|
await PurgeCacheForResourceAsync(reference.ResourceId);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Updates the expiration time for all references to a file
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="fileId">The ID of the file</param>
|
||||||
|
/// <param name="expiredAt">The new expiration time, or null to remove expiration</param>
|
||||||
|
/// <returns>The number of references updated</returns>
|
||||||
|
public async Task<int> SetFileReferencesExpirationAsync(string fileId, Instant? expiredAt)
|
||||||
|
{
|
||||||
|
var rowsAffected = await db.FileReferences
|
||||||
|
.Where(r => r.FileId == fileId)
|
||||||
|
.ExecuteUpdateAsync(setter => setter.SetProperty(
|
||||||
|
r => r.ExpiredAt,
|
||||||
|
_ => expiredAt
|
||||||
|
));
|
||||||
|
|
||||||
|
if (rowsAffected > 0)
|
||||||
|
{
|
||||||
|
await fileService._PurgeCacheAsync(fileId);
|
||||||
|
await PurgeCacheForFileAsync(fileId);
|
||||||
|
}
|
||||||
|
|
||||||
|
return rowsAffected;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Get all file references for a specific resource and usage type
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="resourceId">The resource ID</param>
|
||||||
|
/// <param name="usageType">The usage type</param>
|
||||||
|
/// <returns>List of file references</returns>
|
||||||
|
public async Task<List<CloudFileReference>> GetResourceReferencesAsync(string resourceId, string usageType)
|
||||||
|
{
|
||||||
|
return await db.FileReferences
|
||||||
|
.Where(r => r.ResourceId == resourceId && r.Usage == usageType)
|
||||||
|
.ToListAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Check if a file has any references
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="fileId">The file ID to check</param>
|
||||||
|
/// <returns>True if the file has references, false otherwise</returns>
|
||||||
|
public async Task<bool> HasFileReferencesAsync(string fileId)
|
||||||
|
{
|
||||||
|
return await db.FileReferences.AnyAsync(r => r.FileId == fileId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Updates the expiration time for a file reference using a duration from now
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="referenceId">The ID of the reference</param>
|
||||||
|
/// <param name="duration">The duration after which the reference expires, or null to remove expiration</param>
|
||||||
|
/// <returns>True if the reference was found and updated, false otherwise</returns>
|
||||||
|
public async Task<bool> SetReferenceExpirationDurationAsync(Guid referenceId, Duration? duration)
|
||||||
|
{
|
||||||
|
Instant? expiredAt = null;
|
||||||
|
if (duration.HasValue)
|
||||||
|
{
|
||||||
|
expiredAt = SystemClock.Instance.GetCurrentInstant() + duration.Value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await SetReferenceExpirationAsync(referenceId, expiredAt);
|
||||||
|
}
|
||||||
|
}
|
555
DysonNetwork.Drive/Storage/FileService.cs
Normal file
555
DysonNetwork.Drive/Storage/FileService.cs
Normal file
@ -0,0 +1,555 @@
|
|||||||
|
using System.Globalization;
|
||||||
|
using FFMpegCore;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using DysonNetwork.Shared.Cache;
|
||||||
|
using DysonNetwork.Shared.Proto;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
using Minio;
|
||||||
|
using Minio.DataModel.Args;
|
||||||
|
using NetVips;
|
||||||
|
using NodaTime;
|
||||||
|
using tusdotnet.Stores;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Drive.Storage;
|
||||||
|
|
||||||
|
public class FileService(
|
||||||
|
AppDatabase db,
|
||||||
|
IConfiguration configuration,
|
||||||
|
TusDiskStore store,
|
||||||
|
ILogger<FileService> logger,
|
||||||
|
IServiceScopeFactory scopeFactory,
|
||||||
|
ICacheService cache
|
||||||
|
)
|
||||||
|
{
|
||||||
|
private const string CacheKeyPrefix = "file:";
|
||||||
|
private static readonly TimeSpan CacheDuration = TimeSpan.FromMinutes(15);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// The api for getting file meta with cache,
|
||||||
|
/// the best use case is for accessing the file data.
|
||||||
|
///
|
||||||
|
/// <b>This function won't load uploader's information, only keep minimal file meta</b>
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="fileId">The id of the cloud file requested</param>
|
||||||
|
/// <returns>The minimal file meta</returns>
|
||||||
|
public async Task<CloudFile?> GetFileAsync(string fileId)
|
||||||
|
{
|
||||||
|
var cacheKey = $"{CacheKeyPrefix}{fileId}";
|
||||||
|
|
||||||
|
var cachedFile = await cache.GetAsync<CloudFile>(cacheKey);
|
||||||
|
if (cachedFile is not null)
|
||||||
|
return cachedFile;
|
||||||
|
|
||||||
|
var file = await db.Files
|
||||||
|
.Where(f => f.Id == fileId)
|
||||||
|
.FirstOrDefaultAsync();
|
||||||
|
|
||||||
|
if (file != null)
|
||||||
|
await cache.SetAsync(cacheKey, file, CacheDuration);
|
||||||
|
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static readonly string TempFilePrefix = "dyn-cloudfile";
|
||||||
|
|
||||||
|
private static readonly string[] AnimatedImageTypes =
|
||||||
|
["image/gif", "image/apng", "image/webp", "image/avif"];
|
||||||
|
|
||||||
|
// The analysis file method no longer will remove the GPS EXIF data
|
||||||
|
// It should be handled on the client side, and for some specific cases it should be keep
|
||||||
|
public async Task<CloudFile> ProcessNewFileAsync(
|
||||||
|
Account account,
|
||||||
|
string fileId,
|
||||||
|
Stream stream,
|
||||||
|
string fileName,
|
||||||
|
string? contentType
|
||||||
|
)
|
||||||
|
{
|
||||||
|
var result = new List<(string filePath, string suffix)>();
|
||||||
|
|
||||||
|
var ogFilePath = Path.GetFullPath(Path.Join(configuration.GetValue<string>("Tus:StorePath"), fileId));
|
||||||
|
var fileSize = stream.Length;
|
||||||
|
var hash = await HashFileAsync(stream, fileSize: fileSize);
|
||||||
|
contentType ??= !fileName.Contains('.') ? "application/octet-stream" : MimeTypes.GetMimeType(fileName);
|
||||||
|
|
||||||
|
var file = new CloudFile
|
||||||
|
{
|
||||||
|
Id = fileId,
|
||||||
|
Name = fileName,
|
||||||
|
MimeType = contentType,
|
||||||
|
Size = fileSize,
|
||||||
|
Hash = hash,
|
||||||
|
AccountId = Guid.Parse(account.Id)
|
||||||
|
};
|
||||||
|
|
||||||
|
var existingFile = await db.Files.FirstOrDefaultAsync(f => f.Hash == hash);
|
||||||
|
file.StorageId = existingFile is not null ? existingFile.StorageId : file.Id;
|
||||||
|
|
||||||
|
if (existingFile is not null)
|
||||||
|
{
|
||||||
|
file.FileMeta = existingFile.FileMeta;
|
||||||
|
file.HasCompression = existingFile.HasCompression;
|
||||||
|
file.SensitiveMarks = existingFile.SensitiveMarks;
|
||||||
|
|
||||||
|
db.Files.Add(file);
|
||||||
|
await db.SaveChangesAsync();
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (contentType.Split('/')[0])
|
||||||
|
{
|
||||||
|
case "image":
|
||||||
|
var blurhash =
|
||||||
|
BlurHashSharp.SkiaSharp.BlurHashEncoder.Encode(xComponent: 3, yComponent: 3, filename: ogFilePath);
|
||||||
|
|
||||||
|
// Rewind stream
|
||||||
|
stream.Position = 0;
|
||||||
|
|
||||||
|
// Use NetVips for the rest
|
||||||
|
using (var vipsImage = NetVips.Image.NewFromStream(stream))
|
||||||
|
{
|
||||||
|
var width = vipsImage.Width;
|
||||||
|
var height = vipsImage.Height;
|
||||||
|
var format = vipsImage.Get("vips-loader") ?? "unknown";
|
||||||
|
|
||||||
|
// Try to get orientation from exif data
|
||||||
|
var orientation = 1;
|
||||||
|
var meta = new Dictionary<string, object>
|
||||||
|
{
|
||||||
|
["blur"] = blurhash,
|
||||||
|
["format"] = format,
|
||||||
|
["width"] = width,
|
||||||
|
["height"] = height,
|
||||||
|
["orientation"] = orientation,
|
||||||
|
};
|
||||||
|
Dictionary<string, object> exif = [];
|
||||||
|
|
||||||
|
foreach (var field in vipsImage.GetFields())
|
||||||
|
{
|
||||||
|
var value = vipsImage.Get(field);
|
||||||
|
|
||||||
|
// Skip GPS-related EXIF fields to remove location data
|
||||||
|
if (IsIgnoredField(field))
|
||||||
|
continue;
|
||||||
|
|
||||||
|
if (field.StartsWith("exif-")) exif[field.Replace("exif-", "")] = value;
|
||||||
|
else meta[field] = value;
|
||||||
|
|
||||||
|
if (field == "orientation") orientation = (int)value;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (orientation is 6 or 8)
|
||||||
|
(width, height) = (height, width);
|
||||||
|
|
||||||
|
var aspectRatio = height != 0 ? (double)width / height : 0;
|
||||||
|
|
||||||
|
meta["exif"] = exif;
|
||||||
|
meta["ratio"] = aspectRatio;
|
||||||
|
file.FileMeta = meta;
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
case "video":
|
||||||
|
case "audio":
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var mediaInfo = await FFProbe.AnalyseAsync(ogFilePath);
|
||||||
|
file.FileMeta = new Dictionary<string, object>
|
||||||
|
{
|
||||||
|
["duration"] = mediaInfo.Duration.TotalSeconds,
|
||||||
|
["format_name"] = mediaInfo.Format.FormatName,
|
||||||
|
["format_long_name"] = mediaInfo.Format.FormatLongName,
|
||||||
|
["start_time"] = mediaInfo.Format.StartTime.ToString(),
|
||||||
|
["bit_rate"] = mediaInfo.Format.BitRate.ToString(CultureInfo.InvariantCulture),
|
||||||
|
["tags"] = mediaInfo.Format.Tags ?? [],
|
||||||
|
["chapters"] = mediaInfo.Chapters,
|
||||||
|
};
|
||||||
|
if (mediaInfo.PrimaryVideoStream is not null)
|
||||||
|
file.FileMeta["ratio"] =
|
||||||
|
mediaInfo.PrimaryVideoStream.Width / mediaInfo.PrimaryVideoStream.Height;
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
logger.LogError("File analyzed failed, unable collect video / audio information: {Message}",
|
||||||
|
ex.Message);
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
db.Files.Add(file);
|
||||||
|
await db.SaveChangesAsync();
|
||||||
|
|
||||||
|
_ = Task.Run(async () =>
|
||||||
|
{
|
||||||
|
using var scope = scopeFactory.CreateScope();
|
||||||
|
var nfs = scope.ServiceProvider.GetRequiredService<FileService>();
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
logger.LogInformation("Processed file {fileId}, now trying optimizing if possible...", fileId);
|
||||||
|
|
||||||
|
if (contentType.Split('/')[0] == "image")
|
||||||
|
{
|
||||||
|
// Skip compression for animated image types
|
||||||
|
var animatedMimeTypes = AnimatedImageTypes;
|
||||||
|
if (Enumerable.Contains(animatedMimeTypes, contentType))
|
||||||
|
{
|
||||||
|
logger.LogInformation(
|
||||||
|
"File {fileId} is an animated image (MIME: {mime}), skipping WebP conversion.", fileId,
|
||||||
|
contentType
|
||||||
|
);
|
||||||
|
var tempFilePath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}");
|
||||||
|
result.Add((tempFilePath, string.Empty));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
file.MimeType = "image/webp";
|
||||||
|
|
||||||
|
using var vipsImage = Image.NewFromFile(ogFilePath);
|
||||||
|
var imagePath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}");
|
||||||
|
vipsImage.Autorot().WriteToFile(imagePath + ".webp",
|
||||||
|
new VOption { { "lossless", true }, { "strip", true } });
|
||||||
|
result.Add((imagePath + ".webp", string.Empty));
|
||||||
|
|
||||||
|
if (vipsImage.Width * vipsImage.Height >= 1024 * 1024)
|
||||||
|
{
|
||||||
|
var scale = 1024.0 / Math.Max(vipsImage.Width, vipsImage.Height);
|
||||||
|
var imageCompressedPath =
|
||||||
|
Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}-compressed");
|
||||||
|
|
||||||
|
// Create and save image within the same synchronous block to avoid disposal issues
|
||||||
|
using var compressedImage = vipsImage.Resize(scale);
|
||||||
|
compressedImage.Autorot().WriteToFile(imageCompressedPath + ".webp",
|
||||||
|
new VOption { { "Q", 80 }, { "strip", true } });
|
||||||
|
|
||||||
|
result.Add((imageCompressedPath + ".webp", ".compressed"));
|
||||||
|
file.HasCompression = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// No extra process for video, add it to the upload queue.
|
||||||
|
result.Add((ogFilePath, string.Empty));
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.LogInformation("Optimized file {fileId}, now uploading...", fileId);
|
||||||
|
|
||||||
|
if (result.Count > 0)
|
||||||
|
{
|
||||||
|
List<Task<CloudFile>> tasks = [];
|
||||||
|
tasks.AddRange(result.Select(item =>
|
||||||
|
nfs.UploadFileToRemoteAsync(file, item.filePath, null, item.suffix, true))
|
||||||
|
);
|
||||||
|
|
||||||
|
await Task.WhenAll(tasks);
|
||||||
|
file = await tasks.First();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
file = await nfs.UploadFileToRemoteAsync(file, stream, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.LogInformation("Uploaded file {fileId} done!", fileId);
|
||||||
|
|
||||||
|
var scopedDb = scope.ServiceProvider.GetRequiredService<AppDatabase>();
|
||||||
|
await scopedDb.Files.Where(f => f.Id == file.Id).ExecuteUpdateAsync(setter => setter
|
||||||
|
.SetProperty(f => f.UploadedAt, file.UploadedAt)
|
||||||
|
.SetProperty(f => f.UploadedTo, file.UploadedTo)
|
||||||
|
.SetProperty(f => f.MimeType, file.MimeType)
|
||||||
|
.SetProperty(f => f.HasCompression, file.HasCompression)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch (Exception err)
|
||||||
|
{
|
||||||
|
logger.LogError(err, "Failed to process {fileId}", fileId);
|
||||||
|
}
|
||||||
|
|
||||||
|
await stream.DisposeAsync();
|
||||||
|
await store.DeleteFileAsync(file.Id, CancellationToken.None);
|
||||||
|
await nfs._PurgeCacheAsync(file.Id);
|
||||||
|
});
|
||||||
|
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task<string> HashFileAsync(Stream stream, int chunkSize = 1024 * 1024, long? fileSize = null)
|
||||||
|
{
|
||||||
|
fileSize ??= stream.Length;
|
||||||
|
if (fileSize > chunkSize * 1024 * 5)
|
||||||
|
return await HashFastApproximateAsync(stream, chunkSize);
|
||||||
|
|
||||||
|
using var md5 = MD5.Create();
|
||||||
|
var hashBytes = await md5.ComputeHashAsync(stream);
|
||||||
|
return Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task<string> HashFastApproximateAsync(Stream stream, int chunkSize = 1024 * 1024)
|
||||||
|
{
|
||||||
|
// Scale the chunk size to kB level
|
||||||
|
chunkSize *= 1024;
|
||||||
|
|
||||||
|
using var md5 = MD5.Create();
|
||||||
|
|
||||||
|
var buffer = new byte[chunkSize * 2];
|
||||||
|
var fileLength = stream.Length;
|
||||||
|
|
||||||
|
var bytesRead = await stream.ReadAsync(buffer.AsMemory(0, chunkSize));
|
||||||
|
|
||||||
|
if (fileLength > chunkSize)
|
||||||
|
{
|
||||||
|
stream.Seek(-chunkSize, SeekOrigin.End);
|
||||||
|
bytesRead += await stream.ReadAsync(buffer.AsMemory(chunkSize, chunkSize));
|
||||||
|
}
|
||||||
|
|
||||||
|
var hash = md5.ComputeHash(buffer, 0, bytesRead);
|
||||||
|
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<CloudFile> UploadFileToRemoteAsync(CloudFile file, string filePath, string? targetRemote,
|
||||||
|
string? suffix = null, bool selfDestruct = false)
|
||||||
|
{
|
||||||
|
var fileStream = File.OpenRead(filePath);
|
||||||
|
var result = await UploadFileToRemoteAsync(file, fileStream, targetRemote, suffix);
|
||||||
|
if (selfDestruct) File.Delete(filePath);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<CloudFile> UploadFileToRemoteAsync(CloudFile file, Stream stream, string? targetRemote,
|
||||||
|
string? suffix = null)
|
||||||
|
{
|
||||||
|
if (file.UploadedAt.HasValue) return file;
|
||||||
|
|
||||||
|
file.UploadedTo = targetRemote ?? configuration.GetValue<string>("Storage:PreferredRemote")!;
|
||||||
|
|
||||||
|
var dest = GetRemoteStorageConfig(file.UploadedTo);
|
||||||
|
var client = CreateMinioClient(dest);
|
||||||
|
if (client is null)
|
||||||
|
throw new InvalidOperationException(
|
||||||
|
$"Failed to configure client for remote destination '{file.UploadedTo}'"
|
||||||
|
);
|
||||||
|
|
||||||
|
var bucket = dest.Bucket;
|
||||||
|
var contentType = file.MimeType ?? "application/octet-stream";
|
||||||
|
|
||||||
|
await client.PutObjectAsync(new PutObjectArgs()
|
||||||
|
.WithBucket(bucket)
|
||||||
|
.WithObject(string.IsNullOrWhiteSpace(suffix) ? file.Id : file.Id + suffix)
|
||||||
|
.WithStreamData(stream) // Fix this disposed
|
||||||
|
.WithObjectSize(stream.Length)
|
||||||
|
.WithContentType(contentType)
|
||||||
|
);
|
||||||
|
|
||||||
|
file.UploadedAt = Instant.FromDateTimeUtc(DateTime.UtcNow);
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task DeleteFileAsync(CloudFile file)
|
||||||
|
{
|
||||||
|
await DeleteFileDataAsync(file);
|
||||||
|
|
||||||
|
db.Remove(file);
|
||||||
|
await db.SaveChangesAsync();
|
||||||
|
await _PurgeCacheAsync(file.Id);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task DeleteFileDataAsync(CloudFile file)
|
||||||
|
{
|
||||||
|
if (file.StorageId is null) return;
|
||||||
|
if (file.UploadedTo is null) return;
|
||||||
|
|
||||||
|
// Check if any other file with the same storage ID is referenced
|
||||||
|
var otherFilesWithSameStorageId = await db.Files
|
||||||
|
.Where(f => f.StorageId == file.StorageId && f.Id != file.Id)
|
||||||
|
.Select(f => f.Id)
|
||||||
|
.ToListAsync();
|
||||||
|
|
||||||
|
// Check if any of these files are referenced
|
||||||
|
var anyReferenced = false;
|
||||||
|
if (otherFilesWithSameStorageId.Any())
|
||||||
|
{
|
||||||
|
anyReferenced = await db.FileReferences
|
||||||
|
.Where(r => otherFilesWithSameStorageId.Contains(r.FileId))
|
||||||
|
.AnyAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
// If any other file with the same storage ID is referenced, don't delete the actual file data
|
||||||
|
if (anyReferenced) return;
|
||||||
|
|
||||||
|
var dest = GetRemoteStorageConfig(file.UploadedTo);
|
||||||
|
var client = CreateMinioClient(dest);
|
||||||
|
if (client is null)
|
||||||
|
throw new InvalidOperationException(
|
||||||
|
$"Failed to configure client for remote destination '{file.UploadedTo}'"
|
||||||
|
);
|
||||||
|
|
||||||
|
var bucket = dest.Bucket;
|
||||||
|
var objectId = file.StorageId ?? file.Id; // Use StorageId if available, otherwise fall back to Id
|
||||||
|
|
||||||
|
await client.RemoveObjectAsync(
|
||||||
|
new RemoveObjectArgs().WithBucket(bucket).WithObject(objectId)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (file.HasCompression)
|
||||||
|
{
|
||||||
|
// Also remove the compressed version if it exists
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await client.RemoveObjectAsync(
|
||||||
|
new RemoveObjectArgs().WithBucket(bucket).WithObject(objectId + ".compressed")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
// Ignore errors when deleting compressed version
|
||||||
|
logger.LogWarning("Failed to delete compressed version of file {fileId}", file.Id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public RemoteStorageConfig GetRemoteStorageConfig(string destination)
|
||||||
|
{
|
||||||
|
var destinations = configuration.GetSection("Storage:Remote").Get<List<RemoteStorageConfig>>()!;
|
||||||
|
var dest = destinations.FirstOrDefault(d => d.Id == destination);
|
||||||
|
if (dest is null) throw new InvalidOperationException($"Remote destination '{destination}' not found");
|
||||||
|
return dest;
|
||||||
|
}
|
||||||
|
|
||||||
|
public IMinioClient? CreateMinioClient(RemoteStorageConfig dest)
|
||||||
|
{
|
||||||
|
var client = new MinioClient()
|
||||||
|
.WithEndpoint(dest.Endpoint)
|
||||||
|
.WithRegion(dest.Region)
|
||||||
|
.WithCredentials(dest.SecretId, dest.SecretKey);
|
||||||
|
if (dest.EnableSsl) client = client.WithSSL();
|
||||||
|
|
||||||
|
return client.Build();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper method to purge the cache for a specific file
|
||||||
|
// Made internal to allow FileReferenceService to use it
|
||||||
|
internal async Task _PurgeCacheAsync(string fileId)
|
||||||
|
{
|
||||||
|
var cacheKey = $"{CacheKeyPrefix}{fileId}";
|
||||||
|
await cache.RemoveAsync(cacheKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper method to purge cache for multiple files
|
||||||
|
internal async Task _PurgeCacheRangeAsync(IEnumerable<string> fileIds)
|
||||||
|
{
|
||||||
|
var tasks = fileIds.Select(_PurgeCacheAsync);
|
||||||
|
await Task.WhenAll(tasks);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<List<CloudFile?>> LoadFromReference(List<CloudFileReferenceObject> references)
|
||||||
|
{
|
||||||
|
var cachedFiles = new Dictionary<string, CloudFile>();
|
||||||
|
var uncachedIds = new List<string>();
|
||||||
|
|
||||||
|
// Check cache first
|
||||||
|
foreach (var reference in references)
|
||||||
|
{
|
||||||
|
var cacheKey = $"{CacheKeyPrefix}{reference.Id}";
|
||||||
|
var cachedFile = await cache.GetAsync<CloudFile>(cacheKey);
|
||||||
|
|
||||||
|
if (cachedFile != null)
|
||||||
|
{
|
||||||
|
cachedFiles[reference.Id] = cachedFile;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
uncachedIds.Add(reference.Id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load uncached files from database
|
||||||
|
if (uncachedIds.Count > 0)
|
||||||
|
{
|
||||||
|
var dbFiles = await db.Files
|
||||||
|
.Where(f => uncachedIds.Contains(f.Id))
|
||||||
|
.ToListAsync();
|
||||||
|
|
||||||
|
// Add to cache
|
||||||
|
foreach (var file in dbFiles)
|
||||||
|
{
|
||||||
|
var cacheKey = $"{CacheKeyPrefix}{file.Id}";
|
||||||
|
await cache.SetAsync(cacheKey, file, CacheDuration);
|
||||||
|
cachedFiles[file.Id] = file;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Preserve original order
|
||||||
|
return references
|
||||||
|
.Select(r => cachedFiles.GetValueOrDefault(r.Id))
|
||||||
|
.Where(f => f != null)
|
||||||
|
.ToList();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the number of references to a file based on CloudFileReference records
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="fileId">The ID of the file</param>
|
||||||
|
/// <returns>The number of references to the file</returns>
|
||||||
|
public async Task<int> GetReferenceCountAsync(string fileId)
|
||||||
|
{
|
||||||
|
return await db.FileReferences
|
||||||
|
.Where(r => r.FileId == fileId)
|
||||||
|
.CountAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Checks if a file is referenced by any resource
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="fileId">The ID of the file to check</param>
|
||||||
|
/// <returns>True if the file is referenced, false otherwise</returns>
|
||||||
|
public async Task<bool> IsReferencedAsync(string fileId)
|
||||||
|
{
|
||||||
|
return await db.FileReferences
|
||||||
|
.Where(r => r.FileId == fileId)
|
||||||
|
.AnyAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Checks if an EXIF field contains GPS location data
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="fieldName">The EXIF field name</param>
|
||||||
|
/// <returns>True if the field contains GPS data, false otherwise</returns>
|
||||||
|
private static bool IsGpsExifField(string fieldName)
|
||||||
|
{
|
||||||
|
// Common GPS EXIF field names
|
||||||
|
var gpsFields = new[]
|
||||||
|
{
|
||||||
|
"gps-latitude",
|
||||||
|
"gps-longitude",
|
||||||
|
"gps-altitude",
|
||||||
|
"gps-latitude-ref",
|
||||||
|
"gps-longitude-ref",
|
||||||
|
"gps-altitude-ref",
|
||||||
|
"gps-timestamp",
|
||||||
|
"gps-datestamp",
|
||||||
|
"gps-speed",
|
||||||
|
"gps-speed-ref",
|
||||||
|
"gps-track",
|
||||||
|
"gps-track-ref",
|
||||||
|
"gps-img-direction",
|
||||||
|
"gps-img-direction-ref",
|
||||||
|
"gps-dest-latitude",
|
||||||
|
"gps-dest-longitude",
|
||||||
|
"gps-dest-latitude-ref",
|
||||||
|
"gps-dest-longitude-ref",
|
||||||
|
"gps-processing-method",
|
||||||
|
"gps-area-information"
|
||||||
|
};
|
||||||
|
|
||||||
|
return gpsFields.Any(gpsField =>
|
||||||
|
fieldName.Equals(gpsField, StringComparison.OrdinalIgnoreCase) ||
|
||||||
|
fieldName.StartsWith("gps", StringComparison.OrdinalIgnoreCase));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool IsIgnoredField(string fieldName)
|
||||||
|
{
|
||||||
|
if (IsGpsExifField(fieldName)) return true;
|
||||||
|
if (fieldName.EndsWith("-data")) return true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
79
DysonNetwork.Drive/Storage/TusService.cs
Normal file
79
DysonNetwork.Drive/Storage/TusService.cs
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
using System.Net;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.Json;
|
||||||
|
using DysonNetwork.Shared.Proto;
|
||||||
|
using Microsoft.AspNetCore.Mvc;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using tusdotnet.Interfaces;
|
||||||
|
using tusdotnet.Models;
|
||||||
|
using tusdotnet.Models.Configuration;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Drive.Storage;
|
||||||
|
|
||||||
|
public abstract class TusService
|
||||||
|
{
|
||||||
|
public static DefaultTusConfiguration BuildConfiguration(ITusStore store) => new()
|
||||||
|
{
|
||||||
|
Store = store,
|
||||||
|
Events = new Events
|
||||||
|
{
|
||||||
|
OnAuthorizeAsync = async eventContext =>
|
||||||
|
{
|
||||||
|
if (eventContext.Intent == IntentType.DeleteFile)
|
||||||
|
{
|
||||||
|
eventContext.FailRequest(
|
||||||
|
HttpStatusCode.BadRequest,
|
||||||
|
"Deleting files from this endpoint was disabled, please refer to the Dyson Network File API."
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var httpContext = eventContext.HttpContext;
|
||||||
|
if (httpContext.Items["CurrentUser"] is not Account user)
|
||||||
|
{
|
||||||
|
eventContext.FailRequest(HttpStatusCode.Unauthorized);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!user.IsSuperuser)
|
||||||
|
{
|
||||||
|
using var scope = httpContext.RequestServices.CreateScope();
|
||||||
|
var pm = scope.ServiceProvider.GetRequiredService<PermissionService.PermissionServiceClient>();
|
||||||
|
var allowed = await pm.HasPermissionAsync(new HasPermissionRequest
|
||||||
|
{ Actor = $"user:{user.Id}", Area = "global", Key = "files.create" });
|
||||||
|
if (!allowed.HasPermission)
|
||||||
|
eventContext.FailRequest(HttpStatusCode.Forbidden);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
OnFileCompleteAsync = async eventContext =>
|
||||||
|
{
|
||||||
|
using var scope = eventContext.HttpContext.RequestServices.CreateScope();
|
||||||
|
var services = scope.ServiceProvider;
|
||||||
|
|
||||||
|
var httpContext = eventContext.HttpContext;
|
||||||
|
if (httpContext.Items["CurrentUser"] is not Account user) return;
|
||||||
|
|
||||||
|
var file = await eventContext.GetFileAsync();
|
||||||
|
var metadata = await file.GetMetadataAsync(eventContext.CancellationToken);
|
||||||
|
var fileName = metadata.TryGetValue("filename", out var fn)
|
||||||
|
? fn.GetString(Encoding.UTF8)
|
||||||
|
: "uploaded_file";
|
||||||
|
var contentType = metadata.TryGetValue("content-type", out var ct) ? ct.GetString(Encoding.UTF8) : null;
|
||||||
|
|
||||||
|
var fileStream = await file.GetContentAsync(eventContext.CancellationToken);
|
||||||
|
|
||||||
|
var fileService = services.GetRequiredService<FileService>();
|
||||||
|
var info = await fileService.ProcessNewFileAsync(user, file.Id, fileStream, fileName, contentType);
|
||||||
|
|
||||||
|
using var finalScope = eventContext.HttpContext.RequestServices.CreateScope();
|
||||||
|
var jsonOptions = finalScope.ServiceProvider.GetRequiredService<IOptions<JsonOptions>>().Value
|
||||||
|
.JsonSerializerOptions;
|
||||||
|
var infoJson = JsonSerializer.Serialize(info, jsonOptions);
|
||||||
|
eventContext.HttpContext.Response.Headers.Append("X-FileInfo", infoJson);
|
||||||
|
|
||||||
|
// Dispose the stream after all processing is complete
|
||||||
|
await fileStream.DisposeAsync();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
129
DysonNetwork.Drive/appsettings.json
Normal file
129
DysonNetwork.Drive/appsettings.json
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
{
|
||||||
|
"Debug": true,
|
||||||
|
"BaseUrl": "http://localhost:5071",
|
||||||
|
"Logging": {
|
||||||
|
"LogLevel": {
|
||||||
|
"Default": "Information",
|
||||||
|
"Microsoft.AspNetCore": "Warning"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"AllowedHosts": "*",
|
||||||
|
"ConnectionStrings": {
|
||||||
|
"App": "Host=localhost;Port=5432;Database=dyson_network;Username=postgres;Password=postgres;Include Error Detail=True;Maximum Pool Size=20;Connection Idle Lifetime=60",
|
||||||
|
"FastRetrieve": "localhost:6379"
|
||||||
|
},
|
||||||
|
"Authentication": {
|
||||||
|
"Schemes": {
|
||||||
|
"Bearer": {
|
||||||
|
"ValidAudiences": [
|
||||||
|
"http://localhost:5071",
|
||||||
|
"https://localhost:7099"
|
||||||
|
],
|
||||||
|
"ValidIssuer": "solar-network"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"AuthToken": {
|
||||||
|
"PublicKeyPath": "Keys/PublicKey.pem",
|
||||||
|
"PrivateKeyPath": "Keys/PrivateKey.pem"
|
||||||
|
},
|
||||||
|
"OidcProvider": {
|
||||||
|
"IssuerUri": "https://nt.solian.app",
|
||||||
|
"PublicKeyPath": "Keys/PublicKey.pem",
|
||||||
|
"PrivateKeyPath": "Keys/PrivateKey.pem",
|
||||||
|
"AccessTokenLifetime": "01:00:00",
|
||||||
|
"RefreshTokenLifetime": "30.00:00:00",
|
||||||
|
"AuthorizationCodeLifetime": "00:30:00",
|
||||||
|
"RequireHttpsMetadata": true
|
||||||
|
},
|
||||||
|
"Tus": {
|
||||||
|
"StorePath": "Uploads"
|
||||||
|
},
|
||||||
|
"Storage": {
|
||||||
|
"PreferredRemote": "minio",
|
||||||
|
"Remote": [
|
||||||
|
{
|
||||||
|
"Id": "minio",
|
||||||
|
"Label": "Minio",
|
||||||
|
"Region": "auto",
|
||||||
|
"Bucket": "solar-network-development",
|
||||||
|
"Endpoint": "localhost:9000",
|
||||||
|
"SecretId": "littlesheep",
|
||||||
|
"SecretKey": "password",
|
||||||
|
"EnabledSigned": true,
|
||||||
|
"EnableSsl": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Id": "cloudflare",
|
||||||
|
"Label": "Cloudflare R2",
|
||||||
|
"Region": "auto",
|
||||||
|
"Bucket": "solar-network",
|
||||||
|
"Endpoint": "0a70a6d1b7128888c823359d0008f4e1.r2.cloudflarestorage.com",
|
||||||
|
"SecretId": "8ff5d06c7b1639829d60bc6838a542e6",
|
||||||
|
"SecretKey": "fd58158c5201be16d1872c9209d9cf199421dae3c2f9972f94b2305976580d67",
|
||||||
|
"EnableSigned": true,
|
||||||
|
"EnableSsl": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"Captcha": {
|
||||||
|
"Provider": "cloudflare",
|
||||||
|
"ApiKey": "0x4AAAAAABCDUdOujj4feOb_",
|
||||||
|
"ApiSecret": "0x4AAAAAABCDUWABiJQweqlB7tYq-IqIm8U"
|
||||||
|
},
|
||||||
|
"Notifications": {
|
||||||
|
"Topic": "dev.solsynth.solian",
|
||||||
|
"Endpoint": "http://localhost:8088"
|
||||||
|
},
|
||||||
|
"Email": {
|
||||||
|
"Server": "smtp4dev.orb.local",
|
||||||
|
"Port": 25,
|
||||||
|
"UseSsl": false,
|
||||||
|
"Username": "no-reply@mail.solsynth.dev",
|
||||||
|
"Password": "password",
|
||||||
|
"FromAddress": "no-reply@mail.solsynth.dev",
|
||||||
|
"FromName": "Alphabot",
|
||||||
|
"SubjectPrefix": "Solar Network"
|
||||||
|
},
|
||||||
|
"RealtimeChat": {
|
||||||
|
"Endpoint": "https://solar-network-im44o8gq.livekit.cloud",
|
||||||
|
"ApiKey": "APIs6TiL8wj3A4j",
|
||||||
|
"ApiSecret": "SffxRneIwTnlHPtEf3zicmmv3LUEl7xXael4PvWZrEhE"
|
||||||
|
},
|
||||||
|
"GeoIp": {
|
||||||
|
"DatabasePath": "./Keys/GeoLite2-City.mmdb"
|
||||||
|
},
|
||||||
|
"Oidc": {
|
||||||
|
"Google": {
|
||||||
|
"ClientId": "961776991058-963m1qin2vtp8fv693b5fdrab5hmpl89.apps.googleusercontent.com",
|
||||||
|
"ClientSecret": ""
|
||||||
|
},
|
||||||
|
"Apple": {
|
||||||
|
"ClientId": "dev.solsynth.solian",
|
||||||
|
"TeamId": "W7HPZ53V6B",
|
||||||
|
"KeyId": "B668YP4KBG",
|
||||||
|
"PrivateKeyPath": "./Keys/Solarpass.p8"
|
||||||
|
},
|
||||||
|
"Microsoft": {
|
||||||
|
"ClientId": "YOUR_MICROSOFT_CLIENT_ID",
|
||||||
|
"ClientSecret": "YOUR_MICROSOFT_CLIENT_SECRET",
|
||||||
|
"DiscoveryEndpoint": "YOUR_MICROSOFT_DISCOVERY_ENDPOINT"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"Payment": {
|
||||||
|
"Auth": {
|
||||||
|
"Afdian": "<token here>"
|
||||||
|
},
|
||||||
|
"Subscriptions": {
|
||||||
|
"Afdian": {
|
||||||
|
"7d17aae23c9611f0b5705254001e7c00": "solian.stellar.primary",
|
||||||
|
"7dfae4743c9611f0b3a55254001e7c00": "solian.stellar.nova",
|
||||||
|
"141713ee3d6211f085b352540025c377": "solian.stellar.supernova"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"KnownProxies": [
|
||||||
|
"127.0.0.1",
|
||||||
|
"::1"
|
||||||
|
]
|
||||||
|
}
|
@ -1,27 +1,49 @@
|
|||||||
|
using DysonNetwork.Shared.Cache;
|
||||||
using DysonNetwork.Shared.Proto;
|
using DysonNetwork.Shared.Proto;
|
||||||
using Grpc.Core;
|
using Grpc.Core;
|
||||||
using Microsoft.EntityFrameworkCore;
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
using NodaTime;
|
||||||
|
|
||||||
namespace DysonNetwork.Pass.Auth;
|
namespace DysonNetwork.Pass.Auth;
|
||||||
|
|
||||||
public class AuthServiceGrpc(AuthService authService, AppDatabase db) : Shared.Proto.AuthService.AuthServiceBase
|
public class AuthServiceGrpc(
|
||||||
|
AuthService authService,
|
||||||
|
ICacheService cache,
|
||||||
|
AppDatabase db
|
||||||
|
)
|
||||||
|
: Shared.Proto.AuthService.AuthServiceBase
|
||||||
{
|
{
|
||||||
public override async Task<Shared.Proto.AuthSession> Authenticate(AuthenticateRequest request, ServerCallContext context)
|
public override async Task<AuthenticateResponse> Authenticate(
|
||||||
|
AuthenticateRequest request,
|
||||||
|
ServerCallContext context
|
||||||
|
)
|
||||||
{
|
{
|
||||||
if (!authService.ValidateToken(request.Token, out var sessionId))
|
if (!authService.ValidateToken(request.Token, out var sessionId))
|
||||||
{
|
return new AuthenticateResponse { Valid = false, Message = "Invalid token." };
|
||||||
throw new RpcException(new Status(StatusCode.Unauthenticated, "Invalid token."));
|
|
||||||
}
|
var session = await cache.GetAsync<AuthSession>($"{DysonTokenAuthHandler.AuthCachePrefix}{sessionId}");
|
||||||
|
if (session is not null)
|
||||||
|
return new AuthenticateResponse { Valid = true, Session = session.ToProtoValue() };
|
||||||
|
|
||||||
var session = await db.AuthSessions
|
session = await db.AuthSessions
|
||||||
.AsNoTracking()
|
.AsNoTracking()
|
||||||
|
.Include(e => e.Challenge)
|
||||||
|
.Include(e => e.Account)
|
||||||
|
.ThenInclude(e => e.Profile)
|
||||||
.FirstOrDefaultAsync(s => s.Id == sessionId);
|
.FirstOrDefaultAsync(s => s.Id == sessionId);
|
||||||
|
|
||||||
if (session == null)
|
if (session == null)
|
||||||
{
|
return new AuthenticateResponse { Valid = false, Message = "Session was not found." };
|
||||||
throw new RpcException(new Status(StatusCode.NotFound, "Session not found."));
|
var now = SystemClock.Instance.GetCurrentInstant();
|
||||||
}
|
if (session.ExpiredAt.HasValue && session.ExpiredAt < now)
|
||||||
|
return new AuthenticateResponse { Valid = false, Message = "Session has been expired." };
|
||||||
|
|
||||||
|
await cache.SetWithGroupsAsync(
|
||||||
|
$"auth:{sessionId}",
|
||||||
|
session,
|
||||||
|
[$"{Account.AccountService.AccountCachePrefix}{session.Account.Id}"],
|
||||||
|
TimeSpan.FromHours(1)
|
||||||
|
);
|
||||||
|
|
||||||
return session.ToProtoValue();
|
return new AuthenticateResponse { Valid = true, Session = session.ToProtoValue() };
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -8,33 +8,33 @@
|
|||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Grpc.AspNetCore.Server" Version="2.71.0" />
|
<PackageReference Include="Grpc.AspNetCore.Server" Version="2.71.0" />
|
||||||
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.7"/>
|
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.7" />
|
||||||
<PackageReference Include="NodaTime" Version="3.2.2"/>
|
<PackageReference Include="NodaTime" Version="3.2.2" />
|
||||||
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.2.0"/>
|
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.2.0" />
|
||||||
<PackageReference Include="NodaTime.Serialization.Protobuf" Version="2.0.2" />
|
<PackageReference Include="NodaTime.Serialization.Protobuf" Version="2.0.2" />
|
||||||
<PackageReference Include="NodaTime.Serialization.SystemTextJson" Version="1.3.0"/>
|
<PackageReference Include="NodaTime.Serialization.SystemTextJson" Version="1.3.0" />
|
||||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4"/>
|
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4" />
|
||||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.Design" Version="1.1.0"/>
|
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.Design" Version="1.1.0" />
|
||||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NetTopologySuite" Version="9.0.4"/>
|
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NetTopologySuite" Version="9.0.4" />
|
||||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NodaTime" Version="9.0.4"/>
|
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NodaTime" Version="9.0.4" />
|
||||||
<PackageReference Include="OpenTelemetry.Exporter.OpenTelemetryProtocol" Version="1.12.0"/>
|
<PackageReference Include="OpenTelemetry.Exporter.OpenTelemetryProtocol" Version="1.12.0" />
|
||||||
<PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.12.0"/>
|
<PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.12.0" />
|
||||||
<PackageReference Include="OpenTelemetry.Instrumentation.AspNetCore" Version="1.12.0"/>
|
<PackageReference Include="OpenTelemetry.Instrumentation.AspNetCore" Version="1.12.0" />
|
||||||
<PackageReference Include="OpenTelemetry.Instrumentation.Http" Version="1.12.0"/>
|
<PackageReference Include="OpenTelemetry.Instrumentation.Http" Version="1.12.0" />
|
||||||
<PackageReference Include="OpenTelemetry.Instrumentation.Runtime" Version="1.12.0"/>
|
<PackageReference Include="OpenTelemetry.Instrumentation.Runtime" Version="1.12.0" />
|
||||||
<PackageReference Include="Otp.NET" Version="1.4.0"/>
|
<PackageReference Include="Otp.NET" Version="1.4.0" />
|
||||||
<PackageReference Include="prometheus-net.AspNetCore" Version="8.2.1"/>
|
<PackageReference Include="prometheus-net.AspNetCore" Version="8.2.1" />
|
||||||
<PackageReference Include="prometheus-net.AspNetCore.HealthChecks" Version="8.2.1"/>
|
<PackageReference Include="prometheus-net.AspNetCore.HealthChecks" Version="8.2.1" />
|
||||||
<PackageReference Include="prometheus-net.DotNetRuntime" Version="4.4.1"/>
|
<PackageReference Include="prometheus-net.DotNetRuntime" Version="4.4.1" />
|
||||||
<PackageReference Include="prometheus-net.EntityFramework" Version="0.9.5"/>
|
<PackageReference Include="prometheus-net.EntityFramework" Version="0.9.5" />
|
||||||
<PackageReference Include="prometheus-net.SystemMetrics" Version="3.1.0"/>
|
<PackageReference Include="prometheus-net.SystemMetrics" Version="3.1.0" />
|
||||||
<PackageReference Include="Quartz" Version="3.14.0"/>
|
<PackageReference Include="Quartz" Version="3.14.0" />
|
||||||
<PackageReference Include="Quartz.AspNetCore" Version="3.14.0"/>
|
<PackageReference Include="Quartz.AspNetCore" Version="3.14.0" />
|
||||||
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.14.0"/>
|
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.14.0" />
|
||||||
<PackageReference Include="BCrypt.Net-Next" Version="4.0.3"/>
|
<PackageReference Include="BCrypt.Net-Next" Version="4.0.3" />
|
||||||
<PackageReference Include="EFCore.BulkExtensions" Version="9.0.1"/>
|
<PackageReference Include="EFCore.BulkExtensions" Version="9.0.1" />
|
||||||
<PackageReference Include="EFCore.BulkExtensions.PostgreSql" Version="9.0.1"/>
|
<PackageReference Include="EFCore.BulkExtensions.PostgreSql" Version="9.0.1" />
|
||||||
<PackageReference Include="EFCore.NamingConventions" Version="9.0.0"/>
|
<PackageReference Include="EFCore.NamingConventions" Version="9.0.0" />
|
||||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.3" />
|
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.3" />
|
||||||
<PackageReference Include="Swashbuckle.AspNetCore.SwaggerUI" Version="9.0.3" />
|
<PackageReference Include="Swashbuckle.AspNetCore.SwaggerUI" Version="9.0.3" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
96
DysonNetwork.Pass/Permission/PermissionServiceGrpc.cs
Normal file
96
DysonNetwork.Pass/Permission/PermissionServiceGrpc.cs
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
using Grpc.Core;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
using DysonNetwork.Shared.Proto;
|
||||||
|
using Google.Protobuf.WellKnownTypes;
|
||||||
|
using System.Text.Json;
|
||||||
|
using NodaTime.Serialization.Protobuf;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Pass.Permission;
|
||||||
|
|
||||||
|
public class PermissionServiceGrpc(
|
||||||
|
PermissionService permissionService,
|
||||||
|
AppDatabase db
|
||||||
|
) : DysonNetwork.Shared.Proto.PermissionService.PermissionServiceBase
|
||||||
|
{
|
||||||
|
public override async Task<HasPermissionResponse> HasPermission(HasPermissionRequest request, ServerCallContext context)
|
||||||
|
{
|
||||||
|
var hasPermission = await permissionService.HasPermissionAsync(request.Actor, request.Area, request.Key);
|
||||||
|
return new HasPermissionResponse { HasPermission = hasPermission };
|
||||||
|
}
|
||||||
|
|
||||||
|
public override async Task<GetPermissionResponse> GetPermission(GetPermissionRequest request, ServerCallContext context)
|
||||||
|
{
|
||||||
|
var permissionValue = await permissionService.GetPermissionAsync<JsonDocument>(request.Actor, request.Area, request.Key);
|
||||||
|
return new GetPermissionResponse { Value = permissionValue != null ? Value.Parser.ParseJson(permissionValue.RootElement.GetRawText()) : null };
|
||||||
|
}
|
||||||
|
|
||||||
|
public override async Task<AddPermissionNodeResponse> AddPermissionNode(AddPermissionNodeRequest request, ServerCallContext context)
|
||||||
|
{
|
||||||
|
var node = await permissionService.AddPermissionNode(
|
||||||
|
request.Actor,
|
||||||
|
request.Area,
|
||||||
|
request.Key,
|
||||||
|
JsonDocument.Parse(request.Value.ToString()), // Convert Value to JsonDocument
|
||||||
|
request.ExpiredAt?.ToInstant(),
|
||||||
|
request.AffectedAt?.ToInstant()
|
||||||
|
);
|
||||||
|
return new AddPermissionNodeResponse { Node = node.ToProtoValue() };
|
||||||
|
}
|
||||||
|
|
||||||
|
public override async Task<AddPermissionNodeToGroupResponse> AddPermissionNodeToGroup(AddPermissionNodeToGroupRequest request, ServerCallContext context)
|
||||||
|
{
|
||||||
|
var group = await db.PermissionGroups.FirstOrDefaultAsync(g => g.Id == Guid.Parse(request.Group.Id));
|
||||||
|
if (group == null)
|
||||||
|
{
|
||||||
|
throw new RpcException(new Status(StatusCode.NotFound, "Permission group not found."));
|
||||||
|
}
|
||||||
|
|
||||||
|
var node = await permissionService.AddPermissionNodeToGroup(
|
||||||
|
group,
|
||||||
|
request.Actor,
|
||||||
|
request.Area,
|
||||||
|
request.Key,
|
||||||
|
JsonDocument.Parse(request.Value.ToString()), // Convert Value to JsonDocument
|
||||||
|
request.ExpiredAt?.ToInstant(),
|
||||||
|
request.AffectedAt?.ToInstant()
|
||||||
|
);
|
||||||
|
return new AddPermissionNodeToGroupResponse { Node = node.ToProtoValue() };
|
||||||
|
}
|
||||||
|
|
||||||
|
public override async Task<RemovePermissionNodeResponse> RemovePermissionNode(RemovePermissionNodeRequest request, ServerCallContext context)
|
||||||
|
{
|
||||||
|
await permissionService.RemovePermissionNode(request.Actor, request.Area, request.Key);
|
||||||
|
return new RemovePermissionNodeResponse { Success = true };
|
||||||
|
}
|
||||||
|
|
||||||
|
public override async Task<RemovePermissionNodeFromGroupResponse> RemovePermissionNodeFromGroup(RemovePermissionNodeFromGroupRequest request, ServerCallContext context)
|
||||||
|
{
|
||||||
|
var group = await db.PermissionGroups.FirstOrDefaultAsync(g => g.Id == Guid.Parse(request.Group.Id));
|
||||||
|
if (group == null)
|
||||||
|
{
|
||||||
|
throw new RpcException(new Status(StatusCode.NotFound, "Permission group not found."));
|
||||||
|
}
|
||||||
|
|
||||||
|
await permissionService.RemovePermissionNodeFromGroup<JsonDocument>(group, request.Actor, request.Area, request.Key);
|
||||||
|
return new RemovePermissionNodeFromGroupResponse { Success = true };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class PermissionExtensions
|
||||||
|
{
|
||||||
|
public static DysonNetwork.Shared.Proto.PermissionNode ToProtoValue(this PermissionNode node)
|
||||||
|
{
|
||||||
|
return new DysonNetwork.Shared.Proto.PermissionNode
|
||||||
|
{
|
||||||
|
Id = node.Id.ToString(),
|
||||||
|
Actor = node.Actor,
|
||||||
|
Area = node.Area,
|
||||||
|
Key = node.Key,
|
||||||
|
Value = Value.Parser.ParseJson(node.Value.RootElement.GetRawText()),
|
||||||
|
ExpiredAt = node.ExpiredAt?.ToTimestamp(),
|
||||||
|
AffectedAt = node.AffectedAt?.ToTimestamp(),
|
||||||
|
GroupId = node.GroupId?.ToString() ?? string.Empty
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -13,7 +13,7 @@ builder.ConfigureAppKestrel();
|
|||||||
builder.Services.AddAppMetrics();
|
builder.Services.AddAppMetrics();
|
||||||
|
|
||||||
// Add application services
|
// Add application services
|
||||||
builder.Services.AddEtcdService(builder.Configuration);
|
builder.Services.AddRegistryService(builder.Configuration);
|
||||||
builder.Services.AddAppServices(builder.Configuration);
|
builder.Services.AddAppServices(builder.Configuration);
|
||||||
builder.Services.AddAppRateLimiting();
|
builder.Services.AddAppRateLimiting();
|
||||||
builder.Services.AddAppAuthentication();
|
builder.Services.AddAppAuthentication();
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
<PackageReference Include="MailKit" Version="4.13.0" />
|
<PackageReference Include="MailKit" Version="4.13.0" />
|
||||||
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.7"/>
|
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.7"/>
|
||||||
<PackageReference Include="NodaTime" Version="3.2.2" />
|
<PackageReference Include="NodaTime" Version="3.2.2" />
|
||||||
|
<PackageReference Include="NodaTime.Serialization.Protobuf" Version="2.0.2" />
|
||||||
<PackageReference Include="NodaTime.Serialization.SystemTextJson" Version="1.3.0" />
|
<PackageReference Include="NodaTime.Serialization.SystemTextJson" Version="1.3.0" />
|
||||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4" />
|
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4" />
|
||||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NodaTime" Version="9.0.4" />
|
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NodaTime" Version="9.0.4" />
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
using DysonNetwork.Pass.Startup;
|
|
||||||
using DysonNetwork.Pusher;
|
using DysonNetwork.Pusher;
|
||||||
using DysonNetwork.Pusher.Startup;
|
using DysonNetwork.Pusher.Startup;
|
||||||
|
using DysonNetwork.Shared.Auth;
|
||||||
|
using DysonNetwork.Shared.Registry;
|
||||||
using Microsoft.EntityFrameworkCore;
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
|
||||||
var builder = WebApplication.CreateBuilder(args);
|
var builder = WebApplication.CreateBuilder(args);
|
||||||
@ -9,10 +10,12 @@ var builder = WebApplication.CreateBuilder(args);
|
|||||||
builder.ConfigureAppKestrel();
|
builder.ConfigureAppKestrel();
|
||||||
|
|
||||||
// Add application services
|
// Add application services
|
||||||
|
builder.Services.AddRegistryService(builder.Configuration);
|
||||||
builder.Services.AddAppServices(builder.Configuration);
|
builder.Services.AddAppServices(builder.Configuration);
|
||||||
builder.Services.AddAppRateLimiting();
|
builder.Services.AddAppRateLimiting();
|
||||||
builder.Services.AddAppAuthentication();
|
builder.Services.AddAppAuthentication();
|
||||||
builder.Services.AddAppSwagger();
|
builder.Services.AddAppSwagger();
|
||||||
|
builder.Services.AddDysonAuth(builder.Configuration);
|
||||||
|
|
||||||
// Add flush handlers and websocket handlers
|
// Add flush handlers and websocket handlers
|
||||||
builder.Services.AddAppFlushHandlers();
|
builder.Services.AddAppFlushHandlers();
|
||||||
@ -23,8 +26,6 @@ builder.Services.AddAppBusinessServices();
|
|||||||
// Add scheduled jobs
|
// Add scheduled jobs
|
||||||
builder.Services.AddAppScheduledJobs();
|
builder.Services.AddAppScheduledJobs();
|
||||||
|
|
||||||
builder.Services.AddHostedService<ServiceRegistrationHostedService>();
|
|
||||||
|
|
||||||
var app = builder.Build();
|
var app = builder.Build();
|
||||||
|
|
||||||
// Run database migrations
|
// Run database migrations
|
||||||
@ -37,8 +38,6 @@ using (var scope = app.Services.CreateScope())
|
|||||||
// Configure application middleware pipeline
|
// Configure application middleware pipeline
|
||||||
app.ConfigureAppMiddleware(builder.Configuration);
|
app.ConfigureAppMiddleware(builder.Configuration);
|
||||||
|
|
||||||
app.UseMiddleware<DysonNetwork.Shared.Middleware.AuthMiddleware>();
|
|
||||||
|
|
||||||
// Configure gRPC
|
// Configure gRPC
|
||||||
app.ConfigureGrpcServices();
|
app.ConfigureGrpcServices();
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
namespace DysonNetwork.Pass.Startup;
|
namespace DysonNetwork.Pusher.Startup;
|
||||||
|
|
||||||
public static class KestrelConfiguration
|
public static class KestrelConfiguration
|
||||||
{
|
{
|
||||||
|
154
DysonNetwork.Shared/Auth/AuthScheme.cs
Normal file
154
DysonNetwork.Shared/Auth/AuthScheme.cs
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
using System.Security.Claims;
|
||||||
|
using System.Text.Encodings.Web;
|
||||||
|
using DysonNetwork.Shared.Proto;
|
||||||
|
using Grpc.Core;
|
||||||
|
using Microsoft.AspNetCore.Authentication;
|
||||||
|
using Microsoft.AspNetCore.Http;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using SystemClock = NodaTime.SystemClock;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Shared.Auth;
|
||||||
|
|
||||||
|
public class DysonTokenAuthOptions : AuthenticationSchemeOptions;
|
||||||
|
|
||||||
|
public class DysonTokenAuthHandler(
|
||||||
|
IOptionsMonitor<DysonTokenAuthOptions> options,
|
||||||
|
ILoggerFactory logger,
|
||||||
|
UrlEncoder encoder,
|
||||||
|
ISystemClock clock,
|
||||||
|
AuthService.AuthServiceClient auth
|
||||||
|
)
|
||||||
|
: AuthenticationHandler<DysonTokenAuthOptions>(options, logger, encoder, clock)
|
||||||
|
{
|
||||||
|
protected override async Task<AuthenticateResult> HandleAuthenticateAsync()
|
||||||
|
{
|
||||||
|
var tokenInfo = _ExtractToken(Request);
|
||||||
|
|
||||||
|
if (tokenInfo == null || string.IsNullOrEmpty(tokenInfo.Token))
|
||||||
|
return AuthenticateResult.Fail("No token was provided.");
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var now = SystemClock.Instance.GetCurrentInstant();
|
||||||
|
|
||||||
|
// Validate token and extract session ID
|
||||||
|
AuthSession session;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
session = await ValidateToken(tokenInfo.Token);
|
||||||
|
}
|
||||||
|
catch (InvalidOperationException ex)
|
||||||
|
{
|
||||||
|
return AuthenticateResult.Fail(ex.Message);
|
||||||
|
}
|
||||||
|
catch (RpcException ex)
|
||||||
|
{
|
||||||
|
return AuthenticateResult.Fail($"Remote error: {ex.Status.StatusCode} - {ex.Status.Detail}");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store user and session in the HttpContext.Items for easy access in controllers
|
||||||
|
Context.Items["CurrentUser"] = session.Account;
|
||||||
|
Context.Items["CurrentSession"] = session;
|
||||||
|
Context.Items["CurrentTokenType"] = tokenInfo.Type.ToString();
|
||||||
|
|
||||||
|
// Create claims from the session
|
||||||
|
var claims = new List<Claim>
|
||||||
|
{
|
||||||
|
new("user_id", session.Account.Id),
|
||||||
|
new("session_id", session.Id),
|
||||||
|
new("token_type", tokenInfo.Type.ToString())
|
||||||
|
};
|
||||||
|
|
||||||
|
// return AuthenticateResult.Success(ticket);
|
||||||
|
return AuthenticateResult.NoResult();
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
return AuthenticateResult.Fail($"Authentication failed: {ex.Message}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<AuthSession> ValidateToken(string token)
|
||||||
|
{
|
||||||
|
var resp = await auth.AuthenticateAsync(new AuthenticateRequest { Token = token });
|
||||||
|
if (!resp.Valid) throw new InvalidOperationException(resp.Message);
|
||||||
|
if (resp.Session == null) throw new InvalidOperationException("Session not found.");
|
||||||
|
return resp.Session;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static byte[] Base64UrlDecode(string base64Url)
|
||||||
|
{
|
||||||
|
var padded = base64Url
|
||||||
|
.Replace('-', '+')
|
||||||
|
.Replace('_', '/');
|
||||||
|
|
||||||
|
switch (padded.Length % 4)
|
||||||
|
{
|
||||||
|
case 2: padded += "=="; break;
|
||||||
|
case 3: padded += "="; break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Convert.FromBase64String(padded);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static TokenInfo? _ExtractToken(HttpRequest request)
|
||||||
|
{
|
||||||
|
// Check for token in query parameters
|
||||||
|
if (request.Query.TryGetValue(AuthConstants.TokenQueryParamName, out var queryToken))
|
||||||
|
{
|
||||||
|
return new TokenInfo
|
||||||
|
{
|
||||||
|
Token = queryToken.ToString(),
|
||||||
|
Type = TokenType.AuthKey
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Check for token in Authorization header
|
||||||
|
var authHeader = request.Headers["Authorization"].ToString();
|
||||||
|
if (!string.IsNullOrEmpty(authHeader))
|
||||||
|
{
|
||||||
|
if (authHeader.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
var token = authHeader["Bearer ".Length..].Trim();
|
||||||
|
var parts = token.Split('.');
|
||||||
|
|
||||||
|
return new TokenInfo
|
||||||
|
{
|
||||||
|
Token = token,
|
||||||
|
Type = parts.Length == 3 ? TokenType.OidcKey : TokenType.AuthKey
|
||||||
|
};
|
||||||
|
}
|
||||||
|
else if (authHeader.StartsWith("AtField ", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
return new TokenInfo
|
||||||
|
{
|
||||||
|
Token = authHeader["AtField ".Length..].Trim(),
|
||||||
|
Type = TokenType.AuthKey
|
||||||
|
};
|
||||||
|
}
|
||||||
|
else if (authHeader.StartsWith("AkField ", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
return new TokenInfo
|
||||||
|
{
|
||||||
|
Token = authHeader["AkField ".Length..].Trim(),
|
||||||
|
Type = TokenType.ApiKey
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for token in cookies
|
||||||
|
if (request.Cookies.TryGetValue(AuthConstants.CookieTokenName, out var cookieToken))
|
||||||
|
{
|
||||||
|
return new TokenInfo
|
||||||
|
{
|
||||||
|
Token = cookieToken,
|
||||||
|
Type = cookieToken.Count(c => c == '.') == 2 ? TokenType.OidcKey : TokenType.AuthKey
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
35
DysonNetwork.Shared/Auth/Startup.cs
Normal file
35
DysonNetwork.Shared/Auth/Startup.cs
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
using dotnet_etcd.interfaces;
|
||||||
|
using DysonNetwork.Shared.Proto;
|
||||||
|
using Microsoft.Extensions.Configuration;
|
||||||
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Shared.Auth;
|
||||||
|
|
||||||
|
public static class DysonAuthStartup
|
||||||
|
{
|
||||||
|
public static IServiceCollection AddDysonAuth(
|
||||||
|
this IServiceCollection services,
|
||||||
|
IConfiguration configuration
|
||||||
|
)
|
||||||
|
{
|
||||||
|
services.AddSingleton(sp =>
|
||||||
|
{
|
||||||
|
var etcdClient = sp.GetRequiredService<IEtcdClient>();
|
||||||
|
var config = sp.GetRequiredService<IConfiguration>();
|
||||||
|
var clientCertPath = config["ClientCert:Path"];
|
||||||
|
var clientKeyPath = config["ClientKey:Path"];
|
||||||
|
var clientCertPassword = config["ClientCert:Password"];
|
||||||
|
|
||||||
|
return GrpcClientHelper.CreateAuthServiceClient(etcdClient, clientCertPath, clientKeyPath, clientCertPassword);
|
||||||
|
});
|
||||||
|
|
||||||
|
services.AddAuthentication(options =>
|
||||||
|
{
|
||||||
|
options.DefaultAuthenticateScheme = AuthConstants.SchemeName;
|
||||||
|
options.DefaultChallengeScheme = AuthConstants.SchemeName;
|
||||||
|
})
|
||||||
|
.AddScheme<DysonTokenAuthOptions, DysonTokenAuthHandler>(AuthConstants.SchemeName, _ => { });
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
}
|
@ -8,7 +8,7 @@
|
|||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="dotnet-etcd" Version="8.0.1" />
|
<PackageReference Include="dotnet-etcd" Version="8.0.1" />
|
||||||
<PackageReference Include="Google.Api.CommonProtos" Version="2.17.0"/>
|
<PackageReference Include="Google.Api.CommonProtos" Version="2.17.0" />
|
||||||
<PackageReference Include="Google.Protobuf" Version="3.31.1" />
|
<PackageReference Include="Google.Protobuf" Version="3.31.1" />
|
||||||
<PackageReference Include="Google.Protobuf.Tools" Version="3.31.1" />
|
<PackageReference Include="Google.Protobuf.Tools" Version="3.31.1" />
|
||||||
<PackageReference Include="Grpc" Version="2.46.6" />
|
<PackageReference Include="Grpc" Version="2.46.6" />
|
||||||
@ -17,21 +17,22 @@
|
|||||||
<PrivateAssets>all</PrivateAssets>
|
<PrivateAssets>all</PrivateAssets>
|
||||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||||
</PackageReference>
|
</PackageReference>
|
||||||
<PackageReference Include="MaxMind.GeoIP2" Version="5.3.0"/>
|
<PackageReference Include="MaxMind.GeoIP2" Version="5.3.0" />
|
||||||
|
<PackageReference Include="Microsoft.AspNetCore.Authentication" Version="2.3.0" />
|
||||||
<PackageReference Include="Microsoft.AspNetCore.Http" Version="2.3.0" />
|
<PackageReference Include="Microsoft.AspNetCore.Http" Version="2.3.0" />
|
||||||
<PackageReference Include="Microsoft.AspNetCore.Http.Abstractions" Version="2.3.0" />
|
<PackageReference Include="Microsoft.AspNetCore.Http.Abstractions" Version="2.3.0" />
|
||||||
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.7"/>
|
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.7" />
|
||||||
<PackageReference Include="NetTopologySuite" Version="2.6.0"/>
|
<PackageReference Include="NetTopologySuite" Version="2.6.0" />
|
||||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3"/>
|
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||||
<PackageReference Include="NodaTime" Version="3.2.2" />
|
<PackageReference Include="NodaTime" Version="3.2.2" />
|
||||||
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.2.0"/>
|
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.2.0" />
|
||||||
<PackageReference Include="NodaTime.Serialization.Protobuf" Version="2.0.2"/>
|
<PackageReference Include="NodaTime.Serialization.Protobuf" Version="2.0.2" />
|
||||||
<PackageReference Include="StackExchange.Redis" Version="2.8.41"/>
|
<PackageReference Include="StackExchange.Redis" Version="2.8.41" />
|
||||||
<PackageReference Include="System.Net.Http" Version="4.3.4" />
|
<PackageReference Include="System.Net.Http" Version="4.3.4" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<Protobuf Include="Proto\*.proto" ProtoRoot="Proto" GrpcServices="Both" AdditionalFileExtensions="Proto\"/>
|
<Protobuf Include="Proto\*.proto" ProtoRoot="Proto" GrpcServices="Both" AdditionalFileExtensions="Proto\" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
</Project>
|
</Project>
|
||||||
|
@ -1,107 +0,0 @@
|
|||||||
using Grpc.Core;
|
|
||||||
using Microsoft.AspNetCore.Http;
|
|
||||||
using Microsoft.Extensions.Logging;
|
|
||||||
using DysonNetwork.Shared.Proto;
|
|
||||||
using System.Threading.Tasks;
|
|
||||||
using DysonNetwork.Shared.Auth;
|
|
||||||
|
|
||||||
namespace DysonNetwork.Shared.Middleware;
|
|
||||||
|
|
||||||
public class AuthMiddleware
|
|
||||||
{
|
|
||||||
private readonly RequestDelegate _next;
|
|
||||||
private readonly ILogger<AuthMiddleware> _logger;
|
|
||||||
|
|
||||||
public AuthMiddleware(RequestDelegate next, ILogger<AuthMiddleware> logger)
|
|
||||||
{
|
|
||||||
_next = next;
|
|
||||||
_logger = logger;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async Task InvokeAsync(HttpContext context, AuthService.AuthServiceClient authServiceClient)
|
|
||||||
{
|
|
||||||
var tokenInfo = _ExtractToken(context.Request);
|
|
||||||
|
|
||||||
if (tokenInfo == null || string.IsNullOrEmpty(tokenInfo.Token))
|
|
||||||
{
|
|
||||||
await _next(context);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
var authSession = await authServiceClient.AuthenticateAsync(new AuthenticateRequest { Token = tokenInfo.Token });
|
|
||||||
context.Items["AuthSession"] = authSession;
|
|
||||||
context.Items["CurrentTokenType"] = tokenInfo.Type.ToString();
|
|
||||||
// Assuming AuthSession contains Account information or can be retrieved
|
|
||||||
// context.Items["CurrentUser"] = authSession.Account; // You might need to fetch Account separately if not embedded
|
|
||||||
}
|
|
||||||
catch (RpcException ex)
|
|
||||||
{
|
|
||||||
_logger.LogWarning(ex, "Authentication failed for token: {Token}", tokenInfo.Token);
|
|
||||||
// Optionally, you can return an unauthorized response here
|
|
||||||
// context.Response.StatusCode = StatusCodes.Status401Unauthorized;
|
|
||||||
// return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await _next(context);
|
|
||||||
}
|
|
||||||
|
|
||||||
private TokenInfo? _ExtractToken(HttpRequest request)
|
|
||||||
{
|
|
||||||
// Check for token in query parameters
|
|
||||||
if (request.Query.TryGetValue(AuthConstants.TokenQueryParamName, out var queryToken))
|
|
||||||
{
|
|
||||||
return new TokenInfo
|
|
||||||
{
|
|
||||||
Token = queryToken.ToString(),
|
|
||||||
Type = TokenType.AuthKey
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for token in Authorization header
|
|
||||||
var authHeader = request.Headers["Authorization"].ToString();
|
|
||||||
if (!string.IsNullOrEmpty(authHeader))
|
|
||||||
{
|
|
||||||
if (authHeader.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase))
|
|
||||||
{
|
|
||||||
var token = authHeader["Bearer ".Length..].Trim();
|
|
||||||
var parts = token.Split('.');
|
|
||||||
|
|
||||||
return new TokenInfo
|
|
||||||
{
|
|
||||||
Token = token,
|
|
||||||
Type = parts.Length == 3 ? TokenType.OidcKey : TokenType.AuthKey
|
|
||||||
};
|
|
||||||
}
|
|
||||||
else if (authHeader.StartsWith("AtField ", StringComparison.OrdinalIgnoreCase))
|
|
||||||
{
|
|
||||||
return new TokenInfo
|
|
||||||
{
|
|
||||||
Token = authHeader["AtField ".Length..].Trim(),
|
|
||||||
Type = TokenType.AuthKey
|
|
||||||
};
|
|
||||||
}
|
|
||||||
else if (authHeader.StartsWith("AkField ", StringComparison.OrdinalIgnoreCase))
|
|
||||||
{
|
|
||||||
return new TokenInfo
|
|
||||||
{
|
|
||||||
Token = authHeader["AkField ".Length..].Trim(),
|
|
||||||
Type = TokenType.ApiKey
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for token in cookies
|
|
||||||
if (request.Cookies.TryGetValue(AuthConstants.CookieTokenName, out var cookieToken))
|
|
||||||
{
|
|
||||||
return new TokenInfo
|
|
||||||
{
|
|
||||||
Token = cookieToken,
|
|
||||||
Type = cookieToken.Count(c => c == '.') == 2 ? TokenType.OidcKey : TokenType.AuthKey
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
@ -6,17 +6,21 @@ option csharp_namespace = "DysonNetwork.Shared.Proto";
|
|||||||
|
|
||||||
import "google/protobuf/timestamp.proto";
|
import "google/protobuf/timestamp.proto";
|
||||||
import "google/protobuf/wrappers.proto";
|
import "google/protobuf/wrappers.proto";
|
||||||
|
import "google/protobuf/struct.proto";
|
||||||
|
|
||||||
|
import 'account.proto';
|
||||||
|
|
||||||
// Represents a user session
|
// Represents a user session
|
||||||
message AuthSession {
|
message AuthSession {
|
||||||
string id = 1;
|
string id = 1;
|
||||||
google.protobuf.StringValue label = 2;
|
google.protobuf.StringValue label = 2;
|
||||||
google.protobuf.Timestamp last_granted_at = 3;
|
optional google.protobuf.Timestamp last_granted_at = 3;
|
||||||
google.protobuf.Timestamp expired_at = 4;
|
optional google.protobuf.Timestamp expired_at = 4;
|
||||||
string account_id = 5;
|
string account_id = 5;
|
||||||
string challenge_id = 6;
|
Account account = 6;
|
||||||
AuthChallenge challenge = 7;
|
string challenge_id = 7;
|
||||||
google.protobuf.StringValue app_id = 8;
|
AuthChallenge challenge = 8;
|
||||||
|
google.protobuf.StringValue app_id = 9;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Represents an authentication challenge
|
// Represents an authentication challenge
|
||||||
@ -60,9 +64,111 @@ enum ChallengePlatform {
|
|||||||
}
|
}
|
||||||
|
|
||||||
service AuthService {
|
service AuthService {
|
||||||
rpc Authenticate(AuthenticateRequest) returns (AuthSession) {}
|
rpc Authenticate(AuthenticateRequest) returns (AuthenticateResponse) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
message AuthenticateRequest {
|
message AuthenticateRequest {
|
||||||
string token = 1;
|
string token = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message AuthenticateResponse {
|
||||||
|
bool valid = 1;
|
||||||
|
optional string message = 2;
|
||||||
|
optional AuthSession session = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Permission related messages and services
|
||||||
|
message PermissionNode {
|
||||||
|
string id = 1;
|
||||||
|
string actor = 2;
|
||||||
|
string area = 3;
|
||||||
|
string key = 4;
|
||||||
|
google.protobuf.Value value = 5; // Using Value to represent generic type
|
||||||
|
google.protobuf.Timestamp expired_at = 6;
|
||||||
|
google.protobuf.Timestamp affected_at = 7;
|
||||||
|
string group_id = 8; // Optional group ID
|
||||||
|
}
|
||||||
|
|
||||||
|
message PermissionGroup {
|
||||||
|
string id = 1;
|
||||||
|
string name = 2;
|
||||||
|
google.protobuf.Timestamp created_at = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message HasPermissionRequest {
|
||||||
|
string actor = 1;
|
||||||
|
string area = 2;
|
||||||
|
string key = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message HasPermissionResponse {
|
||||||
|
bool has_permission = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message GetPermissionRequest {
|
||||||
|
string actor = 1;
|
||||||
|
string area = 2;
|
||||||
|
string key = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message GetPermissionResponse {
|
||||||
|
google.protobuf.Value value = 1; // Using Value to represent generic type
|
||||||
|
}
|
||||||
|
|
||||||
|
message AddPermissionNodeRequest {
|
||||||
|
string actor = 1;
|
||||||
|
string area = 2;
|
||||||
|
string key = 3;
|
||||||
|
google.protobuf.Value value = 4;
|
||||||
|
google.protobuf.Timestamp expired_at = 5;
|
||||||
|
google.protobuf.Timestamp affected_at = 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
message AddPermissionNodeResponse {
|
||||||
|
PermissionNode node = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message AddPermissionNodeToGroupRequest {
|
||||||
|
PermissionGroup group = 1;
|
||||||
|
string actor = 2;
|
||||||
|
string area = 3;
|
||||||
|
string key = 4;
|
||||||
|
google.protobuf.Value value = 5;
|
||||||
|
google.protobuf.Timestamp expired_at = 6;
|
||||||
|
google.protobuf.Timestamp affected_at = 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
message AddPermissionNodeToGroupResponse {
|
||||||
|
PermissionNode node = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message RemovePermissionNodeRequest {
|
||||||
|
string actor = 1;
|
||||||
|
string area = 2;
|
||||||
|
string key = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message RemovePermissionNodeResponse {
|
||||||
|
bool success = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message RemovePermissionNodeFromGroupRequest {
|
||||||
|
PermissionGroup group = 1;
|
||||||
|
string actor = 2;
|
||||||
|
string area = 3;
|
||||||
|
string key = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
message RemovePermissionNodeFromGroupResponse {
|
||||||
|
bool success = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
service PermissionService {
|
||||||
|
rpc HasPermission(HasPermissionRequest) returns (HasPermissionResponse) {}
|
||||||
|
rpc GetPermission(GetPermissionRequest) returns (GetPermissionResponse) {}
|
||||||
|
rpc AddPermissionNode(AddPermissionNodeRequest) returns (AddPermissionNodeResponse) {}
|
||||||
|
rpc AddPermissionNodeToGroup(AddPermissionNodeToGroupRequest) returns (AddPermissionNodeToGroupResponse) {}
|
||||||
|
rpc RemovePermissionNode(RemovePermissionNodeRequest) returns (RemovePermissionNodeResponse) {}
|
||||||
|
rpc RemovePermissionNodeFromGroup(RemovePermissionNodeFromGroupRequest) returns (RemovePermissionNodeFromGroupResponse) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
45
DysonNetwork.Shared/Registry/RegistryHostedService.cs
Normal file
45
DysonNetwork.Shared/Registry/RegistryHostedService.cs
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
using Microsoft.Extensions.Configuration;
|
||||||
|
using Microsoft.Extensions.Hosting;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
|
||||||
|
namespace DysonNetwork.Shared.Registry;
|
||||||
|
|
||||||
|
public class RegistryHostedService(
|
||||||
|
ServiceRegistry serviceRegistry,
|
||||||
|
IConfiguration configuration,
|
||||||
|
ILogger<RegistryHostedService> logger
|
||||||
|
)
|
||||||
|
: IHostedService
|
||||||
|
{
|
||||||
|
public async Task StartAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var serviceName = configuration["Service:Name"];
|
||||||
|
var serviceUrl = configuration["Service:Url"];
|
||||||
|
|
||||||
|
if (string.IsNullOrEmpty(serviceUrl) || string.IsNullOrEmpty(serviceName))
|
||||||
|
{
|
||||||
|
logger.LogWarning("Service URL or Service Name was not configured. Skipping Etcd registration.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.LogInformation("Registering service {ServiceName} at {ServiceUrl} with Etcd.", serviceName, serviceUrl);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await serviceRegistry.RegisterService(serviceName, serviceUrl);
|
||||||
|
logger.LogInformation("Service {ServiceName} registered successfully.", serviceName);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
logger.LogError(ex, "Failed to register service {ServiceName} with Etcd.", serviceName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task StopAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
// The lease will expire automatically if the service stops ungracefully.
|
||||||
|
var serviceName = configuration["Service:Name"];
|
||||||
|
if (serviceName is not null)
|
||||||
|
await serviceRegistry.UnregisterService(serviceName);
|
||||||
|
logger.LogInformation("Service registration hosted service is stopping.");
|
||||||
|
}
|
||||||
|
}
|
@ -4,9 +4,9 @@ using Microsoft.Extensions.DependencyInjection;
|
|||||||
|
|
||||||
namespace DysonNetwork.Shared.Registry;
|
namespace DysonNetwork.Shared.Registry;
|
||||||
|
|
||||||
public static class EtcdStartup
|
public static class RegistryStartup
|
||||||
{
|
{
|
||||||
public static IServiceCollection AddEtcdService(
|
public static IServiceCollection AddRegistryService(
|
||||||
this IServiceCollection services,
|
this IServiceCollection services,
|
||||||
IConfiguration configuration
|
IConfiguration configuration
|
||||||
)
|
)
|
||||||
@ -17,6 +17,7 @@ public static class EtcdStartup
|
|||||||
options.UseInsecureChannel = configuration.GetValue<bool>("Etcd:Insecure");
|
options.UseInsecureChannel = configuration.GetValue<bool>("Etcd:Insecure");
|
||||||
});
|
});
|
||||||
services.AddSingleton<ServiceRegistry>();
|
services.AddSingleton<ServiceRegistry>();
|
||||||
|
services.AddHostedService<RegistryHostedService>();
|
||||||
|
|
||||||
return services;
|
return services;
|
||||||
}
|
}
|
||||||
|
@ -29,7 +29,7 @@
|
|||||||
<PackageReference Include="MailKit" Version="4.13.0" />
|
<PackageReference Include="MailKit" Version="4.13.0" />
|
||||||
<PackageReference Include="Markdig" Version="0.41.3" />
|
<PackageReference Include="Markdig" Version="0.41.3" />
|
||||||
<PackageReference Include="MaxMind.GeoIP2" Version="5.3.0" />
|
<PackageReference Include="MaxMind.GeoIP2" Version="5.3.0" />
|
||||||
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="9.0.4" />
|
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="9.0.7" />
|
||||||
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.2" />
|
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.2" />
|
||||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.3">
|
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.3">
|
||||||
<PrivateAssets>all</PrivateAssets>
|
<PrivateAssets>all</PrivateAssets>
|
||||||
@ -40,10 +40,10 @@
|
|||||||
<PrivateAssets>all</PrivateAssets>
|
<PrivateAssets>all</PrivateAssets>
|
||||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||||
</PackageReference>
|
</PackageReference>
|
||||||
<PackageReference Include="Minio" Version="6.0.4" />
|
<PackageReference Include="Minio" Version="6.0.5" />
|
||||||
<PackageReference Include="NetVips" Version="3.0.1" />
|
<PackageReference Include="NetVips" Version="3.1.0" />
|
||||||
<PackageReference Include="NetVips.Native.linux-x64" Version="8.16.1" />
|
<PackageReference Include="NetVips.Native.linux-x64" Version="8.17.1" />
|
||||||
<PackageReference Include="NetVips.Native.osx-arm64" Version="8.16.1" />
|
<PackageReference Include="NetVips.Native.osx-arm64" Version="8.17.1" />
|
||||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||||
<PackageReference Include="NodaTime" Version="3.2.2" />
|
<PackageReference Include="NodaTime" Version="3.2.2" />
|
||||||
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.2.0" />
|
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.2.0" />
|
||||||
@ -66,16 +66,16 @@
|
|||||||
<PackageReference Include="Quartz" Version="3.14.0" />
|
<PackageReference Include="Quartz" Version="3.14.0" />
|
||||||
<PackageReference Include="Quartz.AspNetCore" Version="3.14.0" />
|
<PackageReference Include="Quartz.AspNetCore" Version="3.14.0" />
|
||||||
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.14.0" />
|
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.14.0" />
|
||||||
<PackageReference Include="SkiaSharp" Version="2.88.9" />
|
<PackageReference Include="SkiaSharp" Version="3.119.0" />
|
||||||
<PackageReference Include="SkiaSharp.NativeAssets.Linux" Version="2.88.9" />
|
<PackageReference Include="SkiaSharp.NativeAssets.Linux" Version="3.119.0" />
|
||||||
<PackageReference Include="SkiaSharp.NativeAssets.Linux.NoDependencies" Version="2.88.9" />
|
<PackageReference Include="SkiaSharp.NativeAssets.Linux.NoDependencies" Version="3.119.0" />
|
||||||
<PackageReference Include="SkiaSharp.NativeAssets.macOS" Version="2.88.9" />
|
<PackageReference Include="SkiaSharp.NativeAssets.macOS" Version="3.119.0" />
|
||||||
<PackageReference Include="StackExchange.Redis" Version="2.8.41" />
|
<PackageReference Include="StackExchange.Redis" Version="2.8.41" />
|
||||||
<PackageReference Include="StackExchange.Redis.Extensions.AspNetCore" Version="11.0.0" />
|
<PackageReference Include="StackExchange.Redis.Extensions.AspNetCore" Version="11.0.0" />
|
||||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.3" />
|
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.3" />
|
||||||
<PackageReference Include="Swashbuckle.AspNetCore.SwaggerUI" Version="9.0.3" />
|
<PackageReference Include="Swashbuckle.AspNetCore.SwaggerUI" Version="9.0.3" />
|
||||||
<PackageReference Include="System.ServiceModel.Syndication" Version="9.0.6" />
|
<PackageReference Include="System.ServiceModel.Syndication" Version="9.0.6" />
|
||||||
<PackageReference Include="tusdotnet" Version="2.8.1" />
|
<PackageReference Include="tusdotnet" Version="2.10.0" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
|
@ -13,6 +13,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DysonNetwork.Shared", "Dyso
|
|||||||
EndProject
|
EndProject
|
||||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DysonNetwork.Pusher", "DysonNetwork.Pusher\DysonNetwork.Pusher.csproj", "{D5DAFB0D-487E-48EF-BA2F-C581C846F63B}"
|
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DysonNetwork.Pusher", "DysonNetwork.Pusher\DysonNetwork.Pusher.csproj", "{D5DAFB0D-487E-48EF-BA2F-C581C846F63B}"
|
||||||
EndProject
|
EndProject
|
||||||
|
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DysonNetwork.Drive", "DysonNetwork.Drive\DysonNetwork.Drive.csproj", "{8DE0B783-8852-494D-B90A-201ABBB71202}"
|
||||||
|
EndProject
|
||||||
Global
|
Global
|
||||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||||
Debug|Any CPU = Debug|Any CPU
|
Debug|Any CPU = Debug|Any CPU
|
||||||
@ -35,5 +37,9 @@ Global
|
|||||||
{D5DAFB0D-487E-48EF-BA2F-C581C846F63B}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
{D5DAFB0D-487E-48EF-BA2F-C581C846F63B}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||||
{D5DAFB0D-487E-48EF-BA2F-C581C846F63B}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
{D5DAFB0D-487E-48EF-BA2F-C581C846F63B}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||||
{D5DAFB0D-487E-48EF-BA2F-C581C846F63B}.Release|Any CPU.Build.0 = Release|Any CPU
|
{D5DAFB0D-487E-48EF-BA2F-C581C846F63B}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||||
|
{8DE0B783-8852-494D-B90A-201ABBB71202}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||||
|
{8DE0B783-8852-494D-B90A-201ABBB71202}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||||
|
{8DE0B783-8852-494D-B90A-201ABBB71202}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||||
|
{8DE0B783-8852-494D-B90A-201ABBB71202}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||||
EndGlobalSection
|
EndGlobalSection
|
||||||
EndGlobal
|
EndGlobal
|
||||||
|
@ -3,7 +3,9 @@
|
|||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AAny_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F331aca3f6f414013b09964063341351379060_003F67_003F87f868e3_003FAny_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AAny_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F331aca3f6f414013b09964063341351379060_003F67_003F87f868e3_003FAny_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AApnSender_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F6aadc2cf048f477d8636fb2def7b73648200_003Fc5_003F2a1973a9_003FApnSender_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AApnSender_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F6aadc2cf048f477d8636fb2def7b73648200_003Fc5_003F2a1973a9_003FApnSender_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AApnSettings_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F6aadc2cf048f477d8636fb2def7b73648200_003F0f_003F51443844_003FApnSettings_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AApnSettings_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F6aadc2cf048f477d8636fb2def7b73648200_003F0f_003F51443844_003FApnSettings_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AAuthenticationHandler_00601_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F1f1354e4dbf943ecb04840af5ff9a527fa20_003F5d_003F1fb111f6_003FAuthenticationHandler_00601_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AAuthenticationMiddleware_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003Fe49de78932194d52a02b07486c6d023a24600_003F2f_003F7ab1cc57_003FAuthenticationMiddleware_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AAuthenticationMiddleware_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003Fe49de78932194d52a02b07486c6d023a24600_003F2f_003F7ab1cc57_003FAuthenticationMiddleware_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AAuthenticationSchemeOptions_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F9b24a56e61ae4d86a9e8ba13482a2db924600_003F5b_003F9e854504_003FAuthenticationSchemeOptions_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AAuthenticationSchemeOptions_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003Fe49de78932194d52a02b07486c6d023a24600_003Ff0_003F595b6eda_003FAuthenticationSchemeOptions_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AAuthenticationSchemeOptions_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003Fe49de78932194d52a02b07486c6d023a24600_003Ff0_003F595b6eda_003FAuthenticationSchemeOptions_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AAuthorizationAppBuilderExtensions_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2024_002E3_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F2ff26593f91746d7a53418a46dc419d1f200_003F4b_003F56550da2_003FAuthorizationAppBuilderExtensions_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AAuthorizationAppBuilderExtensions_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2024_002E3_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F2ff26593f91746d7a53418a46dc419d1f200_003F4b_003F56550da2_003FAuthorizationAppBuilderExtensions_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003ABlurHashEncoder_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FSourcesCache_003Fb87f853683828cb934127af9a42b22cf516412af1e61ae2ff4935ae82aff_003FBlurHashEncoder_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003ABlurHashEncoder_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FSourcesCache_003Fb87f853683828cb934127af9a42b22cf516412af1e61ae2ff4935ae82aff_003FBlurHashEncoder_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
|
Reference in New Issue
Block a user