♻️ Extract the Storage service to DysonNetwork.Drive microservice
This commit is contained in:
@ -1,180 +0,0 @@
|
||||
using DysonNetwork.Common.Models;
|
||||
using DysonNetwork.Sphere.Permission;
|
||||
using DysonNetwork.Sphere.Realm;
|
||||
using DysonNetwork.Sphere.Sticker;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Query;
|
||||
|
||||
namespace DysonNetwork.Sphere;
|
||||
|
||||
public class AppDatabase(
|
||||
DbContextOptions<AppDatabase> options,
|
||||
IConfiguration configuration
|
||||
) : DbContext(options)
|
||||
{
|
||||
public DbSet<CloudFile> Files { get; set; }
|
||||
public DbSet<CloudFileReference> FileReferences { get; set; }
|
||||
|
||||
public DbSet<Common.Models.Publisher> Publishers { get; set; }
|
||||
|
||||
public DbSet<PublisherFeature> PublisherFeatures { get; set; }
|
||||
|
||||
public DbSet<Common.Models.Post> Posts { get; set; }
|
||||
public DbSet<PostReaction> PostReactions { get; set; }
|
||||
public DbSet<PostTag> PostTags { get; set; }
|
||||
public DbSet<PostCategory> PostCategories { get; set; }
|
||||
public DbSet<PostCollection> PostCollections { get; set; }
|
||||
|
||||
public DbSet<Common.Models.Realm> Realms { get; set; }
|
||||
public DbSet<RealmMember> RealmMembers { get; set; }
|
||||
public DbSet<Tag> Tags { get; set; }
|
||||
public DbSet<RealmTag> RealmTags { get; set; }
|
||||
|
||||
public DbSet<ChatRoom> ChatRooms { get; set; }
|
||||
public DbSet<ChatMember> ChatMembers { get; set; }
|
||||
public DbSet<Message> ChatMessages { get; set; }
|
||||
public DbSet<RealtimeCall> ChatRealtimeCall { get; set; }
|
||||
public DbSet<MessageReaction> ChatReactions { get; set; }
|
||||
|
||||
public DbSet<Sticker.Sticker> Stickers { get; set; }
|
||||
public DbSet<StickerPack> StickerPacks { get; set; }
|
||||
|
||||
public DbSet<Common.Models.Wallet> Wallets { get; set; }
|
||||
public DbSet<WalletPocket> WalletPockets { get; set; }
|
||||
public DbSet<Order> PaymentOrders { get; set; }
|
||||
public DbSet<Transaction> PaymentTransactions { get; set; }
|
||||
|
||||
public DbSet<CustomApp> CustomApps { get; set; }
|
||||
public DbSet<CustomAppSecret> CustomAppSecrets { get; set; }
|
||||
|
||||
public DbSet<Subscription> WalletSubscriptions { get; set; }
|
||||
public DbSet<Coupon> WalletCoupons { get; set; }
|
||||
public DbSet<Connection.WebReader.WebArticle> WebArticles { get; set; }
|
||||
public DbSet<Connection.WebReader.WebFeed> WebFeeds { get; set; }
|
||||
|
||||
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
|
||||
{
|
||||
optionsBuilder.UseNpgsql(
|
||||
configuration.GetConnectionString("App"),
|
||||
opt => opt
|
||||
.ConfigureDataSource(optSource => optSource.EnableDynamicJson())
|
||||
.UseQuerySplittingBehavior(QuerySplittingBehavior.SplitQuery)
|
||||
.UseNetTopologySuite()
|
||||
.UseNodaTime()
|
||||
).UseSnakeCaseNamingConvention();
|
||||
|
||||
|
||||
|
||||
base.OnConfiguring(optionsBuilder);
|
||||
}
|
||||
|
||||
protected override void OnModelCreating(ModelBuilder modelBuilder)
|
||||
{
|
||||
base.OnModelCreating(modelBuilder);
|
||||
|
||||
modelBuilder.Entity<PublisherMember>()
|
||||
.HasKey(pm => new { pm.PublisherId, pm.AccountId });
|
||||
modelBuilder.Entity<PublisherMember>()
|
||||
.HasOne(pm => pm.Publisher)
|
||||
.WithMany(p => p.Members)
|
||||
.HasForeignKey(pm => pm.PublisherId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
|
||||
modelBuilder.Entity<Common.Models.Post>()
|
||||
.HasGeneratedTsVectorColumn(p => p.SearchVector, "simple", p => new { p.Title, p.Description, p.Content })
|
||||
.HasIndex(p => p.SearchVector)
|
||||
.HasMethod("GIN");
|
||||
|
||||
modelBuilder.Entity<CustomAppSecret>()
|
||||
.HasIndex(s => s.Secret)
|
||||
.IsUnique();
|
||||
|
||||
modelBuilder.Entity<CustomApp>()
|
||||
.HasMany(c => c.Secrets)
|
||||
.WithOne(s => s.App)
|
||||
.HasForeignKey(s => s.AppId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
|
||||
modelBuilder.Entity<Common.Models.Post>()
|
||||
.HasOne(p => p.RepliedPost)
|
||||
.WithMany()
|
||||
.HasForeignKey(p => p.RepliedPostId)
|
||||
.OnDelete(DeleteBehavior.Restrict);
|
||||
modelBuilder.Entity<Common.Models.Post>()
|
||||
.HasOne(p => p.ForwardedPost)
|
||||
.WithMany()
|
||||
.HasForeignKey(p => p.ForwardedPostId)
|
||||
.OnDelete(DeleteBehavior.Restrict);
|
||||
modelBuilder.Entity<Common.Models.Post>()
|
||||
.HasMany(p => p.Tags)
|
||||
.WithMany(t => t.Posts)
|
||||
.UsingEntity(j => j.ToTable("post_tag_links"));
|
||||
modelBuilder.Entity<Common.Models.Post>()
|
||||
.HasMany(p => p.Categories)
|
||||
.WithMany(c => c.Posts)
|
||||
.UsingEntity(j => j.ToTable("post_category_links"));
|
||||
modelBuilder.Entity<Common.Models.Post>()
|
||||
.HasMany(p => p.Collections)
|
||||
.WithMany(c => c.Posts)
|
||||
.UsingEntity(j => j.ToTable("post_collection_links"));
|
||||
|
||||
modelBuilder.Entity<ChatMember>()
|
||||
.HasKey(pm => new { pm.Id });
|
||||
modelBuilder.Entity<ChatMember>()
|
||||
.HasAlternateKey(pm => new { pm.ChatRoomId, pm.AccountId });
|
||||
|
||||
|
||||
modelBuilder.Entity<Message>()
|
||||
.HasOne(m => m.ForwardedMessage)
|
||||
.WithMany()
|
||||
.HasForeignKey(m => m.ForwardedMessageId)
|
||||
.OnDelete(DeleteBehavior.Restrict);
|
||||
modelBuilder.Entity<Message>()
|
||||
.HasOne(m => m.RepliedMessage)
|
||||
.WithMany()
|
||||
.HasForeignKey(m => m.RepliedMessageId)
|
||||
.OnDelete(DeleteBehavior.Restrict);
|
||||
|
||||
|
||||
|
||||
modelBuilder.Entity<Connection.WebReader.WebFeed>()
|
||||
.HasIndex(f => f.Url)
|
||||
.IsUnique();
|
||||
|
||||
modelBuilder.Entity<Connection.WebReader.WebArticle>()
|
||||
.HasIndex(a => a.Url)
|
||||
.IsUnique();
|
||||
|
||||
|
||||
|
||||
public static class OptionalQueryExtensions
|
||||
{
|
||||
public static IQueryable<T> If<T>(
|
||||
this IQueryable<T> source,
|
||||
bool condition,
|
||||
Func<IQueryable<T>, IQueryable<T>> transform
|
||||
)
|
||||
{
|
||||
return condition ? transform(source) : source;
|
||||
}
|
||||
|
||||
public static IQueryable<T> If<T, TP>(
|
||||
this IIncludableQueryable<T, TP> source,
|
||||
bool condition,
|
||||
Func<IIncludableQueryable<T, TP>, IQueryable<T>> transform
|
||||
)
|
||||
where T : class
|
||||
{
|
||||
return condition ? transform(source) : source;
|
||||
}
|
||||
|
||||
public static IQueryable<T> If<T, TP>(
|
||||
this IIncludableQueryable<T, IEnumerable<TP>> source,
|
||||
bool condition,
|
||||
Func<IIncludableQueryable<T, IEnumerable<TP>>, IQueryable<T>> transform
|
||||
)
|
||||
where T : class
|
||||
{
|
||||
return condition ? transform(source) : source;
|
||||
}
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using DysonNetwork.Common.Interfaces;
|
||||
using DysonNetwork.Common.Models;
|
||||
using DysonNetwork.Sphere.Localization;
|
||||
using DysonNetwork.Sphere.Permission;
|
||||
@ -16,7 +17,7 @@ namespace DysonNetwork.Sphere.Chat;
|
||||
[Route("/chat")]
|
||||
public class ChatRoomController(
|
||||
AppDatabase db,
|
||||
FileReferenceService fileRefService,
|
||||
IFileReferenceServiceClient fileRefService,
|
||||
ChatRoomService crs,
|
||||
RealmService rs,
|
||||
ActionLogService als,
|
||||
@ -272,12 +273,12 @@ public class ChatRoomController(
|
||||
if (picture is null) return BadRequest("Invalid picture id, unable to find the file on cloud.");
|
||||
|
||||
// Remove old references for pictures
|
||||
await fileRefService.DeleteResourceReferencesAsync(chatRoom.ResourceIdentifier, "chat.room.picture");
|
||||
await fileRefService.DeleteResourceReferencesAsync(chatRoom.ResourceIdentifier, "chat-room.picture");
|
||||
|
||||
// Add a new reference
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
picture.Id,
|
||||
"chat.room.picture",
|
||||
picture.Id.ToString(),
|
||||
"chat-room.picture",
|
||||
chatRoom.ResourceIdentifier
|
||||
);
|
||||
|
||||
@ -290,12 +291,12 @@ public class ChatRoomController(
|
||||
if (background is null) return BadRequest("Invalid background id, unable to find the file on cloud.");
|
||||
|
||||
// Remove old references for backgrounds
|
||||
await fileRefService.DeleteResourceReferencesAsync(chatRoom.ResourceIdentifier, "chat.room.background");
|
||||
await fileRefService.DeleteResourceReferencesAsync(chatRoom.ResourceIdentifier, "chat-room.background");
|
||||
|
||||
// Add a new reference
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
background.Id,
|
||||
"chat.room.background",
|
||||
background.Id.ToString(),
|
||||
"chat-room.background",
|
||||
chatRoom.ResourceIdentifier
|
||||
);
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
using System.Text.RegularExpressions;
|
||||
using DysonNetwork.Pass.Features.Account;
|
||||
using DysonNetwork.Common.Interfaces;
|
||||
using DysonNetwork.Common.Models;
|
||||
using DysonNetwork.Sphere.Chat.Realtime;
|
||||
using DysonNetwork.Sphere.Connection;
|
||||
@ -11,7 +12,7 @@ namespace DysonNetwork.Sphere.Chat;
|
||||
|
||||
public partial class ChatService(
|
||||
AppDatabase db,
|
||||
FileReferenceService fileRefService,
|
||||
IFileReferenceServiceClient fileRefService,
|
||||
IServiceScopeFactory scopeFactory,
|
||||
IRealtimeService realtime,
|
||||
ILogger<ChatService> logger
|
||||
@ -162,10 +163,9 @@ public partial class ChatService(
|
||||
foreach (var file in files)
|
||||
{
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
file.Id,
|
||||
file.Id.ToString(),
|
||||
ChatFileUsageIdentifier,
|
||||
messageResourceId,
|
||||
duration: Duration.FromDays(30)
|
||||
messageResourceId
|
||||
);
|
||||
}
|
||||
}
|
||||
|
59
DysonNetwork.Sphere/Data/AppDatabase.cs
Normal file
59
DysonNetwork.Sphere/Data/AppDatabase.cs
Normal file
@ -0,0 +1,59 @@
|
||||
using DysonNetwork.Common.Models;
|
||||
using DysonNetwork.Sphere.Realm;
|
||||
using DysonNetwork.Sphere.Sticker;
|
||||
using DysonNetwork.Sphere.Connection;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Query;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
|
||||
namespace DysonNetwork.Sphere.Data;
|
||||
|
||||
public class AppDatabase : DbContext
|
||||
{
|
||||
private readonly IConfiguration _configuration;
|
||||
|
||||
public AppDatabase(DbContextOptions<AppDatabase> options, IConfiguration configuration)
|
||||
: base(options)
|
||||
{
|
||||
_configuration = configuration;
|
||||
}
|
||||
|
||||
public DbSet<CloudFile> Files { get; set; } = null!;
|
||||
public DbSet<CloudFileReference> FileReferences { get; set; } = null!;
|
||||
public DbSet<Common.Models.Publisher> Publishers { get; set; } = null!;
|
||||
public DbSet<PublisherFeature> PublisherFeatures { get; set; } = null!;
|
||||
public DbSet<Common.Models.Post> Posts { get; set; } = null!;
|
||||
public DbSet<PostReaction> PostReactions { get; set; } = null!;
|
||||
public DbSet<PostTag> PostTags { get; set; } = null!;
|
||||
public DbSet<PostCategory> PostCategories { get; set; } = null!;
|
||||
public DbSet<PostCollection> PostCollections { get; set; } = null!;
|
||||
public DbSet<Common.Models.Realm> Realms { get; set; } = null!;
|
||||
public DbSet<RealmMember> RealmMembers { get; set; } = null!;
|
||||
public DbSet<Tag> Tags { get; set; } = null!;
|
||||
public DbSet<RealmTag> RealmTags { get; set; } = null!;
|
||||
public DbSet<ChatRoom> ChatRooms { get; set; } = null!;
|
||||
public DbSet<ChatMember> ChatMembers { get; set; } = null!;
|
||||
public DbSet<Message> ChatMessages { get; set; } = null!;
|
||||
public DbSet<RealtimeCall> ChatRealtimeCall { get; set; } = null!;
|
||||
public DbSet<MessageReaction> ChatReactions { get; set; } = null!;
|
||||
public DbSet<Sticker.Sticker> Stickers { get; set; } = null!;
|
||||
public DbSet<StickerPack> StickerPacks { get; set; } = null!;
|
||||
public DbSet<Common.Models.Wallet> Wallets { get; set; } = null!;
|
||||
public DbSet<WalletPocket> WalletPockets { get; set; } = null!;
|
||||
public DbSet<Order> PaymentOrders { get; set; } = null!;
|
||||
public DbSet<Transaction> PaymentTransactions { get; set; } = null!;
|
||||
public DbSet<CustomApp> CustomApps { get; set; } = null!;
|
||||
public DbSet<CustomAppSecret> CustomAppSecrets { get; set; } = null!;
|
||||
public DbSet<Subscription> WalletSubscriptions { get; set; } = null!;
|
||||
// TODO: Fix Connection type - no Connection class found in DysonNetwork.Sphere.Connection
|
||||
// public DbSet<Connection> Connections { get; set; } = null!;
|
||||
// public DbSet<Connection> Followers { get; set; } = null!;
|
||||
|
||||
protected override void OnModelCreating(ModelBuilder modelBuilder)
|
||||
{
|
||||
base.OnModelCreating(modelBuilder);
|
||||
|
||||
// Configure the database schema and relationships here
|
||||
// This will be moved from the original AppDatabase class
|
||||
}
|
||||
}
|
@ -1,10 +1,11 @@
|
||||
using DysonNetwork.Common.Interfaces;
|
||||
using DysonNetwork.Sphere.Publisher;
|
||||
using DysonNetwork.Sphere.Storage;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace DysonNetwork.Sphere.Developer;
|
||||
|
||||
public class CustomAppService(AppDatabase db, FileReferenceService fileRefService)
|
||||
public class CustomAppService(AppDatabase db, IFileReferenceServiceClient fileRefService)
|
||||
{
|
||||
public async Task<CustomApp?> CreateAppAsync(
|
||||
Publisher.Publisher pub,
|
||||
@ -32,7 +33,7 @@ public class CustomAppService(AppDatabase db, FileReferenceService fileRefServic
|
||||
|
||||
// Create a new reference
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
picture.Id,
|
||||
picture.Id.ToString(),
|
||||
"custom-apps.picture",
|
||||
app.ResourceIdentifier
|
||||
);
|
||||
@ -101,9 +102,9 @@ public class CustomAppService(AppDatabase db, FileReferenceService fileRefServic
|
||||
|
||||
// Create a new reference
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
picture.Id,
|
||||
picture.Id.ToString(),
|
||||
"custom-apps.picture",
|
||||
app.ResourceIdentifier
|
||||
app.ResourceIdentifier
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||
|
||||
<PropertyGroup>
|
||||
@ -39,10 +39,10 @@
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Minio" Version="6.0.4" />
|
||||
<PackageReference Include="NetVips" Version="3.0.1" />
|
||||
<PackageReference Include="NetVips.Native.linux-x64" Version="8.16.1" />
|
||||
<PackageReference Include="NetVips.Native.osx-arm64" Version="8.16.1" />
|
||||
<PackageReference Include="Minio" Version="6.0.5" />
|
||||
<PackageReference Include="NetVips" Version="3.1.0" />
|
||||
<PackageReference Include="NetVips.Native.linux-x64" Version="8.17.0" />
|
||||
<PackageReference Include="NetVips.Native.osx-arm64" Version="8.17.0" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="NodaTime" Version="3.2.2" />
|
||||
<PackageReference Include="NodaTime.Serialization.SystemTextJson" Version="1.3.0" />
|
||||
@ -76,10 +76,10 @@
|
||||
<PackageReference Include="SkiaSharp.NativeAssets.macOS" Version="2.88.9" />
|
||||
<PackageReference Include="StackExchange.Redis" Version="2.8.41" />
|
||||
<PackageReference Include="StackExchange.Redis.Extensions.AspNetCore" Version="11.0.0" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="8.1.0" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore.SwaggerUI" Version="8.1.0" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.1" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore.SwaggerUI" Version="9.0.1" />
|
||||
<PackageReference Include="System.ServiceModel.Syndication" Version="9.0.6" />
|
||||
<PackageReference Include="tusdotnet" Version="2.8.1" />
|
||||
<PackageReference Include="tusdotnet" Version="2.10.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
@ -89,16 +89,21 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Folder Include="Migrations\" />
|
||||
<Folder Include="Discovery\" />
|
||||
<Folder Include="Services\PassClient\" />
|
||||
<ProjectReference Include="..\DysonNetwork.Pass\DysonNetwork.Pass.csproj" />
|
||||
<ProjectReference Include="..\DysonNetwork.Drive\DysonNetwork.Drive.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Folder Include="Migrations\" />
|
||||
<Folder Include="Discovery\" />
|
||||
<Folder Include="Services\PassClient\" />
|
||||
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\DysonNetwork.Drive\DysonNetwork.Drive.csproj" />
|
||||
<ProjectReference Include="..\DysonNetwork.Common\DysonNetwork.Common.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Update="Resources\SharedResource.resx">
|
||||
<Generator>ResXFileCodeGenerator</Generator>
|
||||
|
@ -1,4 +1,5 @@
|
||||
using System.Text.RegularExpressions;
|
||||
using DysonNetwork.Common.Interfaces;
|
||||
using DysonNetwork.Common.Models;
|
||||
using DysonNetwork.Common.Services;
|
||||
using DysonNetwork.Sphere.Connection.WebReader;
|
||||
@ -13,7 +14,7 @@ namespace DysonNetwork.Sphere.Post;
|
||||
|
||||
public partial class PostService(
|
||||
AppDatabase db,
|
||||
FileReferenceService fileRefService,
|
||||
IFileReferenceServiceClient fileRefService,
|
||||
IStringLocalizer<NotificationResource> localizer,
|
||||
IServiceScopeFactory factory,
|
||||
FlushBufferService flushBuffer,
|
||||
@ -135,9 +136,9 @@ public partial class PostService(
|
||||
foreach (var file in post.Attachments)
|
||||
{
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
file.Id,
|
||||
file.Id.ToString(),
|
||||
PostFileUsageIdentifier,
|
||||
postResourceId
|
||||
post.ResourceIdentifier
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -218,12 +219,18 @@ public partial class PostService(
|
||||
{
|
||||
var postResourceId = $"post:{post.Id}";
|
||||
|
||||
// Update resource references using the new file list
|
||||
await fileRefService.UpdateResourceFilesAsync(
|
||||
postResourceId,
|
||||
attachments,
|
||||
PostFileUsageIdentifier
|
||||
);
|
||||
// Delete existing references for this resource and usage
|
||||
await fileRefService.DeleteResourceReferencesAsync(post.ResourceIdentifier, PostFileUsageIdentifier);
|
||||
|
||||
// Create new references for each file
|
||||
foreach (var fileId in attachments)
|
||||
{
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
fileId.ToString(),
|
||||
PostFileUsageIdentifier,
|
||||
post.ResourceIdentifier
|
||||
);
|
||||
}
|
||||
|
||||
// Update post attachments by getting files from database
|
||||
var files = await db.Files
|
||||
|
@ -1,4 +1,5 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using DysonNetwork.Common.Interfaces;
|
||||
using DysonNetwork.Common.Models;
|
||||
using DysonNetwork.Sphere.Permission;
|
||||
using DysonNetwork.Sphere.Realm;
|
||||
@ -15,7 +16,7 @@ namespace DysonNetwork.Sphere.Publisher;
|
||||
public class PublisherController(
|
||||
AppDatabase db,
|
||||
PublisherService ps,
|
||||
FileReferenceService fileRefService,
|
||||
IFileReferenceServiceClient fileRefService,
|
||||
ActionLogService als)
|
||||
: ControllerBase
|
||||
{
|
||||
@ -362,7 +363,7 @@ public class PublisherController(
|
||||
|
||||
// Create a new reference
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
picture.Id,
|
||||
picture.Id.ToString(),
|
||||
"publisher.picture",
|
||||
publisher.ResourceIdentifier
|
||||
);
|
||||
@ -384,7 +385,7 @@ public class PublisherController(
|
||||
|
||||
// Create a new reference
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
background.Id,
|
||||
background.Id.ToString(),
|
||||
"publisher.background",
|
||||
publisher.ResourceIdentifier
|
||||
);
|
||||
|
@ -1,3 +1,4 @@
|
||||
using DysonNetwork.Common.Interfaces;
|
||||
using DysonNetwork.Common.Models;
|
||||
using DysonNetwork.Common.Services;
|
||||
using DysonNetwork.Sphere.Post;
|
||||
@ -8,7 +9,7 @@ using NodaTime;
|
||||
|
||||
namespace DysonNetwork.Sphere.Publisher;
|
||||
|
||||
public class PublisherService(AppDatabase db, FileReferenceService fileRefService, ICacheService cache)
|
||||
public class PublisherService(AppDatabase db, IFileReferenceServiceClient fileRefService, ICacheService cache)
|
||||
{
|
||||
public async Task<Publisher?> GetPublisherByName(string name)
|
||||
{
|
||||
|
@ -1,4 +1,5 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using DysonNetwork.Common.Interfaces;
|
||||
using DysonNetwork.Common.Models;
|
||||
using DysonNetwork.Sphere.Storage;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
@ -13,7 +14,7 @@ namespace DysonNetwork.Sphere.Realm;
|
||||
public class RealmController(
|
||||
AppDatabase db,
|
||||
RealmService rs,
|
||||
FileReferenceService fileRefService,
|
||||
IFileReferenceServiceClient fileRefService,
|
||||
RelationshipService rels,
|
||||
ActionLogService als,
|
||||
AccountEventService aes
|
||||
@ -424,7 +425,7 @@ public class RealmController(
|
||||
|
||||
// Create a new reference
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
picture.Id,
|
||||
picture.Id.ToString(),
|
||||
"realm.picture",
|
||||
realm.ResourceIdentifier
|
||||
);
|
||||
@ -445,7 +446,7 @@ public class RealmController(
|
||||
|
||||
// Create a new reference
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
background.Id,
|
||||
background.Id.ToString(),
|
||||
"realm.background",
|
||||
realm.ResourceIdentifier
|
||||
);
|
||||
|
@ -27,6 +27,11 @@ using DysonNetwork.Sphere.Discovery;
|
||||
using DysonNetwork.Sphere.Safety;
|
||||
using DysonNetwork.Sphere.Wallet.PaymentHandlers;
|
||||
using tusdotnet.Stores;
|
||||
using DysonNetwork.Common.Interfaces;
|
||||
using DysonNetwork.Drive.Clients;
|
||||
using DysonNetwork.Sphere.Data;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace DysonNetwork.Sphere.Startup;
|
||||
|
||||
@ -36,7 +41,13 @@ public static class ServiceCollectionExtensions
|
||||
{
|
||||
services.AddLocalization(options => options.ResourcesPath = "Resources");
|
||||
|
||||
services.AddDbContext<AppDatabase>();
|
||||
services.AddDbContext<AppDatabase>(options =>
|
||||
options.UseNpgsql(
|
||||
configuration.GetConnectionString("DefaultConnection"),
|
||||
o => o.UseNodaTime()
|
||||
)
|
||||
.UseSnakeCaseNamingConvention()
|
||||
);
|
||||
services.AddSingleton<IConnectionMultiplexer>(_ =>
|
||||
{
|
||||
var connection = configuration.GetConnectionString("FastRetrieve")!;
|
||||
@ -49,6 +60,19 @@ public static class ServiceCollectionExtensions
|
||||
services.AddHttpClient<PassClient>();
|
||||
services.AddScoped<PassClient>();
|
||||
|
||||
// Register HTTP clients for Drive microservice
|
||||
services.AddHttpClient<IFileServiceClient, FileServiceClient>(client =>
|
||||
{
|
||||
var baseUrl = configuration["DriveService:BaseUrl"] ?? throw new InvalidOperationException("DriveService:BaseUrl is not configured");
|
||||
client.BaseAddress = new Uri(baseUrl);
|
||||
});
|
||||
|
||||
services.AddHttpClient<IFileReferenceServiceClient, FileReferenceServiceClient>(client =>
|
||||
{
|
||||
var baseUrl = configuration["DriveService:BaseUrl"] ?? throw new InvalidOperationException("DriveService:BaseUrl is not configured");
|
||||
client.BaseAddress = new Uri(baseUrl);
|
||||
});
|
||||
|
||||
// Register OIDC services
|
||||
|
||||
|
||||
@ -181,7 +205,6 @@ public static class ServiceCollectionExtensions
|
||||
|
||||
|
||||
services.AddScoped<FileService>();
|
||||
services.AddScoped<FileReferenceService>();
|
||||
services.AddScoped<FileReferenceMigrationService>();
|
||||
services.AddScoped<PublisherService>();
|
||||
services.AddScoped<PublisherSubscriptionService>();
|
||||
|
@ -1,10 +1,13 @@
|
||||
using DysonNetwork.Common.Interfaces;
|
||||
using DysonNetwork.Common.Models;
|
||||
using DysonNetwork.Common.Services;
|
||||
using DysonNetwork.Sphere.Storage;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NodaTime;
|
||||
|
||||
namespace DysonNetwork.Sphere.Sticker;
|
||||
|
||||
public class StickerService(AppDatabase db, FileService fs, FileReferenceService fileRefService, ICacheService cache)
|
||||
public class StickerService(AppDatabase db, IFileReferenceServiceClient fileRefService, ICacheService cache)
|
||||
{
|
||||
public const string StickerFileUsageIdentifier = "sticker";
|
||||
|
||||
@ -19,9 +22,9 @@ public class StickerService(AppDatabase db, FileService fs, FileReferenceService
|
||||
|
||||
var stickerResourceId = $"sticker:{sticker.Id}";
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
sticker.Image.Id,
|
||||
StickerFileUsageIdentifier,
|
||||
stickerResourceId
|
||||
fileId: sticker.Image.Id.ToString(),
|
||||
usage: StickerFileUsageIdentifier,
|
||||
resourceId: stickerResourceId
|
||||
);
|
||||
|
||||
return sticker;
|
||||
@ -34,20 +37,23 @@ public class StickerService(AppDatabase db, FileService fs, FileReferenceService
|
||||
var stickerResourceId = $"sticker:{sticker.Id}";
|
||||
|
||||
// Delete old references
|
||||
var oldRefs =
|
||||
await fileRefService.GetResourceReferencesAsync(stickerResourceId, StickerFileUsageIdentifier);
|
||||
var oldRefs = await fileRefService.GetResourceReferencesAsync(
|
||||
resourceId: stickerResourceId,
|
||||
usage: StickerFileUsageIdentifier
|
||||
);
|
||||
|
||||
foreach (var oldRef in oldRefs)
|
||||
{
|
||||
await fileRefService.DeleteReferenceAsync(oldRef.Id);
|
||||
await fileRefService.DeleteReferenceAsync(oldRef.Id.ToString());
|
||||
}
|
||||
|
||||
sticker.Image = newImage.ToReferenceObject();
|
||||
|
||||
// Create new reference
|
||||
await fileRefService.CreateReferenceAsync(
|
||||
newImage.Id,
|
||||
StickerFileUsageIdentifier,
|
||||
stickerResourceId
|
||||
fileId: newImage.Id.ToString(),
|
||||
usage: StickerFileUsageIdentifier,
|
||||
resourceId: stickerResourceId
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1,93 +0,0 @@
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NodaTime;
|
||||
using Quartz;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage;
|
||||
|
||||
public class CloudFileUnusedRecyclingJob(
|
||||
AppDatabase db,
|
||||
FileReferenceService fileRefService,
|
||||
ILogger<CloudFileUnusedRecyclingJob> logger
|
||||
)
|
||||
: IJob
|
||||
{
|
||||
public async Task Execute(IJobExecutionContext context)
|
||||
{
|
||||
logger.LogInformation("Marking unused cloud files...");
|
||||
|
||||
var now = SystemClock.Instance.GetCurrentInstant();
|
||||
const int batchSize = 1000; // Process larger batches for efficiency
|
||||
var processedCount = 0;
|
||||
var markedCount = 0;
|
||||
var totalFiles = await db.Files.Where(f => !f.IsMarkedRecycle).CountAsync();
|
||||
|
||||
logger.LogInformation("Found {TotalFiles} files to check for unused status", totalFiles);
|
||||
|
||||
// Define a timestamp to limit the age of files we're processing in this run
|
||||
// This spreads the processing across multiple job runs for very large databases
|
||||
var ageThreshold = now - Duration.FromDays(30); // Process files up to 90 days old in this run
|
||||
|
||||
// Instead of loading all files at once, use pagination
|
||||
var hasMoreFiles = true;
|
||||
string? lastProcessedId = null;
|
||||
|
||||
while (hasMoreFiles)
|
||||
{
|
||||
// Query for the next batch of files using keyset pagination
|
||||
var filesQuery = db.Files
|
||||
.Where(f => !f.IsMarkedRecycle)
|
||||
.Where(f => f.CreatedAt <= ageThreshold); // Only process older files first
|
||||
|
||||
if (lastProcessedId != null)
|
||||
{
|
||||
filesQuery = filesQuery.Where(f => string.Compare(f.Id, lastProcessedId) > 0);
|
||||
}
|
||||
|
||||
var fileBatch = await filesQuery
|
||||
.OrderBy(f => f.Id) // Ensure consistent ordering for pagination
|
||||
.Take(batchSize)
|
||||
.Select(f => f.Id)
|
||||
.ToListAsync();
|
||||
|
||||
if (fileBatch.Count == 0)
|
||||
{
|
||||
hasMoreFiles = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
processedCount += fileBatch.Count;
|
||||
lastProcessedId = fileBatch.Last();
|
||||
|
||||
// Get all relevant file references for this batch
|
||||
var fileReferences = await fileRefService.GetReferencesAsync(fileBatch);
|
||||
|
||||
// Filter to find files that have no references or all expired references
|
||||
var filesToMark = fileBatch.Where(fileId =>
|
||||
!fileReferences.TryGetValue(fileId, out var references) ||
|
||||
references.Count == 0 ||
|
||||
references.All(r => r.ExpiredAt.HasValue && r.ExpiredAt.Value <= now)
|
||||
).ToList();
|
||||
|
||||
if (filesToMark.Count > 0)
|
||||
{
|
||||
// Use a bulk update for better performance - mark all qualifying files at once
|
||||
var updateCount = await db.Files
|
||||
.Where(f => filesToMark.Contains(f.Id))
|
||||
.ExecuteUpdateAsync(setter => setter
|
||||
.SetProperty(f => f.IsMarkedRecycle, true));
|
||||
|
||||
markedCount += updateCount;
|
||||
}
|
||||
|
||||
// Log progress periodically
|
||||
if (processedCount % 10000 == 0 || !hasMoreFiles)
|
||||
{
|
||||
logger.LogInformation(
|
||||
"Progress: processed {ProcessedCount}/{TotalFiles} files, marked {MarkedCount} for recycling",
|
||||
processedCount, totalFiles, markedCount);
|
||||
}
|
||||
}
|
||||
|
||||
logger.LogInformation("Completed marking {MarkedCount} files for recycling", markedCount);
|
||||
}
|
||||
}
|
@ -1,119 +0,0 @@
|
||||
using DysonNetwork.Sphere.Permission;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Minio.DataModel.Args;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage;
|
||||
|
||||
[ApiController]
|
||||
[Route("/files")]
|
||||
public class FileController(
|
||||
AppDatabase db,
|
||||
FileService fs,
|
||||
IConfiguration configuration,
|
||||
IWebHostEnvironment env,
|
||||
FileReferenceMigrationService rms
|
||||
) : ControllerBase
|
||||
{
|
||||
[HttpGet("{id}")]
|
||||
public async Task<ActionResult> OpenFile(string id, [FromQuery] bool original = false)
|
||||
{
|
||||
var file = await fs.GetFileAsync(id);
|
||||
if (file is null) return NotFound();
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(file.StorageUrl)) return Redirect(file.StorageUrl);
|
||||
|
||||
if (file.UploadedTo is null)
|
||||
{
|
||||
var tusStorePath = configuration.GetValue<string>("Tus:StorePath")!;
|
||||
var filePath = Path.Combine(env.ContentRootPath, tusStorePath, file.Id);
|
||||
if (!System.IO.File.Exists(filePath)) return new NotFoundResult();
|
||||
return PhysicalFile(filePath, file.MimeType ?? "application/octet-stream", file.Name);
|
||||
}
|
||||
|
||||
var dest = fs.GetRemoteStorageConfig(file.UploadedTo);
|
||||
var fileName = string.IsNullOrWhiteSpace(file.StorageId) ? file.Id : file.StorageId;
|
||||
|
||||
if (!original && file.HasCompression)
|
||||
fileName += ".compressed";
|
||||
|
||||
if (dest.ImageProxy is not null && (file.MimeType?.StartsWith("image/") ?? false))
|
||||
{
|
||||
var proxyUrl = dest.ImageProxy;
|
||||
var baseUri = new Uri(proxyUrl.EndsWith('/') ? proxyUrl : $"{proxyUrl}/");
|
||||
var fullUri = new Uri(baseUri, fileName);
|
||||
return Redirect(fullUri.ToString());
|
||||
}
|
||||
|
||||
if (dest.AccessProxy is not null)
|
||||
{
|
||||
var proxyUrl = dest.AccessProxy;
|
||||
var baseUri = new Uri(proxyUrl.EndsWith('/') ? proxyUrl : $"{proxyUrl}/");
|
||||
var fullUri = new Uri(baseUri, fileName);
|
||||
return Redirect(fullUri.ToString());
|
||||
}
|
||||
|
||||
if (dest.EnableSigned)
|
||||
{
|
||||
var client = fs.CreateMinioClient(dest);
|
||||
if (client is null)
|
||||
return BadRequest(
|
||||
"Failed to configure client for remote destination, file got an invalid storage remote.");
|
||||
|
||||
var bucket = dest.Bucket;
|
||||
var openUrl = await client.PresignedGetObjectAsync(
|
||||
new PresignedGetObjectArgs()
|
||||
.WithBucket(bucket)
|
||||
.WithObject(fileName)
|
||||
.WithExpiry(3600)
|
||||
);
|
||||
|
||||
return Redirect(openUrl);
|
||||
}
|
||||
|
||||
// Fallback redirect to the S3 endpoint (public read)
|
||||
var protocol = dest.EnableSsl ? "https" : "http";
|
||||
// Use the path bucket lookup mode
|
||||
return Redirect($"{protocol}://{dest.Endpoint}/{dest.Bucket}/{fileName}");
|
||||
}
|
||||
|
||||
[HttpGet("{id}/info")]
|
||||
public async Task<ActionResult<CloudFile>> GetFileInfo(string id)
|
||||
{
|
||||
var file = await db.Files.FindAsync(id);
|
||||
if (file is null) return NotFound();
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
[Authorize]
|
||||
[HttpDelete("{id}")]
|
||||
public async Task<ActionResult> DeleteFile(string id)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account.Account currentUser) return Unauthorized();
|
||||
var userId = currentUser.Id;
|
||||
|
||||
var file = await db.Files
|
||||
.Where(e => e.Id == id)
|
||||
.Where(e => e.Account.Id == userId)
|
||||
.FirstOrDefaultAsync();
|
||||
if (file is null) return NotFound();
|
||||
|
||||
await fs.DeleteFileAsync(file);
|
||||
|
||||
db.Files.Remove(file);
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
return NoContent();
|
||||
}
|
||||
|
||||
[HttpPost("/maintenance/migrateReferences")]
|
||||
[Authorize]
|
||||
[RequiredPermission("maintenance", "files.references")]
|
||||
public async Task<ActionResult> MigrateFileReferences()
|
||||
{
|
||||
await rms.ScanAndMigrateReferences();
|
||||
return Ok();
|
||||
}
|
||||
}
|
@ -1,66 +0,0 @@
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NodaTime;
|
||||
using Quartz;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage;
|
||||
|
||||
/// <summary>
|
||||
/// Job responsible for cleaning up expired file references
|
||||
/// </summary>
|
||||
public class FileExpirationJob(AppDatabase db, FileService fileService, ILogger<FileExpirationJob> logger) : IJob
|
||||
{
|
||||
public async Task Execute(IJobExecutionContext context)
|
||||
{
|
||||
var now = SystemClock.Instance.GetCurrentInstant();
|
||||
logger.LogInformation("Running file reference expiration job at {now}", now);
|
||||
|
||||
// Find all expired references
|
||||
var expiredReferences = await db.FileReferences
|
||||
.Where(r => r.ExpiredAt < now && r.ExpiredAt != null)
|
||||
.ToListAsync();
|
||||
|
||||
if (!expiredReferences.Any())
|
||||
{
|
||||
logger.LogInformation("No expired file references found");
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogInformation("Found {count} expired file references", expiredReferences.Count);
|
||||
|
||||
// Get unique file IDs
|
||||
var fileIds = expiredReferences.Select(r => r.FileId).Distinct().ToList();
|
||||
var filesAndReferenceCount = new Dictionary<string, int>();
|
||||
|
||||
// Delete expired references
|
||||
db.FileReferences.RemoveRange(expiredReferences);
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
// Check remaining references for each file
|
||||
foreach (var fileId in fileIds)
|
||||
{
|
||||
var remainingReferences = await db.FileReferences
|
||||
.Where(r => r.FileId == fileId)
|
||||
.CountAsync();
|
||||
|
||||
filesAndReferenceCount[fileId] = remainingReferences;
|
||||
|
||||
// If no references remain, delete the file
|
||||
if (remainingReferences == 0)
|
||||
{
|
||||
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == fileId);
|
||||
if (file != null)
|
||||
{
|
||||
logger.LogInformation("Deleting file {fileId} as all references have expired", fileId);
|
||||
await fileService.DeleteFileAsync(file);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Just purge the cache
|
||||
await fileService._PurgeCacheAsync(fileId);
|
||||
}
|
||||
}
|
||||
|
||||
logger.LogInformation("Completed file reference expiration job");
|
||||
}
|
||||
}
|
@ -1,434 +0,0 @@
|
||||
using DysonNetwork.Common.Services;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NodaTime;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage;
|
||||
|
||||
public class FileReferenceService(AppDatabase db, FileService fileService, ICacheService cache)
|
||||
{
|
||||
private const string CacheKeyPrefix = "fileref:";
|
||||
private static readonly TimeSpan CacheDuration = TimeSpan.FromMinutes(15);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new reference to a file for a specific resource
|
||||
/// </summary>
|
||||
/// <param name="fileId">The ID of the file to reference</param>
|
||||
/// <param name="usage">The usage context (e.g., "avatar", "post-attachment")</param>
|
||||
/// <param name="resourceId">The ID of the resource using the file</param>
|
||||
/// <param name="expiredAt">Optional expiration time for the file</param>
|
||||
/// <param name="duration">Optional duration after which the file expires (alternative to expiredAt)</param>
|
||||
/// <returns>The created file reference</returns>
|
||||
public async Task<CloudFileReference> CreateReferenceAsync(
|
||||
string fileId,
|
||||
string usage,
|
||||
string resourceId,
|
||||
Instant? expiredAt = null,
|
||||
Duration? duration = null)
|
||||
{
|
||||
// Calculate expiration time if needed
|
||||
var finalExpiration = expiredAt;
|
||||
if (duration.HasValue)
|
||||
finalExpiration = SystemClock.Instance.GetCurrentInstant() + duration.Value;
|
||||
|
||||
var reference = new CloudFileReference
|
||||
{
|
||||
FileId = fileId,
|
||||
Usage = usage,
|
||||
ResourceId = resourceId,
|
||||
ExpiredAt = finalExpiration
|
||||
};
|
||||
|
||||
db.FileReferences.Add(reference);
|
||||
|
||||
await db.SaveChangesAsync();
|
||||
await fileService._PurgeCacheAsync(fileId);
|
||||
|
||||
return reference;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all references to a file
|
||||
/// </summary>
|
||||
/// <param name="fileId">The ID of the file</param>
|
||||
/// <returns>A list of all references to the file</returns>
|
||||
public async Task<List<CloudFileReference>> GetReferencesAsync(string fileId)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}list:{fileId}";
|
||||
|
||||
var cachedReferences = await cache.GetAsync<List<CloudFileReference>>(cacheKey);
|
||||
if (cachedReferences is not null)
|
||||
return cachedReferences;
|
||||
|
||||
var references = await db.FileReferences
|
||||
.Where(r => r.FileId == fileId)
|
||||
.ToListAsync();
|
||||
|
||||
await cache.SetAsync(cacheKey, references, CacheDuration);
|
||||
|
||||
return references;
|
||||
}
|
||||
|
||||
public async Task<Dictionary<string, List<CloudFileReference>>> GetReferencesAsync(IEnumerable<string> fileId)
|
||||
{
|
||||
var references = await db.FileReferences
|
||||
.Where(r => fileId.Contains(r.FileId))
|
||||
.GroupBy(r => r.FileId)
|
||||
.ToDictionaryAsync(r => r.Key, r => r.ToList());
|
||||
return references;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of references to a file
|
||||
/// </summary>
|
||||
/// <param name="fileId">The ID of the file</param>
|
||||
/// <returns>The number of references to the file</returns>
|
||||
public async Task<int> GetReferenceCountAsync(string fileId)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}count:{fileId}";
|
||||
|
||||
var cachedCount = await cache.GetAsync<int?>(cacheKey);
|
||||
if (cachedCount.HasValue)
|
||||
return cachedCount.Value;
|
||||
|
||||
var count = await db.FileReferences
|
||||
.Where(r => r.FileId == fileId)
|
||||
.CountAsync();
|
||||
|
||||
await cache.SetAsync(cacheKey, count, CacheDuration);
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all references for a specific resource
|
||||
/// </summary>
|
||||
/// <param name="resourceId">The ID of the resource</param>
|
||||
/// <returns>A list of file references associated with the resource</returns>
|
||||
public async Task<List<CloudFileReference>> GetResourceReferencesAsync(string resourceId)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}resource:{resourceId}";
|
||||
|
||||
var cachedReferences = await cache.GetAsync<List<CloudFileReference>>(cacheKey);
|
||||
if (cachedReferences is not null)
|
||||
return cachedReferences;
|
||||
|
||||
var references = await db.FileReferences
|
||||
.Where(r => r.ResourceId == resourceId)
|
||||
.ToListAsync();
|
||||
|
||||
await cache.SetAsync(cacheKey, references, CacheDuration);
|
||||
|
||||
return references;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all file references for a specific usage context
|
||||
/// </summary>
|
||||
/// <param name="usage">The usage context</param>
|
||||
/// <returns>A list of file references with the specified usage</returns>
|
||||
public async Task<List<CloudFileReference>> GetUsageReferencesAsync(string usage)
|
||||
{
|
||||
return await db.FileReferences
|
||||
.Where(r => r.Usage == usage)
|
||||
.ToListAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deletes references for a specific resource
|
||||
/// </summary>
|
||||
/// <param name="resourceId">The ID of the resource</param>
|
||||
/// <returns>The number of deleted references</returns>
|
||||
public async Task<int> DeleteResourceReferencesAsync(string resourceId)
|
||||
{
|
||||
var references = await db.FileReferences
|
||||
.Where(r => r.ResourceId == resourceId)
|
||||
.ToListAsync();
|
||||
|
||||
var fileIds = references.Select(r => r.FileId).Distinct().ToList();
|
||||
|
||||
db.FileReferences.RemoveRange(references);
|
||||
var deletedCount = await db.SaveChangesAsync();
|
||||
|
||||
// Purge caches
|
||||
var tasks = fileIds.Select(fileService._PurgeCacheAsync).ToList();
|
||||
tasks.Add(PurgeCacheForResourceAsync(resourceId));
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
return deletedCount;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deletes references for a specific resource and usage
|
||||
/// </summary>
|
||||
/// <param name="resourceId">The ID of the resource</param>
|
||||
/// <param name="usage">The usage context</param>
|
||||
/// <returns>The number of deleted references</returns>
|
||||
public async Task<int> DeleteResourceReferencesAsync(string resourceId, string usage)
|
||||
{
|
||||
var references = await db.FileReferences
|
||||
.Where(r => r.ResourceId == resourceId && r.Usage == usage)
|
||||
.ToListAsync();
|
||||
|
||||
if (!references.Any())
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var fileIds = references.Select(r => r.FileId).Distinct().ToList();
|
||||
|
||||
db.FileReferences.RemoveRange(references);
|
||||
var deletedCount = await db.SaveChangesAsync();
|
||||
|
||||
// Purge caches
|
||||
var tasks = fileIds.Select(fileService._PurgeCacheAsync).ToList();
|
||||
tasks.Add(PurgeCacheForResourceAsync(resourceId));
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
return deletedCount;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deletes a specific file reference
|
||||
/// </summary>
|
||||
/// <param name="referenceId">The ID of the reference to delete</param>
|
||||
/// <returns>True if the reference was deleted, false otherwise</returns>
|
||||
public async Task<bool> DeleteReferenceAsync(Guid referenceId)
|
||||
{
|
||||
var reference = await db.FileReferences
|
||||
.FirstOrDefaultAsync(r => r.Id == referenceId);
|
||||
|
||||
if (reference == null)
|
||||
return false;
|
||||
|
||||
db.FileReferences.Remove(reference);
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
// Purge caches
|
||||
await fileService._PurgeCacheAsync(reference.FileId);
|
||||
await PurgeCacheForResourceAsync(reference.ResourceId);
|
||||
await PurgeCacheForFileAsync(reference.FileId);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates the files referenced by a resource
|
||||
/// </summary>
|
||||
/// <param name="resourceId">The ID of the resource</param>
|
||||
/// <param name="newFileIds">The new list of file IDs</param>
|
||||
/// <param name="usage">The usage context</param>
|
||||
/// <param name="expiredAt">Optional expiration time for newly added files</param>
|
||||
/// <param name="duration">Optional duration after which newly added files expire</param>
|
||||
/// <returns>A list of the updated file references</returns>
|
||||
public async Task<List<CloudFileReference>> UpdateResourceFilesAsync(
|
||||
string resourceId,
|
||||
IEnumerable<string>? newFileIds,
|
||||
string usage,
|
||||
Instant? expiredAt = null,
|
||||
Duration? duration = null)
|
||||
{
|
||||
if (newFileIds == null)
|
||||
return new List<CloudFileReference>();
|
||||
|
||||
var existingReferences = await db.FileReferences
|
||||
.Where(r => r.ResourceId == resourceId && r.Usage == usage)
|
||||
.ToListAsync();
|
||||
|
||||
var existingFileIds = existingReferences.Select(r => r.FileId).ToHashSet();
|
||||
var newFileIdsList = newFileIds.ToList();
|
||||
var newFileIdsSet = newFileIdsList.ToHashSet();
|
||||
|
||||
// Files to remove
|
||||
var toRemove = existingReferences
|
||||
.Where(r => !newFileIdsSet.Contains(r.FileId))
|
||||
.ToList();
|
||||
|
||||
// Files to add
|
||||
var toAdd = newFileIdsList
|
||||
.Where(id => !existingFileIds.Contains(id))
|
||||
.Select(id => new CloudFileReference
|
||||
{
|
||||
FileId = id,
|
||||
Usage = usage,
|
||||
ResourceId = resourceId
|
||||
})
|
||||
.ToList();
|
||||
|
||||
// Apply changes
|
||||
if (toRemove.Any())
|
||||
db.FileReferences.RemoveRange(toRemove);
|
||||
|
||||
if (toAdd.Any())
|
||||
db.FileReferences.AddRange(toAdd);
|
||||
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
// Update expiration for newly added references if specified
|
||||
if ((expiredAt.HasValue || duration.HasValue) && toAdd.Any())
|
||||
{
|
||||
var finalExpiration = expiredAt;
|
||||
if (duration.HasValue)
|
||||
{
|
||||
finalExpiration = SystemClock.Instance.GetCurrentInstant() + duration.Value;
|
||||
}
|
||||
|
||||
// Update newly added references with the expiration time
|
||||
var referenceIds = await db.FileReferences
|
||||
.Where(r => toAdd.Select(a => a.FileId).Contains(r.FileId) &&
|
||||
r.ResourceId == resourceId &&
|
||||
r.Usage == usage)
|
||||
.Select(r => r.Id)
|
||||
.ToListAsync();
|
||||
|
||||
await db.FileReferences
|
||||
.Where(r => referenceIds.Contains(r.Id))
|
||||
.ExecuteUpdateAsync(setter => setter.SetProperty(
|
||||
r => r.ExpiredAt,
|
||||
_ => finalExpiration
|
||||
));
|
||||
}
|
||||
|
||||
// Purge caches
|
||||
var allFileIds = existingFileIds.Union(newFileIdsSet).ToList();
|
||||
var tasks = allFileIds.Select(fileService._PurgeCacheAsync).ToList();
|
||||
tasks.Add(PurgeCacheForResourceAsync(resourceId));
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Return updated references
|
||||
return await db.FileReferences
|
||||
.Where(r => r.ResourceId == resourceId && r.Usage == usage)
|
||||
.ToListAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all files referenced by a resource
|
||||
/// </summary>
|
||||
/// <param name="resourceId">The ID of the resource</param>
|
||||
/// <param name="usage">Optional filter by usage context</param>
|
||||
/// <returns>A list of files referenced by the resource</returns>
|
||||
public async Task<List<CloudFile>> GetResourceFilesAsync(string resourceId, string? usage = null)
|
||||
{
|
||||
var query = db.FileReferences.Where(r => r.ResourceId == resourceId);
|
||||
|
||||
if (usage != null)
|
||||
query = query.Where(r => r.Usage == usage);
|
||||
|
||||
var references = await query.ToListAsync();
|
||||
var fileIds = references.Select(r => r.FileId).ToList();
|
||||
|
||||
return await db.Files
|
||||
.Where(f => fileIds.Contains(f.Id))
|
||||
.ToListAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Purges all caches related to a resource
|
||||
/// </summary>
|
||||
private async Task PurgeCacheForResourceAsync(string resourceId)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}resource:{resourceId}";
|
||||
await cache.RemoveAsync(cacheKey);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Purges all caches related to a file
|
||||
/// </summary>
|
||||
private async Task PurgeCacheForFileAsync(string fileId)
|
||||
{
|
||||
var cacheKeys = new[]
|
||||
{
|
||||
$"{CacheKeyPrefix}list:{fileId}",
|
||||
$"{CacheKeyPrefix}count:{fileId}"
|
||||
};
|
||||
|
||||
var tasks = cacheKeys.Select(cache.RemoveAsync);
|
||||
await Task.WhenAll(tasks);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates the expiration time for a file reference
|
||||
/// </summary>
|
||||
/// <param name="referenceId">The ID of the reference</param>
|
||||
/// <param name="expiredAt">The new expiration time, or null to remove expiration</param>
|
||||
/// <returns>True if the reference was found and updated, false otherwise</returns>
|
||||
public async Task<bool> SetReferenceExpirationAsync(Guid referenceId, Instant? expiredAt)
|
||||
{
|
||||
var reference = await db.FileReferences
|
||||
.FirstOrDefaultAsync(r => r.Id == referenceId);
|
||||
|
||||
if (reference == null)
|
||||
return false;
|
||||
|
||||
reference.ExpiredAt = expiredAt;
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
await PurgeCacheForFileAsync(reference.FileId);
|
||||
await PurgeCacheForResourceAsync(reference.ResourceId);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates the expiration time for all references to a file
|
||||
/// </summary>
|
||||
/// <param name="fileId">The ID of the file</param>
|
||||
/// <param name="expiredAt">The new expiration time, or null to remove expiration</param>
|
||||
/// <returns>The number of references updated</returns>
|
||||
public async Task<int> SetFileReferencesExpirationAsync(string fileId, Instant? expiredAt)
|
||||
{
|
||||
var rowsAffected = await db.FileReferences
|
||||
.Where(r => r.FileId == fileId)
|
||||
.ExecuteUpdateAsync(setter => setter.SetProperty(
|
||||
r => r.ExpiredAt,
|
||||
_ => expiredAt
|
||||
));
|
||||
|
||||
if (rowsAffected > 0)
|
||||
{
|
||||
await fileService._PurgeCacheAsync(fileId);
|
||||
await PurgeCacheForFileAsync(fileId);
|
||||
}
|
||||
|
||||
return rowsAffected;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get all file references for a specific resource and usage type
|
||||
/// </summary>
|
||||
/// <param name="resourceId">The resource ID</param>
|
||||
/// <param name="usageType">The usage type</param>
|
||||
/// <returns>List of file references</returns>
|
||||
public async Task<List<CloudFileReference>> GetResourceReferencesAsync(string resourceId, string usageType)
|
||||
{
|
||||
return await db.FileReferences
|
||||
.Where(r => r.ResourceId == resourceId && r.Usage == usageType)
|
||||
.ToListAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Check if a file has any references
|
||||
/// </summary>
|
||||
/// <param name="fileId">The file ID to check</param>
|
||||
/// <returns>True if the file has references, false otherwise</returns>
|
||||
public async Task<bool> HasFileReferencesAsync(string fileId)
|
||||
{
|
||||
return await db.FileReferences.AnyAsync(r => r.FileId == fileId);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates the expiration time for a file reference using a duration from now
|
||||
/// </summary>
|
||||
/// <param name="referenceId">The ID of the reference</param>
|
||||
/// <param name="duration">The duration after which the reference expires, or null to remove expiration</param>
|
||||
/// <returns>True if the reference was found and updated, false otherwise</returns>
|
||||
public async Task<bool> SetReferenceExpirationDurationAsync(Guid referenceId, Duration? duration)
|
||||
{
|
||||
Instant? expiredAt = null;
|
||||
if (duration.HasValue)
|
||||
{
|
||||
expiredAt = SystemClock.Instance.GetCurrentInstant() + duration.Value;
|
||||
}
|
||||
|
||||
return await SetReferenceExpirationAsync(referenceId, expiredAt);
|
||||
}
|
||||
}
|
@ -1,290 +0,0 @@
|
||||
using EFCore.BulkExtensions;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NodaTime;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage;
|
||||
|
||||
public class FileReferenceMigrationService(AppDatabase db)
|
||||
{
|
||||
public async Task ScanAndMigrateReferences()
|
||||
{
|
||||
// Scan Posts for file references
|
||||
await ScanPosts();
|
||||
|
||||
// Scan Messages for file references
|
||||
await ScanMessages();
|
||||
|
||||
// Scan Profiles for file references
|
||||
await ScanProfiles();
|
||||
|
||||
// Scan Chat entities for file references
|
||||
await ScanChatRooms();
|
||||
|
||||
// Scan Realms for file references
|
||||
await ScanRealms();
|
||||
|
||||
// Scan Publishers for file references
|
||||
await ScanPublishers();
|
||||
|
||||
// Scan Stickers for file references
|
||||
await ScanStickers();
|
||||
}
|
||||
|
||||
private async Task ScanPosts()
|
||||
{
|
||||
var posts = await db.Posts
|
||||
.Include(p => p.OutdatedAttachments)
|
||||
.Where(p => p.OutdatedAttachments.Any())
|
||||
.ToListAsync();
|
||||
|
||||
foreach (var post in posts)
|
||||
{
|
||||
var updatedAttachments = new List<CloudFileReferenceObject>();
|
||||
|
||||
foreach (var attachment in post.OutdatedAttachments)
|
||||
{
|
||||
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == attachment.Id);
|
||||
if (file != null)
|
||||
{
|
||||
// Create a reference for the file
|
||||
var reference = new CloudFileReference
|
||||
{
|
||||
FileId = file.Id,
|
||||
File = file,
|
||||
Usage = "post",
|
||||
ResourceId = post.ResourceIdentifier
|
||||
};
|
||||
|
||||
await db.FileReferences.AddAsync(reference);
|
||||
updatedAttachments.Add(file.ToReferenceObject());
|
||||
}
|
||||
else
|
||||
{
|
||||
// Keep the existing reference object if file not found
|
||||
updatedAttachments.Add(attachment.ToReferenceObject());
|
||||
}
|
||||
}
|
||||
|
||||
post.Attachments = updatedAttachments;
|
||||
db.Posts.Update(post);
|
||||
}
|
||||
|
||||
await db.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private async Task ScanMessages()
|
||||
{
|
||||
var messages = await db.ChatMessages
|
||||
.Include(m => m.OutdatedAttachments)
|
||||
.Where(m => m.OutdatedAttachments.Any())
|
||||
.ToListAsync();
|
||||
|
||||
var fileReferences = messages.SelectMany(message => message.OutdatedAttachments.Select(attachment =>
|
||||
new CloudFileReference
|
||||
{
|
||||
FileId = attachment.Id,
|
||||
File = attachment,
|
||||
Usage = "chat",
|
||||
ResourceId = message.ResourceIdentifier,
|
||||
CreatedAt = SystemClock.Instance.GetCurrentInstant(),
|
||||
UpdatedAt = SystemClock.Instance.GetCurrentInstant()
|
||||
})
|
||||
).ToList();
|
||||
|
||||
foreach (var message in messages)
|
||||
{
|
||||
message.Attachments = message.OutdatedAttachments.Select(a => a.ToReferenceObject()).ToList();
|
||||
db.ChatMessages.Update(message);
|
||||
}
|
||||
|
||||
await db.BulkInsertAsync(fileReferences);
|
||||
await db.SaveChangesAsync();
|
||||
}
|
||||
|
||||
|
||||
|
||||
private async Task ScanChatRooms()
|
||||
{
|
||||
var chatRooms = await db.ChatRooms
|
||||
.Where(c => c.PictureId != null || c.BackgroundId != null)
|
||||
.ToListAsync();
|
||||
|
||||
foreach (var chatRoom in chatRooms)
|
||||
{
|
||||
if (chatRoom is { PictureId: not null, Picture: null })
|
||||
{
|
||||
var avatarFile = await db.Files.FirstOrDefaultAsync(f => f.Id == chatRoom.PictureId);
|
||||
if (avatarFile != null)
|
||||
{
|
||||
// Create a reference for the avatar file
|
||||
var reference = new CloudFileReference
|
||||
{
|
||||
FileId = avatarFile.Id,
|
||||
File = avatarFile,
|
||||
Usage = "chatroom.picture",
|
||||
ResourceId = chatRoom.ResourceIdentifier
|
||||
};
|
||||
|
||||
await db.FileReferences.AddAsync(reference);
|
||||
chatRoom.Picture = avatarFile.ToReferenceObject();
|
||||
db.ChatRooms.Update(chatRoom);
|
||||
}
|
||||
}
|
||||
|
||||
if (chatRoom is not { BackgroundId: not null, Background: null }) continue;
|
||||
var bannerFile = await db.Files.FirstOrDefaultAsync(f => f.Id == chatRoom.BackgroundId);
|
||||
if (bannerFile == null) continue;
|
||||
{
|
||||
// Create a reference for the banner file
|
||||
var reference = new CloudFileReference
|
||||
{
|
||||
FileId = bannerFile.Id,
|
||||
File = bannerFile,
|
||||
Usage = "chatroom.background",
|
||||
ResourceId = chatRoom.ResourceIdentifier
|
||||
};
|
||||
|
||||
await db.FileReferences.AddAsync(reference);
|
||||
chatRoom.Background = bannerFile.ToReferenceObject();
|
||||
db.ChatRooms.Update(chatRoom);
|
||||
}
|
||||
}
|
||||
|
||||
await db.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private async Task ScanRealms()
|
||||
{
|
||||
var realms = await db.Realms
|
||||
.Where(r => r.PictureId != null && r.BackgroundId != null)
|
||||
.ToListAsync();
|
||||
|
||||
foreach (var realm in realms)
|
||||
{
|
||||
// Process avatar if it exists
|
||||
if (realm is { PictureId: not null, Picture: null })
|
||||
{
|
||||
var avatarFile = await db.Files.FirstOrDefaultAsync(f => f.Id == realm.PictureId);
|
||||
if (avatarFile != null)
|
||||
{
|
||||
// Create a reference for the avatar file
|
||||
var reference = new CloudFileReference
|
||||
{
|
||||
FileId = avatarFile.Id,
|
||||
File = avatarFile,
|
||||
Usage = "realm.picture",
|
||||
ResourceId = realm.ResourceIdentifier
|
||||
};
|
||||
|
||||
await db.FileReferences.AddAsync(reference);
|
||||
realm.Picture = avatarFile.ToReferenceObject();
|
||||
}
|
||||
}
|
||||
|
||||
// Process banner if it exists
|
||||
if (realm is { BackgroundId: not null, Background: null })
|
||||
{
|
||||
var bannerFile = await db.Files.FirstOrDefaultAsync(f => f.Id == realm.BackgroundId);
|
||||
if (bannerFile != null)
|
||||
{
|
||||
// Create a reference for the banner file
|
||||
var reference = new CloudFileReference
|
||||
{
|
||||
FileId = bannerFile.Id,
|
||||
File = bannerFile,
|
||||
Usage = "realm.background",
|
||||
ResourceId = realm.ResourceIdentifier
|
||||
};
|
||||
|
||||
await db.FileReferences.AddAsync(reference);
|
||||
realm.Background = bannerFile.ToReferenceObject();
|
||||
}
|
||||
}
|
||||
|
||||
db.Realms.Update(realm);
|
||||
}
|
||||
|
||||
await db.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private async Task ScanPublishers()
|
||||
{
|
||||
var publishers = await db.Publishers
|
||||
.Where(p => p.PictureId != null || p.BackgroundId != null)
|
||||
.ToListAsync();
|
||||
|
||||
foreach (var publisher in publishers)
|
||||
{
|
||||
if (publisher is { PictureId: not null, Picture: null })
|
||||
{
|
||||
var pictureFile = await db.Files.FirstOrDefaultAsync(f => f.Id == publisher.PictureId);
|
||||
if (pictureFile != null)
|
||||
{
|
||||
// Create a reference for the picture file
|
||||
var reference = new CloudFileReference
|
||||
{
|
||||
FileId = pictureFile.Id,
|
||||
File = pictureFile,
|
||||
Usage = "publisher.picture",
|
||||
ResourceId = publisher.Id.ToString()
|
||||
};
|
||||
|
||||
await db.FileReferences.AddAsync(reference);
|
||||
publisher.Picture = pictureFile.ToReferenceObject();
|
||||
}
|
||||
}
|
||||
|
||||
if (publisher is { BackgroundId: not null, Background: null })
|
||||
{
|
||||
var backgroundFile = await db.Files.FirstOrDefaultAsync(f => f.Id == publisher.BackgroundId);
|
||||
if (backgroundFile != null)
|
||||
{
|
||||
// Create a reference for the background file
|
||||
var reference = new CloudFileReference
|
||||
{
|
||||
FileId = backgroundFile.Id,
|
||||
File = backgroundFile,
|
||||
Usage = "publisher.background",
|
||||
ResourceId = publisher.ResourceIdentifier
|
||||
};
|
||||
|
||||
await db.FileReferences.AddAsync(reference);
|
||||
publisher.Background = backgroundFile.ToReferenceObject();
|
||||
}
|
||||
}
|
||||
|
||||
db.Publishers.Update(publisher);
|
||||
}
|
||||
|
||||
await db.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private async Task ScanStickers()
|
||||
{
|
||||
var stickers = await db.Stickers
|
||||
.Where(s => s.ImageId != null && s.Image == null)
|
||||
.ToListAsync();
|
||||
|
||||
foreach (var sticker in stickers)
|
||||
{
|
||||
var imageFile = await db.Files.FirstOrDefaultAsync(f => f.Id == sticker.ImageId);
|
||||
if (imageFile != null)
|
||||
{
|
||||
// Create a reference for the sticker image file
|
||||
var reference = new CloudFileReference
|
||||
{
|
||||
FileId = imageFile.Id,
|
||||
File = imageFile,
|
||||
Usage = "sticker.image",
|
||||
ResourceId = sticker.ResourceIdentifier
|
||||
};
|
||||
|
||||
await db.FileReferences.AddAsync(reference);
|
||||
sticker.Image = imageFile.ToReferenceObject();
|
||||
db.Stickers.Update(sticker);
|
||||
}
|
||||
}
|
||||
|
||||
await db.SaveChangesAsync();
|
||||
}
|
||||
}
|
@ -1,556 +0,0 @@
|
||||
using System.Globalization;
|
||||
using FFMpegCore;
|
||||
using System.Security.Cryptography;
|
||||
using AngleSharp.Text;
|
||||
using DysonNetwork.Common.Services;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Minio;
|
||||
using Minio.DataModel.Args;
|
||||
using NetVips;
|
||||
using NodaTime;
|
||||
using tusdotnet.Stores;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage;
|
||||
|
||||
public class FileService(
|
||||
AppDatabase db,
|
||||
IConfiguration configuration,
|
||||
TusDiskStore store,
|
||||
ILogger<FileService> logger,
|
||||
IServiceScopeFactory scopeFactory,
|
||||
ICacheService cache
|
||||
)
|
||||
{
|
||||
private const string CacheKeyPrefix = "file:";
|
||||
private static readonly TimeSpan CacheDuration = TimeSpan.FromMinutes(15);
|
||||
|
||||
/// <summary>
|
||||
/// The api for getting file meta with cache,
|
||||
/// the best use case is for accessing the file data.
|
||||
///
|
||||
/// <b>This function won't load uploader's information, only keep minimal file meta</b>
|
||||
/// </summary>
|
||||
/// <param name="fileId">The id of the cloud file requested</param>
|
||||
/// <returns>The minimal file meta</returns>
|
||||
public async Task<CloudFile?> GetFileAsync(string fileId)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}{fileId}";
|
||||
|
||||
var cachedFile = await cache.GetAsync<CloudFile>(cacheKey);
|
||||
if (cachedFile is not null)
|
||||
return cachedFile;
|
||||
|
||||
var file = await db.Files
|
||||
.Include(f => f.Account)
|
||||
.Where(f => f.Id == fileId)
|
||||
.FirstOrDefaultAsync();
|
||||
|
||||
if (file != null)
|
||||
await cache.SetAsync(cacheKey, file, CacheDuration);
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
private static readonly string TempFilePrefix = "dyn-cloudfile";
|
||||
|
||||
private static readonly string[] AnimatedImageTypes =
|
||||
["image/gif", "image/apng", "image/webp", "image/avif"];
|
||||
|
||||
// The analysis file method no longer will remove the GPS EXIF data
|
||||
// It should be handled on the client side, and for some specific cases it should be keep
|
||||
public async Task<CloudFile> ProcessNewFileAsync(
|
||||
Account.Account account,
|
||||
string fileId,
|
||||
Stream stream,
|
||||
string fileName,
|
||||
string? contentType
|
||||
)
|
||||
{
|
||||
var result = new List<(string filePath, string suffix)>();
|
||||
|
||||
var ogFilePath = Path.GetFullPath(Path.Join(configuration.GetValue<string>("Tus:StorePath"), fileId));
|
||||
var fileSize = stream.Length;
|
||||
var hash = await HashFileAsync(stream, fileSize: fileSize);
|
||||
contentType ??= !fileName.Contains('.') ? "application/octet-stream" : MimeTypes.GetMimeType(fileName);
|
||||
|
||||
var file = new CloudFile
|
||||
{
|
||||
Id = fileId,
|
||||
Name = fileName,
|
||||
MimeType = contentType,
|
||||
Size = fileSize,
|
||||
Hash = hash,
|
||||
AccountId = accountId
|
||||
};
|
||||
|
||||
var existingFile = await db.Files.FirstOrDefaultAsync(f => f.Hash == hash);
|
||||
file.StorageId = existingFile is not null ? existingFile.StorageId : file.Id;
|
||||
|
||||
if (existingFile is not null)
|
||||
{
|
||||
file.FileMeta = existingFile.FileMeta;
|
||||
file.HasCompression = existingFile.HasCompression;
|
||||
file.SensitiveMarks = existingFile.SensitiveMarks;
|
||||
|
||||
db.Files.Add(file);
|
||||
await db.SaveChangesAsync();
|
||||
return file;
|
||||
}
|
||||
|
||||
switch (contentType.Split('/')[0])
|
||||
{
|
||||
case "image":
|
||||
var blurhash =
|
||||
BlurHashSharp.SkiaSharp.BlurHashEncoder.Encode(xComponent: 3, yComponent: 3, filename: ogFilePath);
|
||||
|
||||
// Rewind stream
|
||||
stream.Position = 0;
|
||||
|
||||
// Use NetVips for the rest
|
||||
using (var vipsImage = NetVips.Image.NewFromStream(stream))
|
||||
{
|
||||
var width = vipsImage.Width;
|
||||
var height = vipsImage.Height;
|
||||
var format = vipsImage.Get("vips-loader") ?? "unknown";
|
||||
|
||||
// Try to get orientation from exif data
|
||||
var orientation = 1;
|
||||
var meta = new Dictionary<string, object>
|
||||
{
|
||||
["blur"] = blurhash,
|
||||
["format"] = format,
|
||||
["width"] = width,
|
||||
["height"] = height,
|
||||
["orientation"] = orientation,
|
||||
};
|
||||
Dictionary<string, object> exif = [];
|
||||
|
||||
foreach (var field in vipsImage.GetFields())
|
||||
{
|
||||
var value = vipsImage.Get(field);
|
||||
|
||||
// Skip GPS-related EXIF fields to remove location data
|
||||
if (IsIgnoredField(field))
|
||||
continue;
|
||||
|
||||
if (field.StartsWith("exif-")) exif[field.Replace("exif-", "")] = value;
|
||||
else meta[field] = value;
|
||||
|
||||
if (field == "orientation") orientation = (int)value;
|
||||
}
|
||||
|
||||
if (orientation is 6 or 8)
|
||||
(width, height) = (height, width);
|
||||
|
||||
var aspectRatio = height != 0 ? (double)width / height : 0;
|
||||
|
||||
meta["exif"] = exif;
|
||||
meta["ratio"] = aspectRatio;
|
||||
file.FileMeta = meta;
|
||||
}
|
||||
|
||||
break;
|
||||
case "video":
|
||||
case "audio":
|
||||
try
|
||||
{
|
||||
var mediaInfo = await FFProbe.AnalyseAsync(ogFilePath);
|
||||
file.FileMeta = new Dictionary<string, object>
|
||||
{
|
||||
["duration"] = mediaInfo.Duration.TotalSeconds,
|
||||
["format_name"] = mediaInfo.Format.FormatName,
|
||||
["format_long_name"] = mediaInfo.Format.FormatLongName,
|
||||
["start_time"] = mediaInfo.Format.StartTime.ToString(),
|
||||
["bit_rate"] = mediaInfo.Format.BitRate.ToString(CultureInfo.InvariantCulture),
|
||||
["tags"] = mediaInfo.Format.Tags ?? [],
|
||||
["chapters"] = mediaInfo.Chapters,
|
||||
};
|
||||
if (mediaInfo.PrimaryVideoStream is not null)
|
||||
file.FileMeta["ratio"] = mediaInfo.PrimaryVideoStream.Width / mediaInfo.PrimaryVideoStream.Height;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError("File analyzed failed, unable collect video / audio information: {Message}",
|
||||
ex.Message);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
db.Files.Add(file);
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
_ = Task.Run(async () =>
|
||||
{
|
||||
using var scope = scopeFactory.CreateScope();
|
||||
var nfs = scope.ServiceProvider.GetRequiredService<FileService>();
|
||||
|
||||
try
|
||||
{
|
||||
logger.LogInformation("Processed file {fileId}, now trying optimizing if possible...", fileId);
|
||||
|
||||
if (contentType.Split('/')[0] == "image")
|
||||
{
|
||||
// Skip compression for animated image types
|
||||
var animatedMimeTypes = AnimatedImageTypes;
|
||||
if (Enumerable.Contains(animatedMimeTypes, contentType))
|
||||
{
|
||||
logger.LogInformation(
|
||||
"File {fileId} is an animated image (MIME: {mime}), skipping WebP conversion.", fileId,
|
||||
contentType
|
||||
);
|
||||
var tempFilePath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}");
|
||||
result.Add((tempFilePath, string.Empty));
|
||||
return;
|
||||
}
|
||||
|
||||
file.MimeType = "image/webp";
|
||||
|
||||
using var vipsImage = Image.NewFromFile(ogFilePath);
|
||||
var imagePath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}");
|
||||
vipsImage.Autorot().WriteToFile(imagePath + ".webp",
|
||||
new VOption { { "lossless", true }, { "strip", true } });
|
||||
result.Add((imagePath + ".webp", string.Empty));
|
||||
|
||||
if (vipsImage.Width * vipsImage.Height >= 1024 * 1024)
|
||||
{
|
||||
var scale = 1024.0 / Math.Max(vipsImage.Width, vipsImage.Height);
|
||||
var imageCompressedPath =
|
||||
Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}-compressed");
|
||||
|
||||
// Create and save image within the same synchronous block to avoid disposal issues
|
||||
using var compressedImage = vipsImage.Resize(scale);
|
||||
compressedImage.Autorot().WriteToFile(imageCompressedPath + ".webp",
|
||||
new VOption { { "Q", 80 }, { "strip", true } });
|
||||
|
||||
result.Add((imageCompressedPath + ".webp", ".compressed"));
|
||||
file.HasCompression = true;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// No extra process for video, add it to the upload queue.
|
||||
result.Add((ogFilePath, string.Empty));
|
||||
}
|
||||
|
||||
logger.LogInformation("Optimized file {fileId}, now uploading...", fileId);
|
||||
|
||||
if (result.Count > 0)
|
||||
{
|
||||
List<Task<CloudFile>> tasks = [];
|
||||
tasks.AddRange(result.Select(item =>
|
||||
nfs.UploadFileToRemoteAsync(file, item.filePath, null, item.suffix, true))
|
||||
);
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
file = await tasks.First();
|
||||
}
|
||||
else
|
||||
{
|
||||
file = await nfs.UploadFileToRemoteAsync(file, stream, null);
|
||||
}
|
||||
|
||||
logger.LogInformation("Uploaded file {fileId} done!", fileId);
|
||||
|
||||
var scopedDb = scope.ServiceProvider.GetRequiredService<AppDatabase>();
|
||||
await scopedDb.Files.Where(f => f.Id == file.Id).ExecuteUpdateAsync(setter => setter
|
||||
.SetProperty(f => f.UploadedAt, file.UploadedAt)
|
||||
.SetProperty(f => f.UploadedTo, file.UploadedTo)
|
||||
.SetProperty(f => f.MimeType, file.MimeType)
|
||||
.SetProperty(f => f.HasCompression, file.HasCompression)
|
||||
);
|
||||
}
|
||||
catch (Exception err)
|
||||
{
|
||||
logger.LogError(err, "Failed to process {fileId}", fileId);
|
||||
}
|
||||
|
||||
await stream.DisposeAsync();
|
||||
await store.DeleteFileAsync(file.Id, CancellationToken.None);
|
||||
await nfs._PurgeCacheAsync(file.Id);
|
||||
});
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
private static async Task<string> HashFileAsync(Stream stream, int chunkSize = 1024 * 1024, long? fileSize = null)
|
||||
{
|
||||
fileSize ??= stream.Length;
|
||||
if (fileSize > chunkSize * 1024 * 5)
|
||||
return await HashFastApproximateAsync(stream, chunkSize);
|
||||
|
||||
using var md5 = MD5.Create();
|
||||
var hashBytes = await md5.ComputeHashAsync(stream);
|
||||
return Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static async Task<string> HashFastApproximateAsync(Stream stream, int chunkSize = 1024 * 1024)
|
||||
{
|
||||
// Scale the chunk size to kB level
|
||||
chunkSize *= 1024;
|
||||
|
||||
using var md5 = MD5.Create();
|
||||
|
||||
var buffer = new byte[chunkSize * 2];
|
||||
var fileLength = stream.Length;
|
||||
|
||||
var bytesRead = await stream.ReadAsync(buffer.AsMemory(0, chunkSize));
|
||||
|
||||
if (fileLength > chunkSize)
|
||||
{
|
||||
stream.Seek(-chunkSize, SeekOrigin.End);
|
||||
bytesRead += await stream.ReadAsync(buffer.AsMemory(chunkSize, chunkSize));
|
||||
}
|
||||
|
||||
var hash = md5.ComputeHash(buffer, 0, bytesRead);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
public async Task<CloudFile> UploadFileToRemoteAsync(CloudFile file, string filePath, string? targetRemote,
|
||||
string? suffix = null, bool selfDestruct = false)
|
||||
{
|
||||
var fileStream = File.OpenRead(filePath);
|
||||
var result = await UploadFileToRemoteAsync(file, fileStream, targetRemote, suffix);
|
||||
if (selfDestruct) File.Delete(filePath);
|
||||
return result;
|
||||
}
|
||||
|
||||
public async Task<CloudFile> UploadFileToRemoteAsync(CloudFile file, Stream stream, string? targetRemote,
|
||||
string? suffix = null)
|
||||
{
|
||||
if (file.UploadedAt.HasValue) return file;
|
||||
|
||||
file.UploadedTo = targetRemote ?? configuration.GetValue<string>("Storage:PreferredRemote")!;
|
||||
|
||||
var dest = GetRemoteStorageConfig(file.UploadedTo);
|
||||
var client = CreateMinioClient(dest);
|
||||
if (client is null)
|
||||
throw new InvalidOperationException(
|
||||
$"Failed to configure client for remote destination '{file.UploadedTo}'"
|
||||
);
|
||||
|
||||
var bucket = dest.Bucket;
|
||||
var contentType = file.MimeType ?? "application/octet-stream";
|
||||
|
||||
await client.PutObjectAsync(new PutObjectArgs()
|
||||
.WithBucket(bucket)
|
||||
.WithObject(string.IsNullOrWhiteSpace(suffix) ? file.Id : file.Id + suffix)
|
||||
.WithStreamData(stream) // Fix this disposed
|
||||
.WithObjectSize(stream.Length)
|
||||
.WithContentType(contentType)
|
||||
);
|
||||
|
||||
file.UploadedAt = Instant.FromDateTimeUtc(DateTime.UtcNow);
|
||||
return file;
|
||||
}
|
||||
|
||||
public async Task DeleteFileAsync(CloudFile file)
|
||||
{
|
||||
await DeleteFileDataAsync(file);
|
||||
|
||||
db.Remove(file);
|
||||
await db.SaveChangesAsync();
|
||||
await _PurgeCacheAsync(file.Id);
|
||||
}
|
||||
|
||||
public async Task DeleteFileDataAsync(CloudFile file)
|
||||
{
|
||||
if (file.StorageId is null) return;
|
||||
if (file.UploadedTo is null) return;
|
||||
|
||||
// Check if any other file with the same storage ID is referenced
|
||||
var otherFilesWithSameStorageId = await db.Files
|
||||
.Where(f => f.StorageId == file.StorageId && f.Id != file.Id)
|
||||
.Select(f => f.Id)
|
||||
.ToListAsync();
|
||||
|
||||
// Check if any of these files are referenced
|
||||
var anyReferenced = false;
|
||||
if (otherFilesWithSameStorageId.Any())
|
||||
{
|
||||
anyReferenced = await db.FileReferences
|
||||
.Where(r => otherFilesWithSameStorageId.Contains(r.FileId))
|
||||
.AnyAsync();
|
||||
}
|
||||
|
||||
// If any other file with the same storage ID is referenced, don't delete the actual file data
|
||||
if (anyReferenced) return;
|
||||
|
||||
var dest = GetRemoteStorageConfig(file.UploadedTo);
|
||||
var client = CreateMinioClient(dest);
|
||||
if (client is null)
|
||||
throw new InvalidOperationException(
|
||||
$"Failed to configure client for remote destination '{file.UploadedTo}'"
|
||||
);
|
||||
|
||||
var bucket = dest.Bucket;
|
||||
var objectId = file.StorageId ?? file.Id; // Use StorageId if available, otherwise fall back to Id
|
||||
|
||||
await client.RemoveObjectAsync(
|
||||
new RemoveObjectArgs().WithBucket(bucket).WithObject(objectId)
|
||||
);
|
||||
|
||||
if (file.HasCompression)
|
||||
{
|
||||
// Also remove the compressed version if it exists
|
||||
try
|
||||
{
|
||||
await client.RemoveObjectAsync(
|
||||
new RemoveObjectArgs().WithBucket(bucket).WithObject(objectId + ".compressed")
|
||||
);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore errors when deleting compressed version
|
||||
logger.LogWarning("Failed to delete compressed version of file {fileId}", file.Id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public RemoteStorageConfig GetRemoteStorageConfig(string destination)
|
||||
{
|
||||
var destinations = configuration.GetSection("Storage:Remote").Get<List<RemoteStorageConfig>>()!;
|
||||
var dest = destinations.FirstOrDefault(d => d.Id == destination);
|
||||
if (dest is null) throw new InvalidOperationException($"Remote destination '{destination}' not found");
|
||||
return dest;
|
||||
}
|
||||
|
||||
public IMinioClient? CreateMinioClient(RemoteStorageConfig dest)
|
||||
{
|
||||
var client = new MinioClient()
|
||||
.WithEndpoint(dest.Endpoint)
|
||||
.WithRegion(dest.Region)
|
||||
.WithCredentials(dest.SecretId, dest.SecretKey);
|
||||
if (dest.EnableSsl) client = client.WithSSL();
|
||||
|
||||
return client.Build();
|
||||
}
|
||||
|
||||
// Helper method to purge the cache for a specific file
|
||||
// Made internal to allow FileReferenceService to use it
|
||||
internal async Task _PurgeCacheAsync(string fileId)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}{fileId}";
|
||||
await cache.RemoveAsync(cacheKey);
|
||||
}
|
||||
|
||||
// Helper method to purge cache for multiple files
|
||||
internal async Task _PurgeCacheRangeAsync(IEnumerable<string> fileIds)
|
||||
{
|
||||
var tasks = fileIds.Select(_PurgeCacheAsync);
|
||||
await Task.WhenAll(tasks);
|
||||
}
|
||||
|
||||
public async Task<List<CloudFile?>> LoadFromReference(List<CloudFileReferenceObject> references)
|
||||
{
|
||||
var cachedFiles = new Dictionary<string, CloudFile>();
|
||||
var uncachedIds = new List<string>();
|
||||
|
||||
// Check cache first
|
||||
foreach (var reference in references)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}{reference.Id}";
|
||||
var cachedFile = await cache.GetAsync<CloudFile>(cacheKey);
|
||||
|
||||
if (cachedFile != null)
|
||||
{
|
||||
cachedFiles[reference.Id] = cachedFile;
|
||||
}
|
||||
else
|
||||
{
|
||||
uncachedIds.Add(reference.Id);
|
||||
}
|
||||
}
|
||||
|
||||
// Load uncached files from database
|
||||
if (uncachedIds.Count > 0)
|
||||
{
|
||||
var dbFiles = await db.Files
|
||||
.Include(f => f.Account)
|
||||
.Where(f => uncachedIds.Contains(f.Id))
|
||||
.ToListAsync();
|
||||
|
||||
// Add to cache
|
||||
foreach (var file in dbFiles)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}{file.Id}";
|
||||
await cache.SetAsync(cacheKey, file, CacheDuration);
|
||||
cachedFiles[file.Id] = file;
|
||||
}
|
||||
}
|
||||
|
||||
// Preserve original order
|
||||
return references
|
||||
.Select(r => cachedFiles.GetValueOrDefault(r.Id))
|
||||
.Where(f => f != null)
|
||||
.ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of references to a file based on CloudFileReference records
|
||||
/// </summary>
|
||||
/// <param name="fileId">The ID of the file</param>
|
||||
/// <returns>The number of references to the file</returns>
|
||||
public async Task<int> GetReferenceCountAsync(string fileId)
|
||||
{
|
||||
return await db.FileReferences
|
||||
.Where(r => r.FileId == fileId)
|
||||
.CountAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a file is referenced by any resource
|
||||
/// </summary>
|
||||
/// <param name="fileId">The ID of the file to check</param>
|
||||
/// <returns>True if the file is referenced, false otherwise</returns>
|
||||
public async Task<bool> IsReferencedAsync(string fileId)
|
||||
{
|
||||
return await db.FileReferences
|
||||
.Where(r => r.FileId == fileId)
|
||||
.AnyAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if an EXIF field contains GPS location data
|
||||
/// </summary>
|
||||
/// <param name="fieldName">The EXIF field name</param>
|
||||
/// <returns>True if the field contains GPS data, false otherwise</returns>
|
||||
private static bool IsGpsExifField(string fieldName)
|
||||
{
|
||||
// Common GPS EXIF field names
|
||||
var gpsFields = new[]
|
||||
{
|
||||
"gps-latitude",
|
||||
"gps-longitude",
|
||||
"gps-altitude",
|
||||
"gps-latitude-ref",
|
||||
"gps-longitude-ref",
|
||||
"gps-altitude-ref",
|
||||
"gps-timestamp",
|
||||
"gps-datestamp",
|
||||
"gps-speed",
|
||||
"gps-speed-ref",
|
||||
"gps-track",
|
||||
"gps-track-ref",
|
||||
"gps-img-direction",
|
||||
"gps-img-direction-ref",
|
||||
"gps-dest-latitude",
|
||||
"gps-dest-longitude",
|
||||
"gps-dest-latitude-ref",
|
||||
"gps-dest-longitude-ref",
|
||||
"gps-processing-method",
|
||||
"gps-area-information"
|
||||
};
|
||||
|
||||
return gpsFields.Any(gpsField =>
|
||||
fieldName.Equals(gpsField, StringComparison.OrdinalIgnoreCase) ||
|
||||
fieldName.StartsWith("gps", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
private static bool IsIgnoredField(string fieldName)
|
||||
{
|
||||
if (IsGpsExifField(fieldName)) return true;
|
||||
if (fieldName.EndsWith("-data")) return true;
|
||||
return false;
|
||||
}
|
||||
}
|
@ -1,66 +0,0 @@
|
||||
using System.Collections.Concurrent;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage;
|
||||
|
||||
public interface IFlushHandler<T>
|
||||
{
|
||||
Task FlushAsync(IReadOnlyList<T> items);
|
||||
}
|
||||
|
||||
public class FlushBufferService
|
||||
{
|
||||
private readonly Dictionary<Type, object> _buffers = new();
|
||||
private readonly Lock _lockObject = new();
|
||||
|
||||
private ConcurrentQueue<T> _GetOrCreateBuffer<T>()
|
||||
{
|
||||
var type = typeof(T);
|
||||
lock (_lockObject)
|
||||
{
|
||||
if (!_buffers.TryGetValue(type, out var buffer))
|
||||
{
|
||||
buffer = new ConcurrentQueue<T>();
|
||||
_buffers[type] = buffer;
|
||||
}
|
||||
return (ConcurrentQueue<T>)buffer;
|
||||
}
|
||||
}
|
||||
|
||||
public void Enqueue<T>(T item)
|
||||
{
|
||||
var buffer = _GetOrCreateBuffer<T>();
|
||||
buffer.Enqueue(item);
|
||||
}
|
||||
|
||||
public async Task FlushAsync<T>(IFlushHandler<T> handler)
|
||||
{
|
||||
var buffer = _GetOrCreateBuffer<T>();
|
||||
var workingQueue = new List<T>();
|
||||
|
||||
while (buffer.TryDequeue(out var item))
|
||||
{
|
||||
workingQueue.Add(item);
|
||||
}
|
||||
|
||||
if (workingQueue.Count == 0)
|
||||
return;
|
||||
|
||||
try
|
||||
{
|
||||
await handler.FlushAsync(workingQueue);
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
// If flush fails, re-queue the items
|
||||
foreach (var item in workingQueue)
|
||||
buffer.Enqueue(item);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
public int GetPendingCount<T>()
|
||||
{
|
||||
var buffer = _GetOrCreateBuffer<T>();
|
||||
return buffer.Count;
|
||||
}
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
|
||||
using EFCore.BulkExtensions;
|
||||
using Quartz;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage.Handlers;
|
||||
|
||||
public class ActionLogFlushHandler(IServiceProvider serviceProvider) : IFlushHandler<ActionLog>
|
||||
{
|
||||
public async Task FlushAsync(IReadOnlyList<ActionLog> items)
|
||||
{
|
||||
using var scope = serviceProvider.CreateScope();
|
||||
var db = scope.ServiceProvider.GetRequiredService<AppDatabase>();
|
||||
|
||||
await db.BulkInsertAsync(items, config => config.ConflictOption = ConflictOption.Ignore);
|
||||
}
|
||||
}
|
||||
|
||||
public class ActionLogFlushJob(FlushBufferService fbs, ActionLogFlushHandler hdl) : IJob
|
||||
{
|
||||
public async Task Execute(IJobExecutionContext context)
|
||||
{
|
||||
await fbs.FlushAsync(hdl);
|
||||
}
|
||||
}
|
@ -1,60 +0,0 @@
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NodaTime;
|
||||
using Quartz;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage.Handlers;
|
||||
|
||||
public class LastActiveInfo
|
||||
{
|
||||
public Auth.Session Session { get; set; } = null!;
|
||||
public Account.Account Account { get; set; } = null!;
|
||||
public Instant SeenAt { get; set; }
|
||||
}
|
||||
|
||||
public class LastActiveFlushHandler(IServiceProvider serviceProvider) : IFlushHandler<LastActiveInfo>
|
||||
{
|
||||
public async Task FlushAsync(IReadOnlyList<LastActiveInfo> items)
|
||||
{
|
||||
using var scope = serviceProvider.CreateScope();
|
||||
var db = scope.ServiceProvider.GetRequiredService<AppDatabase>();
|
||||
|
||||
// Remove duplicates by grouping on (sessionId, accountId), taking the most recent SeenAt
|
||||
var distinctItems = items
|
||||
.GroupBy(x => (SessionId: x.Session.Id, AccountId: x.Account.Id))
|
||||
.Select(g => g.OrderByDescending(x => x.SeenAt).First())
|
||||
.ToList();
|
||||
|
||||
// Build dictionaries so we can match session/account IDs to their new "last seen" timestamps
|
||||
var sessionIdMap = distinctItems
|
||||
.GroupBy(x => x.SessionId)
|
||||
.ToDictionary(g => g.Key, g => g.Last().SeenAt);
|
||||
|
||||
var accountIdMap = distinctItems
|
||||
.GroupBy(x => x.AccountId)
|
||||
.ToDictionary(g => g.Key, g => g.Last().SeenAt);
|
||||
|
||||
// Update sessions using native EF Core ExecuteUpdateAsync
|
||||
foreach (var kvp in sessionIdMap)
|
||||
{
|
||||
await db.AuthSessions
|
||||
.Where(s => s.Id == kvp.Key)
|
||||
.ExecuteUpdateAsync(s => s.SetProperty(x => x.LastGrantedAt, kvp.Value));
|
||||
}
|
||||
|
||||
// Update account profiles using native EF Core ExecuteUpdateAsync
|
||||
foreach (var kvp in accountIdMap)
|
||||
{
|
||||
await db.AccountProfiles
|
||||
.Where(a => a.AccountId == kvp.Key)
|
||||
.ExecuteUpdateAsync(a => a.SetProperty(x => x.LastSeenAt, kvp.Value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class LastActiveFlushJob(FlushBufferService fbs, ActionLogFlushHandler hdl) : IJob
|
||||
{
|
||||
public async Task Execute(IJobExecutionContext context)
|
||||
{
|
||||
await fbs.FlushAsync(hdl);
|
||||
}
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
using DysonNetwork.Common.Models;
|
||||
using DysonNetwork.Sphere.Chat;
|
||||
using EFCore.BulkExtensions;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NodaTime;
|
||||
using Quartz;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage.Handlers;
|
||||
|
||||
public class MessageReadReceiptFlushHandler(IServiceProvider serviceProvider) : IFlushHandler<MessageReadReceipt>
|
||||
{
|
||||
public async Task FlushAsync(IReadOnlyList<MessageReadReceipt> items)
|
||||
{
|
||||
var now = SystemClock.Instance.GetCurrentInstant();
|
||||
var distinctId = items
|
||||
.DistinctBy(x => x.SenderId)
|
||||
.Select(x => x.SenderId)
|
||||
.ToList();
|
||||
|
||||
using var scope = serviceProvider.CreateScope();
|
||||
var db = scope.ServiceProvider.GetRequiredService<AppDatabase>();
|
||||
await db.ChatMembers.Where(r => distinctId.Contains(r.Id))
|
||||
.ExecuteUpdateAsync(s => s.SetProperty(m => m.LastReadAt, now)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public class ReadReceiptFlushJob(FlushBufferService fbs, MessageReadReceiptFlushHandler hdl) : IJob
|
||||
{
|
||||
public async Task Execute(IJobExecutionContext context)
|
||||
{
|
||||
await fbs.FlushAsync(hdl);
|
||||
}
|
||||
}
|
@ -1,53 +0,0 @@
|
||||
using DysonNetwork.Common.Services;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NodaTime;
|
||||
using Quartz;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage.Handlers;
|
||||
|
||||
public class PostViewFlushHandler(IServiceProvider serviceProvider) : IFlushHandler<Post.PostViewInfo>
|
||||
{
|
||||
public async Task FlushAsync(IReadOnlyList<Post.PostViewInfo> items)
|
||||
{
|
||||
using var scope = serviceProvider.CreateScope();
|
||||
var db = scope.ServiceProvider.GetRequiredService<AppDatabase>();
|
||||
var cache = scope.ServiceProvider.GetRequiredService<ICacheService>();
|
||||
|
||||
// Group views by post
|
||||
var postViews = items
|
||||
.GroupBy(x => x.PostId)
|
||||
.ToDictionary(g => g.Key, g => g.ToList());
|
||||
|
||||
// Calculate total views and unique views per post
|
||||
foreach (var postId in postViews.Keys)
|
||||
{
|
||||
// Calculate unique views by distinct viewer IDs (not null)
|
||||
var uniqueViews = postViews[postId]
|
||||
.Where(v => !string.IsNullOrEmpty(v.ViewerId))
|
||||
.Select(v => v.ViewerId)
|
||||
.Distinct()
|
||||
.Count();
|
||||
|
||||
// Total views is just the count of all items for this post
|
||||
var totalViews = postViews[postId].Count;
|
||||
|
||||
// Update the post in the database
|
||||
await db.Posts
|
||||
.Where(p => p.Id == postId)
|
||||
.ExecuteUpdateAsync(p => p
|
||||
.SetProperty(x => x.ViewsTotal, x => x.ViewsTotal + totalViews)
|
||||
.SetProperty(x => x.ViewsUnique, x => x.ViewsUnique + uniqueViews));
|
||||
|
||||
// Invalidate any cache entries for this post
|
||||
await cache.RemoveAsync($"post:{postId}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class PostViewFlushJob(FlushBufferService fbs, PostViewFlushHandler hdl) : IJob
|
||||
{
|
||||
public async Task Execute(IJobExecutionContext context)
|
||||
{
|
||||
await fbs.FlushAsync(hdl);
|
||||
}
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage;
|
||||
|
||||
public abstract class TextSanitizer
|
||||
{
|
||||
public static string? Sanitize(string? text)
|
||||
{
|
||||
if (string.IsNullOrEmpty(text)) return text;
|
||||
|
||||
// List of control characters to preserve
|
||||
var preserveControlChars = new[] { '\n', '\r', '\t', ' ' };
|
||||
|
||||
var filtered = new StringBuilder();
|
||||
foreach (var ch in text)
|
||||
{
|
||||
var category = CharUnicodeInfo.GetUnicodeCategory(ch);
|
||||
|
||||
// Keep whitespace and other specified control characters
|
||||
if (category is not UnicodeCategory.Control || preserveControlChars.Contains(ch))
|
||||
{
|
||||
// Still filter out Format and NonSpacingMark categories
|
||||
if (category is not (UnicodeCategory.Format or UnicodeCategory.NonSpacingMark))
|
||||
{
|
||||
filtered.Append(ch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return filtered.ToString();
|
||||
}
|
||||
}
|
@ -1,78 +0,0 @@
|
||||
using System.Net;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.Extensions.Options;
|
||||
using tusdotnet.Interfaces;
|
||||
using tusdotnet.Models;
|
||||
using tusdotnet.Models.Configuration;
|
||||
|
||||
namespace DysonNetwork.Sphere.Storage;
|
||||
|
||||
public abstract class TusService
|
||||
{
|
||||
public static DefaultTusConfiguration BuildConfiguration(ITusStore store) => new()
|
||||
{
|
||||
Store = store,
|
||||
Events = new Events
|
||||
{
|
||||
OnAuthorizeAsync = async eventContext =>
|
||||
{
|
||||
if (eventContext.Intent == IntentType.DeleteFile)
|
||||
{
|
||||
eventContext.FailRequest(
|
||||
HttpStatusCode.BadRequest,
|
||||
"Deleting files from this endpoint was disabled, please refer to the Dyson Network File API."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
var httpContext = eventContext.HttpContext;
|
||||
if (httpContext.Items["CurrentUser"] is not Account.Account user)
|
||||
{
|
||||
eventContext.FailRequest(HttpStatusCode.Unauthorized);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!user.IsSuperuser)
|
||||
{
|
||||
using var scope = httpContext.RequestServices.CreateScope();
|
||||
var pm = scope.ServiceProvider.GetRequiredService<PermissionService>();
|
||||
var allowed = await pm.HasPermissionAsync($"user:{user.Id}", "global", "files.create");
|
||||
if (!allowed)
|
||||
eventContext.FailRequest(HttpStatusCode.Forbidden);
|
||||
}
|
||||
},
|
||||
OnFileCompleteAsync = async eventContext =>
|
||||
{
|
||||
using var scope = eventContext.HttpContext.RequestServices.CreateScope();
|
||||
var services = scope.ServiceProvider;
|
||||
|
||||
var httpContext = eventContext.HttpContext;
|
||||
if (httpContext.Items["CurrentUser"] is not Account.Account user) return;
|
||||
|
||||
var file = await eventContext.GetFileAsync();
|
||||
var metadata = await file.GetMetadataAsync(eventContext.CancellationToken);
|
||||
var fileName = metadata.TryGetValue("filename", out var fn)
|
||||
? fn.GetString(Encoding.UTF8)
|
||||
: "uploaded_file";
|
||||
var contentType = metadata.TryGetValue("content-type", out var ct) ? ct.GetString(Encoding.UTF8) : null;
|
||||
|
||||
var fileStream = await file.GetContentAsync(eventContext.CancellationToken);
|
||||
|
||||
var fileService = services.GetRequiredService<FileService>();
|
||||
var info = await fileService.ProcessNewFileAsync(user, file.Id, fileStream, fileName, contentType);
|
||||
|
||||
using var finalScope = eventContext.HttpContext.RequestServices.CreateScope();
|
||||
var jsonOptions = finalScope.ServiceProvider.GetRequiredService<IOptions<JsonOptions>>().Value
|
||||
.JsonSerializerOptions;
|
||||
var infoJson = JsonSerializer.Serialize(info, jsonOptions);
|
||||
eventContext.HttpContext.Response.Headers.Append("X-FileInfo", infoJson);
|
||||
|
||||
// Dispose the stream after all processing is complete
|
||||
await fileStream.DisposeAsync();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
@ -111,5 +111,8 @@
|
||||
"KnownProxies": [
|
||||
"127.0.0.1",
|
||||
"::1"
|
||||
]
|
||||
],
|
||||
"DriveService": {
|
||||
"BaseUrl": "http://localhost:5073"
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user