:drunk: Write shit code trying to split up the Auth (WIP)

This commit is contained in:
2025-07-06 12:58:18 +08:00
parent 5757526ea5
commit 6a3d04af3d
224 changed files with 1889 additions and 36885 deletions

View File

@ -1,396 +0,0 @@
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
using NodaTime;
using NodaTime.Serialization.JsonNet;
using StackExchange.Redis;
namespace DysonNetwork.Sphere.Storage;
/// <summary>
/// Represents a distributed lock that can be used to synchronize access across multiple processes
/// </summary>
public interface IDistributedLock : IAsyncDisposable
{
/// <summary>
/// The resource identifier this lock is protecting
/// </summary>
string Resource { get; }
/// <summary>
/// Unique identifier for this lock instance
/// </summary>
string LockId { get; }
/// <summary>
/// Extends the lock's expiration time
/// </summary>
Task<bool> ExtendAsync(TimeSpan timeSpan);
/// <summary>
/// Releases the lock immediately
/// </summary>
Task ReleaseAsync();
}
public interface ICacheService
{
/// <summary>
/// Sets a value in the cache with an optional expiration time
/// </summary>
Task<bool> SetAsync<T>(string key, T value, TimeSpan? expiry = null);
/// <summary>
/// Gets a value from the cache
/// </summary>
Task<T?> GetAsync<T>(string key);
/// <summary>
/// Get a value from the cache with the found status
/// </summary>
Task<(bool found, T? value)> GetAsyncWithStatus<T>(string key);
/// <summary>
/// Removes a specific key from the cache
/// </summary>
Task<bool> RemoveAsync(string key);
/// <summary>
/// Adds a key to a group for group-based operations
/// </summary>
Task AddToGroupAsync(string key, string group);
/// <summary>
/// Removes all keys associated with a specific group
/// </summary>
Task RemoveGroupAsync(string group);
/// <summary>
/// Gets all keys belonging to a specific group
/// </summary>
Task<IEnumerable<string>> GetGroupKeysAsync(string group);
/// <summary>
/// Helper method to set a value in cache and associate it with multiple groups in one operation
/// </summary>
/// <typeparam name="T">The type of value being cached</typeparam>
/// <param name="key">Cache key</param>
/// <param name="value">The value to cache</param>
/// <param name="groups">Optional collection of group names to associate the key with</param>
/// <param name="expiry">Optional expiration time for the cached item</param>
/// <returns>True if the set operation was successful</returns>
Task<bool> SetWithGroupsAsync<T>(string key, T value, IEnumerable<string>? groups = null, TimeSpan? expiry = null);
/// <summary>
/// Acquires a distributed lock on the specified resource
/// </summary>
/// <param name="resource">The resource identifier to lock</param>
/// <param name="expiry">How long the lock should be held before automatically expiring</param>
/// <param name="waitTime">How long to wait for the lock before giving up</param>
/// <param name="retryInterval">How often to retry acquiring the lock during the wait time</param>
/// <returns>A distributed lock instance if acquired, null otherwise</returns>
Task<IDistributedLock?> AcquireLockAsync(string resource, TimeSpan expiry, TimeSpan? waitTime = null,
TimeSpan? retryInterval = null);
/// <summary>
/// Executes an action with a distributed lock, ensuring the lock is properly released afterwards
/// </summary>
/// <param name="resource">The resource identifier to lock</param>
/// <param name="action">The action to execute while holding the lock</param>
/// <param name="expiry">How long the lock should be held before automatically expiring</param>
/// <param name="waitTime">How long to wait for the lock before giving up</param>
/// <param name="retryInterval">How often to retry acquiring the lock during the wait time</param>
/// <returns>True if the lock was acquired and the action was executed, false otherwise</returns>
Task<bool> ExecuteWithLockAsync(string resource, Func<Task> action, TimeSpan expiry, TimeSpan? waitTime = null,
TimeSpan? retryInterval = null);
/// <summary>
/// Executes a function with a distributed lock, ensuring the lock is properly released afterwards
/// </summary>
/// <typeparam name="T">The return type of the function</typeparam>
/// <param name="resource">The resource identifier to lock</param>
/// <param name="func">The function to execute while holding the lock</param>
/// <param name="expiry">How long the lock should be held before automatically expiring</param>
/// <param name="waitTime">How long to wait for the lock before giving up</param>
/// <param name="retryInterval">How often to retry acquiring the lock during the wait time</param>
/// <returns>The result of the function if the lock was acquired, default(T) otherwise</returns>
Task<(bool Acquired, T? Result)> ExecuteWithLockAsync<T>(string resource, Func<Task<T>> func, TimeSpan expiry,
TimeSpan? waitTime = null, TimeSpan? retryInterval = null);
}
public class RedisDistributedLock : IDistributedLock
{
private readonly IDatabase _database;
private bool _disposed;
public string Resource { get; }
public string LockId { get; }
internal RedisDistributedLock(IDatabase database, string resource, string lockId)
{
_database = database;
Resource = resource;
LockId = lockId;
}
public async Task<bool> ExtendAsync(TimeSpan timeSpan)
{
if (_disposed)
throw new ObjectDisposedException(nameof(RedisDistributedLock));
var script = @"
if redis.call('get', KEYS[1]) == ARGV[1] then
return redis.call('pexpire', KEYS[1], ARGV[2])
else
return 0
end
";
var result = await _database.ScriptEvaluateAsync(
script,
[$"{CacheServiceRedis.LockKeyPrefix}{Resource}"],
[LockId, (long)timeSpan.TotalMilliseconds]
);
return (long)result! == 1;
}
public async Task ReleaseAsync()
{
if (_disposed)
return;
var script = @"
if redis.call('get', KEYS[1]) == ARGV[1] then
return redis.call('del', KEYS[1])
else
return 0
end
";
await _database.ScriptEvaluateAsync(
script,
[$"{CacheServiceRedis.LockKeyPrefix}{Resource}"],
[LockId]
);
_disposed = true;
}
public async ValueTask DisposeAsync()
{
await ReleaseAsync();
GC.SuppressFinalize(this);
}
}
public class CacheServiceRedis : ICacheService
{
private readonly IDatabase _database;
private readonly JsonSerializerSettings _serializerSettings;
// Global prefix for all cache keys
public const string GlobalKeyPrefix = "dyson:";
// Using prefixes for different types of keys
public const string GroupKeyPrefix = GlobalKeyPrefix + "cg:";
public const string LockKeyPrefix = GlobalKeyPrefix + "lock:";
public CacheServiceRedis(IConnectionMultiplexer redis)
{
var rds = redis ?? throw new ArgumentNullException(nameof(redis));
_database = rds.GetDatabase();
// Configure Newtonsoft.Json with proper NodaTime serialization
_serializerSettings = new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver(),
PreserveReferencesHandling = PreserveReferencesHandling.Objects,
NullValueHandling = NullValueHandling.Include,
DateParseHandling = DateParseHandling.None
};
// Configure NodaTime serializers
_serializerSettings.ConfigureForNodaTime(DateTimeZoneProviders.Tzdb);
}
public async Task<bool> SetAsync<T>(string key, T value, TimeSpan? expiry = null)
{
key = $"{GlobalKeyPrefix}{key}";
if (string.IsNullOrEmpty(key))
throw new ArgumentException("Key cannot be null or empty", nameof(key));
var serializedValue = JsonConvert.SerializeObject(value, _serializerSettings);
return await _database.StringSetAsync(key, serializedValue, expiry);
}
public async Task<T?> GetAsync<T>(string key)
{
key = $"{GlobalKeyPrefix}{key}";
if (string.IsNullOrEmpty(key))
throw new ArgumentException("Key cannot be null or empty", nameof(key));
var value = await _database.StringGetAsync(key);
if (value.IsNullOrEmpty)
return default;
// For NodaTime serialization, use the configured serializer settings
return JsonConvert.DeserializeObject<T>(value!, _serializerSettings);
}
public async Task<(bool found, T? value)> GetAsyncWithStatus<T>(string key)
{
key = $"{GlobalKeyPrefix}{key}";
if (string.IsNullOrEmpty(key))
throw new ArgumentException("Key cannot be null or empty", nameof(key));
var value = await _database.StringGetAsync(key);
if (value.IsNullOrEmpty)
return (false, default);
// For NodaTime serialization, use the configured serializer settings
return (true, JsonConvert.DeserializeObject<T>(value!, _serializerSettings));
}
public async Task<bool> RemoveAsync(string key)
{
key = $"{GlobalKeyPrefix}{key}";
if (string.IsNullOrEmpty(key))
throw new ArgumentException("Key cannot be null or empty", nameof(key));
// Before removing the key, find all groups it belongs to and remove it from them
var script = @"
local groups = redis.call('KEYS', ARGV[1])
for _, group in ipairs(groups) do
redis.call('SREM', group, ARGV[2])
end
return redis.call('DEL', ARGV[2])
";
var result = await _database.ScriptEvaluateAsync(
script,
values: [$"{GroupKeyPrefix}*", key]
);
return (long)result! > 0;
}
public async Task AddToGroupAsync(string key, string group)
{
if (string.IsNullOrEmpty(key))
throw new ArgumentException(@"Key cannot be null or empty.", nameof(key));
if (string.IsNullOrEmpty(group))
throw new ArgumentException(@"Group cannot be null or empty.", nameof(group));
var groupKey = $"{GroupKeyPrefix}{group}";
key = $"{GlobalKeyPrefix}{key}";
await _database.SetAddAsync(groupKey, key);
}
public async Task RemoveGroupAsync(string group)
{
if (string.IsNullOrEmpty(group))
throw new ArgumentException(@"Group cannot be null or empty.", nameof(group));
var groupKey = $"{GroupKeyPrefix}{group}";
// Get all keys in the group
var keys = await _database.SetMembersAsync(groupKey);
if (keys.Length > 0)
{
// Delete all the keys
var keysTasks = keys.Select(key => _database.KeyDeleteAsync(key.ToString()));
await Task.WhenAll(keysTasks);
}
// Delete the group itself
await _database.KeyDeleteAsync(groupKey);
}
public async Task<IEnumerable<string>> GetGroupKeysAsync(string group)
{
if (string.IsNullOrEmpty(group))
throw new ArgumentException(@"Group cannot be null or empty.", nameof(group));
var groupKey = $"{GroupKeyPrefix}{group}";
var members = await _database.SetMembersAsync(groupKey);
return members.Select(m => m.ToString());
}
public async Task<bool> SetWithGroupsAsync<T>(string key, T value, IEnumerable<string>? groups = null,
TimeSpan? expiry = null)
{
// First, set the value in the cache
var setResult = await SetAsync(key, value, expiry);
// If successful and there are groups to associate, add the key to each group
if (!setResult || groups == null) return setResult;
var groupsArray = groups.Where(g => !string.IsNullOrEmpty(g)).ToArray();
if (groupsArray.Length <= 0) return setResult;
var tasks = groupsArray.Select(group => AddToGroupAsync(key, group));
await Task.WhenAll(tasks);
return setResult;
}
public async Task<IDistributedLock?> AcquireLockAsync(string resource, TimeSpan expiry, TimeSpan? waitTime = null,
TimeSpan? retryInterval = null)
{
if (string.IsNullOrEmpty(resource))
throw new ArgumentException("Resource cannot be null or empty", nameof(resource));
var lockKey = $"{LockKeyPrefix}{resource}";
var lockId = Guid.NewGuid().ToString("N");
var waitTimeSpan = waitTime ?? TimeSpan.Zero;
var retryIntervalSpan = retryInterval ?? TimeSpan.FromMilliseconds(100);
var startTime = DateTime.UtcNow;
var acquired = false;
// Try to acquire the lock, retry until waitTime is exceeded
while (!acquired && (DateTime.UtcNow - startTime) < waitTimeSpan)
{
acquired = await _database.StringSetAsync(lockKey, lockId, expiry, When.NotExists);
if (!acquired)
{
await Task.Delay(retryIntervalSpan);
}
}
if (!acquired)
{
return null; // Could not acquire the lock within the wait time
}
return new RedisDistributedLock(_database, resource, lockId);
}
public async Task<bool> ExecuteWithLockAsync(string resource, Func<Task> action, TimeSpan expiry,
TimeSpan? waitTime = null, TimeSpan? retryInterval = null)
{
await using var lockObj = await AcquireLockAsync(resource, expiry, waitTime, retryInterval);
if (lockObj == null)
return false; // Could not acquire the lock
await action();
return true;
}
public async Task<(bool Acquired, T? Result)> ExecuteWithLockAsync<T>(string resource, Func<Task<T>> func,
TimeSpan expiry, TimeSpan? waitTime = null, TimeSpan? retryInterval = null)
{
await using var lockObj = await AcquireLockAsync(resource, expiry, waitTime, retryInterval);
if (lockObj == null)
return (false, default); // Could not acquire the lock
var result = await func();
return (true, result);
}
}

View File

@ -1,130 +0,0 @@
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Text.Json.Serialization;
using NodaTime;
namespace DysonNetwork.Sphere.Storage;
public class RemoteStorageConfig
{
public string Id { get; set; } = string.Empty;
public string Label { get; set; } = string.Empty;
public string Region { get; set; } = string.Empty;
public string Bucket { get; set; } = string.Empty;
public string Endpoint { get; set; } = string.Empty;
public string SecretId { get; set; } = string.Empty;
public string SecretKey { get; set; } = string.Empty;
public bool EnableSigned { get; set; }
public bool EnableSsl { get; set; }
public string? ImageProxy { get; set; }
public string? AccessProxy { get; set; }
}
/// <summary>
/// The class that used in jsonb columns which referenced the cloud file.
/// The aim of this class is to store some properties that won't change to a file to reduce the database load.
/// </summary>
public class CloudFileReferenceObject : ModelBase, ICloudFile
{
public string Id { get; set; } = null!;
public string Name { get; set; } = string.Empty;
public Dictionary<string, object>? FileMeta { get; set; } = null!;
public Dictionary<string, object>? UserMeta { get; set; } = null!;
public string? MimeType { get; set; }
public string? Hash { get; set; }
public long Size { get; set; }
public bool HasCompression { get; set; } = false;
}
public class CloudFile : ModelBase, ICloudFile, IIdentifiedResource
{
/// The id generated by TuS, basically just UUID remove the dash lines
[MaxLength(32)]
public string Id { get; set; } = Guid.NewGuid().ToString();
[MaxLength(1024)] public string Name { get; set; } = string.Empty;
[MaxLength(4096)] public string? Description { get; set; }
[Column(TypeName = "jsonb")] public Dictionary<string, object>? FileMeta { get; set; } = null!;
[Column(TypeName = "jsonb")] public Dictionary<string, object>? UserMeta { get; set; } = null!;
[Column(TypeName = "jsonb")] public List<ContentSensitiveMark>? SensitiveMarks { get; set; } = [];
[MaxLength(256)] public string? MimeType { get; set; }
[MaxLength(256)] public string? Hash { get; set; }
public long Size { get; set; }
public Instant? UploadedAt { get; set; }
[MaxLength(128)] public string? UploadedTo { get; set; }
public bool HasCompression { get; set; } = false;
/// <summary>
/// The field is set to true if the recycling job plans to delete the file.
/// Due to the unstable of the recycling job, this doesn't really delete the file until a human verifies it.
/// </summary>
public bool IsMarkedRecycle { get; set; } = false;
/// The object name which stored remotely,
/// multiple cloud file may have same storage id to indicate they are the same file
///
/// If the storage id was null and the uploaded at is not null, means it is an embedding file,
/// The embedding file means the file is store on another site,
/// or it is a webpage (based on mimetype)
[MaxLength(32)]
public string? StorageId { get; set; }
/// This field should be null when the storage id is filled
/// Indicates the off-site accessible url of the file
[MaxLength(4096)]
public string? StorageUrl { get; set; }
[JsonIgnore] public Account.Account Account { get; set; } = null!;
public Guid AccountId { get; set; }
public CloudFileReferenceObject ToReferenceObject()
{
return new CloudFileReferenceObject
{
CreatedAt = CreatedAt,
UpdatedAt = UpdatedAt,
DeletedAt = DeletedAt,
Id = Id,
Name = Name,
FileMeta = FileMeta,
UserMeta = UserMeta,
MimeType = MimeType,
Hash = Hash,
Size = Size,
HasCompression = HasCompression
};
}
public string ResourceIdentifier => $"file/{Id}";
}
public enum ContentSensitiveMark
{
Language,
SexualContent,
Violence,
Profanity,
HateSpeech,
Racism,
AdultContent,
DrugAbuse,
AlcoholAbuse,
Gambling,
SelfHarm,
ChildAbuse,
Other
}
public class CloudFileReference : ModelBase
{
public Guid Id { get; set; } = Guid.NewGuid();
[MaxLength(32)] public string FileId { get; set; } = null!;
public CloudFile File { get; set; } = null!;
[MaxLength(1024)] public string Usage { get; set; } = null!;
[MaxLength(1024)] public string ResourceId { get; set; } = null!;
/// <summary>
/// Optional expiration date for the file reference
/// </summary>
public Instant? ExpiredAt { get; set; }
}

View File

@ -1,3 +1,4 @@
using DysonNetwork.Common.Services;
using Microsoft.EntityFrameworkCore;
using NodaTime;

View File

@ -101,56 +101,7 @@ public class FileReferenceMigrationService(AppDatabase db)
await db.SaveChangesAsync();
}
private async Task ScanProfiles()
{
var profiles = await db.AccountProfiles
.Where(p => p.PictureId != null || p.BackgroundId != null)
.ToListAsync();
foreach (var profile in profiles)
{
if (profile is { PictureId: not null, Picture: null })
{
var avatarFile = await db.Files.FirstOrDefaultAsync(f => f.Id == profile.PictureId);
if (avatarFile != null)
{
// Create a reference for the avatar file
var reference = new CloudFileReference
{
FileId = avatarFile.Id,
File = avatarFile,
Usage = "profile.picture",
ResourceId = profile.Id.ToString()
};
await db.FileReferences.AddAsync(reference);
profile.Picture = avatarFile.ToReferenceObject();
db.AccountProfiles.Update(profile);
}
}
// Also check for the banner if it exists
if (profile is not { BackgroundId: not null, Background: null }) continue;
var bannerFile = await db.Files.FirstOrDefaultAsync(f => f.Id == profile.BackgroundId);
if (bannerFile == null) continue;
{
// Create a reference for the banner file
var reference = new CloudFileReference
{
FileId = bannerFile.Id,
File = bannerFile,
Usage = "profile.background",
ResourceId = profile.Id.ToString()
};
await db.FileReferences.AddAsync(reference);
profile.Background = bannerFile.ToReferenceObject();
db.AccountProfiles.Update(profile);
}
}
await db.SaveChangesAsync();
}
private async Task ScanChatRooms()
{

View File

@ -2,6 +2,7 @@ using System.Globalization;
using FFMpegCore;
using System.Security.Cryptography;
using AngleSharp.Text;
using DysonNetwork.Common.Services;
using Microsoft.EntityFrameworkCore;
using Minio;
using Minio.DataModel.Args;
@ -79,7 +80,7 @@ public class FileService(
MimeType = contentType,
Size = fileSize,
Hash = hash,
AccountId = account.Id
AccountId = accountId
};
var existingFile = await db.Files.FirstOrDefaultAsync(f => f.Hash == hash);

View File

@ -1,4 +1,4 @@
using DysonNetwork.Sphere.Account;
using EFCore.BulkExtensions;
using Quartz;

View File

@ -26,11 +26,11 @@ public class LastActiveFlushHandler(IServiceProvider serviceProvider) : IFlushHa
// Build dictionaries so we can match session/account IDs to their new "last seen" timestamps
var sessionIdMap = distinctItems
.GroupBy(x => x.Session.Id)
.GroupBy(x => x.SessionId)
.ToDictionary(g => g.Key, g => g.Last().SeenAt);
var accountIdMap = distinctItems
.GroupBy(x => x.Account.Id)
.GroupBy(x => x.AccountId)
.ToDictionary(g => g.Key, g => g.Last().SeenAt);
// Update sessions using native EF Core ExecuteUpdateAsync

View File

@ -1,3 +1,4 @@
using DysonNetwork.Common.Models;
using DysonNetwork.Sphere.Chat;
using EFCore.BulkExtensions;
using Microsoft.EntityFrameworkCore;

View File

@ -1,3 +1,4 @@
using DysonNetwork.Common.Services;
using Microsoft.EntityFrameworkCore;
using NodaTime;
using Quartz;

View File

@ -1,55 +0,0 @@
using NodaTime;
namespace DysonNetwork.Sphere.Storage;
/// <summary>
/// Common interface for cloud file entities that can be used in file operations.
/// This interface exposes the essential properties needed for file operations
/// and is implemented by both CloudFile and CloudFileReferenceObject.
/// </summary>
public interface ICloudFile
{
public Instant CreatedAt { get; }
public Instant UpdatedAt { get; }
public Instant? DeletedAt { get; }
/// <summary>
/// Gets the unique identifier of the cloud file.
/// </summary>
string Id { get; }
/// <summary>
/// Gets the name of the cloud file.
/// </summary>
string Name { get; }
/// <summary>
/// Gets the file metadata dictionary.
/// </summary>
Dictionary<string, object>? FileMeta { get; }
/// <summary>
/// Gets the user metadata dictionary.
/// </summary>
Dictionary<string, object>? UserMeta { get; }
/// <summary>
/// Gets the MIME type of the file.
/// </summary>
string? MimeType { get; }
/// <summary>
/// Gets the hash of the file content.
/// </summary>
string? Hash { get; }
/// <summary>
/// Gets the size of the file in bytes.
/// </summary>
long Size { get; }
/// <summary>
/// Gets whether the file has a compressed version available.
/// </summary>
bool HasCompression { get; }
}

View File

@ -1,7 +1,7 @@
using System.Net;
using System.Text;
using System.Text.Json;
using DysonNetwork.Sphere.Permission;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
using tusdotnet.Interfaces;