2 Commits

Author SHA1 Message Date
28d60c722a Directly message singaling 2025-10-19 17:34:44 +08:00
4626529eb5 ♻️ Replace the LiveKit with built-in webrtc 2025-10-19 17:30:51 +08:00
425 changed files with 7978 additions and 89101 deletions

View File

@@ -7,69 +7,27 @@ on:
workflow_dispatch:
jobs:
determine-changes:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.changes.outputs.matrix }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get changed files
id: changed-files
run: |
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.sha }} | xargs)" >> $GITHUB_OUTPUT
- name: Determine changed services
id: changes
run: |
files="${{ steps.changed-files.outputs.files }}"
matrix="{\"include\":[]}"
services=("Sphere" "Pass" "Ring" "Drive" "Develop" "Gateway" "Insight" "Zone")
images=("sphere" "pass" "ring" "drive" "develop" "gateway" "insight" "zone")
changed_services=()
for file in $files; do
if [[ "$file" == DysonNetwork.Shared/* ]]; then
changed_services=("${services[@]}")
break
fi
for i in "${!services[@]}"; do
if [[ "$file" == DysonNetwork.${services[$i]}/* ]]; then
# check if service is already in changed_services
if [[ ! " ${changed_services[@]} " =~ " ${services[$i]} " ]]; then
changed_services+=("${services[$i]}")
fi
fi
done
done
if [ ${#changed_services[@]} -gt 0 ]; then
json_objects=""
for service in "${changed_services[@]}"; do
for i in "${!services[@]}"; do
if [[ "${services[$i]}" == "$service" ]]; then
image="${images[$i]}"
break
fi
done
json_objects+="{\"service\":\"$service\",\"image\":\"$image\"},"
done
matrix="{\"include\":[${json_objects%,}]}"
fi
echo "matrix=$matrix" >> $GITHUB_OUTPUT
build-and-push:
needs: determine-changes
if: ${{ needs.determine-changes.outputs.matrix != '{"include":[]}' }}
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
strategy:
matrix: ${{ fromJson(needs.determine-changes.outputs.matrix) }}
matrix:
include:
- service: Sphere
image: sphere
- service: Pass
image: pass
- service: Ring
image: ring
- service: Drive
image: drive
- service: Develop
image: develop
- service: Gateway
image: gateway
steps:
- name: Checkout repository

View File

@@ -21,22 +21,11 @@ var developService = builder.AddProject<Projects.DysonNetwork_Develop>("develop"
.WithReference(passService)
.WithReference(ringService)
.WithReference(sphereService);
var insightService = builder.AddProject<Projects.DysonNetwork_Insight>("insight")
.WithReference(passService)
.WithReference(ringService)
.WithReference(sphereService)
.WithReference(developService);
var zoneService = builder.AddProject<Projects.DysonNetwork_Zone>("zone")
.WithReference(passService)
.WithReference(ringService)
.WithReference(sphereService)
.WithReference(developService)
.WithReference(insightService);
passService.WithReference(developService).WithReference(driveService);
List<IResourceBuilder<ProjectResource>> services =
[ringService, passService, driveService, sphereService, developService, insightService, zoneService];
[ringService, passService, driveService, sphereService, developService];
for (var idx = 0; idx < services.Count; idx++)
{

View File

@@ -1,29 +1,25 @@
<Project Sdk="Microsoft.NET.Sdk">
<Sdk Name="Aspire.AppHost.Sdk" Version="13.0.0"/>
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<UserSecretsId>a68b3195-a00d-40c2-b5ed-d675356b7cde</UserSecretsId>
<RootNamespace>DysonNetwork.Control</RootNamespace>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Aspire.Hosting.AppHost" Version="13.0.0"/>
<PackageReference Include="Aspire.Hosting.Docker" Version="13.0.0-preview.1.25560.3"/>
<PackageReference Include="Aspire.Hosting.Nats" Version="13.0.0"/>
<PackageReference Include="Aspire.Hosting.Redis" Version="13.0.0"/>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\DysonNetwork.Develop\DysonNetwork.Develop.csproj"/>
<ProjectReference Include="..\DysonNetwork.Drive\DysonNetwork.Drive.csproj"/>
<ProjectReference Include="..\DysonNetwork.Pass\DysonNetwork.Pass.csproj"/>
<ProjectReference Include="..\DysonNetwork.Ring\DysonNetwork.Ring.csproj"/>
<ProjectReference Include="..\DysonNetwork.Sphere\DysonNetwork.Sphere.csproj"/>
<ProjectReference Include="..\DysonNetwork.Gateway\DysonNetwork.Gateway.csproj"/>
<ProjectReference Include="..\DysonNetwork.Insight\DysonNetwork.Insight.csproj"/>
<ProjectReference Include="..\DysonNetwork.Zone\DysonNetwork.Zone.csproj"/>
</ItemGroup>
</Project>
<Sdk Name="Aspire.AppHost.Sdk" Version="9.5.1" />
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<UserSecretsId>a68b3195-a00d-40c2-b5ed-d675356b7cde</UserSecretsId>
<RootNamespace>DysonNetwork.Control</RootNamespace>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Aspire.Hosting.AppHost" Version="9.5.1" />
<PackageReference Include="Aspire.Hosting.Docker" Version="9.4.2-preview.1.25428.12" />
<PackageReference Include="Aspire.Hosting.Nats" Version="9.5.1" />
<PackageReference Include="Aspire.Hosting.Redis" Version="9.5.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\DysonNetwork.Develop\DysonNetwork.Develop.csproj" />
<ProjectReference Include="..\DysonNetwork.Drive\DysonNetwork.Drive.csproj" />
<ProjectReference Include="..\DysonNetwork.Pass\DysonNetwork.Pass.csproj" />
<ProjectReference Include="..\DysonNetwork.Ring\DysonNetwork.Ring.csproj" />
<ProjectReference Include="..\DysonNetwork.Sphere\DysonNetwork.Sphere.csproj" />
<ProjectReference Include="..\DysonNetwork.Gateway\DysonNetwork.Gateway.csproj" />
</ItemGroup>
</Project>

View File

@@ -5,7 +5,7 @@
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
"applicationUrl": "https://localhost:17169;http://localhost:15057",
"applicationUrl": "https://localhost:17025;http://localhost:15057",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development",
"DOTNET_ENVIRONMENT": "Development",

View File

@@ -1,357 +0,0 @@
{
"$schema": "https://json.schemastore.org/aspire-8.0.json",
"resources": {
"cache": {
"type": "container.v1",
"connectionString": "{cache.bindings.tcp.host}:{cache.bindings.tcp.port},password={cache-password.value}",
"image": "docker.io/library/redis:8.2",
"entrypoint": "/bin/sh",
"args": [
"-c",
"redis-server --requirepass $REDIS_PASSWORD"
],
"env": {
"REDIS_PASSWORD": "{cache-password.value}"
},
"bindings": {
"tcp": {
"scheme": "tcp",
"protocol": "tcp",
"transport": "tcp",
"targetPort": 6379
}
}
},
"queue": {
"type": "container.v1",
"connectionString": "nats://nats:{queue-password.value}@{queue.bindings.tcp.host}:{queue.bindings.tcp.port}",
"image": "docker.io/library/nats:2.11",
"args": [
"--user",
"nats",
"--pass",
"{queue-password.value}",
"-js"
],
"bindings": {
"tcp": {
"scheme": "tcp",
"protocol": "tcp",
"transport": "tcp",
"targetPort": 4222
}
}
},
"ring": {
"type": "project.v1",
"path": "../DysonNetwork.Ring/DysonNetwork.Ring.csproj",
"env": {
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
"HTTP_PORTS": "8001",
"HTTPS_PORTS": "{ring.bindings.grpc.targetPort}",
"ConnectionStrings__cache": "{cache.connectionString}",
"ConnectionStrings__queue": "{queue.connectionString}",
"GRPC_PORT": "7002",
"services__pass__http__0": "{pass.bindings.http.url}",
"services__pass__grpc__0": "{pass.bindings.grpc.url}",
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
"OTEL_SERVICE_NAME": "ring"
},
"bindings": {
"http": {
"scheme": "http",
"protocol": "tcp",
"transport": "http",
"targetPort": 8001
},
"grpc": {
"scheme": "https",
"protocol": "tcp",
"transport": "http",
"targetPort": 7002
}
}
},
"pass": {
"type": "project.v1",
"path": "../DysonNetwork.Pass/DysonNetwork.Pass.csproj",
"env": {
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
"HTTP_PORTS": "8002",
"HTTPS_PORTS": "{pass.bindings.grpc.targetPort}",
"services__ring__http__0": "{ring.bindings.http.url}",
"services__ring__grpc__0": "{ring.bindings.grpc.url}",
"services__develop__http__0": "{develop.bindings.http.url}",
"services__develop__grpc__0": "{develop.bindings.grpc.url}",
"services__drive__http__0": "{drive.bindings.http.url}",
"services__drive__grpc__0": "{drive.bindings.grpc.url}",
"ConnectionStrings__cache": "{cache.connectionString}",
"ConnectionStrings__queue": "{queue.connectionString}",
"GRPC_PORT": "7003",
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
"OTEL_SERVICE_NAME": "pass"
},
"bindings": {
"http": {
"scheme": "http",
"protocol": "tcp",
"transport": "http",
"targetPort": 8002
},
"grpc": {
"scheme": "https",
"protocol": "tcp",
"transport": "http",
"targetPort": 7003
}
}
},
"drive": {
"type": "project.v1",
"path": "../DysonNetwork.Drive/DysonNetwork.Drive.csproj",
"env": {
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
"HTTP_PORTS": "8003",
"HTTPS_PORTS": "{drive.bindings.grpc.targetPort}",
"services__pass__http__0": "{pass.bindings.http.url}",
"services__pass__grpc__0": "{pass.bindings.grpc.url}",
"services__ring__http__0": "{ring.bindings.http.url}",
"services__ring__grpc__0": "{ring.bindings.grpc.url}",
"ConnectionStrings__cache": "{cache.connectionString}",
"ConnectionStrings__queue": "{queue.connectionString}",
"GRPC_PORT": "7004",
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
"OTEL_SERVICE_NAME": "drive"
},
"bindings": {
"http": {
"scheme": "http",
"protocol": "tcp",
"transport": "http",
"targetPort": 8003
},
"grpc": {
"scheme": "https",
"protocol": "tcp",
"transport": "http",
"targetPort": 7004
}
}
},
"sphere": {
"type": "project.v1",
"path": "../DysonNetwork.Sphere/DysonNetwork.Sphere.csproj",
"env": {
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
"HTTP_PORTS": "8004",
"HTTPS_PORTS": "{sphere.bindings.grpc.targetPort}",
"services__pass__http__0": "{pass.bindings.http.url}",
"services__pass__grpc__0": "{pass.bindings.grpc.url}",
"services__ring__http__0": "{ring.bindings.http.url}",
"services__ring__grpc__0": "{ring.bindings.grpc.url}",
"services__drive__http__0": "{drive.bindings.http.url}",
"services__drive__grpc__0": "{drive.bindings.grpc.url}",
"ConnectionStrings__cache": "{cache.connectionString}",
"ConnectionStrings__queue": "{queue.connectionString}",
"GRPC_PORT": "7005",
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
"OTEL_SERVICE_NAME": "sphere"
},
"bindings": {
"http": {
"scheme": "http",
"protocol": "tcp",
"transport": "http",
"targetPort": 8004
},
"grpc": {
"scheme": "https",
"protocol": "tcp",
"transport": "http",
"targetPort": 7005
}
}
},
"develop": {
"type": "project.v1",
"path": "../DysonNetwork.Develop/DysonNetwork.Develop.csproj",
"env": {
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
"HTTP_PORTS": "8005",
"HTTPS_PORTS": "{develop.bindings.grpc.targetPort}",
"services__pass__http__0": "{pass.bindings.http.url}",
"services__pass__grpc__0": "{pass.bindings.grpc.url}",
"services__ring__http__0": "{ring.bindings.http.url}",
"services__ring__grpc__0": "{ring.bindings.grpc.url}",
"services__sphere__http__0": "{sphere.bindings.http.url}",
"services__sphere__grpc__0": "{sphere.bindings.grpc.url}",
"ConnectionStrings__cache": "{cache.connectionString}",
"ConnectionStrings__queue": "{queue.connectionString}",
"GRPC_PORT": "7006",
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
"OTEL_SERVICE_NAME": "develop"
},
"bindings": {
"http": {
"scheme": "http",
"protocol": "tcp",
"transport": "http",
"targetPort": 8005
},
"grpc": {
"scheme": "https",
"protocol": "tcp",
"transport": "http",
"targetPort": 7006
}
}
},
"insight": {
"type": "project.v1",
"path": "../DysonNetwork.Insight/DysonNetwork.Insight.csproj",
"env": {
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
"HTTP_PORTS": "8006",
"HTTPS_PORTS": "{insight.bindings.grpc.targetPort}",
"services__pass__http__0": "{pass.bindings.http.url}",
"services__pass__grpc__0": "{pass.bindings.grpc.url}",
"services__ring__http__0": "{ring.bindings.http.url}",
"services__ring__grpc__0": "{ring.bindings.grpc.url}",
"services__sphere__http__0": "{sphere.bindings.http.url}",
"services__sphere__grpc__0": "{sphere.bindings.grpc.url}",
"services__develop__http__0": "{develop.bindings.http.url}",
"services__develop__grpc__0": "{develop.bindings.grpc.url}",
"ConnectionStrings__cache": "{cache.connectionString}",
"ConnectionStrings__queue": "{queue.connectionString}",
"GRPC_PORT": "7007",
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
"OTEL_SERVICE_NAME": "insight"
},
"bindings": {
"http": {
"scheme": "http",
"protocol": "tcp",
"transport": "http",
"targetPort": 8006
},
"grpc": {
"scheme": "https",
"protocol": "tcp",
"transport": "http",
"targetPort": 7007
}
}
},
"gateway": {
"type": "project.v1",
"path": "../DysonNetwork.Gateway/DysonNetwork.Gateway.csproj",
"env": {
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
"HTTP_PORTS": "5001",
"services__ring__http__0": "{ring.bindings.http.url}",
"services__ring__grpc__0": "{ring.bindings.grpc.url}",
"services__pass__http__0": "{pass.bindings.http.url}",
"services__pass__grpc__0": "{pass.bindings.grpc.url}",
"services__drive__http__0": "{drive.bindings.http.url}",
"services__drive__grpc__0": "{drive.bindings.grpc.url}",
"services__sphere__http__0": "{sphere.bindings.http.url}",
"services__sphere__grpc__0": "{sphere.bindings.grpc.url}",
"services__develop__http__0": "{develop.bindings.http.url}",
"services__develop__grpc__0": "{develop.bindings.grpc.url}",
"services__insight__http__0": "{insight.bindings.http.url}",
"services__insight__grpc__0": "{insight.bindings.grpc.url}",
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
"OTEL_SERVICE_NAME": "gateway"
},
"bindings": {
"http": {
"scheme": "http",
"protocol": "tcp",
"transport": "http",
"targetPort": 5001
}
}
},
"docker-compose": {
"error": "This resource does not support generation in the manifest."
},
"cache-password": {
"type": "parameter.v0",
"value": "{cache-password.inputs.value}",
"inputs": {
"value": {
"type": "string",
"secret": true,
"default": {
"generate": {
"minLength": 22,
"special": false
}
}
}
}
},
"queue-password": {
"type": "parameter.v0",
"value": "{queue-password.inputs.value}",
"inputs": {
"value": {
"type": "string",
"secret": true,
"default": {
"generate": {
"minLength": 22,
"special": false
}
}
}
}
},
"docker-compose-dashboard": {
"type": "container.v1",
"image": "mcr.microsoft.com/dotnet/nightly/aspire-dashboard:latest",
"bindings": {
"http": {
"scheme": "http",
"protocol": "tcp",
"transport": "http",
"targetPort": 18888
},
"otlp-grpc": {
"scheme": "http",
"protocol": "tcp",
"transport": "http",
"targetPort": 18889
}
}
}
}
}

View File

@@ -1,8 +1,6 @@
using DysonNetwork.Shared.Data;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Design;
using NodaTime;
namespace DysonNetwork.Develop;
@@ -31,18 +29,10 @@ public class AppDatabase(
base.OnConfiguring(optionsBuilder);
}
public override async Task<int> SaveChangesAsync(CancellationToken cancellationToken = default)
{
this.ApplyAuditableAndSoftDelete();
return await base.SaveChangesAsync(cancellationToken);
}
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
base.OnModelCreating(modelBuilder);
modelBuilder.ApplySoftDeleteFilters();
}
}

View File

@@ -1,10 +1,10 @@
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
FROM mcr.microsoft.com/dotnet/aspnet:9.0 AS base
USER $APP_UID
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["DysonNetwork.Develop/DysonNetwork.Develop.csproj", "DysonNetwork.Develop/"]

View File

@@ -1,19 +1,24 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<TargetFramework>net9.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.11">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PackageReference Include="EFCore.NamingConventions" Version="9.0.0" />
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.7"/>
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.7">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="NodaTime.Serialization.Protobuf" Version="2.0.2" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4"/>
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NodaTime" Version="9.0.4" />
<PackageReference Include="prometheus-net.AspNetCore" Version="8.2.1"/>
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.4" />
<PackageReference Include="NodaTime" Version="3.2.2"/>
<PackageReference Include="NodaTime.Serialization.SystemTextJson" Version="1.3.0"/>
<PackageReference Include="Grpc.AspNetCore.Server" Version="2.71.0"/>

View File

@@ -19,7 +19,7 @@ public class BotAccountController(
DeveloperService ds,
DevProjectService projectService,
ILogger<BotAccountController> logger,
RemoteAccountService remoteAccounts,
AccountClientHelper accounts,
BotAccountReceiverService.BotAccountReceiverServiceClient accountsReceiver
)
: ControllerBase
@@ -222,7 +222,7 @@ public class BotAccountController(
if (bot is null || bot.ProjectId != projectId)
return NotFound("Bot not found");
var botAccount = await remoteAccounts.GetBotAccount(bot.Id);
var botAccount = await accounts.GetBotAccount(bot.Id);
if (request.Name is not null) botAccount.Name = request.Name;
if (request.Nick is not null) botAccount.Nick = request.Nick;

View File

@@ -10,7 +10,7 @@ namespace DysonNetwork.Develop.Identity;
public class BotAccountService(
AppDatabase db,
BotAccountReceiverService.BotAccountReceiverServiceClient accountReceiver,
RemoteAccountService remoteAccounts
AccountClientHelper accounts
)
{
public async Task<SnBotAccount?> GetBotByIdAsync(Guid id)
@@ -158,7 +158,7 @@ public class BotAccountService(
public async Task<List<SnBotAccount>> LoadBotsAccountAsync(List<SnBotAccount> bots)
{
var automatedIds = bots.Select(b => b.Id).ToList();
var data = await remoteAccounts.GetBotAccountBatch(automatedIds);
var data = await accounts.GetBotAccountBatch(automatedIds);
foreach (var bot in bots)
{

View File

@@ -69,7 +69,7 @@ public class DeveloperController(
[HttpPost("{name}/enroll")]
[Authorize]
[AskPermission("developers.create")]
[RequiredPermission("global", "developers.create")]
public async Task<ActionResult<SnDeveloper>> EnrollDeveloperProgram(string name)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
@@ -79,7 +79,7 @@ public class DeveloperController(
try
{
var pubResponse = await ps.GetPublisherAsync(new GetPublisherRequest { Name = name });
pub = SnPublisher.FromProtoValue(pubResponse.Publisher);
pub = SnPublisher.FromProto(pubResponse.Publisher);
} catch (RpcException ex)
{
return NotFound(ex.Status.Detail);

View File

@@ -13,7 +13,7 @@ public class DeveloperService(
public async Task<SnDeveloper> LoadDeveloperPublisher(SnDeveloper developer)
{
var pubResponse = await ps.GetPublisherAsync(new GetPublisherRequest { Id = developer.PublisherId.ToString() });
developer.Publisher = SnPublisher.FromProtoValue(pubResponse.Publisher);
developer.Publisher = SnPublisher.FromProto(pubResponse.Publisher);
return developer;
}
@@ -25,7 +25,7 @@ public class DeveloperService(
var pubRequest = new GetPublisherBatchRequest();
pubIds.ForEach(x => pubRequest.Ids.Add(x.ToString()));
var pubResponse = await ps.GetPublisherBatchAsync(pubRequest);
var pubs = pubResponse.Publishers.ToDictionary(p => Guid.Parse(p.Id), SnPublisher.FromProtoValue);
var pubs = pubResponse.Publishers.ToDictionary(p => Guid.Parse(p.Id), SnPublisher.FromProto);
return enumerable.Select(d =>
{

View File

@@ -14,7 +14,7 @@ builder.ConfigureAppKestrel(builder.Configuration);
builder.Services.AddAppServices(builder.Configuration);
builder.Services.AddAppAuthentication();
builder.Services.AddDysonAuth();
builder.Services.AddSphereService();
builder.Services.AddPublisherService();
builder.Services.AddAccountService();
builder.Services.AddDriveService();
@@ -35,6 +35,6 @@ using (var scope = app.Services.CreateScope())
app.ConfigureAppMiddleware(builder.Configuration);
app.UseSwaggerManifest("DysonNetwork.Develop");
app.UseSwaggerManifest();
app.Run();

View File

@@ -4,7 +4,11 @@ using DysonNetwork.Shared.Models;
namespace DysonNetwork.Develop.Project;
public class DevProjectService(AppDatabase db )
public class DevProjectService(
AppDatabase db,
FileReferenceService.FileReferenceServiceClient fileRefs,
FileService.FileServiceClient files
)
{
public async Task<SnDevProject> CreateProjectAsync(
SnDeveloper developer,
@@ -21,14 +25,14 @@ public class DevProjectService(AppDatabase db )
db.DevProjects.Add(project);
await db.SaveChangesAsync();
return project;
}
public async Task<SnDevProject?> GetProjectAsync(Guid id, Guid? developerId = null)
{
var query = db.DevProjects.AsQueryable();
if (developerId.HasValue)
{
query = query.Where(p => p.DeveloperId == developerId.Value);
@@ -70,4 +74,4 @@ public class DevProjectService(AppDatabase db )
await db.SaveChangesAsync();
return true;
}
}
}

View File

@@ -1,6 +1,7 @@
using DysonNetwork.Develop.Identity;
using DysonNetwork.Shared.Auth;
using DysonNetwork.Shared.Http;
using Prometheus;
namespace DysonNetwork.Develop.Startup;
@@ -8,6 +9,7 @@ public static class ApplicationConfiguration
{
public static WebApplication ConfigureAppMiddleware(this WebApplication app, IConfiguration configuration)
{
app.MapMetrics();
app.MapOpenApi();
app.UseRequestLocalization();
@@ -16,12 +18,11 @@ public static class ApplicationConfiguration
app.UseAuthentication();
app.UseAuthorization();
app.UseMiddleware<RemotePermissionMiddleware>();
app.UseMiddleware<PermissionMiddleware>();
app.MapControllers();
app.MapGrpcService<CustomAppServiceGrpc>();
app.MapGrpcReflectionService();
return app;
}

View File

@@ -16,7 +16,9 @@ public static class ServiceCollectionExtensions
services.AddLocalization();
services.AddDbContext<AppDatabase>();
services.AddSingleton<IClock>(SystemClock.Instance);
services.AddHttpContextAccessor();
services.AddSingleton<ICacheService, CacheServiceRedis>();
services.AddHttpClient();
@@ -30,7 +32,6 @@ public static class ServiceCollectionExtensions
});
services.AddGrpc(options => { options.EnableDetailedErrors = true; });
services.AddGrpcReflection();
services.Configure<RequestLocalizationOptions>(options =>
{

View File

@@ -1,28 +1,26 @@
{
"Debug": true,
"BaseUrl": "http://localhost:5071",
"SiteUrl": "https://solian.app",
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*",
"ConnectionStrings": {
"App": "Host=localhost;Port=5432;Database=dyson_develop;Username=postgres;Password=postgres;Include Error Detail=True;Maximum Pool Size=20;Connection Idle Lifetime=60"
},
"KnownProxies": [
"127.0.0.1",
"::1"
],
"Swagger": {
"PublicBasePath": "/develop"
},
"Cache": {
"Serializer": "MessagePack"
},
"Etcd": {
"Insecure": true
"Debug": true,
"BaseUrl": "http://localhost:5071",
"SiteUrl": "https://solian.app",
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*",
"ConnectionStrings": {
"App": "Host=localhost;Port=5432;Database=dyson_develop;Username=postgres;Password=postgres;Include Error Detail=True;Maximum Pool Size=20;Connection Idle Lifetime=60"
},
"KnownProxies": ["127.0.0.1", "::1"],
"Swagger": {
"PublicBasePath": "/develop"
},
"Etcd": {
"Insecure": true
},
"Service": {
"Name": "DysonNetwork.Develop",
"Url": "https://localhost:7192"
}
}

View File

@@ -1,14 +1,12 @@
using System.Linq.Expressions;
using System.Reflection;
using DysonNetwork.Drive.Billing;
using DysonNetwork.Drive.Storage;
using DysonNetwork.Drive.Storage.Model;
using DysonNetwork.Shared.Data;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Design;
using Microsoft.EntityFrameworkCore.Query;
using NodaTime;
using Quartz;
using TaskStatus = DysonNetwork.Drive.Storage.Model.TaskStatus;
namespace DysonNetwork.Drive;
@@ -23,11 +21,7 @@ public class AppDatabase(
public DbSet<QuotaRecord> QuotaRecords { get; set; } = null!;
public DbSet<SnCloudFile> Files { get; set; } = null!;
public DbSet<SnCloudFileReference> FileReferences { get; set; } = null!;
public DbSet<SnCloudFileIndex> FileIndexes { get; set; }
public DbSet<PersistentTask> Tasks { get; set; } = null!;
public DbSet<PersistentUploadTask> UploadTasks { get; set; } = null!; // Backward compatibility
public DbSet<CloudFileReference> FileReferences { get; set; } = null!;
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
{
@@ -45,12 +39,52 @@ public class AppDatabase(
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
base.OnModelCreating(modelBuilder);
modelBuilder.ApplySoftDeleteFilters();
// Automatically apply soft-delete filter to all entities inheriting BaseModel
foreach (var entityType in modelBuilder.Model.GetEntityTypes())
{
if (!typeof(ModelBase).IsAssignableFrom(entityType.ClrType)) continue;
var method = typeof(AppDatabase)
.GetMethod(nameof(SetSoftDeleteFilter),
BindingFlags.NonPublic | BindingFlags.Static)!
.MakeGenericMethod(entityType.ClrType);
method.Invoke(null, [modelBuilder]);
}
}
private static void SetSoftDeleteFilter<TEntity>(ModelBuilder modelBuilder)
where TEntity : ModelBase
{
modelBuilder.Entity<TEntity>().HasQueryFilter(e => e.DeletedAt == null);
}
public override async Task<int> SaveChangesAsync(CancellationToken cancellationToken = default)
{
this.ApplyAuditableAndSoftDelete();
var now = SystemClock.Instance.GetCurrentInstant();
foreach (var entry in ChangeTracker.Entries<ModelBase>())
{
switch (entry.State)
{
case EntityState.Added:
entry.Entity.CreatedAt = now;
entry.Entity.UpdatedAt = now;
break;
case EntityState.Modified:
entry.Entity.UpdatedAt = now;
break;
case EntityState.Deleted:
entry.State = EntityState.Modified;
entry.Entity.DeletedAt = now;
break;
case EntityState.Detached:
case EntityState.Unchanged:
default:
break;
}
}
return await base.SaveChangesAsync(cancellationToken);
}
}
@@ -102,45 +136,6 @@ public class AppDatabaseRecyclingJob(AppDatabase db, ILogger<AppDatabaseRecyclin
}
}
public class PersistentTaskCleanupJob(
IServiceProvider serviceProvider,
ILogger<PersistentTaskCleanupJob> logger
) : IJob
{
public async Task Execute(IJobExecutionContext context)
{
logger.LogInformation("Cleaning up stale persistent tasks...");
// Get the PersistentTaskService from DI
using var scope = serviceProvider.CreateScope();
var persistentTaskService = scope.ServiceProvider.GetService(typeof(PersistentTaskService));
if (persistentTaskService is PersistentTaskService service)
{
// Clean up tasks for all users (you might want to add user-specific logic here)
// For now, we'll clean up tasks older than 30 days for all users
var cutoff = SystemClock.Instance.GetCurrentInstant() - Duration.FromDays(30);
var tasksToClean = await service.GetUserTasksAsync(
Guid.Empty, // This would need to be adjusted for multi-user cleanup
status: TaskStatus.Completed | TaskStatus.Failed | TaskStatus.Cancelled | TaskStatus.Expired
);
var cleanedCount = 0;
foreach (var task in tasksToClean.Items.Where(t => t.UpdatedAt < cutoff))
{
await service.CancelTaskAsync(task.TaskId); // Or implement a proper cleanup method
cleanedCount++;
}
logger.LogInformation("Cleaned up {Count} stale persistent tasks", cleanedCount);
}
else
{
logger.LogWarning("PersistentTaskService not found in DI container");
}
}
}
public class AppDatabaseFactory : IDesignTimeDbContextFactory<AppDatabase>
{
public AppDatabase CreateDbContext(string[] args)
@@ -154,3 +149,35 @@ public class AppDatabaseFactory : IDesignTimeDbContextFactory<AppDatabase>
return new AppDatabase(optionsBuilder.Options, configuration);
}
}
public static class OptionalQueryExtensions
{
public static IQueryable<T> If<T>(
this IQueryable<T> source,
bool condition,
Func<IQueryable<T>, IQueryable<T>> transform
)
{
return condition ? transform(source) : source;
}
public static IQueryable<T> If<T, TP>(
this IIncludableQueryable<T, TP> source,
bool condition,
Func<IIncludableQueryable<T, TP>, IQueryable<T>> transform
)
where T : class
{
return condition ? transform(source) : source;
}
public static IQueryable<T> If<T, TP>(
this IIncludableQueryable<T, IEnumerable<TP>> source,
bool condition,
Func<IIncludableQueryable<T, IEnumerable<TP>>, IQueryable<T>> transform
)
where T : class
{
return condition ? transform(source) : source;
}
}

View File

@@ -1,4 +1,4 @@
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
FROM mcr.microsoft.com/dotnet/aspnet:9.0 AS base
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
@@ -20,7 +20,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
USER $APP_UID
# Stage 2: Build .NET application
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["DysonNetwork.Drive/DysonNetwork.Drive.csproj", "DysonNetwork.Drive/"]

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<TargetFramework>net9.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
@@ -10,35 +10,55 @@
<ItemGroup>
<PackageReference Include="BCrypt.Net-Next" Version="4.0.3" />
<PackageReference Include="BlurHashSharp.SkiaSharp" Version="1.3.4" />
<PackageReference Include="FFMpegCore" Version="5.4.0" />
<PackageReference Include="FFMpegCore" Version="5.2.0" />
<PackageReference Include="Grpc.AspNetCore.Server" Version="2.71.0" />
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.11">
<PrivateAssets>all</PrivateAssets>
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.7" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.7">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="MimeKit" Version="4.14.0" />
<PackageReference Include="MimeKit" Version="4.13.0" />
<PackageReference Include="MimeTypes" Version="2.5.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Minio" Version="7.0.0" />
<PackageReference Include="Minio" Version="6.0.5" />
<PackageReference Include="Nanoid" Version="3.1.0" />
<PackageReference Include="Nerdbank.GitVersioning" Version="3.7.115">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="NetVips" Version="3.1.0" />
<PackageReference Include="NetVips.Native.linux-x64" Version="8.17.3" />
<PackageReference Include="NetVips.Native.osx-arm64" Version="8.17.3" />
<PackageReference Include="NetVips.Native.linux-x64" Version="8.17.1" />
<PackageReference Include="NetVips.Native.osx-arm64" Version="8.17.1" />
<PackageReference Include="NodaTime" Version="3.2.2" />
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.2.0" />
<PackageReference Include="NodaTime.Serialization.Protobuf" Version="2.0.2" />
<PackageReference Include="NodaTime.Serialization.SystemTextJson" Version="1.3.0" />
<PackageReference Include="Quartz" Version="3.15.1" />
<PackageReference Include="Quartz.AspNetCore" Version="3.15.1" />
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.15.1" />
<PackageReference Include="EFCore.BulkExtensions.PostgreSql" Version="9.0.2" />
<!-- Pin the SkiaSharp version at the 2.88.9 due to the BlurHash need this specific version -->
<PackageReference Include="SkiaSharp" Version="2.88.9" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.Design" Version="1.1.0" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NodaTime" Version="9.0.4" />
<PackageReference Include="OpenTelemetry.Exporter.OpenTelemetryProtocol" Version="1.12.0" />
<PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.12.0" />
<PackageReference Include="OpenTelemetry.Instrumentation.AspNetCore" Version="1.12.0" />
<PackageReference Include="OpenTelemetry.Instrumentation.Http" Version="1.12.0" />
<PackageReference Include="OpenTelemetry.Instrumentation.Runtime" Version="1.12.0" />
<PackageReference Include="prometheus-net.AspNetCore" Version="8.2.1" />
<PackageReference Include="prometheus-net.AspNetCore.HealthChecks" Version="8.2.1" />
<PackageReference Include="prometheus-net.DotNetRuntime" Version="4.4.1" />
<PackageReference Include="prometheus-net.EntityFramework" Version="0.9.5" />
<PackageReference Include="prometheus-net.SystemMetrics" Version="3.1.0" />
<PackageReference Include="Quartz" Version="3.14.0" />
<PackageReference Include="Quartz.AspNetCore" Version="3.14.0" />
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.14.0" />
<PackageReference Include="EFCore.BulkExtensions" Version="9.0.1" />
<PackageReference Include="EFCore.BulkExtensions.PostgreSql" Version="9.0.1" />
<PackageReference Include="EFCore.NamingConventions" Version="9.0.0" />
<PackageReference Include="SkiaSharp.NativeAssets.Linux" Version="2.88.9" />
<PackageReference Include="SkiaSharp.NativeAssets.Linux.NoDependencies" Version="2.88.9" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.4" />
<PackageReference Include="Swashbuckle.AspNetCore.SwaggerUI" Version="9.0.4" />
<PackageReference Include="tusdotnet" Version="2.10.0" />
</ItemGroup>
<ItemGroup>

View File

@@ -1,585 +0,0 @@
using System.ComponentModel.DataAnnotations;
using DysonNetwork.Drive.Storage;
using DysonNetwork.Shared.Auth;
using DysonNetwork.Shared.Http;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace DysonNetwork.Drive.Index;
[ApiController]
[Route("/api/index")]
[Authorize]
public class FileIndexController(
FileIndexService fileIndexService,
AppDatabase db,
ILogger<FileIndexController> logger
) : ControllerBase
{
/// <summary>
/// Gets files in a specific path for the current user
/// </summary>
/// <param name="path">The path to browse (defaults to root "/")</param>
/// <param name="query">Optional query to filter files by name</param>
/// <param name="order">The field to order by (date, size, name - defaults to date)</param>
/// <param name="orderDesc">Whether to order in descending order (defaults to true)</param>
/// <returns>List of files in the specified path</returns>
[HttpGet("browse")]
public async Task<IActionResult> BrowseFiles(
[FromQuery] string path = "/",
[FromQuery] string? query = null,
[FromQuery] string order = "date",
[FromQuery] bool orderDesc = true
)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var accountId = Guid.Parse(currentUser.Id);
try
{
var fileIndexes = await fileIndexService.GetByPathAsync(accountId, path);
if (!string.IsNullOrWhiteSpace(query))
{
fileIndexes = fileIndexes
.Where(fi => fi.File.Name.Contains(query, StringComparison.OrdinalIgnoreCase))
.ToList();
}
// Apply sorting
fileIndexes = order.ToLower() switch
{
"name" => orderDesc ? fileIndexes.OrderByDescending(fi => fi.File.Name).ToList()
: fileIndexes.OrderBy(fi => fi.File.Name).ToList(),
"size" => orderDesc ? fileIndexes.OrderByDescending(fi => fi.File.Size).ToList()
: fileIndexes.OrderBy(fi => fi.File.Size).ToList(),
_ => orderDesc ? fileIndexes.OrderByDescending(fi => fi.File.CreatedAt).ToList()
: fileIndexes.OrderBy(fi => fi.File.CreatedAt).ToList()
};
// Get all file indexes for this account to extract child folders
var allFileIndexes = await fileIndexService.GetByAccountIdAsync(accountId);
// Extract unique child folder paths
var childFolders = ExtractChildFolders(allFileIndexes, path);
return Ok(new
{
Path = path,
Files = fileIndexes,
Folders = childFolders,
TotalCount = fileIndexes.Count
});
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to browse files for account {AccountId} at path {Path}", accountId, path);
return new ObjectResult(new ApiError
{
Code = "BROWSE_FAILED",
Message = "Failed to browse files",
Status = 500
}) { StatusCode = 500 };
}
}
/// <summary>
/// Extracts unique child folder paths from all file indexes for a given parent path
/// </summary>
/// <param name="allFileIndexes">All file indexes for the account</param>
/// <param name="parentPath">The parent path to find children for</param>
/// <returns>List of unique child folder names</returns>
private List<string> ExtractChildFolders(List<SnCloudFileIndex> allFileIndexes, string parentPath)
{
var normalizedParentPath = FileIndexService.NormalizePath(parentPath);
var childFolders = new HashSet<string>();
foreach (var index in allFileIndexes)
{
var normalizedIndexPath = FileIndexService.NormalizePath(index.Path);
// Check if this path is a direct child of the parent path
if (normalizedIndexPath.StartsWith(normalizedParentPath) &&
normalizedIndexPath != normalizedParentPath)
{
// Remove the parent path prefix to get the relative path
var relativePath = normalizedIndexPath.Substring(normalizedParentPath.Length);
// Extract the first folder name (direct child)
var firstSlashIndex = relativePath.IndexOf('/');
if (firstSlashIndex > 0)
{
var folderName = relativePath.Substring(0, firstSlashIndex);
childFolders.Add(folderName);
}
}
}
return childFolders.OrderBy(f => f).ToList();
}
/// <summary>
/// Gets all files for the current user (across all paths)
/// </summary>
/// <param name="query">Optional query to filter files by name</param>
/// <param name="order">The field to order by (date, size, name - defaults to date)</param>
/// <param name="orderDesc">Whether to order in descending order (defaults to true)</param>
/// <returns>List of all files for the user</returns>
[HttpGet("all")]
public async Task<IActionResult> GetAllFiles(
[FromQuery] string? query = null,
[FromQuery] string order = "date",
[FromQuery] bool orderDesc = true
)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var accountId = Guid.Parse(currentUser.Id);
try
{
var fileIndexes = await fileIndexService.GetByAccountIdAsync(accountId);
if (!string.IsNullOrWhiteSpace(query))
{
fileIndexes = fileIndexes
.Where(fi => fi.File.Name.Contains(query, StringComparison.OrdinalIgnoreCase))
.ToList();
}
// Apply sorting
fileIndexes = order.ToLower() switch
{
"name" => orderDesc ? fileIndexes.OrderByDescending(fi => fi.File.Name).ToList()
: fileIndexes.OrderBy(fi => fi.File.Name).ToList(),
"size" => orderDesc ? fileIndexes.OrderByDescending(fi => fi.File.Size).ToList()
: fileIndexes.OrderBy(fi => fi.File.Size).ToList(),
_ => orderDesc ? fileIndexes.OrderByDescending(fi => fi.File.CreatedAt).ToList()
: fileIndexes.OrderBy(fi => fi.File.CreatedAt).ToList()
};
return Ok(new
{
Files = fileIndexes,
TotalCount = fileIndexes.Count()
});
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to get all files for account {AccountId}", accountId);
return new ObjectResult(new ApiError
{
Code = "GET_ALL_FAILED",
Message = "Failed to get files",
Status = 500
}) { StatusCode = 500 };
}
}
/// <summary>
/// Gets files that have not been indexed for the current user.
/// </summary>
/// <param name="recycled">Shows recycled files or not</param>
/// <param name="offset">The number of files to skip</param>
/// <param name="take">The number of files to return</param>
/// <param name="pool">The pool ID of those files</param>
/// <param name="query">Optional query to filter files by name</param>
/// <param name="order">The field to order by (date, size, name - defaults to date)</param>
/// <param name="orderDesc">Whether to order in descending order (defaults to true)</param>
/// <returns>List of unindexed files</returns>
[HttpGet("unindexed")]
public async Task<IActionResult> GetUnindexedFiles(
[FromQuery] Guid? pool,
[FromQuery] bool recycled = false,
[FromQuery] int offset = 0,
[FromQuery] int take = 20,
[FromQuery] string? query = null,
[FromQuery] string order = "date",
[FromQuery] bool orderDesc = true
)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var accountId = Guid.Parse(currentUser.Id);
try
{
var filesQuery = db.Files
.Where(f => f.AccountId == accountId
&& f.IsMarkedRecycle == recycled
&& !db.FileIndexes.Any(fi => fi.FileId == f.Id && fi.AccountId == accountId)
)
.AsQueryable();
// Apply sorting
filesQuery = order.ToLower() switch
{
"name" => orderDesc ? filesQuery.OrderByDescending(f => f.Name)
: filesQuery.OrderBy(f => f.Name),
"size" => orderDesc ? filesQuery.OrderByDescending(f => f.Size)
: filesQuery.OrderBy(f => f.Size),
_ => orderDesc ? filesQuery.OrderByDescending(f => f.CreatedAt)
: filesQuery.OrderBy(f => f.CreatedAt)
};
if (pool.HasValue) filesQuery = filesQuery.Where(f => f.PoolId == pool);
if (!string.IsNullOrWhiteSpace(query))
{
filesQuery = filesQuery.Where(f => f.Name.Contains(query));
}
var totalCount = await filesQuery.CountAsync();
Response.Headers.Append("X-Total", totalCount.ToString());
var unindexedFiles = await filesQuery
.Skip(offset)
.Take(take)
.ToListAsync();
return Ok(unindexedFiles);
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to get unindexed files for account {AccountId}", accountId);
return new ObjectResult(new ApiError
{
Code = "GET_UNINDEXED_FAILED",
Message = "Failed to get unindexed files",
Status = 500
}) { StatusCode = 500 };
}
}
/// <summary>
/// Moves a file to a new path
/// </summary>
/// <param name="indexId">The file index ID</param>
/// <param name="newPath">The new path</param>
/// <returns>The updated file index</returns>
[HttpPost("move/{indexId}")]
public async Task<IActionResult> MoveFile(Guid indexId, [FromBody] MoveFileRequest request)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var accountId = Guid.Parse(currentUser.Id);
try
{
// Verify ownership
var existingIndex = await db.FileIndexes
.Include(fi => fi.File)
.FirstOrDefaultAsync(fi => fi.Id == indexId && fi.AccountId == accountId);
if (existingIndex == null)
return new ObjectResult(ApiError.NotFound("File index")) { StatusCode = 404 };
var updatedIndex = await fileIndexService.UpdateAsync(indexId, request.NewPath);
if (updatedIndex == null)
return new ObjectResult(ApiError.NotFound("File index")) { StatusCode = 404 };
return Ok(new
{
updatedIndex.FileId,
IndexId = updatedIndex.Id,
OldPath = existingIndex.Path,
NewPath = updatedIndex.Path,
Message = "File moved successfully"
});
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to move file index {IndexId} for account {AccountId}", indexId, accountId);
return new ObjectResult(new ApiError
{
Code = "MOVE_FAILED",
Message = "Failed to move file",
Status = 500
}) { StatusCode = 500 };
}
}
/// <summary>
/// Removes a file index (does not delete the actual file by default)
/// </summary>
/// <param name="indexId">The file index ID</param>
/// <param name="deleteFile">Whether to also delete the actual file data</param>
/// <returns>Success message</returns>
[HttpDelete("remove/{indexId}")]
public async Task<IActionResult> RemoveFileIndex(Guid indexId, [FromQuery] bool deleteFile = false)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var accountId = Guid.Parse(currentUser.Id);
try
{
// Verify ownership
var existingIndex = await db.FileIndexes
.Include(fi => fi.File)
.FirstOrDefaultAsync(fi => fi.Id == indexId && fi.AccountId == accountId);
if (existingIndex == null)
return new ObjectResult(ApiError.NotFound("File index")) { StatusCode = 404 };
var fileId = existingIndex.FileId;
var fileName = existingIndex.File.Name;
var filePath = existingIndex.Path;
// Remove the index
var removed = await fileIndexService.RemoveAsync(indexId);
if (!removed)
return new ObjectResult(ApiError.NotFound("File index")) { StatusCode = 404 };
// Optionally delete the actual file
if (!deleteFile)
return Ok(new
{
Message = deleteFile
? "File index and file data removed successfully"
: "File index removed successfully",
FileId = fileId,
FileName = fileName,
Path = filePath,
FileDataDeleted = deleteFile
});
try
{
// Check if there are any other indexes for this file
var remainingIndexes = await fileIndexService.GetByFileIdAsync(fileId);
if (remainingIndexes.Count == 0)
{
// No other indexes exist, safe to delete the file
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == fileId.ToString());
if (file != null)
{
db.Files.Remove(file);
await db.SaveChangesAsync();
logger.LogInformation("Deleted file {FileId} ({FileName}) as requested", fileId, fileName);
}
}
}
catch (Exception ex)
{
logger.LogWarning(ex, "Failed to delete file {FileId} while removing index", fileId);
// Continue even if file deletion fails
}
return Ok(new
{
Message = deleteFile
? "File index and file data removed successfully"
: "File index removed successfully",
FileId = fileId,
FileName = fileName,
Path = filePath,
FileDataDeleted = deleteFile
});
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to remove file index {IndexId} for account {AccountId}", indexId, accountId);
return new ObjectResult(new ApiError
{
Code = "REMOVE_FAILED",
Message = "Failed to remove file",
Status = 500
}) { StatusCode = 500 };
}
}
/// <summary>
/// Removes all file indexes in a specific path
/// </summary>
/// <param name="path">The path to clear</param>
/// <param name="deleteFiles">Whether to also delete the actual file data</param>
/// <returns>Success message with count of removed items</returns>
[HttpDelete("clear-path")]
public async Task<IActionResult> ClearPath([FromQuery] string path = "/", [FromQuery] bool deleteFiles = false)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var accountId = Guid.Parse(currentUser.Id);
try
{
var removedCount = await fileIndexService.RemoveByPathAsync(accountId, path);
if (!deleteFiles || removedCount <= 0)
return Ok(new
{
Message = deleteFiles
? $"Cleared {removedCount} file indexes from path and deleted orphaned files"
: $"Cleared {removedCount} file indexes from path",
Path = path,
RemovedCount = removedCount,
FilesDeleted = deleteFiles
});
// Get the files that were in this path and check if they have other indexes
var filesInPath = await fileIndexService.GetByPathAsync(accountId, path);
var fileIdsToCheck = filesInPath.Select(fi => fi.FileId).Distinct().ToList();
foreach (var fileId in fileIdsToCheck)
{
var remainingIndexes = await fileIndexService.GetByFileIdAsync(fileId);
if (remainingIndexes.Count != 0) continue;
// No other indexes exist, safe to delete the file
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == fileId.ToString());
if (file == null) continue;
db.Files.Remove(file);
logger.LogInformation("Deleted orphaned file {FileId} after clearing path {Path}", fileId, path);
}
await db.SaveChangesAsync();
return Ok(new
{
Message = deleteFiles
? $"Cleared {removedCount} file indexes from path and deleted orphaned files"
: $"Cleared {removedCount} file indexes from path",
Path = path,
RemovedCount = removedCount,
FilesDeleted = deleteFiles
});
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to clear path {Path} for account {AccountId}", path, accountId);
return new ObjectResult(new ApiError
{
Code = "CLEAR_PATH_FAILED",
Message = "Failed to clear path",
Status = 500
}) { StatusCode = 500 };
}
}
/// <summary>
/// Creates a new file index (useful for adding existing files to a path)
/// </summary>
/// <param name="request">The create index request</param>
/// <returns>The created file index</returns>
[HttpPost("create")]
public async Task<IActionResult> CreateFileIndex([FromBody] CreateFileIndexRequest request)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var accountId = Guid.Parse(currentUser.Id);
try
{
// Verify the file exists and belongs to the user
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == request.FileId);
if (file == null)
return new ObjectResult(ApiError.NotFound("File")) { StatusCode = 404 };
if (file.AccountId != accountId)
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
// Check if index already exists for this file and path
var existingIndex = await db.FileIndexes
.FirstOrDefaultAsync(fi =>
fi.FileId == request.FileId && fi.Path == request.Path && fi.AccountId == accountId);
if (existingIndex != null)
return new ObjectResult(ApiError.Validation(new Dictionary<string, string[]>
{
{ "fileId", ["File index already exists for this path"] }
})) { StatusCode = 400 };
var fileIndex = await fileIndexService.CreateAsync(request.Path, request.FileId, accountId);
return Ok(new
{
IndexId = fileIndex.Id,
fileIndex.FileId,
fileIndex.Path,
Message = "File index created successfully"
});
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to create file index for file {FileId} at path {Path} for account {AccountId}",
request.FileId, request.Path, accountId);
return new ObjectResult(new ApiError
{
Code = "CREATE_INDEX_FAILED",
Message = "Failed to create file index",
Status = 500
}) { StatusCode = 500 };
}
}
/// <summary>
/// Searches for files by name or metadata
/// </summary>
/// <param name="query">The search query</param>
/// <param name="path">Optional path to limit search to</param>
/// <returns>Matching files</returns>
[HttpGet("search")]
public async Task<IActionResult> SearchFiles([FromQuery] string query, [FromQuery] string? path = null)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var accountId = Guid.Parse(currentUser.Id);
try
{
// Build the query with all conditions at once
var searchTerm = query.ToLower();
var fileIndexes = await db.FileIndexes
.Where(fi => fi.AccountId == accountId)
.Include(fi => fi.File)
.Where(fi =>
(string.IsNullOrEmpty(path) || fi.Path == FileIndexService.NormalizePath(path)) &&
(fi.File.Name.ToLower().Contains(searchTerm) ||
(fi.File.Description != null && fi.File.Description.ToLower().Contains(searchTerm)) ||
(fi.File.MimeType != null && fi.File.MimeType.ToLower().Contains(searchTerm))))
.ToListAsync();
return Ok(new
{
Query = query,
Path = path,
Results = fileIndexes,
TotalCount = fileIndexes.Count()
});
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to search files for account {AccountId} with query {Query}", accountId, query);
return new ObjectResult(new ApiError
{
Code = "SEARCH_FAILED",
Message = "Failed to search files",
Status = 500
}) { StatusCode = 500 };
}
}
}
public class MoveFileRequest
{
public string NewPath { get; set; } = null!;
}
public class CreateFileIndexRequest
{
[MaxLength(32)] public string FileId { get; set; } = null!;
public string Path { get; set; } = null!;
}

View File

@@ -1,197 +0,0 @@
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
namespace DysonNetwork.Drive.Index;
public class FileIndexService(AppDatabase db)
{
/// <summary>
/// Creates a new file index entry
/// </summary>
/// <param name="path">The parent folder path with a trailing slash</param>
/// <param name="fileId">The file ID</param>
/// <param name="accountId">The account ID</param>
/// <returns>The created file index</returns>
public async Task<SnCloudFileIndex> CreateAsync(string path, string fileId, Guid accountId)
{
// Ensure a path has a trailing slash and is query-safe
var normalizedPath = NormalizePath(path);
// Check if a file with the same name already exists in the same path for this account
var existingFileIndex = await db.FileIndexes
.FirstOrDefaultAsync(fi => fi.AccountId == accountId && fi.Path == normalizedPath && fi.FileId == fileId);
if (existingFileIndex != null)
{
throw new InvalidOperationException(
$"A file with ID '{fileId}' already exists in path '{normalizedPath}' for account '{accountId}'");
}
var fileIndex = new SnCloudFileIndex
{
Path = normalizedPath,
FileId = fileId,
AccountId = accountId
};
db.FileIndexes.Add(fileIndex);
await db.SaveChangesAsync();
return fileIndex;
}
/// <summary>
/// Updates an existing file index entry by removing the old one and creating a new one
/// </summary>
/// <param name="id">The file index ID</param>
/// <param name="newPath">The new parent folder path with trailing slash</param>
/// <returns>The updated file index</returns>
public async Task<SnCloudFileIndex?> UpdateAsync(Guid id, string newPath)
{
var fileIndex = await db.FileIndexes.FindAsync(id);
if (fileIndex == null)
return null;
// Since properties are init-only, we need to remove the old index and create a new one
db.FileIndexes.Remove(fileIndex);
var newFileIndex = new SnCloudFileIndex
{
Path = NormalizePath(newPath),
FileId = fileIndex.FileId,
AccountId = fileIndex.AccountId
};
db.FileIndexes.Add(newFileIndex);
await db.SaveChangesAsync();
return newFileIndex;
}
/// <summary>
/// Removes a file index entry by ID
/// </summary>
/// <param name="id">The file index ID</param>
/// <returns>True if the index was found and removed, false otherwise</returns>
public async Task<bool> RemoveAsync(Guid id)
{
var fileIndex = await db.FileIndexes.FindAsync(id);
if (fileIndex == null)
return false;
db.FileIndexes.Remove(fileIndex);
await db.SaveChangesAsync();
return true;
}
/// <summary>
/// Removes file index entries by file ID
/// </summary>
/// <param name="fileId">The file ID</param>
/// <returns>The number of indexes removed</returns>
public async Task<int> RemoveByFileIdAsync(string fileId)
{
var indexes = await db.FileIndexes
.Where(fi => fi.FileId == fileId)
.ToListAsync();
if (indexes.Count == 0)
return 0;
db.FileIndexes.RemoveRange(indexes);
await db.SaveChangesAsync();
return indexes.Count;
}
/// <summary>
/// Removes file index entries by account ID and path
/// </summary>
/// <param name="accountId">The account ID</param>
/// <param name="path">The parent folder path</param>
/// <returns>The number of indexes removed</returns>
public async Task<int> RemoveByPathAsync(Guid accountId, string path)
{
var normalizedPath = NormalizePath(path);
var indexes = await db.FileIndexes
.Where(fi => fi.AccountId == accountId && fi.Path == normalizedPath)
.ToListAsync();
if (!indexes.Any())
return 0;
db.FileIndexes.RemoveRange(indexes);
await db.SaveChangesAsync();
return indexes.Count;
}
/// <summary>
/// Gets file indexes by account ID and path
/// </summary>
/// <param name="accountId">The account ID</param>
/// <param name="path">The parent folder path</param>
/// <returns>List of file indexes</returns>
public async Task<List<SnCloudFileIndex>> GetByPathAsync(Guid accountId, string path)
{
var normalizedPath = NormalizePath(path);
return await db.FileIndexes
.Where(fi => fi.AccountId == accountId && fi.Path == normalizedPath)
.Include(fi => fi.File)
.ToListAsync();
}
/// <summary>
/// Gets file indexes by file ID
/// </summary>
/// <param name="fileId">The file ID</param>
/// <returns>List of file indexes</returns>
public async Task<List<SnCloudFileIndex>> GetByFileIdAsync(string fileId)
{
return await db.FileIndexes
.Where(fi => fi.FileId == fileId)
.Include(fi => fi.File)
.ToListAsync();
}
/// <summary>
/// Gets all file indexes for an account
/// </summary>
/// <param name="accountId">The account ID</param>
/// <returns>List of file indexes</returns>
public async Task<List<SnCloudFileIndex>> GetByAccountIdAsync(Guid accountId)
{
return await db.FileIndexes
.Where(fi => fi.AccountId == accountId)
.Include(fi => fi.File)
.ToListAsync();
}
/// <summary>
/// Normalizes the path to ensure it has a trailing slash and is query-safe
/// </summary>
/// <param name="path">The original path</param>
/// <returns>The normalized path</returns>
public static string NormalizePath(string path)
{
if (string.IsNullOrEmpty(path))
return "/";
// Ensure the path starts with a slash
if (!path.StartsWith('/'))
path = "/" + path;
// Ensure the path ends with a slash (unless it's just the root)
if (path != "/" && !path.EndsWith('/'))
path += "/";
// Make path query-safe by removing problematic characters
// This is a basic implementation - you might want to add more robust validation
path = path.Replace("%", "").Replace("'", "").Replace("\"", "");
return path;
}
}

View File

@@ -1,341 +0,0 @@
# File Indexing System Documentation
## Overview
The File Indexing System provides a hierarchical file organization layer on top of the existing file storage system in DysonNetwork Drive. It allows users to organize their files in folders and paths while maintaining the underlying file storage capabilities.
When using with the gateway, replace the `/api` with the `/drive` in the path.
And all the arguments will be transformed into snake case via the gateway.
## Architecture
### Core Components
1. **SnCloudFileIndex Model** - Represents the file-to-path mapping
2. **FileIndexService** - Business logic for file index operations
3. **FileIndexController** - REST API endpoints for file management
4. **FileUploadController Integration** - Automatic index creation during upload
### Database Schema
```sql
-- File Indexes table
CREATE TABLE "FileIndexes" (
"Id" uuid NOT NULL DEFAULT gen_random_uuid(),
"Path" character varying(8192) NOT NULL,
"FileId" uuid NOT NULL,
"AccountId" uuid NOT NULL,
"CreatedAt" timestamp with time zone NOT NULL DEFAULT (now() at time zone 'utc'),
"UpdatedAt" timestamp with time zone NOT NULL DEFAULT (now() at time zone 'utc'),
CONSTRAINT "PK_FileIndexes" PRIMARY KEY ("Id"),
CONSTRAINT "FK_FileIndexes_Files_FileId" FOREIGN KEY ("FileId") REFERENCES "Files" ("Id") ON DELETE CASCADE,
INDEX "IX_FileIndexes_Path_AccountId" ("Path", "AccountId")
);
```
## API Endpoints
### Browse Files
**GET** `/api/index/browse?path=/documents/`
Browse files in a specific path.
**Query Parameters:**
- `path` (optional, default: "/") - The path to browse
**Response:**
```json
{
"path": "/documents/",
"files": [
{
"id": "guid",
"path": "/documents/",
"fileId": "guid",
"accountId": "guid",
"createdAt": "2024-01-01T00:00:00Z",
"updatedAt": "2024-01-01T00:00:00Z",
"file": {
"id": "string",
"name": "document.pdf",
"size": 1024,
"mimeType": "application/pdf",
"hash": "sha256-hash",
"uploadedAt": "2024-01-01T00:00:00Z",
"expiredAt": null,
"hasCompression": false,
"hasThumbnail": true,
"isEncrypted": false,
"description": null
}
}
],
"totalCount": 1
}
```
### Get All Files
**GET** `/api/index/all`
Get all files for the current user across all paths.
**Response:**
```json
{
"files": [
// Same structure as browse endpoint
],
"totalCount": 10
}
```
### Move File
**POST** `/api/index/move/{indexId}`
Move a file to a new path.
**Path Parameters:**
- `indexId` - The file index ID
**Request Body:**
```json
{
"newPath": "/archived/"
}
```
**Response:**
```json
{
"fileId": "guid",
"indexId": "guid",
"oldPath": "/documents/",
"newPath": "/archived/",
"message": "File moved successfully"
}
```
### Remove File Index
**DELETE** `/api/index/remove/{indexId}?deleteFile=false`
Remove a file index. Optionally delete the actual file data.
**Path Parameters:**
- `indexId` - The file index ID
**Query Parameters:**
- `deleteFile` (optional, default: false) - Whether to also delete the file data
**Response:**
```json
{
"message": "File index removed successfully",
"fileId": "guid",
"fileName": "document.pdf",
"path": "/documents/",
"fileDataDeleted": false
}
```
### Clear Path
**DELETE** `/api/index/clear-path?path=/temp/&deleteFiles=false`
Remove all file indexes in a specific path.
**Query Parameters:**
- `path` (optional, default: "/") - The path to clear
- `deleteFiles` (optional, default: false) - Whether to also delete orphaned files
**Response:**
```json
{
"message": "Cleared 5 file indexes from path",
"path": "/temp/",
"removedCount": 5,
"filesDeleted": false
}
```
### Create File Index
**POST** `/api/index/create`
Create a new file index for an existing file.
**Request Body:**
```json
{
"fileId": "guid",
"path": "/documents/"
}
```
**Response:**
```json
{
"indexId": "guid",
"fileId": "guid",
"path": "/documents/",
"message": "File index created successfully"
}
```
### Search Files
**GET** `/api/index/search?query=report&path=/documents/`
Search for files by name or metadata.
**Query Parameters:**
- `query` (required) - The search query
- `path` (optional) - Limit search to specific path
**Response:**
```json
{
"query": "report",
"path": "/documents/",
"results": [
// Same structure as browse endpoint
],
"totalCount": 3
}
```
## Path Normalization
The system automatically normalizes paths to ensure consistency:
- **Trailing Slash**: All paths end with `/`
- **Root Path**: User home folder is represented as `/`
- **Query Safety**: Paths are validated to avoid SQL injection
- **Examples**:
- `/documents/` ✅ (correct)
- `/documents``/documents/` ✅ (normalized)
- `/documents/reports/` ✅ (correct)
- `/documents/reports``/documents/reports/` ✅ (normalized)
## File Upload Integration
When uploading files with the `FileUploadController`, you can specify a path to automatically create file indexes:
**Create Upload Task Request:**
```json
{
"fileName": "document.pdf",
"fileSize": 1024,
"contentType": "application/pdf",
"hash": "sha256-hash",
"path": "/documents/" // New field for file indexing
}
```
The system will automatically create a file index when the upload completes successfully.
## Service Methods
### FileIndexService
```csharp
public class FileIndexService
{
// Create a new file index
Task<SnCloudFileIndex> CreateAsync(string path, Guid fileId, Guid accountId);
// Get files by path
Task<List<SnCloudFileIndex>> GetByPathAsync(Guid accountId, string path);
// Get all files for account
Task<List<SnCloudFileIndex>> GetByAccountIdAsync(Guid accountId);
// Get indexes for specific file
Task<List<SnCloudFileIndex>> GetByFileIdAsync(Guid fileId);
// Move file to new path
Task<SnCloudFileIndex?> UpdateAsync(Guid indexId, string newPath);
// Remove file index
Task<bool> RemoveAsync(Guid indexId);
// Remove all indexes in path
Task<int> RemoveByPathAsync(Guid accountId, string path);
// Normalize path format
public static string NormalizePath(string path);
}
```
## Error Handling
The API returns appropriate HTTP status codes and error messages:
- **400 Bad Request**: Invalid input parameters
- **401 Unauthorized**: User not authenticated
- **403 Forbidden**: User lacks permission
- **404 Not Found**: Resource not found
- **500 Internal Server Error**: Server-side error
**Error Response Format:**
```json
{
"code": "BROWSE_FAILED",
"message": "Failed to browse files",
"status": 500
}
```
## Security Considerations
1. **Ownership Verification**: All operations verify that the user owns the file indexes
2. **Path Validation**: Paths are normalized and validated
3. **Cascade Deletion**: File indexes are automatically removed when files are deleted
4. **Safe File Deletion**: Files are only deleted when no other indexes reference them
## Usage Examples
### Upload File to Specific Path
```bash
# Create upload task with path
curl -X POST /api/files/upload/create \
-H "Authorization: Bearer {token}" \
-H "Content-Type: application/json" \
-d '{
"fileName": "report.pdf",
"fileSize": 2048,
"contentType": "application/pdf",
"path": "/documents/reports/"
}'
```
### Browse Files
```bash
curl -X GET "/api/index/browse?path=/documents/reports/" \
-H "Authorization: Bearer {token}"
```
### Move File
```bash
curl -X POST "/api/index/move/{indexId}" \
-H "Authorization: Bearer {token}" \
-H "Content-Type: application/json" \
-d '{"newPath": "/archived/"}'
```
### Search Files
```bash
curl -X GET "/api/index/search?query=invoice&path=/documents/" \
-H "Authorization: Bearer {token}"
```
## Best Practices
1. **Use Trailing Slashes**: Always include trailing slashes in paths
2. **Organize Hierarchically**: Use meaningful folder structures
3. **Search Efficiently**: Use the search endpoint instead of client-side filtering
4. **Clean Up**: Use the clear-path endpoint for temporary directories
5. **Monitor Usage**: Check total file counts for quota management
## Integration Notes
- The file indexing system works alongside the existing file storage
- Files can exist in multiple paths (hard links)
- File deletion is optional and only removes data when safe
- The system maintains referential integrity between files and indexes

View File

@@ -1,567 +0,0 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using DysonNetwork.Drive;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DysonNetwork.Drive.Migrations
{
[DbContext(typeof(AppDatabase))]
[Migration("20251108191230_AddPersistentTask")]
partial class AddPersistentTask
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.10")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DysonNetwork.Drive.Billing.QuotaRecord", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.IsRequired()
.HasColumnType("text")
.HasColumnName("description");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("text")
.HasColumnName("name");
b.Property<long>("Quota")
.HasColumnType("bigint")
.HasColumnName("quota");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_quota_records");
b.ToTable("quota_records", (string)null);
});
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentTask", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant?>("CompletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("completed_at");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("description");
b.Property<string>("Discriminator")
.IsRequired()
.HasMaxLength(21)
.HasColumnType("character varying(21)")
.HasColumnName("discriminator");
b.Property<string>("ErrorMessage")
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("error_message");
b.Property<long?>("EstimatedDurationSeconds")
.HasColumnType("bigint")
.HasColumnName("estimated_duration_seconds");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<Instant>("LastActivity")
.HasColumnType("timestamp with time zone")
.HasColumnName("last_activity");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("name");
b.Property<Dictionary<string, object>>("Parameters")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("parameters");
b.Property<int>("Priority")
.HasColumnType("integer")
.HasColumnName("priority");
b.Property<double>("Progress")
.HasColumnType("double precision")
.HasColumnName("progress");
b.Property<Dictionary<string, object>>("Results")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("results");
b.Property<Instant?>("StartedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("started_at");
b.Property<int>("Status")
.HasColumnType("integer")
.HasColumnName("status");
b.Property<string>("TaskId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)")
.HasColumnName("task_id");
b.Property<int>("Type")
.HasColumnType("integer")
.HasColumnName("type");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_tasks");
b.ToTable("tasks", (string)null);
b.HasDiscriminator().HasValue("PersistentTask");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("DysonNetwork.Shared.Models.CloudFileReference", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<string>("FileId")
.IsRequired()
.HasMaxLength(32)
.HasColumnType("character varying(32)")
.HasColumnName("file_id");
b.Property<string>("ResourceId")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("resource_id");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.Property<string>("Usage")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("usage");
b.HasKey("Id")
.HasName("pk_file_references");
b.HasIndex("FileId")
.HasDatabaseName("ix_file_references_file_id");
b.ToTable("file_references", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.FilePool", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid?>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<BillingConfig>("BillingConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("billing_config");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.IsRequired()
.HasMaxLength(8192)
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<bool>("IsHidden")
.HasColumnType("boolean")
.HasColumnName("is_hidden");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("name");
b.Property<PolicyConfig>("PolicyConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("policy_config");
b.Property<RemoteStorageConfig>("StorageConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("storage_config");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_pools");
b.ToTable("pools", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
{
b.Property<string>("Id")
.HasMaxLength(32)
.HasColumnType("character varying(32)")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Guid?>("BundleId")
.HasColumnType("uuid")
.HasColumnName("bundle_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("description");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<Dictionary<string, object>>("FileMeta")
.HasColumnType("jsonb")
.HasColumnName("file_meta");
b.Property<bool>("HasCompression")
.HasColumnType("boolean")
.HasColumnName("has_compression");
b.Property<bool>("HasThumbnail")
.HasColumnType("boolean")
.HasColumnName("has_thumbnail");
b.Property<string>("Hash")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("hash");
b.Property<bool>("IsEncrypted")
.HasColumnType("boolean")
.HasColumnName("is_encrypted");
b.Property<bool>("IsMarkedRecycle")
.HasColumnType("boolean")
.HasColumnName("is_marked_recycle");
b.Property<string>("MimeType")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("mime_type");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("name");
b.Property<Guid?>("PoolId")
.HasColumnType("uuid")
.HasColumnName("pool_id");
b.Property<List<ContentSensitiveMark>>("SensitiveMarks")
.HasColumnType("jsonb")
.HasColumnName("sensitive_marks");
b.Property<long>("Size")
.HasColumnType("bigint")
.HasColumnName("size");
b.Property<string>("StorageId")
.HasMaxLength(32)
.HasColumnType("character varying(32)")
.HasColumnName("storage_id");
b.Property<string>("StorageUrl")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("storage_url");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.Property<Instant?>("UploadedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("uploaded_at");
b.Property<Dictionary<string, object>>("UserMeta")
.HasColumnType("jsonb")
.HasColumnName("user_meta");
b.HasKey("Id")
.HasName("pk_files");
b.HasIndex("BundleId")
.HasDatabaseName("ix_files_bundle_id");
b.HasIndex("PoolId")
.HasDatabaseName("ix_files_pool_id");
b.ToTable("files", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.HasMaxLength(8192)
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("name");
b.Property<string>("Passcode")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("passcode");
b.Property<string>("Slug")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("slug");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_bundles");
b.HasIndex("Slug")
.IsUnique()
.HasDatabaseName("ix_bundles_slug");
b.ToTable("bundles", (string)null);
});
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentUploadTask", b =>
{
b.HasBaseType("DysonNetwork.Drive.Storage.Model.PersistentTask");
b.Property<Guid?>("BundleId")
.HasColumnType("uuid")
.HasColumnName("bundle_id");
b.Property<long>("ChunkSize")
.HasColumnType("bigint")
.HasColumnName("chunk_size");
b.Property<int>("ChunksCount")
.HasColumnType("integer")
.HasColumnName("chunks_count");
b.Property<int>("ChunksUploaded")
.HasColumnType("integer")
.HasColumnName("chunks_uploaded");
b.Property<string>("ContentType")
.IsRequired()
.HasMaxLength(128)
.HasColumnType("character varying(128)")
.HasColumnName("content_type");
b.Property<string>("EncryptPassword")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("encrypt_password");
b.Property<string>("FileName")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("file_name");
b.Property<long>("FileSize")
.HasColumnType("bigint")
.HasColumnName("file_size");
b.Property<string>("Hash")
.IsRequired()
.HasColumnType("text")
.HasColumnName("hash");
b.Property<Guid>("PoolId")
.HasColumnType("uuid")
.HasColumnName("pool_id");
b.PrimitiveCollection<List<int>>("UploadedChunks")
.IsRequired()
.HasColumnType("integer[]")
.HasColumnName("uploaded_chunks");
b.HasDiscriminator().HasValue("PersistentUploadTask");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.CloudFileReference", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
.WithMany("References")
.HasForeignKey("FileId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_file_references_files_file_id");
b.Navigation("File");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnFileBundle", "Bundle")
.WithMany("Files")
.HasForeignKey("BundleId")
.HasConstraintName("fk_files_bundles_bundle_id");
b.HasOne("DysonNetwork.Shared.Models.FilePool", "Pool")
.WithMany()
.HasForeignKey("PoolId")
.HasConstraintName("fk_files_pools_pool_id");
b.Navigation("Bundle");
b.Navigation("Pool");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
{
b.Navigation("References");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
{
b.Navigation("Files");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -1,66 +0,0 @@
using System;
using System.Collections.Generic;
using Microsoft.EntityFrameworkCore.Migrations;
using NodaTime;
#nullable disable
namespace DysonNetwork.Drive.Migrations
{
/// <inheritdoc />
public partial class AddPersistentTask : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "tasks",
columns: table => new
{
id = table.Column<Guid>(type: "uuid", nullable: false),
task_id = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
name = table.Column<string>(type: "character varying(256)", maxLength: 256, nullable: false),
description = table.Column<string>(type: "character varying(1024)", maxLength: 1024, nullable: true),
type = table.Column<int>(type: "integer", nullable: false),
status = table.Column<int>(type: "integer", nullable: false),
account_id = table.Column<Guid>(type: "uuid", nullable: false),
progress = table.Column<double>(type: "double precision", nullable: false),
parameters = table.Column<Dictionary<string, object>>(type: "jsonb", nullable: false),
results = table.Column<Dictionary<string, object>>(type: "jsonb", nullable: false),
error_message = table.Column<string>(type: "character varying(1024)", maxLength: 1024, nullable: true),
started_at = table.Column<Instant>(type: "timestamp with time zone", nullable: true),
completed_at = table.Column<Instant>(type: "timestamp with time zone", nullable: true),
expired_at = table.Column<Instant>(type: "timestamp with time zone", nullable: true),
last_activity = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
priority = table.Column<int>(type: "integer", nullable: false),
estimated_duration_seconds = table.Column<long>(type: "bigint", nullable: true),
discriminator = table.Column<string>(type: "character varying(21)", maxLength: 21, nullable: false),
file_name = table.Column<string>(type: "character varying(256)", maxLength: 256, nullable: true),
file_size = table.Column<long>(type: "bigint", nullable: true),
content_type = table.Column<string>(type: "character varying(128)", maxLength: 128, nullable: true),
chunk_size = table.Column<long>(type: "bigint", nullable: true),
chunks_count = table.Column<int>(type: "integer", nullable: true),
chunks_uploaded = table.Column<int>(type: "integer", nullable: true),
pool_id = table.Column<Guid>(type: "uuid", nullable: true),
bundle_id = table.Column<Guid>(type: "uuid", nullable: true),
encrypt_password = table.Column<string>(type: "character varying(256)", maxLength: 256, nullable: true),
hash = table.Column<string>(type: "text", nullable: true),
uploaded_chunks = table.Column<List<int>>(type: "integer[]", nullable: true),
created_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
updated_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
deleted_at = table.Column<Instant>(type: "timestamp with time zone", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("pk_tasks", x => x.id);
});
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "tasks");
}
}
}

View File

@@ -1,632 +0,0 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using DysonNetwork.Drive;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DysonNetwork.Drive.Migrations
{
[DbContext(typeof(AppDatabase))]
[Migration("20251112135535_AddFileIndex")]
partial class AddFileIndex
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.10")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DysonNetwork.Drive.Billing.QuotaRecord", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.IsRequired()
.HasColumnType("text")
.HasColumnName("description");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("text")
.HasColumnName("name");
b.Property<long>("Quota")
.HasColumnType("bigint")
.HasColumnName("quota");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_quota_records");
b.ToTable("quota_records", (string)null);
});
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentTask", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant?>("CompletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("completed_at");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("description");
b.Property<string>("Discriminator")
.IsRequired()
.HasMaxLength(21)
.HasColumnType("character varying(21)")
.HasColumnName("discriminator");
b.Property<string>("ErrorMessage")
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("error_message");
b.Property<long?>("EstimatedDurationSeconds")
.HasColumnType("bigint")
.HasColumnName("estimated_duration_seconds");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<Instant>("LastActivity")
.HasColumnType("timestamp with time zone")
.HasColumnName("last_activity");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("name");
b.Property<Dictionary<string, object>>("Parameters")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("parameters");
b.Property<int>("Priority")
.HasColumnType("integer")
.HasColumnName("priority");
b.Property<double>("Progress")
.HasColumnType("double precision")
.HasColumnName("progress");
b.Property<Dictionary<string, object>>("Results")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("results");
b.Property<Instant?>("StartedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("started_at");
b.Property<int>("Status")
.HasColumnType("integer")
.HasColumnName("status");
b.Property<string>("TaskId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)")
.HasColumnName("task_id");
b.Property<int>("Type")
.HasColumnType("integer")
.HasColumnName("type");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_tasks");
b.ToTable("tasks", (string)null);
b.HasDiscriminator().HasValue("PersistentTask");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("DysonNetwork.Shared.Models.CloudFileReference", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<string>("FileId")
.IsRequired()
.HasMaxLength(32)
.HasColumnType("character varying(32)")
.HasColumnName("file_id");
b.Property<string>("ResourceId")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("resource_id");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.Property<string>("Usage")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("usage");
b.HasKey("Id")
.HasName("pk_file_references");
b.HasIndex("FileId")
.HasDatabaseName("ix_file_references_file_id");
b.ToTable("file_references", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.FilePool", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid?>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<BillingConfig>("BillingConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("billing_config");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.IsRequired()
.HasMaxLength(8192)
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<bool>("IsHidden")
.HasColumnType("boolean")
.HasColumnName("is_hidden");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("name");
b.Property<PolicyConfig>("PolicyConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("policy_config");
b.Property<RemoteStorageConfig>("StorageConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("storage_config");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_pools");
b.ToTable("pools", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
{
b.Property<string>("Id")
.HasMaxLength(32)
.HasColumnType("character varying(32)")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Guid?>("BundleId")
.HasColumnType("uuid")
.HasColumnName("bundle_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("description");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<Dictionary<string, object>>("FileMeta")
.HasColumnType("jsonb")
.HasColumnName("file_meta");
b.Property<bool>("HasCompression")
.HasColumnType("boolean")
.HasColumnName("has_compression");
b.Property<bool>("HasThumbnail")
.HasColumnType("boolean")
.HasColumnName("has_thumbnail");
b.Property<string>("Hash")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("hash");
b.Property<bool>("IsEncrypted")
.HasColumnType("boolean")
.HasColumnName("is_encrypted");
b.Property<bool>("IsMarkedRecycle")
.HasColumnType("boolean")
.HasColumnName("is_marked_recycle");
b.Property<string>("MimeType")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("mime_type");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("name");
b.Property<Guid?>("PoolId")
.HasColumnType("uuid")
.HasColumnName("pool_id");
b.Property<List<ContentSensitiveMark>>("SensitiveMarks")
.HasColumnType("jsonb")
.HasColumnName("sensitive_marks");
b.Property<long>("Size")
.HasColumnType("bigint")
.HasColumnName("size");
b.Property<string>("StorageId")
.HasMaxLength(32)
.HasColumnType("character varying(32)")
.HasColumnName("storage_id");
b.Property<string>("StorageUrl")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("storage_url");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.Property<Instant?>("UploadedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("uploaded_at");
b.Property<Dictionary<string, object>>("UserMeta")
.HasColumnType("jsonb")
.HasColumnName("user_meta");
b.HasKey("Id")
.HasName("pk_files");
b.HasIndex("BundleId")
.HasDatabaseName("ix_files_bundle_id");
b.HasIndex("PoolId")
.HasDatabaseName("ix_files_pool_id");
b.ToTable("files", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileIndex", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("FileId")
.IsRequired()
.HasMaxLength(32)
.HasColumnType("character varying(32)")
.HasColumnName("file_id");
b.Property<string>("Path")
.IsRequired()
.HasMaxLength(8192)
.HasColumnType("character varying(8192)")
.HasColumnName("path");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_file_indexes");
b.HasIndex("FileId")
.HasDatabaseName("ix_file_indexes_file_id");
b.HasIndex("Path", "AccountId")
.HasDatabaseName("ix_file_indexes_path_account_id");
b.ToTable("file_indexes", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.HasMaxLength(8192)
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("name");
b.Property<string>("Passcode")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("passcode");
b.Property<string>("Slug")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("slug");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_bundles");
b.HasIndex("Slug")
.IsUnique()
.HasDatabaseName("ix_bundles_slug");
b.ToTable("bundles", (string)null);
});
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentUploadTask", b =>
{
b.HasBaseType("DysonNetwork.Drive.Storage.Model.PersistentTask");
b.Property<Guid?>("BundleId")
.HasColumnType("uuid")
.HasColumnName("bundle_id");
b.Property<long>("ChunkSize")
.HasColumnType("bigint")
.HasColumnName("chunk_size");
b.Property<int>("ChunksCount")
.HasColumnType("integer")
.HasColumnName("chunks_count");
b.Property<int>("ChunksUploaded")
.HasColumnType("integer")
.HasColumnName("chunks_uploaded");
b.Property<string>("ContentType")
.IsRequired()
.HasMaxLength(128)
.HasColumnType("character varying(128)")
.HasColumnName("content_type");
b.Property<string>("EncryptPassword")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("encrypt_password");
b.Property<string>("FileName")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("file_name");
b.Property<long>("FileSize")
.HasColumnType("bigint")
.HasColumnName("file_size");
b.Property<string>("Hash")
.IsRequired()
.HasColumnType("text")
.HasColumnName("hash");
b.Property<string>("Path")
.HasColumnType("text")
.HasColumnName("path");
b.Property<Guid>("PoolId")
.HasColumnType("uuid")
.HasColumnName("pool_id");
b.PrimitiveCollection<List<int>>("UploadedChunks")
.IsRequired()
.HasColumnType("integer[]")
.HasColumnName("uploaded_chunks");
b.HasDiscriminator().HasValue("PersistentUploadTask");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.CloudFileReference", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
.WithMany("References")
.HasForeignKey("FileId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_file_references_files_file_id");
b.Navigation("File");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnFileBundle", "Bundle")
.WithMany("Files")
.HasForeignKey("BundleId")
.HasConstraintName("fk_files_bundles_bundle_id");
b.HasOne("DysonNetwork.Shared.Models.FilePool", "Pool")
.WithMany()
.HasForeignKey("PoolId")
.HasConstraintName("fk_files_pools_pool_id");
b.Navigation("Bundle");
b.Navigation("Pool");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileIndex", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
.WithMany("FileIndexes")
.HasForeignKey("FileId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_file_indexes_files_file_id");
b.Navigation("File");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
{
b.Navigation("FileIndexes");
b.Navigation("References");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
{
b.Navigation("Files");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -1,66 +0,0 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
using NodaTime;
#nullable disable
namespace DysonNetwork.Drive.Migrations
{
/// <inheritdoc />
public partial class AddFileIndex : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "path",
table: "tasks",
type: "text",
nullable: true);
migrationBuilder.CreateTable(
name: "file_indexes",
columns: table => new
{
id = table.Column<Guid>(type: "uuid", nullable: false),
path = table.Column<string>(type: "character varying(8192)", maxLength: 8192, nullable: false),
file_id = table.Column<string>(type: "character varying(32)", maxLength: 32, nullable: false),
account_id = table.Column<Guid>(type: "uuid", nullable: false),
created_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
updated_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
deleted_at = table.Column<Instant>(type: "timestamp with time zone", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("pk_file_indexes", x => x.id);
table.ForeignKey(
name: "fk_file_indexes_files_file_id",
column: x => x.file_id,
principalTable: "files",
principalColumn: "id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateIndex(
name: "ix_file_indexes_file_id",
table: "file_indexes",
column: "file_id");
migrationBuilder.CreateIndex(
name: "ix_file_indexes_path_account_id",
table: "file_indexes",
columns: new[] { "path", "account_id" });
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "file_indexes");
migrationBuilder.DropColumn(
name: "path",
table: "tasks");
}
}
}

View File

@@ -20,7 +20,7 @@ namespace DysonNetwork.Drive.Migrations
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.10")
.HasAnnotation("ProductVersion", "9.0.7")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
@@ -72,224 +72,7 @@ namespace DysonNetwork.Drive.Migrations
b.ToTable("quota_records", (string)null);
});
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentTask", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant?>("CompletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("completed_at");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("description");
b.Property<string>("Discriminator")
.IsRequired()
.HasMaxLength(21)
.HasColumnType("character varying(21)")
.HasColumnName("discriminator");
b.Property<string>("ErrorMessage")
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("error_message");
b.Property<long?>("EstimatedDurationSeconds")
.HasColumnType("bigint")
.HasColumnName("estimated_duration_seconds");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<Instant>("LastActivity")
.HasColumnType("timestamp with time zone")
.HasColumnName("last_activity");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("name");
b.Property<Dictionary<string, object>>("Parameters")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("parameters");
b.Property<int>("Priority")
.HasColumnType("integer")
.HasColumnName("priority");
b.Property<double>("Progress")
.HasColumnType("double precision")
.HasColumnName("progress");
b.Property<Dictionary<string, object>>("Results")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("results");
b.Property<Instant?>("StartedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("started_at");
b.Property<int>("Status")
.HasColumnType("integer")
.HasColumnName("status");
b.Property<string>("TaskId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)")
.HasColumnName("task_id");
b.Property<int>("Type")
.HasColumnType("integer")
.HasColumnName("type");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_tasks");
b.ToTable("tasks", (string)null);
b.HasDiscriminator().HasValue("PersistentTask");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileReference", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<string>("FileId")
.IsRequired()
.HasMaxLength(32)
.HasColumnType("character varying(32)")
.HasColumnName("file_id");
b.Property<string>("ResourceId")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("resource_id");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.Property<string>("Usage")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("usage");
b.HasKey("Id")
.HasName("pk_file_references");
b.HasIndex("FileId")
.HasDatabaseName("ix_file_references_file_id");
b.ToTable("file_references", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.FilePool", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid?>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<BillingConfig>("BillingConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("billing_config");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.IsRequired()
.HasMaxLength(8192)
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<bool>("IsHidden")
.HasColumnType("boolean")
.HasColumnName("is_hidden");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("name");
b.Property<PolicyConfig>("PolicyConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("policy_config");
b.Property<RemoteStorageConfig>("StorageConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("storage_config");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_pools");
b.ToTable("pools", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFile", b =>
{
b.Property<string>("Id")
.HasMaxLength(32)
@@ -403,17 +186,13 @@ namespace DysonNetwork.Drive.Migrations
b.ToTable("files", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileIndex", b =>
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFileReference", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
@@ -422,35 +201,42 @@ namespace DysonNetwork.Drive.Migrations
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<Instant?>("ExpiredAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("expired_at");
b.Property<string>("FileId")
.IsRequired()
.HasMaxLength(32)
.HasColumnType("character varying(32)")
.HasColumnName("file_id");
b.Property<string>("Path")
b.Property<string>("ResourceId")
.IsRequired()
.HasMaxLength(8192)
.HasColumnType("character varying(8192)")
.HasColumnName("path");
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("resource_id");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.Property<string>("Usage")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("usage");
b.HasKey("Id")
.HasName("pk_file_indexes");
.HasName("pk_file_references");
b.HasIndex("FileId")
.HasDatabaseName("ix_file_indexes_file_id");
.HasDatabaseName("ix_file_references_file_id");
b.HasIndex("Path", "AccountId")
.HasDatabaseName("ix_file_indexes_path_account_id");
b.ToTable("file_indexes", (string)null);
b.ToTable("file_references", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
modelBuilder.Entity("DysonNetwork.Drive.Storage.FileBundle", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
@@ -509,88 +295,74 @@ namespace DysonNetwork.Drive.Migrations
b.ToTable("bundles", (string)null);
});
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentUploadTask", b =>
modelBuilder.Entity("DysonNetwork.Drive.Storage.FilePool", b =>
{
b.HasBaseType("DysonNetwork.Drive.Storage.Model.PersistentTask");
b.Property<Guid?>("BundleId")
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("bundle_id");
.HasColumnName("id");
b.Property<long>("ChunkSize")
.HasColumnType("bigint")
.HasColumnName("chunk_size");
b.Property<int>("ChunksCount")
.HasColumnType("integer")
.HasColumnName("chunks_count");
b.Property<int>("ChunksUploaded")
.HasColumnType("integer")
.HasColumnName("chunks_uploaded");
b.Property<string>("ContentType")
.IsRequired()
.HasMaxLength(128)
.HasColumnType("character varying(128)")
.HasColumnName("content_type");
b.Property<string>("EncryptPassword")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("encrypt_password");
b.Property<string>("FileName")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("file_name");
b.Property<long>("FileSize")
.HasColumnType("bigint")
.HasColumnName("file_size");
b.Property<string>("Hash")
.IsRequired()
.HasColumnType("text")
.HasColumnName("hash");
b.Property<string>("Path")
.HasColumnType("text")
.HasColumnName("path");
b.Property<Guid>("PoolId")
b.Property<Guid?>("AccountId")
.HasColumnType("uuid")
.HasColumnName("pool_id");
.HasColumnName("account_id");
b.PrimitiveCollection<List<int>>("UploadedChunks")
b.Property<BillingConfig>("BillingConfig")
.IsRequired()
.HasColumnType("integer[]")
.HasColumnName("uploaded_chunks");
.HasColumnType("jsonb")
.HasColumnName("billing_config");
b.HasDiscriminator().HasValue("PersistentUploadTask");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Description")
.IsRequired()
.HasMaxLength(8192)
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<bool>("IsHidden")
.HasColumnType("boolean")
.HasColumnName("is_hidden");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)")
.HasColumnName("name");
b.Property<PolicyConfig>("PolicyConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("policy_config");
b.Property<RemoteStorageConfig>("StorageConfig")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("storage_config");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_pools");
b.ToTable("pools", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileReference", b =>
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFile", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
.WithMany("References")
.HasForeignKey("FileId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_file_references_files_file_id");
b.Navigation("File");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnFileBundle", "Bundle")
b.HasOne("DysonNetwork.Drive.Storage.FileBundle", "Bundle")
.WithMany("Files")
.HasForeignKey("BundleId")
.HasConstraintName("fk_files_bundles_bundle_id");
b.HasOne("DysonNetwork.Shared.Models.FilePool", "Pool")
b.HasOne("DysonNetwork.Drive.Storage.FilePool", "Pool")
.WithMany()
.HasForeignKey("PoolId")
.HasConstraintName("fk_files_pools_pool_id");
@@ -600,26 +372,24 @@ namespace DysonNetwork.Drive.Migrations
b.Navigation("Pool");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileIndex", b =>
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFileReference", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
.WithMany("FileIndexes")
b.HasOne("DysonNetwork.Drive.Storage.CloudFile", "File")
.WithMany("References")
.HasForeignKey("FileId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_file_indexes_files_file_id");
.HasConstraintName("fk_file_references_files_file_id");
b.Navigation("File");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFile", b =>
{
b.Navigation("FileIndexes");
b.Navigation("References");
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
modelBuilder.Entity("DysonNetwork.Drive.Storage.FileBundle", b =>
{
b.Navigation("Files");
});

View File

@@ -4,6 +4,7 @@ using DysonNetwork.Shared.Auth;
using DysonNetwork.Shared.Http;
using DysonNetwork.Shared.Registry;
using Microsoft.EntityFrameworkCore;
using tusdotnet.Stores;
var builder = WebApplication.CreateBuilder(args);
@@ -15,11 +16,13 @@ builder.ConfigureAppKestrel(builder.Configuration, maxRequestBodySize: long.MaxV
// Add application services
builder.Services.AddAppServices(builder.Configuration);
builder.Services.AddAppRateLimiting();
builder.Services.AddAppAuthentication();
builder.Services.AddDysonAuth();
builder.Services.AddRingService();
builder.Services.AddAccountService();
builder.Services.AddAppFileStorage(builder.Configuration);
builder.Services.AddAppFlushHandlers();
builder.Services.AddAppBusinessServices();
builder.Services.AddAppScheduledJobs();
@@ -40,11 +43,12 @@ using (var scope = app.Services.CreateScope())
await db.Database.MigrateAsync();
}
app.ConfigureAppMiddleware();
var tusDiskStore = app.Services.GetRequiredService<TusDiskStore>();
app.ConfigureAppMiddleware(tusDiskStore);
// Configure gRPC
app.ConfigureGrpcServices();
app.UseSwaggerManifest("DysonNetwork.Drive");
app.UseSwaggerManifest();
app.Run();

View File

@@ -1,14 +1,18 @@
using DysonNetwork.Drive.Storage;
using tusdotnet;
using tusdotnet.Interfaces;
namespace DysonNetwork.Drive.Startup;
public static class ApplicationBuilderExtensions
{
public static WebApplication ConfigureAppMiddleware(this WebApplication app)
public static WebApplication ConfigureAppMiddleware(this WebApplication app, ITusStore tusStore)
{
app.UseAuthorization();
app.MapControllers();
app.MapTus("/api/tus", _ => Task.FromResult(TusService.BuildConfiguration(tusStore, app.Configuration)));
return app;
}
@@ -17,7 +21,6 @@ public static class ApplicationBuilderExtensions
// Map your gRPC services here
app.MapGrpcService<FileServiceGrpc>();
app.MapGrpcService<FileReferenceServiceGrpc>();
app.MapGrpcReflectionService();
return app;
}

View File

@@ -1,7 +1,5 @@
using System.Text.Json;
using DysonNetwork.Drive.Storage;
using DysonNetwork.Drive.Storage.Model;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using DysonNetwork.Shared.Stream;
using FFMpegCore;
@@ -31,15 +29,16 @@ public class BroadcastEventHandler(
[".gif", ".apng", ".avif"];
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
var js = nats.CreateJetStreamContext();
await js.EnsureStreamCreated("account_events", [AccountDeletedEvent.Type]);
var accountEventConsumer = await js.CreateOrUpdateConsumerAsync("account_events",
new ConsumerConfig("drive_account_deleted_handler"), cancellationToken: stoppingToken);
await js.EnsureStreamCreated("file_events", [FileUploadedEvent.Type]);
var fileUploadedConsumer = await js.CreateOrUpdateConsumerAsync("file_events",
new ConsumerConfig("drive_file_uploaded_handler") { MaxDeliver = 3 }, cancellationToken: stoppingToken);
@@ -54,14 +53,13 @@ public class BroadcastEventHandler(
{
await foreach (var msg in consumer.ConsumeAsync<byte[]>(cancellationToken: stoppingToken))
{
var payload =
JsonSerializer.Deserialize<FileUploadedEventPayload>(msg.Data, GrpcTypeHelper.SerializerOptions);
var payload = JsonSerializer.Deserialize<FileUploadedEventPayload>(msg.Data, GrpcTypeHelper.SerializerOptions);
if (payload == null)
{
await msg.AckAsync(cancellationToken: stoppingToken);
continue;
}
try
{
await ProcessAndUploadInBackgroundAsync(
@@ -131,8 +129,8 @@ public class BroadcastEventHandler(
}
}
}
private async Task ProcessAndUploadInBackgroundAsync(
private async Task ProcessAndUploadInBackgroundAsync(
string fileId,
Guid remoteId,
string storageId,
@@ -144,7 +142,6 @@ public class BroadcastEventHandler(
using var scope = serviceProvider.CreateScope();
var fs = scope.ServiceProvider.GetRequiredService<FileService>();
var scopedDb = scope.ServiceProvider.GetRequiredService<AppDatabase>();
var persistentTaskService = scope.ServiceProvider.GetRequiredService<PersistentTaskService>();
var pool = await fs.GetPoolAsync(remoteId);
if (pool is null) return;
@@ -158,11 +155,6 @@ public class BroadcastEventHandler(
var fileToUpdate = await scopedDb.Files.AsNoTracking().FirstAsync(f => f.Id == fileId);
// Find the upload task associated with this file
var uploadTask = await scopedDb.Tasks
.OfType<PersistentUploadTask>()
.FirstOrDefaultAsync(t => t.FileName == fileToUpdate.Name && t.FileSize == fileToUpdate.Size);
if (fileToUpdate.IsEncrypted)
{
uploads.Add((processingFilePath, string.Empty, contentType, false));
@@ -301,51 +293,5 @@ public class BroadcastEventHandler(
}
await fs._PurgeCacheAsync(fileId);
// Complete the upload task if found
if (uploadTask != null)
{
await persistentTaskService.MarkTaskCompletedAsync(uploadTask.TaskId, new Dictionary<string, object?>
{
{ "FileId", fileId },
{ "FileName", fileToUpdate.Name },
{ "FileInfo", fileToUpdate },
{ "FileSize", fileToUpdate.Size },
{ "MimeType", newMimeType },
{ "HasCompression", hasCompression },
{ "HasThumbnail", hasThumbnail }
});
// Send push notification for large files (>5MB) that took longer to process
if (fileToUpdate.Size > 5 * 1024 * 1024) // 5MB threshold
await SendLargeFileProcessingCompleteNotificationAsync(uploadTask, fileToUpdate);
}
}
private async Task SendLargeFileProcessingCompleteNotificationAsync(PersistentUploadTask task, SnCloudFile file)
{
try
{
var ringService = serviceProvider.GetRequiredService<RingService.RingServiceClient>();
var pushNotification = new PushNotification
{
Topic = "drive.tasks.upload",
Title = "File Processing Complete",
Subtitle = file.Name,
Body = $"Your file '{file.Name}' has finished processing and is now available.",
IsSavable = true
};
await ringService.SendPushNotificationToUserAsync(new SendPushNotificationToUserRequest
{
UserId = task.AccountId.ToString(),
Notification = pushNotification
});
}
catch (Exception ex)
{
logger.LogWarning(ex, "Failed to send large file processing notification for task {TaskId}", task.TaskId);
}
}
}
}

View File

@@ -22,13 +22,6 @@ public static class ScheduledJobsConfiguration
.ForJob(cloudFileUnusedRecyclingJob)
.WithIdentity("CloudFileUnusedRecyclingTrigger")
.WithCronSchedule("0 0 0 * * ?"));
var persistentTaskCleanupJob = new JobKey("PersistentTaskCleanup");
q.AddJob<PersistentTaskCleanupJob>(opts => opts.WithIdentity(persistentTaskCleanupJob));
q.AddTrigger(opts => opts
.ForJob(persistentTaskCleanupJob)
.WithIdentity("PersistentTaskCleanupTrigger")
.WithCronSchedule("0 0 2 * * ?")); // Run daily at 2 AM
});
services.AddQuartzHostedService(q => q.WaitForJobsToComplete = true);

View File

@@ -1,9 +1,11 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using DysonNetwork.Drive.Index;
using System.Threading.RateLimiting;
using DysonNetwork.Shared.Cache;
using Microsoft.AspNetCore.RateLimiting;
using NodaTime;
using NodaTime.Serialization.SystemTextJson;
using tusdotnet.Stores;
namespace DysonNetwork.Drive.Startup;
@@ -12,7 +14,9 @@ public static class ServiceCollectionExtensions
public static IServiceCollection AddAppServices(this IServiceCollection services, IConfiguration configuration)
{
services.AddDbContext<AppDatabase>(); // Assuming you'll have an AppDatabase
services.AddSingleton<IClock>(SystemClock.Instance);
services.AddHttpContextAccessor();
services.AddSingleton<ICacheService, CacheServiceRedis>(); // Uncomment if you have CacheServiceRedis
services.AddHttpClient();
@@ -23,7 +27,9 @@ public static class ServiceCollectionExtensions
options.MaxReceiveMessageSize = 16 * 1024 * 1024; // 16MB
options.MaxSendMessageSize = 16 * 1024 * 1024; // 16MB
});
services.AddGrpcReflection();
// Register gRPC reflection for service discovery
services.AddGrpc();
services.AddControllers().AddJsonOptions(options =>
{
@@ -37,6 +43,19 @@ public static class ServiceCollectionExtensions
return services;
}
public static IServiceCollection AddAppRateLimiting(this IServiceCollection services)
{
services.AddRateLimiter(o => o.AddFixedWindowLimiter(policyName: "fixed", opts =>
{
opts.Window = TimeSpan.FromMinutes(1);
opts.PermitLimit = 120;
opts.QueueLimit = 2;
opts.QueueProcessingOrder = QueueProcessingOrder.OldestFirst;
}));
return services;
}
public static IServiceCollection AddAppAuthentication(this IServiceCollection services)
{
services.AddAuthorization();
@@ -50,17 +69,26 @@ public static class ServiceCollectionExtensions
return services;
}
public static IServiceCollection AddAppFileStorage(this IServiceCollection services, IConfiguration configuration)
{
var tusStorePath = configuration.GetSection("Tus").GetValue<string>("StorePath")!;
Directory.CreateDirectory(tusStorePath);
var tusDiskStore = new TusDiskStore(tusStorePath);
services.AddSingleton(tusDiskStore);
return services;
}
public static IServiceCollection AddAppBusinessServices(this IServiceCollection services)
{
services.AddScoped<Storage.FileService>();
services.AddScoped<Storage.FileReferenceService>();
services.AddScoped<Storage.PersistentTaskService>();
services.AddScoped<FileIndexService>();
services.AddScoped<Billing.UsageService>();
services.AddScoped<Billing.QuotaService>();
services.AddHostedService<BroadcastEventHandler>();
return services;
}
}
}

View File

@@ -6,6 +6,7 @@ namespace DysonNetwork.Drive.Storage;
public class CloudFileUnusedRecyclingJob(
AppDatabase db,
FileReferenceService fileRefService,
ILogger<CloudFileUnusedRecyclingJob> logger,
IConfiguration configuration
)
@@ -14,7 +15,7 @@ public class CloudFileUnusedRecyclingJob(
public async Task Execute(IJobExecutionContext context)
{
logger.LogInformation("Cleaning tus cloud files...");
var storePath = configuration["Storage:Uploads"];
var storePath = configuration["Tus:StorePath"];
if (Directory.Exists(storePath))
{
var oneHourAgo = SystemClock.Instance.GetCurrentInstant() - Duration.FromHours(1);
@@ -39,7 +40,6 @@ public class CloudFileUnusedRecyclingJob(
var processedCount = 0;
var markedCount = 0;
var totalFiles = await db.Files
.Where(f => f.FileIndexes.Count == 0)
.Where(f => f.PoolId.HasValue && recyclablePools.Contains(f.PoolId.Value))
.Where(f => !f.IsMarkedRecycle)
.CountAsync();
@@ -80,15 +80,15 @@ public class CloudFileUnusedRecyclingJob(
processedCount += fileBatch.Count;
lastProcessedId = fileBatch.Last();
// Optimized query: Find files that have no references OR all references are expired
// This replaces the memory-intensive approach of loading all references
var filesToMark = await db.Files
.Where(f => fileBatch.Contains(f.Id))
.Where(f => !db.FileReferences.Any(r => r.FileId == f.Id) || // No references at all
!db.FileReferences.Any(r => r.FileId == f.Id && // OR has references but all are expired
(r.ExpiredAt == null || r.ExpiredAt > now)))
.Select(f => f.Id)
.ToListAsync();
// Get all relevant file references for this batch
var fileReferences = await fileRefService.GetReferencesAsync(fileBatch);
// Filter to find files that have no references or all expired references
var filesToMark = fileBatch.Where(fileId =>
!fileReferences.TryGetValue(fileId, out var references) ||
references.Count == 0 ||
references.All(r => r.ExpiredAt.HasValue && r.ExpiredAt.Value <= now)
).ToList();
if (filesToMark.Count > 0)
{
@@ -120,4 +120,4 @@ public class CloudFileUnusedRecyclingJob(
logger.LogInformation("Completed marking {MarkedCount} files for recycling", markedCount);
}
}
}

View File

@@ -1,3 +1,4 @@
using DysonNetwork.Drive.Billing;
using DysonNetwork.Shared.Auth;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
@@ -13,9 +14,9 @@ namespace DysonNetwork.Drive.Storage;
public class FileController(
AppDatabase db,
FileService fs,
QuotaService qs,
IConfiguration configuration,
IWebHostEnvironment env,
FileReferenceService fileReferenceService
IWebHostEnvironment env
) : ControllerBase
{
[HttpGet("{id}")]
@@ -28,197 +29,137 @@ public class FileController(
[FromQuery] string? passcode = null
)
{
var (fileId, fileExtension) = ParseFileId(id);
var file = await fs.GetFileAsync(fileId);
if (file is null) return NotFound("File not found.");
var accessResult = await ValidateFileAccess(file, passcode);
if (accessResult is not null) return accessResult;
// Handle direct storage URL redirect
if (!string.IsNullOrWhiteSpace(file.StorageUrl))
return Redirect(file.StorageUrl);
// Handle files not yet uploaded to remote storage
if (file.UploadedAt is null)
return await ServeLocalFile(file);
// Handle uploaded files
return await ServeRemoteFile(file, fileExtension, download, original, thumbnail, overrideMimeType);
}
private (string fileId, string? extension) ParseFileId(string id)
{
if (!id.Contains('.')) return (id, null);
var parts = id.Split('.');
return (parts.First(), parts.Last());
}
private async Task<ActionResult?> ValidateFileAccess(SnCloudFile file, string? passcode)
{
if (file.Bundle is not null && !file.Bundle.VerifyPasscode(passcode))
return StatusCode(StatusCodes.Status403Forbidden, "The passcode is incorrect.");
return null;
}
private Task<ActionResult> ServeLocalFile(SnCloudFile file)
{
// Try temp storage first
var tempFilePath = Path.Combine(Path.GetTempPath(), file.Id);
if (System.IO.File.Exists(tempFilePath))
// Support the file extension for client side data recognize
string? fileExtension = null;
if (id.Contains('.'))
{
if (file.IsEncrypted)
return Task.FromResult<ActionResult>(StatusCode(StatusCodes.Status403Forbidden,
"Encrypted files cannot be accessed before they are processed and stored."));
return Task.FromResult<ActionResult>(PhysicalFile(tempFilePath, file.MimeType ?? "application/octet-stream",
file.Name, enableRangeProcessing: true));
var splitId = id.Split('.');
id = splitId.First();
fileExtension = splitId.Last();
}
// Fallback for tus uploads
var tusStorePath = configuration.GetValue<string>("Storage:Uploads");
if (string.IsNullOrEmpty(tusStorePath))
return Task.FromResult<ActionResult>(StatusCode(StatusCodes.Status400BadRequest,
"File is being processed. Please try again later."));
var tusFilePath = Path.Combine(env.ContentRootPath, tusStorePath, file.Id);
return System.IO.File.Exists(tusFilePath)
? Task.FromResult<ActionResult>(PhysicalFile(tusFilePath, file.MimeType ?? "application/octet-stream",
file.Name, enableRangeProcessing: true))
: Task.FromResult<ActionResult>(StatusCode(StatusCodes.Status400BadRequest,
"File is being processed. Please try again later."));
}
var file = await fs.GetFileAsync(id);
if (file is null) return NotFound("File not found.");
if (file.Bundle is not null && !file.Bundle.VerifyPasscode(passcode))
return StatusCode(StatusCodes.Status403Forbidden, "The passcode is incorrect.");
if (!string.IsNullOrWhiteSpace(file.StorageUrl)) return Redirect(file.StorageUrl);
if (file.UploadedAt is null)
{
// File is not yet uploaded to remote storage. Try to serve from local temp storage.
var tempFilePath = Path.Combine(Path.GetTempPath(), file.Id);
if (System.IO.File.Exists(tempFilePath))
{
if (file.IsEncrypted)
{
return StatusCode(StatusCodes.Status403Forbidden, "Encrypted files cannot be accessed before they are processed and stored.");
}
return PhysicalFile(tempFilePath, file.MimeType ?? "application/octet-stream", file.Name, enableRangeProcessing: true);
}
// Fallback for tus uploads that are not processed yet.
var tusStorePath = configuration.GetValue<string>("Tus:StorePath");
if (!string.IsNullOrEmpty(tusStorePath))
{
var tusFilePath = Path.Combine(env.ContentRootPath, tusStorePath, file.Id);
if (System.IO.File.Exists(tusFilePath))
{
return PhysicalFile(tusFilePath, file.MimeType ?? "application/octet-stream", file.Name, enableRangeProcessing: true);
}
}
return StatusCode(StatusCodes.Status400BadRequest, "File is being processed. Please try again later.");
}
private async Task<ActionResult> ServeRemoteFile(
SnCloudFile file,
string? fileExtension,
bool download,
bool original,
bool thumbnail,
string? overrideMimeType
)
{
if (!file.PoolId.HasValue)
return StatusCode(StatusCodes.Status500InternalServerError,
"File is in an inconsistent state: uploaded but no pool ID.");
return StatusCode(StatusCodes.Status500InternalServerError, "File is in an inconsistent state: uploaded but no pool ID.");
var pool = await fs.GetPoolAsync(file.PoolId.Value);
if (pool is null)
return StatusCode(StatusCodes.Status410Gone, "The pool of the file no longer exists or not accessible.");
if (!pool.PolicyConfig.AllowAnonymous && HttpContext.Items["CurrentUser"] is not Account)
return Unauthorized();
var dest = pool.StorageConfig;
var fileName = BuildRemoteFileName(file, original, thumbnail);
// Try proxy redirects first
var proxyResult = TryProxyRedirect(file, dest, fileName);
if (proxyResult is not null) return proxyResult;
if (!pool.PolicyConfig.AllowAnonymous)
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
return Unauthorized();
// TODO: Provide ability to add access log
// Handle signed URLs
if (dest.EnableSigned)
return await CreateSignedUrl(file, dest, fileName, fileExtension, download, overrideMimeType);
// Fallback to direct S3 endpoint
var protocol = dest.EnableSsl ? "https" : "http";
return Redirect($"{protocol}://{dest.Endpoint}/{dest.Bucket}/{fileName}");
}
private string BuildRemoteFileName(SnCloudFile file, bool original, bool thumbnail)
{
var fileName = string.IsNullOrWhiteSpace(file.StorageId) ? file.Id : file.StorageId;
if (thumbnail)
switch (thumbnail)
{
if (!file.HasThumbnail) throw new InvalidOperationException("Thumbnail not available");
fileName += ".thumbnail";
case true when file.HasThumbnail:
fileName += ".thumbnail";
break;
case true when !file.HasThumbnail:
return NotFound();
}
else if (!original && file.HasCompression)
{
if (!original && file.HasCompression)
fileName += ".compressed";
}
return fileName;
}
private ActionResult? TryProxyRedirect(SnCloudFile file, RemoteStorageConfig dest, string fileName)
{
if (dest.ImageProxy is not null && (file.MimeType?.StartsWith("image/") ?? false))
{
return Redirect(BuildProxyUrl(dest.ImageProxy, fileName));
var proxyUrl = dest.ImageProxy;
var baseUri = new Uri(proxyUrl.EndsWith('/') ? proxyUrl : $"{proxyUrl}/");
var fullUri = new Uri(baseUri, fileName);
return Redirect(fullUri.ToString());
}
return dest.AccessProxy is not null ? Redirect(BuildProxyUrl(dest.AccessProxy, fileName)) : null;
}
private static string BuildProxyUrl(string proxyUrl, string fileName)
{
var baseUri = new Uri(proxyUrl.EndsWith('/') ? proxyUrl : $"{proxyUrl}/");
var fullUri = new Uri(baseUri, fileName);
return fullUri.ToString();
}
private async Task<ActionResult> CreateSignedUrl(
SnCloudFile file,
RemoteStorageConfig dest,
string fileName,
string? fileExtension,
bool download,
string? overrideMimeType
)
{
var client = fs.CreateMinioClient(dest);
if (client is null)
return BadRequest("Failed to configure client for remote destination, file got an invalid storage remote.");
var headers = BuildSignedUrlHeaders(file, fileExtension, overrideMimeType, download);
var openUrl = await client.PresignedGetObjectAsync(
new PresignedGetObjectArgs()
.WithBucket(dest.Bucket)
.WithObject(fileName)
.WithExpiry(3600)
.WithHeaders(headers)
);
return Redirect(openUrl);
}
private static Dictionary<string, string> BuildSignedUrlHeaders(
SnCloudFile file,
string? fileExtension,
string? overrideMimeType,
bool download
)
{
var headers = new Dictionary<string, string>();
string? contentType = null;
if (fileExtension is not null && MimeTypes.TryGetMimeType(fileExtension, out var mimeType))
if (dest.AccessProxy is not null)
{
contentType = mimeType;
}
else if (overrideMimeType is not null)
{
contentType = overrideMimeType;
}
else if (file.MimeType is not null && !file.MimeType.EndsWith("unknown"))
{
contentType = file.MimeType;
var proxyUrl = dest.AccessProxy;
var baseUri = new Uri(proxyUrl.EndsWith('/') ? proxyUrl : $"{proxyUrl}/");
var fullUri = new Uri(baseUri, fileName);
return Redirect(fullUri.ToString());
}
if (contentType is not null)
if (dest.EnableSigned)
{
headers.Add("Response-Content-Type", contentType);
var client = fs.CreateMinioClient(dest);
if (client is null)
return BadRequest(
"Failed to configure client for remote destination, file got an invalid storage remote."
);
var headers = new Dictionary<string, string>();
if (fileExtension is not null)
{
if (MimeTypes.TryGetMimeType(fileExtension, out var mimeType))
headers.Add("Response-Content-Type", mimeType);
}
else if (overrideMimeType is not null)
{
headers.Add("Response-Content-Type", overrideMimeType);
}
else if (file.MimeType is not null && !file.MimeType!.EndsWith("unknown"))
{
headers.Add("Response-Content-Type", file.MimeType);
}
if (download)
{
headers.Add("Response-Content-Disposition", $"attachment; filename=\"{file.Name}\"");
}
var bucket = dest.Bucket;
var openUrl = await client.PresignedGetObjectAsync(
new PresignedGetObjectArgs()
.WithBucket(bucket)
.WithObject(fileName)
.WithExpiry(3600)
.WithHeaders(headers)
);
return Redirect(openUrl);
}
if (download)
{
headers.Add("Response-Content-Disposition", $"attachment; filename=\"{file.Name}\"");
}
return headers;
// Fallback redirect to the S3 endpoint (public read)
var protocol = dest.EnableSsl ? "https" : "http";
// Use the path bucket lookup mode
return Redirect($"{protocol}://{dest.Endpoint}/{dest.Bucket}/{fileName}");
}
[HttpGet("{id}/info")]
@@ -230,59 +171,50 @@ public class FileController(
return file;
}
[HttpGet("{id}/references")]
public async Task<ActionResult<List<Shared.Models.SnCloudFileReference>>> GetFileReferences(string id)
{
var file = await fs.GetFileAsync(id);
if (file is null) return NotFound("File not found.");
// Check if user has access to the file
var accessResult = await ValidateFileAccess(file, null);
if (accessResult is not null) return accessResult;
// Get references using the injected FileReferenceService
var references = await fileReferenceService.GetReferencesAsync(id);
return Ok(references);
}
[Authorize]
[HttpPatch("{id}/name")]
public async Task<ActionResult<SnCloudFile>> UpdateFileName(string id, [FromBody] string name)
{
return await UpdateFileProperty(id, file => file.Name = name);
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
var accountId = Guid.Parse(currentUser.Id);
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == id && f.AccountId == accountId);
if (file is null) return NotFound();
file.Name = name;
await db.SaveChangesAsync();
await fs._PurgeCacheAsync(file.Id);
return file;
}
public class MarkFileRequest
{
public List<Shared.Models.ContentSensitiveMark>? SensitiveMarks { get; set; }
public List<ContentSensitiveMark>? SensitiveMarks { get; set; }
}
[Authorize]
[HttpPut("{id}/marks")]
public async Task<ActionResult<SnCloudFile>> MarkFile(string id, [FromBody] MarkFileRequest request)
{
return await UpdateFileProperty(id, file => file.SensitiveMarks = request.SensitiveMarks);
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
var accountId = Guid.Parse(currentUser.Id);
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == id && f.AccountId == accountId);
if (file is null) return NotFound();
file.SensitiveMarks = request.SensitiveMarks;
await db.SaveChangesAsync();
await fs._PurgeCacheAsync(file.Id);
return file;
}
[Authorize]
[HttpPut("{id}/meta")]
public async Task<ActionResult<SnCloudFile>> UpdateFileMeta(string id, [FromBody] Dictionary<string, object?> meta)
{
return await UpdateFileProperty(id, file => file.UserMeta = meta);
}
private async Task<ActionResult<SnCloudFile>> UpdateFileProperty(string fileId, Action<SnCloudFile> updateAction)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
var accountId = Guid.Parse(currentUser.Id);
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == fileId && f.AccountId == accountId);
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == id && f.AccountId == accountId);
if (file is null) return NotFound();
updateAction(file);
file.UserMeta = meta;
await db.SaveChangesAsync();
await fs._PurgeCacheAsync(file.Id);
return file;
}
@@ -292,40 +224,25 @@ public class FileController(
[FromQuery] Guid? pool,
[FromQuery] bool recycled = false,
[FromQuery] int offset = 0,
[FromQuery] int take = 20,
[FromQuery] string? query = null,
[FromQuery] string order = "date",
[FromQuery] bool orderDesc = true
[FromQuery] int take = 20
)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
var accountId = Guid.Parse(currentUser.Id);
var filesQuery = db.Files
var query = db.Files
.Where(e => e.IsMarkedRecycle == recycled)
.Where(e => e.AccountId == accountId)
.Include(e => e.Pool)
.OrderByDescending(e => e.CreatedAt)
.AsQueryable();
if (pool.HasValue) filesQuery = filesQuery.Where(e => e.PoolId == pool);
if (pool.HasValue) query = query.Where(e => e.PoolId == pool);
if (!string.IsNullOrWhiteSpace(query))
{
filesQuery = filesQuery.Where(e => e.Name.Contains(query));
}
filesQuery = order.ToLower() switch
{
"date" => orderDesc ? filesQuery.OrderByDescending(e => e.CreatedAt) : filesQuery.OrderBy(e => e.CreatedAt),
"size" => orderDesc ? filesQuery.OrderByDescending(e => e.Size) : filesQuery.OrderBy(e => e.Size),
"name" => orderDesc ? filesQuery.OrderByDescending(e => e.Name) : filesQuery.OrderBy(e => e.Name),
_ => filesQuery.OrderByDescending(e => e.CreatedAt)
};
var total = await filesQuery.CountAsync();
var total = await query.CountAsync();
Response.Headers.Append("X-Total", total.ToString());
var files = await filesQuery
var files = await query
.Skip(offset)
.Take(take)
.ToListAsync();
@@ -333,25 +250,9 @@ public class FileController(
return Ok(files);
}
public class FileBatchDeletionRequest
{
public List<string> FileIds { get; set; } = [];
}
[Authorize]
[HttpPost("batches/delete")]
public async Task<ActionResult> DeleteFileBatch([FromBody] FileBatchDeletionRequest request)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
var userId = Guid.Parse(currentUser.Id);
var count = await fs.DeleteAccountFileBatchAsync(userId, request.FileIds);
return Ok(new { Count = count });
}
[Authorize]
[HttpDelete("{id}")]
public async Task<ActionResult<SnCloudFile>> DeleteFile(string id)
public async Task<ActionResult> DeleteFile(string id)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
var userId = Guid.Parse(currentUser.Id);
@@ -363,9 +264,9 @@ public class FileController(
if (file is null) return NotFound();
await fs.DeleteFileDataAsync(file, force: true);
await fs.DeleteFileAsync(file, skipData: true);
await fs.DeleteFileAsync(file);
return Ok(file);
return NoContent();
}
[Authorize]
@@ -381,10 +282,116 @@ public class FileController(
[Authorize]
[HttpDelete("recycle")]
[AskPermission("files.delete.recycle")]
[RequiredPermission("maintenance", "files.delete.recycle")]
public async Task<ActionResult> DeleteAllRecycledFiles()
{
var count = await fs.DeleteAllRecycledFilesAsync();
return Ok(new { Count = count });
}
}
public class CreateFastFileRequest
{
public string Name { get; set; } = null!;
public long Size { get; set; }
public string Hash { get; set; } = null!;
public string? MimeType { get; set; }
public string? Description { get; set; }
public Dictionary<string, object?>? UserMeta { get; set; }
public Dictionary<string, object?>? FileMeta { get; set; }
public List<ContentSensitiveMark>? SensitiveMarks { get; set; }
public Guid PoolId { get; set; }
}
[Authorize]
[HttpPost("fast")]
[RequiredPermission("global", "files.create")]
public async Task<ActionResult<SnCloudFile>> CreateFastFile([FromBody] CreateFastFileRequest request)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
var accountId = Guid.Parse(currentUser.Id);
var pool = await db.Pools.FirstOrDefaultAsync(p => p.Id == request.PoolId);
if (pool is null) return BadRequest();
if (!currentUser.IsSuperuser && pool.AccountId != accountId)
return StatusCode(403, "You don't have permission to create files in this pool.");
if (!pool.PolicyConfig.EnableFastUpload)
return StatusCode(
403,
"This pool does not allow fast upload"
);
if (pool.PolicyConfig.RequirePrivilege > 0)
{
if (currentUser.PerkSubscription is null)
{
return StatusCode(
403,
$"You need to have join the Stellar Program to use this pool"
);
}
var privilege =
PerkSubscriptionPrivilege.GetPrivilegeFromIdentifier(currentUser.PerkSubscription.Identifier);
if (privilege < pool.PolicyConfig.RequirePrivilege)
{
return StatusCode(
403,
$"You need Stellar Program tier {pool.PolicyConfig.RequirePrivilege} to use this pool, you are tier {privilege}"
);
}
}
if (request.Size > pool.PolicyConfig.MaxFileSize)
{
return StatusCode(
403,
$"File size {request.Size} is larger than the pool's maximum file size {pool.PolicyConfig.MaxFileSize}"
);
}
var (ok, billableUnit, quota) = await qs.IsFileAcceptable(
accountId,
pool.BillingConfig.CostMultiplier ?? 1.0,
request.Size
);
if (!ok)
{
return StatusCode(
403,
$"File size {billableUnit} is larger than the user's quota {quota}"
);
}
await using var transaction = await db.Database.BeginTransactionAsync();
try
{
var file = new SnCloudFile
{
Name = request.Name,
Size = request.Size,
Hash = request.Hash,
MimeType = request.MimeType,
Description = request.Description,
AccountId = accountId,
UserMeta = request.UserMeta,
FileMeta = request.FileMeta,
SensitiveMarks = request.SensitiveMarks,
PoolId = request.PoolId
};
db.Files.Add(file);
await db.SaveChangesAsync();
await fs._PurgeCacheAsync(file.Id);
await transaction.CommitAsync();
file.FastUploadLink = await fs.CreateFastUploadLinkAsync(file);
return file;
}
catch (Exception)
{
await transaction.RollbackAsync();
throw;
}
}
}

View File

@@ -10,59 +10,53 @@ namespace DysonNetwork.Drive.Storage;
public class FileExpirationJob(AppDatabase db, FileService fileService, ILogger<FileExpirationJob> logger) : IJob
{
public async Task Execute(IJobExecutionContext context)
{
{
var now = SystemClock.Instance.GetCurrentInstant();
logger.LogInformation("Running file reference expiration job at {now}", now);
// Delete expired references in bulk and get affected file IDs
var affectedFileIds = await db.FileReferences
// Find all expired references
var expiredReferences = await db.FileReferences
.Where(r => r.ExpiredAt < now && r.ExpiredAt != null)
.Select(r => r.FileId)
.Distinct()
.ToListAsync();
if (!affectedFileIds.Any())
if (!expiredReferences.Any())
{
logger.LogInformation("No expired file references found");
return;
}
logger.LogInformation("Found expired references for {count} files", affectedFileIds.Count);
logger.LogInformation("Found {count} expired file references", expiredReferences.Count);
// Delete expired references in bulk
var deletedReferencesCount = await db.FileReferences
.Where(r => r.ExpiredAt < now && r.ExpiredAt != null)
.ExecuteDeleteAsync();
// Get unique file IDs
var fileIds = expiredReferences.Select(r => r.FileId).Distinct().ToList();
var filesAndReferenceCount = new Dictionary<string, int>();
logger.LogInformation("Deleted {count} expired file references", deletedReferencesCount);
// Delete expired references
db.FileReferences.RemoveRange(expiredReferences);
await db.SaveChangesAsync();
// Find files that now have no remaining references (bulk operation)
var filesToDelete = await db.Files
.Where(f => affectedFileIds.Contains(f.Id))
.Where(f => !db.FileReferences.Any(r => r.FileId == f.Id))
.Select(f => f.Id)
.ToListAsync();
// Check remaining references for each file
foreach (var fileId in fileIds)
{
var remainingReferences = await db.FileReferences
.Where(r => r.FileId == fileId)
.CountAsync();
if (filesToDelete.Any())
{
logger.LogInformation("Deleting {count} files that have no remaining references", filesToDelete.Count);
filesAndReferenceCount[fileId] = remainingReferences;
// Get files for deletion
var files = await db.Files
.Where(f => filesToDelete.Contains(f.Id))
.ToListAsync();
// Delete files and their data in parallel
var deleteTasks = files.Select(f => fileService.DeleteFileAsync(f));
await Task.WhenAll(deleteTasks);
}
// Purge cache for files that still have references
var filesWithRemainingRefs = affectedFileIds.Except(filesToDelete).ToList();
if (filesWithRemainingRefs.Any())
{
var cachePurgeTasks = filesWithRemainingRefs.Select(fileService._PurgeCacheAsync);
await Task.WhenAll(cachePurgeTasks);
// If no references remain, delete the file
if (remainingReferences == 0)
{
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == fileId);
if (file == null) continue;
logger.LogInformation("Deleting file {fileId} as all references have expired", fileId);
await fileService.DeleteFileAsync(file);
}
else
{
// Just purge the cache
await fileService._PurgeCacheAsync(fileId);
}
}
logger.LogInformation("Completed file reference expiration job");

View File

@@ -1,5 +1,4 @@
using DysonNetwork.Shared.Cache;
using DysonNetwork.Shared.Data;
using DysonNetwork.Shared.Models;
using EFCore.BulkExtensions;
using Microsoft.EntityFrameworkCore;
@@ -21,7 +20,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
/// <param name="expiredAt">Optional expiration time for the file</param>
/// <param name="duration">Optional duration after which the file expires (alternative to expiredAt)</param>
/// <returns>The created file reference</returns>
public async Task<SnCloudFileReference> CreateReferenceAsync(
public async Task<CloudFileReference> CreateReferenceAsync(
string fileId,
string usage,
string resourceId,
@@ -34,7 +33,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
if (duration.HasValue)
finalExpiration = SystemClock.Instance.GetCurrentInstant() + duration.Value;
var reference = new SnCloudFileReference
var reference = new CloudFileReference
{
FileId = fileId,
Usage = usage,
@@ -50,7 +49,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
return reference;
}
public async Task<List<SnCloudFileReference>> CreateReferencesAsync(
public async Task<List<CloudFileReference>> CreateReferencesAsync(
List<string> fileId,
string usage,
string resourceId,
@@ -58,15 +57,12 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
Duration? duration = null
)
{
var now = SystemClock.Instance.GetCurrentInstant();
var data = fileId.Select(id => new SnCloudFileReference
var data = fileId.Select(id => new CloudFileReference
{
FileId = id,
Usage = usage,
ResourceId = resourceId,
ExpiredAt = expiredAt ?? now + duration,
CreatedAt = now,
UpdatedAt = now
ExpiredAt = expiredAt ?? SystemClock.Instance.GetCurrentInstant() + duration
}).ToList();
await db.BulkInsertAsync(data);
return data;
@@ -77,11 +73,11 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
/// </summary>
/// <param name="fileId">The ID of the file</param>
/// <returns>A list of all references to the file</returns>
public async Task<List<SnCloudFileReference>> GetReferencesAsync(string fileId)
public async Task<List<CloudFileReference>> GetReferencesAsync(string fileId)
{
var cacheKey = $"{CacheKeyPrefix}list:{fileId}";
var cachedReferences = await cache.GetAsync<List<SnCloudFileReference>>(cacheKey);
var cachedReferences = await cache.GetAsync<List<CloudFileReference>>(cacheKey);
if (cachedReferences is not null)
return cachedReferences;
@@ -94,45 +90,13 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
return references;
}
public async Task<Dictionary<string, List<SnCloudFileReference>>> GetReferencesAsync(IEnumerable<string> fileIds)
public async Task<Dictionary<string, List<CloudFileReference>>> GetReferencesAsync(IEnumerable<string> fileId)
{
var fileIdList = fileIds.ToList();
var result = new Dictionary<string, List<SnCloudFileReference>>();
// Check cache for each file ID
var uncachedFileIds = new List<string>();
foreach (var fileId in fileIdList)
{
var cacheKey = $"{CacheKeyPrefix}list:{fileId}";
var cachedReferences = await cache.GetAsync<List<SnCloudFileReference>>(cacheKey);
if (cachedReferences is not null)
{
result[fileId] = cachedReferences;
}
else
{
uncachedFileIds.Add(fileId);
}
}
// Fetch uncached references from database
if (uncachedFileIds.Any())
{
var dbReferences = await db.FileReferences
.Where(r => uncachedFileIds.Contains(r.FileId))
.GroupBy(r => r.FileId)
.ToDictionaryAsync(r => r.Key, r => r.ToList());
// Cache the results
foreach (var kvp in dbReferences)
{
var cacheKey = $"{CacheKeyPrefix}list:{kvp.Key}";
await cache.SetAsync(cacheKey, kvp.Value, CacheDuration);
result[kvp.Key] = kvp.Value;
}
}
return result;
var references = await db.FileReferences
.Where(r => fileId.Contains(r.FileId))
.GroupBy(r => r.FileId)
.ToDictionaryAsync(r => r.Key, r => r.ToList());
return references;
}
/// <summary>
@@ -162,11 +126,11 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
/// </summary>
/// <param name="resourceId">The ID of the resource</param>
/// <returns>A list of file references associated with the resource</returns>
public async Task<List<SnCloudFileReference>> GetResourceReferencesAsync(string resourceId)
public async Task<List<CloudFileReference>> GetResourceReferencesAsync(string resourceId)
{
var cacheKey = $"{CacheKeyPrefix}resource:{resourceId}";
var cachedReferences = await cache.GetAsync<List<SnCloudFileReference>>(cacheKey);
var cachedReferences = await cache.GetAsync<List<CloudFileReference>>(cacheKey);
if (cachedReferences is not null)
return cachedReferences;
@@ -184,21 +148,11 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
/// </summary>
/// <param name="usage">The usage context</param>
/// <returns>A list of file references with the specified usage</returns>
public async Task<List<SnCloudFileReference>> GetUsageReferencesAsync(string usage)
public async Task<List<CloudFileReference>> GetUsageReferencesAsync(string usage)
{
var cacheKey = $"{CacheKeyPrefix}usage:{usage}";
var cachedReferences = await cache.GetAsync<List<SnCloudFileReference>>(cacheKey);
if (cachedReferences is not null)
return cachedReferences;
var references = await db.FileReferences
return await db.FileReferences
.Where(r => r.Usage == usage)
.ToListAsync();
await cache.SetAsync(cacheKey, references, CacheDuration);
return references;
}
/// <summary>
@@ -255,9 +209,8 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
public async Task<int> DeleteResourceReferencesBatchAsync(IEnumerable<string> resourceIds, string? usage = null)
{
var resourceIdList = resourceIds.ToList();
var references = await db.FileReferences
.Where(r => resourceIdList.Contains(r.ResourceId))
.Where(r => resourceIds.Contains(r.ResourceId))
.If(usage != null, q => q.Where(q => q.Usage == usage))
.ToListAsync();
@@ -269,9 +222,8 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
db.FileReferences.RemoveRange(references);
var deletedCount = await db.SaveChangesAsync();
// Purge caches for files and resources
// Purge caches
var tasks = fileIds.Select(fileService._PurgeCacheAsync).ToList();
tasks.AddRange(resourceIdList.Select(PurgeCacheForResourceAsync));
await Task.WhenAll(tasks);
return deletedCount;
@@ -310,7 +262,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
/// <param name="expiredAt">Optional expiration time for newly added files</param>
/// <param name="duration">Optional duration after which newly added files expire</param>
/// <returns>A list of the updated file references</returns>
public async Task<List<SnCloudFileReference>> UpdateResourceFilesAsync(
public async Task<List<CloudFileReference>> UpdateResourceFilesAsync(
string resourceId,
IEnumerable<string>? newFileIds,
string usage,
@@ -318,7 +270,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
Duration? duration = null)
{
if (newFileIds == null)
return new List<SnCloudFileReference>();
return new List<CloudFileReference>();
var existingReferences = await db.FileReferences
.Where(r => r.ResourceId == resourceId && r.Usage == usage)
@@ -336,7 +288,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
// Files to add
var toAdd = newFileIdsList
.Where(id => !existingFileIds.Contains(id))
.Select(id => new SnCloudFileReference
.Select(id => new CloudFileReference
{
FileId = id,
Usage = usage,
@@ -488,7 +440,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
/// <param name="resourceId">The resource ID</param>
/// <param name="usageType">The usage type</param>
/// <returns>List of file references</returns>
public async Task<List<SnCloudFileReference>> GetResourceReferencesAsync(string resourceId, string usageType)
public async Task<List<CloudFileReference>> GetResourceReferencesAsync(string resourceId, string usageType)
{
return await db.FileReferences
.Where(r => r.ResourceId == resourceId && r.Usage == usageType)
@@ -521,4 +473,4 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
return await SetReferenceExpirationAsync(referenceId, expiredAt);
}
}
}

View File

@@ -99,74 +99,30 @@ public class FileService(
)
{
var accountId = Guid.Parse(account.Id);
var pool = await ValidateAndGetPoolAsync(filePool);
var bundle = await ValidateAndGetBundleAsync(fileBundleId, accountId);
var finalExpiredAt = CalculateFinalExpiration(expiredAt, pool, bundle);
var (managedTempPath, fileSize, finalContentType) =
await PrepareFileAsync(fileId, filePath, fileName, contentType);
var file = CreateFileObject(fileId, fileName, finalContentType, fileSize, finalExpiredAt, bundle, accountId);
if (!pool.PolicyConfig.NoMetadata)
{
await ExtractMetadataAsync(file, managedTempPath);
}
var (processingPath, isTempFile) =
await ProcessEncryptionAsync(fileId, managedTempPath, encryptPassword, pool, file);
file.Hash = await HashFileAsync(processingPath);
await SaveFileToDatabaseAsync(file);
await PublishFileUploadedEventAsync(file, pool, processingPath, isTempFile);
return file;
}
private async Task<FilePool> ValidateAndGetPoolAsync(string filePool)
{
var pool = await GetPoolAsync(Guid.Parse(filePool));
return pool ?? throw new InvalidOperationException("Pool not found: " + filePool);
}
if (pool is null) throw new InvalidOperationException("Pool not found");
private async Task<SnFileBundle?> ValidateAndGetBundleAsync(string? fileBundleId, Guid accountId)
{
if (fileBundleId is null) return null;
var bundle = await GetBundleAsync(Guid.Parse(fileBundleId), accountId);
return bundle ?? throw new InvalidOperationException("Bundle not found: " + fileBundleId);
}
private static Instant? CalculateFinalExpiration(Instant? expiredAt, FilePool pool, SnFileBundle? bundle)
{
var finalExpiredAt = expiredAt;
// Apply pool expiration policy
if (pool.StorageConfig.Expiration is not null && expiredAt.HasValue)
{
var expectedExpiration = SystemClock.Instance.GetCurrentInstant() - expiredAt.Value;
var effectiveExpiration = pool.StorageConfig.Expiration < expectedExpiration
? pool.StorageConfig.Expiration
: expectedExpiration;
finalExpiredAt = SystemClock.Instance.GetCurrentInstant() + effectiveExpiration;
expiredAt = SystemClock.Instance.GetCurrentInstant() + effectiveExpiration;
}
var bundle = fileBundleId is not null
? await GetBundleAsync(Guid.Parse(fileBundleId), accountId)
: null;
if (fileBundleId is not null && bundle is null)
{
throw new InvalidOperationException("Bundle not found");
}
// Bundle expiration takes precedence
if (bundle?.ExpiredAt != null)
finalExpiredAt = bundle.ExpiredAt.Value;
expiredAt = bundle.ExpiredAt.Value;
return finalExpiredAt;
}
private async Task<(string tempPath, long fileSize, string contentType)> PrepareFileAsync(
string fileId,
string filePath,
string fileName,
string? contentType
)
{
var managedTempPath = Path.Combine(Path.GetTempPath(), fileId);
File.Copy(filePath, managedTempPath, true);
@@ -175,67 +131,49 @@ public class FileService(
var finalContentType = contentType ??
(!fileName.Contains('.') ? "application/octet-stream" : MimeTypes.GetMimeType(fileName));
return (managedTempPath, fileSize, finalContentType);
}
private SnCloudFile CreateFileObject(
string fileId,
string fileName,
string contentType,
long fileSize,
Instant? expiredAt,
SnFileBundle? bundle,
Guid accountId
)
{
return new SnCloudFile
var file = new SnCloudFile
{
Id = fileId,
Name = fileName,
MimeType = contentType,
MimeType = finalContentType,
Size = fileSize,
ExpiredAt = expiredAt,
BundleId = bundle?.Id,
AccountId = accountId,
AccountId = Guid.Parse(account.Id),
};
}
private async Task<(string processingPath, bool isTempFile)> ProcessEncryptionAsync(
string fileId,
string managedTempPath,
string? encryptPassword,
FilePool pool,
SnCloudFile file
)
{
if (string.IsNullOrWhiteSpace(encryptPassword))
return (managedTempPath, true);
if (!pool.PolicyConfig.NoMetadata)
{
await ExtractMetadataAsync(file, managedTempPath);
}
if (!pool.PolicyConfig.AllowEncryption)
throw new InvalidOperationException("Encryption is not allowed in this pool");
string processingPath = managedTempPath;
bool isTempFile = true;
var encryptedPath = Path.Combine(Path.GetTempPath(), $"{fileId}.encrypted");
FileEncryptor.EncryptFile(managedTempPath, encryptedPath, encryptPassword);
if (!string.IsNullOrWhiteSpace(encryptPassword))
{
if (!pool.PolicyConfig.AllowEncryption)
throw new InvalidOperationException("Encryption is not allowed in this pool");
File.Delete(managedTempPath);
var encryptedPath = Path.Combine(Path.GetTempPath(), $"{fileId}.encrypted");
FileEncryptor.EncryptFile(managedTempPath, encryptedPath, encryptPassword);
file.IsEncrypted = true;
file.MimeType = "application/octet-stream";
file.Size = new FileInfo(encryptedPath).Length;
File.Delete(managedTempPath);
return (encryptedPath, true);
}
processingPath = encryptedPath;
file.IsEncrypted = true;
file.MimeType = "application/octet-stream";
file.Size = new FileInfo(processingPath).Length;
}
file.Hash = await HashFileAsync(processingPath);
private async Task SaveFileToDatabaseAsync(SnCloudFile file)
{
db.Files.Add(file);
await db.SaveChangesAsync();
file.StorageId ??= file.Id;
}
private async Task PublishFileUploadedEventAsync(SnCloudFile file, FilePool pool, string processingPath,
bool isTempFile)
{
file.StorageId ??= file.Id;
var js = nats.CreateJetStreamContext();
await js.PublishAsync(
FileUploadedEvent.Type,
@@ -248,6 +186,8 @@ public class FileService(
isTempFile)
).ToByteArray()
);
return file;
}
private async Task ExtractMetadataAsync(SnCloudFile file, string filePath)
@@ -474,14 +414,13 @@ public class FileService(
return await db.Files.AsNoTracking().FirstAsync(f => f.Id == file.Id);
}
public async Task DeleteFileAsync(SnCloudFile file, bool skipData = false)
public async Task DeleteFileAsync(SnCloudFile file)
{
db.Remove(file);
await db.SaveChangesAsync();
await _PurgeCacheAsync(file.Id);
if (!skipData)
await DeleteFileDataAsync(file);
await DeleteFileDataAsync(file);
}
public async Task DeleteFileDataAsync(SnCloudFile file, bool force = false)
@@ -664,12 +603,9 @@ public class FileService(
}
}
return
[
.. references
.Select(r => cachedFiles.GetValueOrDefault(r.Id))
.Where(f => f != null)
];
return [.. references
.Select(r => cachedFiles.GetValueOrDefault(r.Id))
.Where(f => f != null)];
}
public async Task<int> GetReferenceCountAsync(string fileId)
@@ -718,21 +654,6 @@ public class FileService(
return count;
}
public async Task<int> DeleteAccountFileBatchAsync(Guid accountId, List<string> fileIds)
{
var files = await db.Files
.Where(f => f.AccountId == accountId && fileIds.Contains(f.Id))
.ToListAsync();
var count = files.Count;
var tasks = files.Select(f => DeleteFileDataAsync(f, true));
await Task.WhenAll(tasks);
var fileIdsList = files.Select(f => f.Id).ToList();
await _PurgeCacheRangeAsync(fileIdsList);
db.RemoveRange(files);
await db.SaveChangesAsync();
return count;
}
public async Task<int> DeletePoolRecycledFilesAsync(Guid poolId)
{
var files = await db.Files
@@ -803,4 +724,4 @@ file class UpdatableCloudFile(SnCloudFile file)
.SetProperty(f => f.UserMeta, userMeta)
.SetProperty(f => f.IsMarkedRecycle, IsMarkedRecycle);
}
}
}

View File

@@ -1,17 +1,14 @@
using System.ComponentModel.DataAnnotations;
using System.Text.Json;
using DysonNetwork.Drive.Billing;
using DysonNetwork.Drive.Index;
using DysonNetwork.Drive.Storage.Model;
using DysonNetwork.Shared.Auth;
using DysonNetwork.Shared.Http;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using NanoidDotNet;
using NodaTime;
using TaskStatus = DysonNetwork.Drive.Storage.Model.TaskStatus;
namespace DysonNetwork.Drive.Storage;
@@ -23,10 +20,7 @@ public class FileUploadController(
FileService fileService,
AppDatabase db,
PermissionService.PermissionServiceClient permission,
QuotaService quotaService,
PersistentTaskService persistentTaskService,
FileIndexService fileIndexService,
ILogger<FileUploadController> logger
QuotaService quotaService
)
: ControllerBase
{
@@ -39,54 +33,114 @@ public class FileUploadController(
public async Task<IActionResult> CreateUploadTask([FromBody] CreateUploadTaskRequest request)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
{
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
}
var permissionCheck = await ValidateUserPermissions(currentUser);
if (permissionCheck is not null) return permissionCheck;
if (!currentUser.IsSuperuser)
{
var allowed = await permission.HasPermissionAsync(new HasPermissionRequest
{ Actor = $"user:{currentUser.Id}", Area = "global", Key = "files.create" });
if (!allowed.HasPermission)
{
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
}
}
request.PoolId ??= Guid.Parse(configuration["Storage:PreferredRemote"]!);
var pool = await fileService.GetPoolAsync(request.PoolId.Value);
if (pool is null)
{
return new ObjectResult(ApiError.NotFound("Pool")) { StatusCode = 404 };
}
var poolValidation = await ValidatePoolAccess(currentUser, pool, request);
if (poolValidation is not null) return poolValidation;
if (pool.PolicyConfig.RequirePrivilege is > 0)
{
var privilege =
currentUser.PerkSubscription is null ? 0 :
PerkSubscriptionPrivilege.GetPrivilegeFromIdentifier(currentUser.PerkSubscription.Identifier);
if (privilege < pool.PolicyConfig.RequirePrivilege)
{
return new ObjectResult(ApiError.Unauthorized(
$"You need Stellar Program tier {pool.PolicyConfig.RequirePrivilege} to use pool {pool.Name}, you are tier {privilege}",
forbidden: true))
{
StatusCode = 403
};
}
}
var policyValidation = ValidatePoolPolicy(pool.PolicyConfig, request);
if (policyValidation is not null) return policyValidation;
var policy = pool.PolicyConfig;
if (!policy.AllowEncryption && !string.IsNullOrEmpty(request.EncryptPassword))
{
return new ObjectResult(ApiError.Unauthorized("File encryption is not allowed in this pool", true))
{ StatusCode = 403 };
}
var quotaValidation = await ValidateQuota(currentUser, pool, request.FileSize);
if (quotaValidation is not null) return quotaValidation;
if (policy.AcceptTypes is { Count: > 0 })
{
if (string.IsNullOrEmpty(request.ContentType))
{
return new ObjectResult(ApiError.Validation(new Dictionary<string, string[]>
{
{ "contentType", new[] { "Content type is required by the pool's policy" } }
}))
{ StatusCode = 400 };
}
EnsureTempDirectoryExists();
var foundMatch = policy.AcceptTypes.Any(acceptType =>
{
if (acceptType.EndsWith("/*", StringComparison.OrdinalIgnoreCase))
{
var type = acceptType[..^2];
return request.ContentType.StartsWith($"{type}/", StringComparison.OrdinalIgnoreCase);
}
var accountId = Guid.Parse(currentUser.Id);
return acceptType.Equals(request.ContentType, StringComparison.OrdinalIgnoreCase);
});
if (!foundMatch)
{
return new ObjectResult(
ApiError.Unauthorized($"Content type {request.ContentType} is not allowed by the pool's policy",
true))
{ StatusCode = 403 };
}
}
if (policy.MaxFileSize is not null && request.FileSize > policy.MaxFileSize)
{
return new ObjectResult(ApiError.Unauthorized(
$"File size {request.FileSize} is larger than the pool's maximum file size {policy.MaxFileSize}",
true))
{
StatusCode = 403
};
}
var (ok, billableUnit, quota) = await quotaService.IsFileAcceptable(
Guid.Parse(currentUser.Id),
pool.BillingConfig.CostMultiplier ?? 1.0,
request.FileSize
);
if (!ok)
{
return new ObjectResult(
ApiError.Unauthorized($"File size {billableUnit} MiB is exceeded the user's quota {quota} MiB",
true))
{ StatusCode = 403 };
}
if (!Directory.Exists(_tempPath))
{
Directory.CreateDirectory(_tempPath);
}
// Check if a file with the same hash already exists
var existingFile = await db.Files.FirstOrDefaultAsync(f => f.Hash == request.Hash);
if (existingFile != null)
{
// Create the file index if a path is provided, even for existing files
if (string.IsNullOrEmpty(request.Path))
return Ok(new CreateUploadTaskResponse
{
FileExists = true,
File = existingFile
});
try
{
await fileIndexService.CreateAsync(request.Path, existingFile.Id, accountId);
logger.LogInformation("Created file index for existing file {FileId} at path {Path}",
existingFile.Id, request.Path);
}
catch (Exception ex)
{
logger.LogWarning(ex, "Failed to create file index for existing file {FileId} at path {Path}",
existingFile.Id, request.Path);
// Don't fail the request if index creation fails, just log it
}
return Ok(new CreateUploadTaskResponse
{
FileExists = true,
@@ -95,593 +149,130 @@ public class FileUploadController(
}
var taskId = await Nanoid.GenerateAsync();
var taskPath = Path.Combine(_tempPath, taskId);
Directory.CreateDirectory(taskPath);
// Create persistent upload task
var persistentTask = await persistentTaskService.CreateUploadTaskAsync(taskId, request, accountId);
var chunkSize = request.ChunkSize ?? DefaultChunkSize;
var chunksCount = (int)Math.Ceiling((double)request.FileSize / chunkSize);
var task = new UploadTask
{
TaskId = taskId,
FileName = request.FileName,
FileSize = request.FileSize,
ContentType = request.ContentType,
ChunkSize = chunkSize,
ChunksCount = chunksCount,
PoolId = request.PoolId.Value,
BundleId = request.BundleId,
EncryptPassword = request.EncryptPassword,
ExpiredAt = request.ExpiredAt,
Hash = request.Hash,
};
await System.IO.File.WriteAllTextAsync(Path.Combine(taskPath, "task.json"), JsonSerializer.Serialize(task));
return Ok(new CreateUploadTaskResponse
{
FileExists = false,
TaskId = taskId,
ChunkSize = persistentTask.ChunkSize,
ChunksCount = persistentTask.ChunksCount
ChunkSize = chunkSize,
ChunksCount = chunksCount
});
}
private async Task<IActionResult?> ValidateUserPermissions(Account currentUser)
{
if (currentUser.IsSuperuser) return null;
var allowed = await permission.HasPermissionAsync(new HasPermissionRequest
{ Actor = currentUser.Id, Key = "files.create" });
return allowed.HasPermission
? null
: new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
}
private Task<IActionResult?> ValidatePoolAccess(Account currentUser, FilePool pool, CreateUploadTaskRequest request)
{
if (pool.PolicyConfig.RequirePrivilege <= 0) return Task.FromResult<IActionResult?>(null);
var privilege = currentUser.PerkSubscription is null
? 0
: PerkSubscriptionPrivilege.GetPrivilegeFromIdentifier(currentUser.PerkSubscription.Identifier);
if (privilege < pool.PolicyConfig.RequirePrivilege)
{
return Task.FromResult<IActionResult?>(new ObjectResult(ApiError.Unauthorized(
$"You need Stellar Program tier {pool.PolicyConfig.RequirePrivilege} to use pool {pool.Name}, you are tier {privilege}",
forbidden: true))
{ StatusCode = 403 });
}
return Task.FromResult<IActionResult?>(null);
}
private static IActionResult? ValidatePoolPolicy(PolicyConfig policy, CreateUploadTaskRequest request)
{
if (!policy.AllowEncryption && !string.IsNullOrEmpty(request.EncryptPassword))
{
return new ObjectResult(ApiError.Unauthorized("File encryption is not allowed in this pool", true))
{ StatusCode = 403 };
}
if (policy.AcceptTypes is { Count: > 0 })
{
if (string.IsNullOrEmpty(request.ContentType))
{
return new ObjectResult(ApiError.Validation(new Dictionary<string, string[]>
{
{ "contentType", new[] { "Content type is required by the pool's policy" } }
}))
{ StatusCode = 400 };
}
var foundMatch = policy.AcceptTypes.Any(acceptType =>
{
if (!acceptType.EndsWith("/*", StringComparison.OrdinalIgnoreCase))
return acceptType.Equals(request.ContentType, StringComparison.OrdinalIgnoreCase);
var type = acceptType[..^2];
return request.ContentType.StartsWith($"{type}/", StringComparison.OrdinalIgnoreCase);
});
if (!foundMatch)
{
return new ObjectResult(
ApiError.Unauthorized($"Content type {request.ContentType} is not allowed by the pool's policy",
true))
{ StatusCode = 403 };
}
}
if (policy.MaxFileSize is not null && request.FileSize > policy.MaxFileSize)
{
return new ObjectResult(ApiError.Unauthorized(
$"File size {request.FileSize} is larger than the pool's maximum file size {policy.MaxFileSize}",
true))
{ StatusCode = 403 };
}
return null;
}
private async Task<IActionResult?> ValidateQuota(Account currentUser, FilePool pool, long fileSize)
{
var (ok, billableUnit, quota) = await quotaService.IsFileAcceptable(
Guid.Parse(currentUser.Id),
pool.BillingConfig.CostMultiplier ?? 1.0,
fileSize
);
if (!ok)
{
return new ObjectResult(
ApiError.Unauthorized($"File size {billableUnit} MiB is exceeded the user's quota {quota} MiB",
true))
{ StatusCode = 403 };
}
return null;
}
private void EnsureTempDirectoryExists()
{
if (!Directory.Exists(_tempPath))
{
Directory.CreateDirectory(_tempPath);
}
}
public class UploadChunkRequest
{
[Required] public IFormFile Chunk { get; set; } = null!;
[Required]
public IFormFile Chunk { get; set; } = null!;
}
[HttpPost("chunk/{taskId}/{chunkIndex:int}")]
[HttpPost("chunk/{taskId}/{chunkIndex}")]
[RequestSizeLimit(DefaultChunkSize + 1024 * 1024)] // 6MB to be safe
[RequestFormLimits(MultipartBodyLengthLimit = DefaultChunkSize + 1024 * 1024)]
public async Task<IActionResult> UploadChunk(string taskId, int chunkIndex, [FromForm] UploadChunkRequest request)
{
var chunk = request.Chunk;
// Check if chunk is already uploaded (resumable upload)
if (await persistentTaskService.IsChunkUploadedAsync(taskId, chunkIndex))
{
return Ok(new { message = "Chunk already uploaded" });
}
var taskPath = Path.Combine(_tempPath, taskId);
if (!Directory.Exists(taskPath))
{
Directory.CreateDirectory(taskPath);
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
}
var chunkPath = Path.Combine(taskPath, $"{chunkIndex}.chunk");
await using var stream = new FileStream(chunkPath, FileMode.Create);
await chunk.CopyToAsync(stream);
// Update persistent task progress
await persistentTaskService.UpdateChunkProgressAsync(taskId, chunkIndex);
return Ok();
}
[HttpPost("complete/{taskId}")]
public async Task<IActionResult> CompleteUpload(string taskId)
{
// Get persistent task
var persistentTask = await persistentTaskService.GetUploadTaskAsync(taskId);
if (persistentTask is null)
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
var currentUser = HttpContext.Items["CurrentUser"] as Account;
if (currentUser is null)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
// Verify ownership
if (persistentTask.AccountId != Guid.Parse(currentUser.Id))
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
var taskPath = Path.Combine(_tempPath, taskId);
if (!Directory.Exists(taskPath))
return new ObjectResult(ApiError.NotFound("Upload task directory")) { StatusCode = 404 };
{
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
}
var taskJsonPath = Path.Combine(taskPath, "task.json");
if (!System.IO.File.Exists(taskJsonPath))
{
return new ObjectResult(ApiError.NotFound("Upload task metadata")) { StatusCode = 404 };
}
var task = JsonSerializer.Deserialize<UploadTask>(await System.IO.File.ReadAllTextAsync(taskJsonPath));
if (task == null)
{
return new ObjectResult(new ApiError { Code = "BAD_REQUEST", Message = "Invalid task metadata.", Status = 400 })
{ StatusCode = 400 };
}
var mergedFilePath = Path.Combine(_tempPath, taskId + ".tmp");
try
await using (var mergedStream = new FileStream(mergedFilePath, FileMode.Create))
{
await MergeChunks(taskId, taskPath, mergedFilePath, persistentTask.ChunksCount, persistentTaskService);
var fileId = await Nanoid.GenerateAsync();
var cloudFile = await fileService.ProcessNewFileAsync(
currentUser,
fileId,
persistentTask.PoolId.ToString(),
persistentTask.BundleId?.ToString(),
mergedFilePath,
persistentTask.FileName,
persistentTask.ContentType,
persistentTask.EncryptPassword,
persistentTask.ExpiredAt
);
// Create the file index if a path is provided
if (!string.IsNullOrEmpty(persistentTask.Path))
for (var i = 0; i < task.ChunksCount; i++)
{
try
var chunkPath = Path.Combine(taskPath, $"{i}.chunk");
if (!System.IO.File.Exists(chunkPath))
{
var accountId = Guid.Parse(currentUser.Id);
await fileIndexService.CreateAsync(persistentTask.Path, fileId, accountId);
logger.LogInformation("Created file index for file {FileId} at path {Path}", fileId,
persistentTask.Path);
}
catch (Exception ex)
{
logger.LogWarning(ex, "Failed to create file index for file {FileId} at path {Path}", fileId,
persistentTask.Path);
// Don't fail the upload if index creation fails, just log it
// Clean up partially uploaded file
mergedStream.Close();
System.IO.File.Delete(mergedFilePath);
Directory.Delete(taskPath, true);
return new ObjectResult(new ApiError
{ Code = "CHUNK_MISSING", Message = $"Chunk {i} is missing.", Status = 400 })
{ StatusCode = 400 };
}
await using var chunkStream = new FileStream(chunkPath, FileMode.Open);
await chunkStream.CopyToAsync(mergedStream);
}
// Update the task status to "processing" - background processing is now happening
await persistentTaskService.UpdateTaskProgressAsync(taskId, 0.95, "Processing file in background...");
// Send upload completion notification (a file is uploaded, but processing continues)
await persistentTaskService.SendUploadCompletedNotificationAsync(persistentTask, fileId);
return Ok(cloudFile);
}
catch (Exception ex)
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
{
// Log the actual exception for debugging
logger.LogError(ex, "Failed to complete upload for task {TaskId}. Error: {ErrorMessage}", taskId,
ex.Message);
// Mark task as failed
await persistentTaskService.MarkTaskFailedAsync(taskId);
// Send failure notification
await persistentTaskService.SendUploadFailedNotificationAsync(persistentTask, ex.Message);
await CleanupTempFiles(taskPath, mergedFilePath);
return new ObjectResult(new ApiError
{
Code = "UPLOAD_FAILED",
Message = $"Failed to complete file upload: {ex.Message}",
Status = 500
}) { StatusCode = 500 };
}
finally
{
// Always clean up temp files
await CleanupTempFiles(taskPath, mergedFilePath);
}
}
private static async Task MergeChunks(
string taskId,
string taskPath,
string mergedFilePath,
int chunksCount,
PersistentTaskService persistentTaskService)
{
await using var mergedStream = new FileStream(mergedFilePath, FileMode.Create);
const double baseProgress = 0.8; // Start from 80% (chunk upload is already at 95%)
const double remainingProgress = 0.15; // Remaining 15% progress distributed across chunks
var progressPerChunk = remainingProgress / chunksCount;
for (var i = 0; i < chunksCount; i++)
{
var chunkPath = Path.Combine(taskPath, i + ".chunk");
if (!System.IO.File.Exists(chunkPath))
throw new InvalidOperationException("Chunk " + i + " is missing.");
await using var chunkStream = new FileStream(chunkPath, FileMode.Open);
await chunkStream.CopyToAsync(mergedStream);
// Update progress after each chunk is merged
var currentProgress = baseProgress + progressPerChunk * (i + 1);
await persistentTaskService.UpdateTaskProgressAsync(
taskId,
currentProgress,
"Merging chunks... (" + (i + 1) + "/" + chunksCount + ")"
);
}
}
private static Task CleanupTempFiles(string taskPath, string mergedFilePath)
{
try
{
if (Directory.Exists(taskPath))
Directory.Delete(taskPath, true);
if (System.IO.File.Exists(mergedFilePath))
System.IO.File.Delete(mergedFilePath);
}
catch
{
// Ignore cleanup errors to avoid masking the original exception
}
return Task.CompletedTask;
}
// New endpoints for resumable uploads
[HttpGet("tasks")]
public async Task<IActionResult> GetMyUploadTasks(
[FromQuery] UploadTaskStatus? status = null,
[FromQuery] string? sortBy = "lastActivity",
[FromQuery] bool sortDescending = true,
[FromQuery] int offset = 0,
[FromQuery] int limit = 50
)
{
var currentUser = HttpContext.Items["CurrentUser"] as Account;
if (currentUser is null)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
}
var accountId = Guid.Parse(currentUser.Id);
var tasks = await persistentTaskService.GetUserUploadTasksAsync(accountId, status, sortBy, sortDescending,
offset, limit);
var fileId = await Nanoid.GenerateAsync();
Response.Headers.Append("X-Total", tasks.TotalCount.ToString());
return Ok(tasks.Items.Select(t => new
{
t.TaskId,
t.FileName,
t.FileSize,
t.ContentType,
t.ChunkSize,
t.ChunksCount,
t.ChunksUploaded,
Progress = t.ChunksCount > 0 ? (double)t.ChunksUploaded / t.ChunksCount * 100 : 0,
t.Status,
t.LastActivity,
t.CreatedAt,
t.UpdatedAt,
t.UploadedChunks,
Pool = new { t.PoolId, Name = "Pool Name" }, // Could be expanded to include pool details
Bundle = t.BundleId.HasValue ? new { t.BundleId } : null
}));
}
[HttpGet("progress/{taskId}")]
public async Task<IActionResult> GetUploadProgress(string taskId)
{
var currentUser = HttpContext.Items["CurrentUser"] as Account;
if (currentUser is null)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var task = await persistentTaskService.GetUploadTaskAsync(taskId);
if (task is null)
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
// Verify ownership
if (task.AccountId != Guid.Parse(currentUser.Id))
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
var progress = await persistentTaskService.GetUploadProgressAsync(taskId);
return Ok(new
{
task.TaskId,
var cloudFile = await fileService.ProcessNewFileAsync(
currentUser,
fileId,
task.PoolId.ToString(),
task.BundleId?.ToString(),
mergedFilePath,
task.FileName,
task.FileSize,
task.ChunksCount,
task.ChunksUploaded,
Progress = progress,
task.Status,
task.LastActivity,
task.UploadedChunks
});
}
[HttpGet("resume/{taskId}")]
public async Task<IActionResult> ResumeUploadTask(string taskId)
{
var currentUser = HttpContext.Items["CurrentUser"] as Account;
if (currentUser is null)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var task = await persistentTaskService.GetUploadTaskAsync(taskId);
if (task is null)
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
// Verify ownership
if (task.AccountId != Guid.Parse(currentUser.Id))
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
// Ensure temp directory exists
var taskPath = Path.Combine(_tempPath, taskId);
if (!Directory.Exists(taskPath))
{
Directory.CreateDirectory(taskPath);
}
return Ok(new
{
task.TaskId,
task.FileName,
task.FileSize,
task.ContentType,
task.ChunkSize,
task.ChunksCount,
task.ChunksUploaded,
task.UploadedChunks,
Progress = task.ChunksCount > 0 ? (double)task.ChunksUploaded / task.ChunksCount * 100 : 0
});
task.EncryptPassword,
task.ExpiredAt
);
// Clean up
Directory.Delete(taskPath, true);
System.IO.File.Delete(mergedFilePath);
return Ok(cloudFile);
}
[HttpDelete("task/{taskId}")]
public async Task<IActionResult> CancelUploadTask(string taskId)
{
var currentUser = HttpContext.Items["CurrentUser"] as Account;
if (currentUser is null)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var task = await persistentTaskService.GetUploadTaskAsync(taskId);
if (task is null)
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
// Verify ownership
if (task.AccountId != Guid.Parse(currentUser.Id))
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
// Mark as failed (cancelled)
await persistentTaskService.MarkTaskFailedAsync(taskId);
// Clean up temp files
var taskPath = Path.Combine(_tempPath, taskId);
await CleanupTempFiles(taskPath, string.Empty);
return Ok(new { message = "Upload task cancelled" });
}
[HttpGet("stats")]
public async Task<IActionResult> GetUploadStats()
{
var currentUser = HttpContext.Items["CurrentUser"] as Account;
if (currentUser is null)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var accountId = Guid.Parse(currentUser.Id);
var stats = await persistentTaskService.GetUserUploadStatsAsync(accountId);
return Ok(new
{
stats.TotalTasks,
stats.InProgressTasks,
stats.CompletedTasks,
stats.FailedTasks,
stats.ExpiredTasks,
stats.TotalUploadedBytes,
stats.AverageProgress,
stats.RecentActivity
});
}
[HttpDelete("tasks/cleanup")]
public async Task<IActionResult> CleanupFailedTasks()
{
var currentUser = HttpContext.Items["CurrentUser"] as Account;
if (currentUser is null)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var accountId = Guid.Parse(currentUser.Id);
var cleanedCount = await persistentTaskService.CleanupUserFailedTasksAsync(accountId);
return Ok(new { message = $"Cleaned up {cleanedCount} failed tasks" });
}
[HttpGet("tasks/recent")]
public async Task<IActionResult> GetRecentTasks([FromQuery] int limit = 10)
{
var currentUser = HttpContext.Items["CurrentUser"] as Account;
if (currentUser is null)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var accountId = Guid.Parse(currentUser.Id);
var tasks = await persistentTaskService.GetRecentUserTasksAsync(accountId, limit);
return Ok(tasks.Select(t => new
{
t.TaskId,
t.FileName,
t.FileSize,
t.ContentType,
Progress = t.ChunksCount > 0 ? (double)t.ChunksUploaded / t.ChunksCount * 100 : 0,
t.Status,
t.LastActivity,
t.CreatedAt
}));
}
[HttpGet("tasks/{taskId}/details")]
public async Task<IActionResult> GetTaskDetails(string taskId)
{
var currentUser = HttpContext.Items["CurrentUser"] as Account;
if (currentUser is null)
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
var task = await persistentTaskService.GetUploadTaskAsync(taskId);
if (task is null)
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
// Verify ownership
if (task.AccountId != Guid.Parse(currentUser.Id))
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
// Get pool information
var pool = await fileService.GetPoolAsync(task.PoolId);
var bundle = task.BundleId.HasValue
? await db.Bundles.FirstOrDefaultAsync(b => b.Id == task.BundleId.Value)
: null;
return Ok(new
{
Task = new
{
task.TaskId,
task.FileName,
task.FileSize,
task.ContentType,
task.ChunkSize,
task.ChunksCount,
task.ChunksUploaded,
Progress = task.ChunksCount > 0 ? (double)task.ChunksUploaded / task.ChunksCount * 100 : 0,
task.Status,
task.LastActivity,
task.CreatedAt,
task.UpdatedAt,
task.ExpiredAt,
task.Hash,
task.UploadedChunks
},
Pool = pool != null
? new
{
pool.Id,
pool.Name,
pool.Description
}
: null,
Bundle = bundle != null
? new
{
bundle.Id,
bundle.Name,
bundle.Description
}
: null,
EstimatedTimeRemaining = CalculateEstimatedTime(task),
UploadSpeed = CalculateUploadSpeed(task)
});
}
private static string? CalculateEstimatedTime(PersistentUploadTask task)
{
if (task.Status != TaskStatus.InProgress || task.ChunksUploaded == 0)
return null;
var elapsed = NodaTime.SystemClock.Instance.GetCurrentInstant() - task.CreatedAt;
var elapsedSeconds = elapsed.TotalSeconds;
var chunksPerSecond = task.ChunksUploaded / elapsedSeconds;
var remainingChunks = task.ChunksCount - task.ChunksUploaded;
if (chunksPerSecond <= 0)
return null;
var remainingSeconds = remainingChunks / chunksPerSecond;
return remainingSeconds switch
{
< 60 => $"{remainingSeconds:F0} seconds",
< 3600 => $"{remainingSeconds / 60:F0} minutes",
_ => $"{remainingSeconds / 3600:F1} hours"
};
}
private static string? CalculateUploadSpeed(PersistentUploadTask task)
{
if (task.ChunksUploaded == 0)
return null;
var elapsed = SystemClock.Instance.GetCurrentInstant() - task.CreatedAt;
var elapsedSeconds = elapsed.TotalSeconds;
var bytesUploaded = task.ChunksUploaded * task.ChunkSize;
var bytesPerSecond = bytesUploaded / elapsedSeconds;
return bytesPerSecond switch
{
< 1024 => $"{bytesPerSecond:F0} B/s",
< 1024 * 1024 => $"{bytesPerSecond / 1024:F0} KB/s",
_ => $"{bytesPerSecond / (1024 * 1024):F1} MB/s"
};
}
}
}

View File

@@ -1,670 +1,42 @@
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using Google.Protobuf.Collections;
using NodaTime;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Text.Json;
namespace DysonNetwork.Drive.Storage.Model;
// File Upload Task Parameters
public class FileUploadParameters
namespace DysonNetwork.Drive.Storage.Model
{
public string FileName { get; set; } = string.Empty;
public long FileSize { get; set; }
public string ContentType { get; set; } = string.Empty;
public long ChunkSize { get; set; } = 5242880L;
public int ChunksCount { get; set; }
public int ChunksUploaded { get; set; }
public Guid PoolId { get; set; }
public Guid? BundleId { get; set; }
public string? EncryptPassword { get; set; }
public string Hash { get; set; } = string.Empty;
public List<int> UploadedChunks { get; set; } = [];
public string? Path { get; set; }
}
// File Move Task Parameters
public class FileMoveParameters
{
public List<string> FileIds { get; set; } = [];
public Guid TargetPoolId { get; set; }
public Guid? TargetBundleId { get; set; }
public int FilesProcessed { get; set; }
}
// File Compression Task Parameters
public class FileCompressParameters
{
public List<string> FileIds { get; set; } = [];
public string CompressionFormat { get; set; } = "zip";
public int CompressionLevel { get; set; } = 6;
public string? OutputFileName { get; set; }
public int FilesProcessed { get; set; }
public string? ResultFileId { get; set; }
}
// Bulk Operation Task Parameters
public class BulkOperationParameters
{
public string OperationType { get; set; } = string.Empty;
public List<string> TargetIds { get; set; } = [];
public Dictionary<string, object?> OperationParameters { get; set; } = new();
public int ItemsProcessed { get; set; }
public Dictionary<string, object?>? OperationResults { get; set; }
}
// Storage Migration Task Parameters
public class StorageMigrationParameters
{
public Guid SourcePoolId { get; set; }
public Guid TargetPoolId { get; set; }
public List<string> FileIds { get; set; } = new();
public bool PreserveOriginals { get; set; } = true;
public long TotalBytesToTransfer { get; set; }
public long BytesTransferred { get; set; }
public int FilesMigrated { get; set; }
}
// Helper class for parameter operations using GrpcTypeHelper
public static class ParameterHelper
{
public static T? Typed<T>(Dictionary<string, object?> parameters)
public class CreateUploadTaskRequest
{
var rawParams = GrpcTypeHelper.ConvertObjectToByteString(parameters);
return GrpcTypeHelper.ConvertByteStringToObject<T>(rawParams);
public string Hash { get; set; } = null!;
public string FileName { get; set; } = null!;
public long FileSize { get; set; }
public string ContentType { get; set; } = null!;
public Guid? PoolId { get; set; } = null!;
public Guid? BundleId { get; set; }
public string? EncryptPassword { get; set; }
public Instant? ExpiredAt { get; set; }
public long? ChunkSize { get; set; }
}
public static Dictionary<string, object?> Untyped<T>(T parameters)
public class CreateUploadTaskResponse
{
var rawParams = GrpcTypeHelper.ConvertObjectToByteString(parameters);
return GrpcTypeHelper.ConvertByteStringToObject<Dictionary<string, object?>>(rawParams) ?? [];
public bool FileExists { get; set; }
public SnCloudFile? File { get; set; }
public string? TaskId { get; set; }
public long? ChunkSize { get; set; }
public int? ChunksCount { get; set; }
}
internal class UploadTask
{
public string TaskId { get; set; } = null!;
public string FileName { get; set; } = null!;
public long FileSize { get; set; }
public string ContentType { get; set; } = null!;
public long ChunkSize { get; set; }
public int ChunksCount { get; set; }
public Guid PoolId { get; set; }
public Guid? BundleId { get; set; }
public string? EncryptPassword { get; set; }
public Instant? ExpiredAt { get; set; }
public string Hash { get; set; } = null!;
}
}
public class CreateUploadTaskRequest
{
public string Hash { get; set; } = null!;
public string FileName { get; set; } = null!;
public long FileSize { get; set; }
public string ContentType { get; set; } = null!;
public Guid? PoolId { get; set; } = null!;
public Guid? BundleId { get; set; }
public string? EncryptPassword { get; set; }
public Instant? ExpiredAt { get; set; }
public long? ChunkSize { get; set; }
public string? Path { get; set; }
}
public class CreateUploadTaskResponse
{
public bool FileExists { get; set; }
public SnCloudFile? File { get; set; }
public string? TaskId { get; set; }
public long? ChunkSize { get; set; }
public int? ChunksCount { get; set; }
}
internal class UploadTask
{
public string TaskId { get; set; } = null!;
public string FileName { get; set; } = null!;
public long FileSize { get; set; }
public string ContentType { get; set; } = null!;
public long ChunkSize { get; set; }
public int ChunksCount { get; set; }
public Guid PoolId { get; set; }
public Guid? BundleId { get; set; }
public string? EncryptPassword { get; set; }
public Instant? ExpiredAt { get; set; }
public string Hash { get; set; } = null!;
}
public class PersistentTask : ModelBase
{
public Guid Id { get; set; } = Guid.NewGuid();
[MaxLength(64)] public string TaskId { get; set; } = null!;
[MaxLength(256)] public string Name { get; set; } = null!;
[MaxLength(1024)] public string? Description { get; set; }
public TaskType Type { get; set; }
public TaskStatus Status { get; set; } = TaskStatus.InProgress;
public Guid AccountId { get; set; }
// Progress tracking (0-100)
public double Progress { get; set; }
// Task-specific parameters stored as JSON
[Column(TypeName = "jsonb")] public Dictionary<string, object?> Parameters { get; set; } = new();
// Task results/output stored as JSON
[Column(TypeName = "jsonb")] public Dictionary<string, object?> Results { get; set; } = new();
[MaxLength(1024)] public string? ErrorMessage { get; set; }
public Instant? StartedAt { get; set; }
public Instant? CompletedAt { get; set; }
public Instant? ExpiredAt { get; set; }
public Instant LastActivity { get; set; }
// Priority (higher = more important)
public int Priority { get; set; } = 0;
// Estimated duration in seconds
public long? EstimatedDurationSeconds { get; set; }
}
// Backward compatibility - UploadTask inherits from PersistentTask
public class PersistentUploadTask : PersistentTask
{
public PersistentUploadTask()
{
Type = TaskType.FileUpload;
Name = "File Upload";
}
// Convenience properties using typed parameters
[NotMapped]
public FileUploadParameters TypedParameters
{
get => ParameterHelper.Typed<FileUploadParameters>(Parameters)!;
set => Parameters = ParameterHelper.Untyped(value);
}
[MaxLength(256)]
public string FileName
{
get => TypedParameters.FileName;
set
{
var parameters = TypedParameters;
parameters.FileName = value;
TypedParameters = parameters;
}
}
public long FileSize
{
get => TypedParameters.FileSize;
set
{
var parameters = TypedParameters;
parameters.FileSize = value;
TypedParameters = parameters;
}
}
[MaxLength(128)]
public string ContentType
{
get => TypedParameters.ContentType;
set
{
var parameters = TypedParameters;
parameters.ContentType = value;
TypedParameters = parameters;
}
}
public long ChunkSize
{
get => TypedParameters.ChunkSize;
set
{
var parameters = TypedParameters;
parameters.ChunkSize = value;
TypedParameters = parameters;
}
}
public int ChunksCount
{
get => TypedParameters.ChunksCount;
set
{
var parameters = TypedParameters;
parameters.ChunksCount = value;
TypedParameters = parameters;
}
}
public int ChunksUploaded
{
get => TypedParameters.ChunksUploaded;
set
{
var parameters = TypedParameters;
parameters.ChunksUploaded = value;
TypedParameters = parameters;
Progress = ChunksCount > 0 ? (double)value / ChunksCount * 100 : 0;
}
}
public Guid PoolId
{
get => TypedParameters.PoolId;
set
{
var parameters = TypedParameters;
parameters.PoolId = value;
TypedParameters = parameters;
}
}
public Guid? BundleId
{
get => TypedParameters.BundleId;
set
{
var parameters = TypedParameters;
parameters.BundleId = value;
TypedParameters = parameters;
}
}
[MaxLength(256)]
public string? EncryptPassword
{
get => TypedParameters.EncryptPassword;
set
{
var parameters = TypedParameters;
parameters.EncryptPassword = value;
TypedParameters = parameters;
}
}
public string Hash
{
get => TypedParameters.Hash;
set
{
var parameters = TypedParameters;
parameters.Hash = value;
TypedParameters = parameters;
}
}
// JSON array of uploaded chunk indices for resumability
public List<int> UploadedChunks
{
get => TypedParameters.UploadedChunks;
set
{
var parameters = TypedParameters;
parameters.UploadedChunks = value;
TypedParameters = parameters;
}
}
public string? Path
{
get => TypedParameters.Path;
set
{
var parameters = TypedParameters;
parameters.Path = value;
TypedParameters = parameters;
}
}
}
public enum TaskType
{
FileUpload,
FileMove,
FileCompress,
FileDecompress,
FileEncrypt,
FileDecrypt,
BulkOperation,
StorageMigration,
FileConversion,
Custom
}
[Flags]
public enum TaskStatus
{
Pending,
InProgress,
Paused,
Completed,
Failed,
Cancelled,
Expired
}
// File Move Task
public class FileMoveTask : PersistentTask
{
public FileMoveTask()
{
Type = TaskType.FileMove;
Name = "Move Files";
}
// Convenience properties using typed parameters
public FileMoveParameters TypedParameters
{
get => ParameterHelper.Typed<FileMoveParameters>(Parameters)!;
set => Parameters = ParameterHelper.Untyped(value);
}
public List<string> FileIds
{
get => TypedParameters.FileIds;
set
{
var parameters = TypedParameters;
parameters.FileIds = value;
TypedParameters = parameters;
}
}
public Guid TargetPoolId
{
get => TypedParameters.TargetPoolId;
set
{
var parameters = TypedParameters;
parameters.TargetPoolId = value;
TypedParameters = parameters;
}
}
public Guid? TargetBundleId
{
get => TypedParameters.TargetBundleId;
set
{
var parameters = TypedParameters;
parameters.TargetBundleId = value;
TypedParameters = parameters;
}
}
public int FilesProcessed
{
get => TypedParameters.FilesProcessed;
set
{
var parameters = TypedParameters;
parameters.FilesProcessed = value;
TypedParameters = parameters;
Progress = FileIds.Count > 0 ? (double)value / FileIds.Count * 100 : 0;
}
}
}
// File Compression Task
public class FileCompressTask : PersistentTask
{
public FileCompressTask()
{
Type = TaskType.FileCompress;
Name = "Compress Files";
}
// Convenience properties using typed parameters
public FileCompressParameters TypedParameters
{
get => ParameterHelper.Typed<FileCompressParameters>(Parameters)!;
set => Parameters = ParameterHelper.Untyped(value);
}
public List<string> FileIds
{
get => TypedParameters.FileIds;
set
{
var parameters = TypedParameters;
parameters.FileIds = value;
TypedParameters = parameters;
}
}
[MaxLength(32)]
public string CompressionFormat
{
get => TypedParameters.CompressionFormat;
set
{
var parameters = TypedParameters;
parameters.CompressionFormat = value;
TypedParameters = parameters;
}
}
public int CompressionLevel
{
get => TypedParameters.CompressionLevel;
set
{
var parameters = TypedParameters;
parameters.CompressionLevel = value;
TypedParameters = parameters;
}
}
public string? OutputFileName
{
get => TypedParameters.OutputFileName;
set
{
var parameters = TypedParameters;
parameters.OutputFileName = value;
TypedParameters = parameters;
}
}
public int FilesProcessed
{
get => TypedParameters.FilesProcessed;
set
{
var parameters = TypedParameters;
parameters.FilesProcessed = value;
TypedParameters = parameters;
Progress = FileIds.Count > 0 ? (double)value / FileIds.Count * 100 : 0;
}
}
public string? ResultFileId
{
get => TypedParameters.ResultFileId;
set
{
var parameters = TypedParameters;
parameters.ResultFileId = value;
TypedParameters = parameters;
}
}
}
// Bulk Operation Task
public class BulkOperationTask : PersistentTask
{
public BulkOperationTask()
{
Type = TaskType.BulkOperation;
Name = "Bulk Operation";
}
// Convenience properties using typed parameters
public BulkOperationParameters TypedParameters
{
get => ParameterHelper.Typed<BulkOperationParameters>(Parameters)!;
set => Parameters = ParameterHelper.Untyped(value);
}
[MaxLength(128)]
public string OperationType
{
get => TypedParameters.OperationType;
set
{
var parameters = TypedParameters;
parameters.OperationType = value;
TypedParameters = parameters;
}
}
public List<string> TargetIds
{
get => TypedParameters.TargetIds;
set
{
var parameters = TypedParameters;
parameters.TargetIds = value;
TypedParameters = parameters;
}
}
[Column(TypeName = "jsonb")]
public Dictionary<string, object?> OperationParameters
{
get => TypedParameters.OperationParameters;
set
{
var parameters = TypedParameters;
parameters.OperationParameters = value;
TypedParameters = parameters;
}
}
public int ItemsProcessed
{
get => TypedParameters.ItemsProcessed;
set
{
var parameters = TypedParameters;
parameters.ItemsProcessed = value;
TypedParameters = parameters;
Progress = TargetIds.Count > 0 ? (double)value / TargetIds.Count * 100 : 0;
}
}
[Column(TypeName = "jsonb")]
public Dictionary<string, object?>? OperationResults
{
get => TypedParameters.OperationResults;
set
{
var parameters = TypedParameters;
parameters.OperationResults = value;
TypedParameters = parameters;
}
}
}
// Storage Migration Task
public class StorageMigrationTask : PersistentTask
{
public StorageMigrationTask()
{
Type = TaskType.StorageMigration;
Name = "Storage Migration";
}
// Convenience properties using typed parameters
public StorageMigrationParameters TypedParameters
{
get => ParameterHelper.Typed<StorageMigrationParameters>(Parameters)!;
set => Parameters = ParameterHelper.Untyped(value);
}
public Guid SourcePoolId
{
get => TypedParameters.SourcePoolId;
set
{
var parameters = TypedParameters;
parameters.SourcePoolId = value;
TypedParameters = parameters;
}
}
public Guid TargetPoolId
{
get => TypedParameters.TargetPoolId;
set
{
var parameters = TypedParameters;
parameters.TargetPoolId = value;
TypedParameters = parameters;
}
}
public List<string> FileIds
{
get => TypedParameters.FileIds;
set
{
var parameters = TypedParameters;
parameters.FileIds = value;
TypedParameters = parameters;
}
}
public bool PreserveOriginals
{
get => TypedParameters.PreserveOriginals;
set
{
var parameters = TypedParameters;
parameters.PreserveOriginals = value;
TypedParameters = parameters;
}
}
public long TotalBytesToTransfer
{
get => TypedParameters.TotalBytesToTransfer;
set
{
var parameters = TypedParameters;
parameters.TotalBytesToTransfer = value;
TypedParameters = parameters;
}
}
public long BytesTransferred
{
get => TypedParameters.BytesTransferred;
set
{
var parameters = TypedParameters;
parameters.BytesTransferred = value;
TypedParameters = parameters;
Progress = TotalBytesToTransfer > 0 ? (double)value / TotalBytesToTransfer * 100 : 0;
}
}
public int FilesMigrated
{
get => TypedParameters.FilesMigrated;
set
{
var parameters = TypedParameters;
parameters.FilesMigrated = value;
TypedParameters = parameters;
}
}
}
// Legacy enum for backward compatibility
public enum UploadTaskStatus
{
InProgress = TaskStatus.InProgress,
Completed = TaskStatus.Completed,
Failed = TaskStatus.Failed,
Expired = TaskStatus.Expired
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,301 @@
using System.Net;
using System.Text;
using System.Text.Json;
using DysonNetwork.Drive.Billing;
using DysonNetwork.Shared.Auth;
using DysonNetwork.Shared.Proto;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
using NodaTime;
using tusdotnet.Interfaces;
using tusdotnet.Models;
using tusdotnet.Models.Configuration;
namespace DysonNetwork.Drive.Storage;
public abstract class TusService
{
public static DefaultTusConfiguration BuildConfiguration(
ITusStore store,
IConfiguration configuration
) => new()
{
Store = store,
Events = new Events
{
OnAuthorizeAsync = async eventContext =>
{
if (eventContext.Intent == IntentType.DeleteFile)
{
eventContext.FailRequest(
HttpStatusCode.BadRequest,
"Deleting files from this endpoint was disabled, please refer to the Dyson Network File API."
);
return;
}
var httpContext = eventContext.HttpContext;
if (httpContext.Items["CurrentUser"] is not Account currentUser)
{
eventContext.FailRequest(HttpStatusCode.Unauthorized);
return;
}
if (eventContext.Intent != IntentType.CreateFile) return;
using var scope = httpContext.RequestServices.CreateScope();
if (!currentUser.IsSuperuser)
{
var pm = scope.ServiceProvider.GetRequiredService<PermissionService.PermissionServiceClient>();
var allowed = await pm.HasPermissionAsync(new HasPermissionRequest
{ Actor = $"user:{currentUser.Id}", Area = "global", Key = "files.create" });
if (!allowed.HasPermission)
eventContext.FailRequest(HttpStatusCode.Forbidden);
}
var filePool = httpContext.Request.Headers["X-FilePool"].FirstOrDefault();
if (string.IsNullOrEmpty(filePool)) filePool = configuration["Storage:PreferredRemote"];
if (!Guid.TryParse(filePool, out _))
{
eventContext.FailRequest(HttpStatusCode.BadRequest, "Invalid file pool id");
return;
}
var fs = scope.ServiceProvider.GetRequiredService<FileService>();
var pool = await fs.GetPoolAsync(Guid.Parse(filePool!));
if (pool is null)
{
eventContext.FailRequest(HttpStatusCode.BadRequest, "Pool not found");
return;
}
if (pool.PolicyConfig.RequirePrivilege > 0)
{
if (currentUser.PerkSubscription is null)
{
eventContext.FailRequest(
HttpStatusCode.Forbidden,
$"You need to have join the Stellar Program to use this pool"
);
return;
}
var privilege =
PerkSubscriptionPrivilege.GetPrivilegeFromIdentifier(currentUser.PerkSubscription.Identifier);
if (privilege < pool.PolicyConfig.RequirePrivilege)
{
eventContext.FailRequest(
HttpStatusCode.Forbidden,
$"You need Stellar Program tier {pool.PolicyConfig.RequirePrivilege} to use this pool, you are tier {privilege}"
);
}
}
var bundleId = eventContext.HttpContext.Request.Headers["X-FileBundle"].FirstOrDefault();
if (!string.IsNullOrEmpty(bundleId) && !Guid.TryParse(bundleId, out _))
{
eventContext.FailRequest(HttpStatusCode.BadRequest, "Invalid file bundle id");
}
},
OnFileCompleteAsync = async eventContext =>
{
using var scope = eventContext.HttpContext.RequestServices.CreateScope();
var services = scope.ServiceProvider;
var httpContext = eventContext.HttpContext;
if (httpContext.Items["CurrentUser"] is not Account user) return;
var file = await eventContext.GetFileAsync();
var metadata = await file.GetMetadataAsync(eventContext.CancellationToken);
var fileName = metadata.TryGetValue("filename", out var fn)
? fn.GetString(Encoding.UTF8)
: "uploaded_file";
var contentType = metadata.TryGetValue("content-type", out var ct) ? ct.GetString(Encoding.UTF8) : null;
var filePath = Path.Combine(configuration.GetValue<string>("Tus:StorePath")!, file.Id);
var filePool = httpContext.Request.Headers["X-FilePool"].FirstOrDefault();
var bundleId = eventContext.HttpContext.Request.Headers["X-FileBundle"].FirstOrDefault();
var encryptPassword = httpContext.Request.Headers["X-FilePass"].FirstOrDefault();
if (string.IsNullOrEmpty(filePool))
filePool = configuration["Storage:PreferredRemote"];
Instant? expiredAt = null;
var expiredString = httpContext.Request.Headers["X-FileExpire"].FirstOrDefault();
if (!string.IsNullOrEmpty(expiredString) && int.TryParse(expiredString, out var expired))
expiredAt = Instant.FromUnixTimeSeconds(expired);
try
{
var fileService = services.GetRequiredService<FileService>();
var info = await fileService.ProcessNewFileAsync(
user,
file.Id,
filePool!,
bundleId,
filePath,
fileName,
contentType,
encryptPassword,
expiredAt
);
using var finalScope = eventContext.HttpContext.RequestServices.CreateScope();
var jsonOptions = finalScope.ServiceProvider.GetRequiredService<IOptions<JsonOptions>>().Value
.JsonSerializerOptions;
var infoJson = JsonSerializer.Serialize(info, jsonOptions);
eventContext.HttpContext.Response.Headers.Append("X-FileInfo", infoJson);
}
catch (Exception ex)
{
var logger = services.GetRequiredService<ILogger<TusService>>();
eventContext.HttpContext.Response.StatusCode = StatusCodes.Status400BadRequest;
await eventContext.HttpContext.Response.WriteAsync(ex.Message);
logger.LogError(ex, "Error handling file upload...");
}
},
OnBeforeCreateAsync = async eventContext =>
{
var httpContext = eventContext.HttpContext;
if (httpContext.Items["CurrentUser"] is not Account currentUser)
{
eventContext.FailRequest(HttpStatusCode.Unauthorized);
return;
}
var accountId = Guid.Parse(currentUser.Id);
var poolId = eventContext.HttpContext.Request.Headers["X-FilePool"].FirstOrDefault();
if (string.IsNullOrEmpty(poolId)) poolId = configuration["Storage:PreferredRemote"];
if (!Guid.TryParse(poolId, out _))
{
eventContext.FailRequest(HttpStatusCode.BadRequest, "Invalid file pool id");
return;
}
var bundleId = eventContext.HttpContext.Request.Headers["X-FileBundle"].FirstOrDefault();
if (!string.IsNullOrEmpty(bundleId) && !Guid.TryParse(bundleId, out _))
{
eventContext.FailRequest(HttpStatusCode.BadRequest, "Invalid file bundle id");
return;
}
var metadata = eventContext.Metadata;
var contentType = metadata.TryGetValue("content-type", out var ct) ? ct.GetString(Encoding.UTF8) : null;
var scope = eventContext.HttpContext.RequestServices.CreateScope();
var rejected = false;
var fs = scope.ServiceProvider.GetRequiredService<FileService>();
var pool = await fs.GetPoolAsync(Guid.Parse(poolId!));
if (pool is null)
{
eventContext.FailRequest(HttpStatusCode.BadRequest, "Pool not found");
rejected = true;
}
var logger = scope.ServiceProvider.GetRequiredService<ILogger<TusService>>();
// Do the policy check
var policy = pool!.PolicyConfig;
if (!rejected && !pool.PolicyConfig.AllowEncryption)
{
var encryptPassword = eventContext.HttpContext.Request.Headers["X-FilePass"].FirstOrDefault();
if (!string.IsNullOrEmpty(encryptPassword))
{
eventContext.FailRequest(
HttpStatusCode.Forbidden,
"File encryption is not allowed in this pool"
);
rejected = true;
}
}
if (!rejected && policy.AcceptTypes is not null)
{
if (string.IsNullOrEmpty(contentType))
{
eventContext.FailRequest(
HttpStatusCode.BadRequest,
"Content type is required by the pool's policy"
);
rejected = true;
}
else
{
var foundMatch = false;
foreach (var acceptType in policy.AcceptTypes)
{
if (acceptType.EndsWith("/*", StringComparison.OrdinalIgnoreCase))
{
var type = acceptType[..^2];
if (!contentType.StartsWith($"{type}/", StringComparison.OrdinalIgnoreCase)) continue;
foundMatch = true;
break;
}
else if (acceptType.Equals(contentType, StringComparison.OrdinalIgnoreCase))
{
foundMatch = true;
break;
}
}
if (!foundMatch)
{
eventContext.FailRequest(
HttpStatusCode.Forbidden,
$"Content type {contentType} is not allowed by the pool's policy"
);
rejected = true;
}
}
}
if (!rejected && policy.MaxFileSize is not null)
{
if (eventContext.UploadLength > policy.MaxFileSize)
{
eventContext.FailRequest(
HttpStatusCode.Forbidden,
$"File size {eventContext.UploadLength} is larger than the pool's maximum file size {policy.MaxFileSize}"
);
rejected = true;
}
}
if (!rejected)
{
var quotaService = scope.ServiceProvider.GetRequiredService<QuotaService>();
var (ok, billableUnit, quota) = await quotaService.IsFileAcceptable(
accountId,
pool.BillingConfig.CostMultiplier ?? 1.0,
eventContext.UploadLength
);
if (!ok)
{
eventContext.FailRequest(
HttpStatusCode.Forbidden,
$"File size {billableUnit} MiB is exceeded the user's quota {quota} MiB"
);
rejected = true;
}
}
if (rejected)
logger.LogInformation("File rejected #{FileId}", eventContext.FileId);
},
OnCreateCompleteAsync = eventContext =>
{
var directUpload = eventContext.HttpContext.Request.Headers["X-DirectUpload"].FirstOrDefault();
if (!string.IsNullOrEmpty(directUpload)) return Task.CompletedTask;
var gatewayUrl = configuration["GatewayUrl"];
if (gatewayUrl is not null)
eventContext.SetUploadUrl(new Uri(gatewayUrl + "/drive/tus/" + eventContext.FileId));
return Task.CompletedTask;
},
}
};
}

View File

@@ -0,0 +1,20 @@
using DysonNetwork.Shared.Data;
using Microsoft.AspNetCore.Mvc;
namespace DysonNetwork.Drive;
[ApiController]
[Route("/api/version")]
public class VersionController : ControllerBase
{
[HttpGet]
public IActionResult Get()
{
return Ok(new AppVersion
{
Version = ThisAssembly.AssemblyVersion,
Commit = ThisAssembly.GitCommitId,
UpdateDate = ThisAssembly.GitCommitDate
});
}
}

View File

@@ -1,121 +1,125 @@
{
"Debug": true,
"BaseUrl": "http://localhost:5090",
"GatewayUrl": "http://localhost:5094",
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*",
"ConnectionStrings": {
"App": "Host=localhost;Port=5432;Database=dyson_drive;Username=postgres;Password=postgres;Include Error Detail=True;Maximum Pool Size=20;Connection Idle Lifetime=60"
},
"Authentication": {
"Schemes": {
"Bearer": {
"ValidAudiences": [
"http://localhost:5071",
"https://localhost:7099"
],
"ValidIssuer": "solar-network"
}
}
},
"AuthToken": {
"PublicKeyPath": "Keys/PublicKey.pem",
"PrivateKeyPath": "Keys/PrivateKey.pem"
},
"Storage": {
"Uploads": "Uploads",
"PreferredRemote": "c53136a6-9152-4ecb-9f88-43c41438c23e",
"Remote": [
{
"Id": "minio",
"Label": "Minio",
"Region": "auto",
"Bucket": "solar-network-development",
"Endpoint": "localhost:9000",
"SecretId": "littlesheep",
"SecretKey": "password",
"EnabledSigned": true,
"EnableSsl": false
},
{
"Id": "cloudflare",
"Label": "Cloudflare R2",
"Region": "auto",
"Bucket": "solar-network",
"Endpoint": "0a70a6d1b7128888c823359d0008f4e1.r2.cloudflarestorage.com",
"SecretId": "8ff5d06c7b1639829d60bc6838a542e6",
"SecretKey": "fd58158c5201be16d1872c9209d9cf199421dae3c2f9972f94b2305976580d67",
"EnableSigned": true,
"EnableSsl": true
}
]
},
"Captcha": {
"Provider": "cloudflare",
"ApiKey": "0x4AAAAAABCDUdOujj4feOb_",
"ApiSecret": "0x4AAAAAABCDUWABiJQweqlB7tYq-IqIm8U"
},
"Notifications": {
"Topic": "dev.solsynth.solian",
"Endpoint": "http://localhost:8088"
},
"Email": {
"Server": "smtp4dev.orb.local",
"Port": 25,
"UseSsl": false,
"Username": "no-reply@mail.solsynth.dev",
"Password": "password",
"FromAddress": "no-reply@mail.solsynth.dev",
"FromName": "Alphabot",
"SubjectPrefix": "Solar Network"
},
"RealtimeChat": {
"Endpoint": "https://solar-network-im44o8gq.livekit.cloud",
"ApiKey": "APIs6TiL8wj3A4j",
"ApiSecret": "SffxRneIwTnlHPtEf3zicmmv3LUEl7xXael4PvWZrEhE"
},
"GeoIp": {
"DatabasePath": "./Keys/GeoLite2-City.mmdb"
},
"Oidc": {
"Google": {
"ClientId": "961776991058-963m1qin2vtp8fv693b5fdrab5hmpl89.apps.googleusercontent.com",
"ClientSecret": ""
},
"Apple": {
"ClientId": "dev.solsynth.solian",
"TeamId": "W7HPZ53V6B",
"KeyId": "B668YP4KBG",
"PrivateKeyPath": "./Keys/Solarpass.p8"
},
"Microsoft": {
"ClientId": "YOUR_MICROSOFT_CLIENT_ID",
"ClientSecret": "YOUR_MICROSOFT_CLIENT_SECRET",
"DiscoveryEndpoint": "YOUR_MICROSOFT_DISCOVERY_ENDPOINT"
}
},
"Payment": {
"Auth": {
"Afdian": "<token here>"
},
"Subscriptions": {
"Afdian": {
"7d17aae23c9611f0b5705254001e7c00": "solian.stellar.primary",
"7dfae4743c9611f0b3a55254001e7c00": "solian.stellar.nova",
"141713ee3d6211f085b352540025c377": "solian.stellar.supernova"
}
}
},
"Cache": {
"Serializer": "MessagePack"
},
"KnownProxies": [
"127.0.0.1",
"::1"
"Debug": true,
"BaseUrl": "http://localhost:5090",
"GatewayUrl": "http://localhost:5094",
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*",
"ConnectionStrings": {
"App": "Host=localhost;Port=5432;Database=dyson_drive;Username=postgres;Password=postgres;Include Error Detail=True;Maximum Pool Size=20;Connection Idle Lifetime=60"
},
"Authentication": {
"Schemes": {
"Bearer": {
"ValidAudiences": [
"http://localhost:5071",
"https://localhost:7099"
],
"ValidIssuer": "solar-network"
}
}
},
"AuthToken": {
"PublicKeyPath": "Keys/PublicKey.pem",
"PrivateKeyPath": "Keys/PrivateKey.pem"
},
"Tus": {
"StorePath": "Uploads"
},
"Storage": {
"Uploads": "Uploads",
"PreferredRemote": "2adceae3-981a-4564-9b8d-5d71a211c873",
"Remote": [
{
"Id": "minio",
"Label": "Minio",
"Region": "auto",
"Bucket": "solar-network-development",
"Endpoint": "localhost:9000",
"SecretId": "littlesheep",
"SecretKey": "password",
"EnabledSigned": true,
"EnableSsl": false
},
{
"Id": "cloudflare",
"Label": "Cloudflare R2",
"Region": "auto",
"Bucket": "solar-network",
"Endpoint": "0a70a6d1b7128888c823359d0008f4e1.r2.cloudflarestorage.com",
"SecretId": "8ff5d06c7b1639829d60bc6838a542e6",
"SecretKey": "fd58158c5201be16d1872c9209d9cf199421dae3c2f9972f94b2305976580d67",
"EnableSigned": true,
"EnableSsl": true
}
]
},
"Captcha": {
"Provider": "cloudflare",
"ApiKey": "0x4AAAAAABCDUdOujj4feOb_",
"ApiSecret": "0x4AAAAAABCDUWABiJQweqlB7tYq-IqIm8U"
},
"Notifications": {
"Topic": "dev.solsynth.solian",
"Endpoint": "http://localhost:8088"
},
"Email": {
"Server": "smtp4dev.orb.local",
"Port": 25,
"UseSsl": false,
"Username": "no-reply@mail.solsynth.dev",
"Password": "password",
"FromAddress": "no-reply@mail.solsynth.dev",
"FromName": "Alphabot",
"SubjectPrefix": "Solar Network"
},
"RealtimeChat": {
"Endpoint": "https://solar-network-im44o8gq.livekit.cloud",
"ApiKey": "APIs6TiL8wj3A4j",
"ApiSecret": "SffxRneIwTnlHPtEf3zicmmv3LUEl7xXael4PvWZrEhE"
},
"GeoIp": {
"DatabasePath": "./Keys/GeoLite2-City.mmdb"
},
"Oidc": {
"Google": {
"ClientId": "961776991058-963m1qin2vtp8fv693b5fdrab5hmpl89.apps.googleusercontent.com",
"ClientSecret": ""
},
"Apple": {
"ClientId": "dev.solsynth.solian",
"TeamId": "W7HPZ53V6B",
"KeyId": "B668YP4KBG",
"PrivateKeyPath": "./Keys/Solarpass.p8"
},
"Microsoft": {
"ClientId": "YOUR_MICROSOFT_CLIENT_ID",
"ClientSecret": "YOUR_MICROSOFT_CLIENT_SECRET",
"DiscoveryEndpoint": "YOUR_MICROSOFT_DISCOVERY_ENDPOINT"
}
},
"Payment": {
"Auth": {
"Afdian": "<token here>"
},
"Subscriptions": {
"Afdian": {
"7d17aae23c9611f0b5705254001e7c00": "solian.stellar.primary",
"7dfae4743c9611f0b3a55254001e7c00": "solian.stellar.nova",
"141713ee3d6211f085b352540025c377": "solian.stellar.supernova"
}
}
},
"KnownProxies": [
"127.0.0.1",
"::1"
],
"Service": {
"Name": "DysonNetwork.Drive",
"Url": "https://localhost:7092"
}
}

View File

@@ -1,10 +1,10 @@
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
FROM mcr.microsoft.com/dotnet/aspnet:9.0 AS base
USER $APP_UID
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["DysonNetwork.Gateway/DysonNetwork.Gateway.csproj", "DysonNetwork.Gateway/"]

View File

@@ -1,17 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<TargetFramework>net9.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.ServiceDiscovery.Yarp" Version="10.0.0" />
<PackageReference Include="Nerdbank.GitVersioning" Version="3.9.50">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.Extensions.ServiceDiscovery.Yarp" Version="9.4.2" />
<PackageReference Include="Yarp.ReverseProxy" Version="2.3.0" />
</ItemGroup>

View File

@@ -56,7 +56,7 @@ builder.Services.AddRateLimiter(options =>
};
});
var serviceNames = new[] { "ring", "pass", "drive", "sphere", "develop", "insight", "zone" };
var serviceNames = new[] { "ring", "pass", "drive", "sphere", "develop" };
var specialRoutes = new[]
{
@@ -90,6 +90,7 @@ var apiRoutes = serviceNames.Select(serviceName =>
{
var apiPath = serviceName switch
{
"pass" => "/id",
_ => $"/{serviceName}"
};
return new RouteConfig
@@ -122,9 +123,9 @@ var routes = specialRoutes.Concat(apiRoutes).Concat(swaggerRoutes).ToArray();
var clusters = serviceNames.Select(serviceName => new ClusterConfig
{
ClusterId = serviceName,
HealthCheck = new HealthCheckConfig
HealthCheck = new()
{
Active = new ActiveHealthCheckConfig
Active = new()
{
Enabled = true,
Interval = TimeSpan.FromSeconds(10),
@@ -161,6 +162,8 @@ app.UseForwardedHeaders(forwardedHeadersOptions);
app.UseCors();
app.UseRateLimiter();
app.MapReverseProxy().RequireRateLimiting("fixed");
app.MapControllers();

View File

@@ -1,16 +1,13 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"Cache": {
"Serializer": "MessagePack"
},
"AllowedHosts": "*",
"SiteUrl": "http://localhost:3000",
"Client": {
"SomeSetting": "SomeValue"
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*",
"SiteUrl": "http://localhost:3000",
"Client": {
"SomeSetting": "SomeValue"
}
}

View File

@@ -1,57 +0,0 @@
using DysonNetwork.Shared.Data;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Design;
using NodaTime;
namespace DysonNetwork.Insight;
public class AppDatabase(
DbContextOptions<AppDatabase> options,
IConfiguration configuration
) : DbContext(options)
{
public DbSet<SnThinkingSequence> ThinkingSequences { get; set; }
public DbSet<SnThinkingThought> ThinkingThoughts { get; set; }
public DbSet<SnUnpaidAccount> UnpaidAccounts { get; set; }
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
{
optionsBuilder.UseNpgsql(
configuration.GetConnectionString("App"),
opt => opt
.ConfigureDataSource(optSource => optSource.EnableDynamicJson())
.UseQuerySplittingBehavior(QuerySplittingBehavior.SplitQuery)
.UseNodaTime()
).UseSnakeCaseNamingConvention();
base.OnConfiguring(optionsBuilder);
}
public override async Task<int> SaveChangesAsync(CancellationToken cancellationToken = default)
{
this.ApplyAuditableAndSoftDelete();
return await base.SaveChangesAsync(cancellationToken);
}
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
base.OnModelCreating(modelBuilder);
modelBuilder.ApplySoftDeleteFilters();
}
}
public class AppDatabaseFactory : IDesignTimeDbContextFactory<AppDatabase>
{
public AppDatabase CreateDbContext(string[] args)
{
var configuration = new ConfigurationBuilder()
.SetBasePath(Directory.GetCurrentDirectory())
.AddJsonFile("appsettings.json")
.Build();
var optionsBuilder = new DbContextOptionsBuilder<AppDatabase>();
return new AppDatabase(optionsBuilder.Options, configuration);
}
}

View File

@@ -1,42 +0,0 @@
using DysonNetwork.Insight.Thought;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using DysonNetwork.Shared.Proto;
namespace DysonNetwork.Insight.Controllers;
[ApiController]
[Route("api/billing")]
public class BillingController(AppDatabase db, ThoughtService thoughtService, ILogger<BillingController> logger)
: ControllerBase
{
[HttpGet("status")]
public async Task<IActionResult> GetBillingStatus()
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
return Unauthorized();
var accountId = Guid.Parse(currentUser.Id);
var isMarked = await db.UnpaidAccounts.AnyAsync(u => u.AccountId == accountId);
return Ok(isMarked ? new { status = "unpaid" } : new { status = "ok" });
}
[HttpPost("retry")]
public async Task<IActionResult> RetryBilling()
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
return Unauthorized();
var accountId = Guid.Parse(currentUser.Id);
var (success, cost) = await thoughtService.RetryBillingForAccountAsync(accountId, logger);
if (success)
{
return Ok(cost > 0
? new { message = $"Billing retry successful. Billed {cost} points." }
: new { message = "No outstanding payment found." });
}
return BadRequest(new { message = "Billing retry failed. Please check your balance and try again." });
}
}

View File

@@ -1,27 +0,0 @@
#See https://aka.ms/customizecontainer to learn how to customize your debug container and how Visual Studio uses this Dockerfile to build your images for faster debugging.
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
USER app
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["DysonNetwork.Insight/DysonNetwork.Insight.csproj", "DysonNetwork.Insight/"]
COPY ["DysonNetwork.Shared/DysonNetwork.Shared.csproj", "DysonNetwork.Shared/"]
COPY ["DysonNetwork.Develop/DysonNetwork.Develop.csproj", "DysonNetwork.Develop/"]
RUN dotnet restore "DysonNetwork.Insight/DysonNetwork.Insight.csproj"
COPY . .
WORKDIR "/src/DysonNetwork.Insight"
RUN dotnet build "DysonNetwork.Insight.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "DysonNetwork.Insight.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "DysonNetwork.Insight.dll"]

View File

@@ -1,31 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.11">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.SemanticKernel" Version="1.67.1" />
<PackageReference Include="Microsoft.SemanticKernel.Connectors.Ollama" Version="1.66.0-alpha" />
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Web" Version="1.66.0-alpha" />
<PackageReference Include="Quartz" Version="3.15.1" />
<PackageReference Include="Quartz.AspNetCore" Version="3.15.1" />
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.15.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\DysonNetwork.Shared\DysonNetwork.Shared.csproj" />
</ItemGroup>
<ItemGroup>
<Folder Include="Controllers\" />
</ItemGroup>
</Project>

View File

@@ -1,124 +0,0 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using DysonNetwork.Insight;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
[DbContext(typeof(AppDatabase))]
[Migration("20251025115921_AddThinkingThought")]
partial class AddThinkingThought
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.10")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Topic")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("topic");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_sequences");
b.ToTable("thinking_sequences", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("Content")
.HasColumnType("text")
.HasColumnName("content");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<List<SnCloudFileReferenceObject>>("Files")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("files");
b.Property<int>("Role")
.HasColumnType("integer")
.HasColumnName("role");
b.Property<Guid>("SequenceId")
.HasColumnType("uuid")
.HasColumnName("sequence_id");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_thoughts");
b.HasIndex("SequenceId")
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
b.ToTable("thinking_thoughts", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
.WithMany()
.HasForeignKey("SequenceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
b.Navigation("Sequence");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -1,73 +0,0 @@
using System;
using System.Collections.Generic;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore.Migrations;
using NodaTime;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
/// <inheritdoc />
public partial class AddThinkingThought : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "thinking_sequences",
columns: table => new
{
id = table.Column<Guid>(type: "uuid", nullable: false),
topic = table.Column<string>(type: "character varying(4096)", maxLength: 4096, nullable: true),
account_id = table.Column<Guid>(type: "uuid", nullable: false),
created_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
updated_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
deleted_at = table.Column<Instant>(type: "timestamp with time zone", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("pk_thinking_sequences", x => x.id);
});
migrationBuilder.CreateTable(
name: "thinking_thoughts",
columns: table => new
{
id = table.Column<Guid>(type: "uuid", nullable: false),
content = table.Column<string>(type: "text", nullable: true),
files = table.Column<List<SnCloudFileReferenceObject>>(type: "jsonb", nullable: false),
role = table.Column<int>(type: "integer", nullable: false),
sequence_id = table.Column<Guid>(type: "uuid", nullable: false),
created_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
updated_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
deleted_at = table.Column<Instant>(type: "timestamp with time zone", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("pk_thinking_thoughts", x => x.id);
table.ForeignKey(
name: "fk_thinking_thoughts_thinking_sequences_sequence_id",
column: x => x.sequence_id,
principalTable: "thinking_sequences",
principalColumn: "id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateIndex(
name: "ix_thinking_thoughts_sequence_id",
table: "thinking_thoughts",
column: "sequence_id");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "thinking_thoughts");
migrationBuilder.DropTable(
name: "thinking_sequences");
}
}
}

View File

@@ -1,124 +0,0 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using DysonNetwork.Insight;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
[DbContext(typeof(AppDatabase))]
[Migration("20251026045505_AddThinkingChunk")]
partial class AddThinkingChunk
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.10")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<string>("Topic")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("topic");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_sequences");
b.ToTable("thinking_sequences", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("Content")
.HasColumnType("text")
.HasColumnName("content");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<List<SnCloudFileReferenceObject>>("Files")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("files");
b.Property<int>("Role")
.HasColumnType("integer")
.HasColumnName("role");
b.Property<Guid>("SequenceId")
.HasColumnType("uuid")
.HasColumnName("sequence_id");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_thoughts");
b.HasIndex("SequenceId")
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
b.ToTable("thinking_thoughts", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
.WithMany()
.HasForeignKey("SequenceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
b.Navigation("Sequence");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -1,24 +0,0 @@
using System.Collections.Generic;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
/// <inheritdoc />
public partial class AddThinkingChunk : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
// The chunk type has been removed, so this did nothing
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
// The chunk type has been removed, so this did nothing
}
}
}

View File

@@ -1,141 +0,0 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using DysonNetwork.Insight;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
[DbContext(typeof(AppDatabase))]
[Migration("20251026134218_AddBilling")]
partial class AddBilling
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.10")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<long>("PaidToken")
.HasColumnType("bigint")
.HasColumnName("paid_token");
b.Property<string>("Topic")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("topic");
b.Property<long>("TotalToken")
.HasColumnType("bigint")
.HasColumnName("total_token");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_sequences");
b.ToTable("thinking_sequences", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("Content")
.HasColumnType("text")
.HasColumnName("content");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<List<SnCloudFileReferenceObject>>("Files")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("files");
b.Property<string>("ModelName")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("model_name");
b.Property<int>("Role")
.HasColumnType("integer")
.HasColumnName("role");
b.Property<Guid>("SequenceId")
.HasColumnType("uuid")
.HasColumnName("sequence_id");
b.Property<long>("TokenCount")
.HasColumnType("bigint")
.HasColumnName("token_count");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_thoughts");
b.HasIndex("SequenceId")
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
b.ToTable("thinking_thoughts", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
.WithMany()
.HasForeignKey("SequenceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
b.Navigation("Sequence");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -1,62 +0,0 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
/// <inheritdoc />
public partial class AddBilling : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "model_name",
table: "thinking_thoughts",
type: "character varying(4096)",
maxLength: 4096,
nullable: true);
migrationBuilder.AddColumn<long>(
name: "token_count",
table: "thinking_thoughts",
type: "bigint",
nullable: false,
defaultValue: 0L);
migrationBuilder.AddColumn<long>(
name: "paid_token",
table: "thinking_sequences",
type: "bigint",
nullable: false,
defaultValue: 0L);
migrationBuilder.AddColumn<long>(
name: "total_token",
table: "thinking_sequences",
type: "bigint",
nullable: false,
defaultValue: 0L);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "model_name",
table: "thinking_thoughts");
migrationBuilder.DropColumn(
name: "token_count",
table: "thinking_thoughts");
migrationBuilder.DropColumn(
name: "paid_token",
table: "thinking_sequences");
migrationBuilder.DropColumn(
name: "total_token",
table: "thinking_sequences");
}
}
}

View File

@@ -1,142 +0,0 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using DysonNetwork.Insight;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
[DbContext(typeof(AppDatabase))]
[Migration("20251115084746_RefactorThoughtMessage")]
partial class RefactorThoughtMessage
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.11")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<long>("PaidToken")
.HasColumnType("bigint")
.HasColumnName("paid_token");
b.Property<string>("Topic")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("topic");
b.Property<long>("TotalToken")
.HasColumnType("bigint")
.HasColumnName("total_token");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_sequences");
b.ToTable("thinking_sequences", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<List<SnCloudFileReferenceObject>>("Files")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("files");
b.Property<string>("ModelName")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("model_name");
b.Property<List<SnThinkingMessagePart>>("Parts")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("parts");
b.Property<int>("Role")
.HasColumnType("integer")
.HasColumnName("role");
b.Property<Guid>("SequenceId")
.HasColumnType("uuid")
.HasColumnName("sequence_id");
b.Property<long>("TokenCount")
.HasColumnType("bigint")
.HasColumnName("token_count");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_thoughts");
b.HasIndex("SequenceId")
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
b.ToTable("thinking_thoughts", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
.WithMany()
.HasForeignKey("SequenceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
b.Navigation("Sequence");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -1,30 +0,0 @@
using System.Collections.Generic;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
/// <inheritdoc />
public partial class RefactorThoughtMessage : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<List<SnThinkingMessagePart>>(
name: "parts",
table: "thinking_thoughts",
type: "jsonb",
nullable: false);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "parts",
table: "thinking_thoughts");
}
}
}

View File

@@ -1,142 +0,0 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using DysonNetwork.Insight;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
[DbContext(typeof(AppDatabase))]
[Migration("20251115162347_UpdatedFunctionCallModels")]
partial class UpdatedFunctionCallModels
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.11")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<long>("PaidToken")
.HasColumnType("bigint")
.HasColumnName("paid_token");
b.Property<string>("Topic")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("topic");
b.Property<long>("TotalToken")
.HasColumnType("bigint")
.HasColumnName("total_token");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_sequences");
b.ToTable("thinking_sequences", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<List<SnCloudFileReferenceObject>>("Files")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("files");
b.Property<string>("ModelName")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("model_name");
b.Property<List<SnThinkingMessagePart>>("Parts")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("parts");
b.Property<int>("Role")
.HasColumnType("integer")
.HasColumnName("role");
b.Property<Guid>("SequenceId")
.HasColumnType("uuid")
.HasColumnName("sequence_id");
b.Property<long>("TokenCount")
.HasColumnType("bigint")
.HasColumnName("token_count");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_thoughts");
b.HasIndex("SequenceId")
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
b.ToTable("thinking_thoughts", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
.WithMany()
.HasForeignKey("SequenceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
b.Navigation("Sequence");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -1,22 +0,0 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
/// <inheritdoc />
public partial class UpdatedFunctionCallModels : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
}
}
}

View File

@@ -1,159 +0,0 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using DysonNetwork.Insight;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
[DbContext(typeof(AppDatabase))]
[Migration("20251115165833_AddUnpaidAccounts")]
partial class AddUnpaidAccounts
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.11")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<long>("PaidToken")
.HasColumnType("bigint")
.HasColumnName("paid_token");
b.Property<string>("Topic")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("topic");
b.Property<long>("TotalToken")
.HasColumnType("bigint")
.HasColumnName("total_token");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_sequences");
b.ToTable("thinking_sequences", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<List<SnCloudFileReferenceObject>>("Files")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("files");
b.Property<string>("ModelName")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("model_name");
b.Property<List<SnThinkingMessagePart>>("Parts")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("parts");
b.Property<int>("Role")
.HasColumnType("integer")
.HasColumnName("role");
b.Property<Guid>("SequenceId")
.HasColumnType("uuid")
.HasColumnName("sequence_id");
b.Property<long>("TokenCount")
.HasColumnType("bigint")
.HasColumnName("token_count");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_thoughts");
b.HasIndex("SequenceId")
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
b.ToTable("thinking_thoughts", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnUnpaidAccount", b =>
{
b.Property<Guid>("AccountId")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<DateTime>("MarkedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("marked_at");
b.HasKey("AccountId")
.HasName("pk_unpaid_accounts");
b.ToTable("unpaid_accounts", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
.WithMany()
.HasForeignKey("SequenceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
b.Navigation("Sequence");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -1,34 +0,0 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
/// <inheritdoc />
public partial class AddUnpaidAccounts : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "unpaid_accounts",
columns: table => new
{
account_id = table.Column<Guid>(type: "uuid", nullable: false),
marked_at = table.Column<DateTime>(type: "timestamp with time zone", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("pk_unpaid_accounts", x => x.account_id);
});
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "unpaid_accounts");
}
}
}

View File

@@ -1,163 +0,0 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using DysonNetwork.Insight;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
[DbContext(typeof(AppDatabase))]
[Migration("20251116123552_SharableThought")]
partial class SharableThought
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.11")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<bool>("IsPublic")
.HasColumnType("boolean")
.HasColumnName("is_public");
b.Property<long>("PaidToken")
.HasColumnType("bigint")
.HasColumnName("paid_token");
b.Property<string>("Topic")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("topic");
b.Property<long>("TotalToken")
.HasColumnType("bigint")
.HasColumnName("total_token");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_sequences");
b.ToTable("thinking_sequences", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<List<SnCloudFileReferenceObject>>("Files")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("files");
b.Property<string>("ModelName")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("model_name");
b.Property<List<SnThinkingMessagePart>>("Parts")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("parts");
b.Property<int>("Role")
.HasColumnType("integer")
.HasColumnName("role");
b.Property<Guid>("SequenceId")
.HasColumnType("uuid")
.HasColumnName("sequence_id");
b.Property<long>("TokenCount")
.HasColumnType("bigint")
.HasColumnName("token_count");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_thoughts");
b.HasIndex("SequenceId")
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
b.ToTable("thinking_thoughts", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnUnpaidAccount", b =>
{
b.Property<Guid>("AccountId")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<DateTime>("MarkedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("marked_at");
b.HasKey("AccountId")
.HasName("pk_unpaid_accounts");
b.ToTable("unpaid_accounts", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
.WithMany()
.HasForeignKey("SequenceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
b.Navigation("Sequence");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -1,29 +0,0 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
/// <inheritdoc />
public partial class SharableThought : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<bool>(
name: "is_public",
table: "thinking_sequences",
type: "boolean",
nullable: false,
defaultValue: false);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "is_public",
table: "thinking_sequences");
}
}
}

View File

@@ -1,160 +0,0 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using DysonNetwork.Insight;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DysonNetwork.Insight.Migrations
{
[DbContext(typeof(AppDatabase))]
partial class AppDatabaseModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.11")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Guid>("AccountId")
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<bool>("IsPublic")
.HasColumnType("boolean")
.HasColumnName("is_public");
b.Property<long>("PaidToken")
.HasColumnType("bigint")
.HasColumnName("paid_token");
b.Property<string>("Topic")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("topic");
b.Property<long>("TotalToken")
.HasColumnType("bigint")
.HasColumnName("total_token");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_sequences");
b.ToTable("thinking_sequences", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<Instant>("CreatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("created_at");
b.Property<Instant?>("DeletedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("deleted_at");
b.Property<List<SnCloudFileReferenceObject>>("Files")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("files");
b.Property<string>("ModelName")
.HasMaxLength(4096)
.HasColumnType("character varying(4096)")
.HasColumnName("model_name");
b.Property<List<SnThinkingMessagePart>>("Parts")
.IsRequired()
.HasColumnType("jsonb")
.HasColumnName("parts");
b.Property<int>("Role")
.HasColumnType("integer")
.HasColumnName("role");
b.Property<Guid>("SequenceId")
.HasColumnType("uuid")
.HasColumnName("sequence_id");
b.Property<long>("TokenCount")
.HasColumnType("bigint")
.HasColumnName("token_count");
b.Property<Instant>("UpdatedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("updated_at");
b.HasKey("Id")
.HasName("pk_thinking_thoughts");
b.HasIndex("SequenceId")
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
b.ToTable("thinking_thoughts", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnUnpaidAccount", b =>
{
b.Property<Guid>("AccountId")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("account_id");
b.Property<DateTime>("MarkedAt")
.HasColumnType("timestamp with time zone")
.HasColumnName("marked_at");
b.HasKey("AccountId")
.HasName("pk_unpaid_accounts");
b.ToTable("unpaid_accounts", (string)null);
});
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
{
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
.WithMany()
.HasForeignKey("SequenceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired()
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
b.Navigation("Sequence");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -1,45 +0,0 @@
using DysonNetwork.Insight;
using DysonNetwork.Insight.Startup;
using DysonNetwork.Shared.Auth;
using DysonNetwork.Shared.Http;
using DysonNetwork.Shared.Registry;
using Microsoft.EntityFrameworkCore;
var builder = WebApplication.CreateBuilder(args);
builder.AddServiceDefaults();
builder.ConfigureAppKestrel(builder.Configuration);
builder.Services.AddControllers();
builder.Services.AddAppServices();
builder.Services.AddAppAuthentication();
builder.Services.AddAppFlushHandlers();
builder.Services.AddAppBusinessServices();
builder.Services.AddAppScheduledJobs();
builder.Services.AddDysonAuth();
builder.Services.AddAccountService();
builder.Services.AddSphereService();
builder.Services.AddThinkingServices(builder.Configuration);
builder.AddSwaggerManifest(
"DysonNetwork.Insight",
"The insight service in the Solar Network."
);
var app = builder.Build();
app.MapDefaultEndpoints();
using (var scope = app.Services.CreateScope())
{
var db = scope.ServiceProvider.GetRequiredService<AppDatabase>();
await db.Database.MigrateAsync();
}
app.ConfigureAppMiddleware(builder.Configuration);
app.UseSwaggerManifest("DysonNetwork.Insight");
app.Run();

View File

@@ -1,21 +0,0 @@
{
"$schema": "https://json.schemastore.org/launchsettings.json",
"profiles": {
"http": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": false,
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"https": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": false,
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
}
}
}

View File

@@ -1,22 +0,0 @@
using DysonNetwork.Shared.Http;
namespace DysonNetwork.Insight.Startup;
public static class ApplicationConfiguration
{
public static WebApplication ConfigureAppMiddleware(this WebApplication app, IConfiguration configuration)
{
app.MapOpenApi();
app.UseRequestLocalization();
app.ConfigureForwardedHeaders(configuration);
app.UseAuthentication();
app.UseAuthorization();
app.MapControllers();
return app;
}
}

View File

@@ -1,26 +0,0 @@
using DysonNetwork.Insight.Thought;
using Quartz;
namespace DysonNetwork.Insight.Startup;
public static class ScheduledJobsConfiguration
{
public static IServiceCollection AddAppScheduledJobs(this IServiceCollection services)
{
services.AddQuartz(q =>
{
var tokenBillingJob = new JobKey("TokenBilling");
q.AddJob<TokenBillingJob>(opts => opts.WithIdentity(tokenBillingJob));
q.AddTrigger(opts => opts
.ForJob(tokenBillingJob)
.WithIdentity("TokenBillingTrigger")
.WithSimpleSchedule(o => o
.WithIntervalInMinutes(5)
.RepeatForever())
);
});
services.AddQuartzHostedService(q => q.WaitForJobsToComplete = true);
return services;
}
}

View File

@@ -1,70 +0,0 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using DysonNetwork.Insight.Thought;
using DysonNetwork.Shared.Cache;
using DysonNetwork.Shared.Registry;
using Microsoft.SemanticKernel;
using NodaTime;
using NodaTime.Serialization.SystemTextJson;
namespace DysonNetwork.Insight.Startup;
public static class ServiceCollectionExtensions
{
public static IServiceCollection AddAppServices(this IServiceCollection services)
{
services.AddDbContext<AppDatabase>();
services.AddHttpContextAccessor();
services.AddHttpClient();
// Register gRPC services
services.AddGrpc(options =>
{
options.EnableDetailedErrors = true; // Will be adjusted in Program.cs
options.MaxReceiveMessageSize = 16 * 1024 * 1024; // 16MB
options.MaxSendMessageSize = 16 * 1024 * 1024; // 16MB
});
services.AddGrpcReflection();
// Register gRPC services
// Register OIDC services
services.AddControllers().AddJsonOptions(options =>
{
options.JsonSerializerOptions.NumberHandling = JsonNumberHandling.AllowNamedFloatingPointLiterals;
options.JsonSerializerOptions.PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower;
options.JsonSerializerOptions.DictionaryKeyPolicy = JsonNamingPolicy.SnakeCaseLower;
options.JsonSerializerOptions.ConfigureForNodaTime(DateTimeZoneProviders.Tzdb);
});
return services;
}
public static IServiceCollection AddAppAuthentication(this IServiceCollection services)
{
services.AddAuthorization();
return services;
}
public static IServiceCollection AddAppFlushHandlers(this IServiceCollection services)
{
services.AddSingleton<FlushBufferService>();
return services;
}
public static IServiceCollection AddAppBusinessServices(this IServiceCollection services)
{
return services;
}
public static IServiceCollection AddThinkingServices(this IServiceCollection services, IConfiguration configuration)
{
services.AddSingleton<ThoughtProvider>();
services.AddScoped<ThoughtService>();
return services;
}
}

View File

@@ -1,155 +0,0 @@
# Client-Side Guide: Handling the New Message Structure
This document outlines how to update your client application to support the new rich message structure for the thinking/chat feature. The backend now sends structured messages that can include plain text, function calls, and function results, allowing for a more interactive and transparent user experience.
When using with gateway, all the response type are in snake case
## 1. Data Models
When you receive a complete message (a "thought"), it will be in the form of an `SnThinkingThought` object. The core of this object is the `Parts` array, which contains the different components of the message.
Here are the primary data models you will be working with, represented here in a TypeScript-like format for clarity:
```typescript
// The main message object from the assistant or user
interface SnThinkingThought {
id: string;
parts: SnThinkingMessagePart[];
role: 'Assistant' /*Value is (0)*/ | 'User' /*Value is (1)*/;
createdAt: string; // ISO 8601 date string
// ... other metadata
}
// A single part of a message
interface SnThinkingMessagePart {
type: ThinkingMessagePartType;
text?: string;
functionCall?: SnFunctionCall;
functionResult?: SnFunctionResult;
}
// Enum for the different part types
enum ThinkingMessagePartType {
Text = 0,
FunctionCall = 1,
FunctionResult = 2,
}
// Represents a function/tool call made by the assistant
interface SnFunctionCall {
id: string;
name: string;
arguments: string; // A JSON string of the arguments
}
// Represents the result of a function call
interface SnFunctionResult {
callId: string; // The ID of the corresponding function call
result: any; // The data returned by the function
isError: boolean;
}
```
## 2. Handling the SSE Stream
The response is streamed using Server-Sent Events (SSE). Your client should listen to this stream and process events as they arrive to build the UI in real-time.
The stream sends different types of messages, identified by a `type` field in the JSON payload.
| Event Type | `data` Payload | Client-Side Action |
| ------------------------ | -------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- |
| `text` | `{ "type": "text", "data": "some text" }` | Append the text content to the current message being displayed. This is the most common event. |
| `function_call_update` | `{ "type": "function_call_update", "data": { ... } }` | This provides real-time updates as the AI decides on a function call. You can use this to show an advanced "thinking" state, but it's optional. The key events to handle are `function_call` and `function_result`. |
| `function_call` | `{ "type": "function_call", "data": SnFunctionCall }` | The AI has committed to using a tool. Display a "Using tool..." indicator. You can show the `name` of the tool for more clarity. |
| `function_result` | `{ "type": "function_result", "data": SnFunctionResult }` | The tool has finished running. You can hide the "thinking" indicator for this tool and optionally display a summary of the result. |
| `topic` | `{ "type": "topic", "data": "A new topic" }` | If this is the first message in a new conversation, this event provides the auto-generated topic title. Update your UI accordingly. |
| `thought` | `{ "type": "thought", "data": SnThinkingThought }` | This is the **final event** in the stream. It contains the complete, persisted message object with all its `Parts`. You should use this final object to replace the incrementally-built message in your state to ensure consistency. |
## 3. Rendering a Message from `SnThinkingThought`
Once you have the final `SnThinkingThought` object (either from the `thought` event in the stream or by fetching conversation history), you can render it by iterating through the `parts` array.
### Pseudocode for Rendering
```javascript
function renderThought(thought: SnThinkingThought) {
const messageContainer = document.createElement('div');
messageContainer.className = `message message-role-${thought.role}`;
// User messages are simple and will only have one text part
if (thought.role === 'User') {
const textPart = thought.parts[0];
messageContainer.innerText = textPart.text;
return messageContainer;
}
// Assistant messages can have multiple parts
let textBuffer = '';
thought.parts.forEach(part => {
switch (part.type) {
case ThinkingMessagePartType.Text:
// Buffer text to combine consecutive text parts
textBuffer += part.text;
break;
case ThinkingMessagePartType.FunctionCall:
// First, render any buffered text
if (textBuffer) {
messageContainer.appendChild(renderText(textBuffer));
textBuffer = '';
}
// Then, render the function call UI component
messageContainer.appendChild(renderFunctionCall(part.functionCall));
break;
case ThinkingMessagePartType.FunctionResult:
// Render buffered text
if (textBuffer) {
messageContainer.appendChild(renderText(textBuffer));
textBuffer = '';
}
// Then, render the function result UI component
messageContainer.appendChild(renderFunctionResult(part.functionResult));
break;
}
});
// Render any remaining text at the end
if (textBuffer) {
messageContainer.appendChild(renderText(textBuffer));
}
return messageContainer;
}
// Helper functions to create UI components
function renderText(text) {
const p = document.createElement('p');
p.innerText = text;
return p;
}
function renderFunctionCall(functionCall) {
const el = document.createElement('div');
el.className = 'function-call-indicator';
el.innerHTML = `<i>Using tool: <strong>${functionCall.name}</strong>...</i>`;
// You could add a button to show functionCall.arguments
return el;
}
function renderFunctionResult(functionResult) {
const el = document.createElement('div');
el.className = 'function-result-indicator';
if (functionResult.isError) {
el.classList.add('error');
el.innerText = 'An error occurred while using the tool.';
} else {
el.innerText = 'Tool finished.';
}
// You could expand this to show a summary of functionResult.result
return el;
}
```
This approach ensures that text and tool-use indicators are rendered inline and in the correct order, providing a clear and accurate representation of the assistant's actions.

View File

@@ -1,29 +0,0 @@
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using Microsoft.IdentityModel.Tokens;
using Microsoft.SemanticKernel;
namespace DysonNetwork.Insight.Thought.Plugins;
public class SnAccountKernelPlugin(
AccountService.AccountServiceClient accountClient
)
{
[KernelFunction("get_account")]
public async Task<SnAccount?> GetAccount(string userId)
{
var request = new GetAccountRequest { Id = userId };
var response = await accountClient.GetAccountAsync(request);
if (response is null) return null;
return SnAccount.FromProtoValue(response);
}
[KernelFunction("get_account_by_name")]
public async Task<SnAccount?> GetAccountByName(string username)
{
var request = new LookupAccountBatchRequest();
request.Names.Add(username);
var response = await accountClient.LookupAccountBatchAsync(request);
return response.Accounts.IsNullOrEmpty() ? null : SnAccount.FromProtoValue(response.Accounts[0]);
}
}

View File

@@ -1,98 +0,0 @@
using System.ComponentModel;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using Microsoft.SemanticKernel;
using NodaTime;
using NodaTime.Serialization.Protobuf;
using NodaTime.Text;
namespace DysonNetwork.Insight.Thought.Plugins;
public class SnPostKernelPlugin(
PostService.PostServiceClient postClient
)
{
[KernelFunction("get_post")]
public async Task<SnPost?> GetPost(string postId)
{
var request = new GetPostRequest { Id = postId };
var response = await postClient.GetPostAsync(request);
return response is null ? null : SnPost.FromProtoValue(response);
}
[KernelFunction("search_posts")]
[Description("Perform a full-text search in all Solar Network posts.")]
public async Task<List<SnPost>> SearchPostsContent(string contentQuery, int pageSize = 10, int page = 1)
{
var request = new SearchPostsRequest
{
Query = contentQuery,
PageSize = pageSize,
PageToken = ((page - 1) * pageSize).ToString()
};
var response = await postClient.SearchPostsAsync(request);
return response.Posts.Select(SnPost.FromProtoValue).ToList();
}
public class KernelPostListResult
{
public List<SnPost> Posts { get; set; } = [];
public int TotalCount { get; set; }
}
[KernelFunction("list_posts")]
[Description("List all posts on the Solar Network without filters, orderBy can be date or popularity")]
public async Task<KernelPostListResult> ListPosts(
string orderBy = "date",
bool orderDesc = true,
int pageSize = 10,
int page = 1
)
{
var request = new ListPostsRequest
{
OrderBy = orderBy,
OrderDesc = orderDesc,
PageSize = pageSize,
PageToken = ((page - 1) * pageSize).ToString()
};
var response = await postClient.ListPostsAsync(request);
return new KernelPostListResult
{
Posts = response.Posts.Select(SnPost.FromProtoValue).ToList(),
TotalCount = response.TotalSize,
};
}
[KernelFunction("list_posts_within_time")]
[Description(
"List posts in a period of time, the time requires ISO-8601 format, one of the start and end must be provided.")]
public async Task<KernelPostListResult> ListPostsWithinTime(
string? beforeTime,
string? afterTime,
int pageSize = 10,
int page = 1
)
{
var pattern = InstantPattern.General;
Instant? before = !string.IsNullOrWhiteSpace(beforeTime)
? pattern.Parse(beforeTime).TryGetValue(default, out var beforeValue) ? beforeValue : null
: null;
Instant? after = !string.IsNullOrWhiteSpace(afterTime)
? pattern.Parse(afterTime).TryGetValue(default, out var afterValue) ? afterValue : null
: null;
var request = new ListPostsRequest
{
After = after?.ToTimestamp(),
Before = before?.ToTimestamp(),
PageSize = pageSize,
PageToken = ((page - 1) * pageSize).ToString()
};
var response = await postClient.ListPostsAsync(request);
return new KernelPostListResult
{
Posts = response.Posts.Select(SnPost.FromProtoValue).ToList(),
TotalCount = response.TotalSize,
};
}
}

View File

@@ -1,161 +0,0 @@
# DysonNetwork Insight Thought API
The Thought API provides conversational AI capabilities for users of the Solar Network. It allows users to engage in chat-like conversations with an AI assistant powered by semantic kernel and connected to various tools.
This service is handled by the Insight, when using with the Gateway, the `/api` should be replaced with `/insight`
## Features
- Streaming chat responses using Server-Sent Events (SSE)
- Conversation context management with sequences
- Caching for improved performance
- Authentication required for all operations
## Endpoints
### POST /api/thought
Initiates or continues a chat conversation.
#### Parameters
- `UserMessage` (string, required): The message from the user
- `SequenceId` (Guid, optional): ID of existing conversation sequence. If not provided, a new sequence is created.
#### Response
- Content-Type: `text/event-stream`
- Streaming response with assistant messages
- Status: 401 if not authenticated
- Status: 403 if sequence doesn't belong to user
#### Example Usage
```bash
curl -X POST "http://localhost:5000/api/thought" \
-H "Content-Type: application/json" \
-d '{
"UserMessage": "Hello, how can I help with the Solar Network?",
"SequenceId": null
}'
```
### GET /api/thought/sequences
Lists all thinking sequences for the authenticated user.
#### Parameters
- `offset` (int, default 0): Number of sequences to skip for pagination
- `take` (int, default 20): Maximum number of sequences to return
#### Response
- `200 OK`: Array of `SnThinkingSequence`
- `401 Unauthorized`: If not authenticated
- Headers:
- `X-Total`: Total number of sequences before pagination
#### Example Usage
```bash
curl -X GET "http://localhost:5000/api/thought/sequences?take=10"
```
### GET /api/thought/sequences/{sequenceId}
Retrieves all thoughts (messages) in a specific conversation sequence.
#### Parameters
- `sequenceId` (Guid, path): ID of the sequence to retrieve
#### Response
- `200 OK`: Array of `SnThinkingThought` ordered by creation date
- `401 Unauthorized`: If not authenticated
- `404 Not Found`: If sequence doesn't exist or doesn't belong to user
#### Example Usage
```bash
curl -X GET "http://localhost:5000/api/thought/sequences/12345678-1234-1234-1234-123456789abc"
```
## Data Models
### StreamThinkingRequest
```csharp
{
string UserMessage, // Required
Guid? SequenceId // Optional
}
```
### SnThinkingSequence
```csharp
{
Guid Id,
string? Topic,
Guid AccountId
}
```
### SnThinkingThought
```csharp
{
Guid Id,
string? Content,
List<SnCloudFileReferenceObject> Files,
ThinkingThoughtRole Role,
Guid SequenceId,
SnThinkingSequence Sequence
}
```
### ThinkingThoughtRole (enum)
- `Assistant`
- `User`
## Caching
The API uses Redis-based caching for conversation thoughts:
- Thoughts are cached for 10 minutes with group-based invalidation
- Cache is invalidated when new thoughts are added to a sequence
- Improves performance for accessing conversation history
## Authentication
All endpoints require authentication through the current user session. Sequence access is validated against the authenticated user's account ID.
## Error Responses
- `401 Unauthorized`: Authentication required
- `403 Forbidden`: Access denied (sequence ownership)
- `404 Not Found`: Resource not found
## Streaming Details
The POST endpoint returns a stream of assistant responses using Server-Sent Events format. Clients should handle the streaming response and display messages incrementally.
### Streaming Message Format
The streaming response sends several types of JSON messages:
- **Text messages**: `{"type": "text", "data": "..." }`
- **Function calls**: `{"type": "function_call", "data": {...} }` (when AI uses tools)
- **Topic updates**: `{"type": "topic", "data": "..." }` (sent at end if topic was generated)
- **Thought completion**: `{"type": "thought", "data": {...} }` (sent at end with saved thought details)
All streaming chunks during generation use the SSE event format:
```
data: {"type": "...", "data": ...}
```
Final messages (topic and thought) use custom event types:
```
topic: {"type": "topic", "data": "..."}
thought: {"type": "thought", "data": {...}}
```
Clients should parse these JSON messages and handle different types appropriately, such as displaying text in real-time and processing tool calls.
## Implementation Notes
- Built with ASP.NET Core and Semantic Kernel
- Uses PostgreSQL via Entity Framework Core
- Integrated with Ollama for AI completion
- Caching via Redis

View File

@@ -1,461 +0,0 @@
using System.ComponentModel.DataAnnotations;
using System.Diagnostics.CodeAnalysis;
using System.Text;
using System.Text.Json;
using DysonNetwork.Shared.Auth;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using Microsoft.AspNetCore.Mvc;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
namespace DysonNetwork.Insight.Thought;
[ApiController]
[Route("/api/thought")]
public class ThoughtController(ThoughtProvider provider, ThoughtService service) : ControllerBase
{
public static readonly List<string> AvailableProposals = ["post_create"];
public class StreamThinkingRequest
{
[Required] public string UserMessage { get; set; } = null!;
public string? ServiceId { get; set; }
public Guid? SequenceId { get; set; }
public List<string>? AttachedPosts { get; set; } = [];
public List<Dictionary<string, dynamic>>? AttachedMessages { get; set; }
public List<string> AcceptProposals { get; set; } = [];
}
public class UpdateSharingRequest
{
public bool IsPublic { get; set; }
}
public class ThoughtServiceInfo
{
public string ServiceId { get; set; } = null!;
public double BillingMultiplier { get; set; }
public int PerkLevel { get; set; }
}
public class ThoughtServicesResponse
{
public string DefaultService { get; set; } = null!;
public IEnumerable<ThoughtServiceInfo> Services { get; set; } = null!;
}
[HttpGet("services")]
[ProducesResponseType(StatusCodes.Status200OK)]
public ActionResult<ThoughtServicesResponse> GetAvailableServices()
{
var services = provider.GetAvailableServicesInfo()
.Select(s => new ThoughtServiceInfo
{
ServiceId = s.ServiceId,
BillingMultiplier = s.BillingMultiplier,
PerkLevel = s.PerkLevel
});
return Ok(new ThoughtServicesResponse
{
DefaultService = provider.GetDefaultServiceId(),
Services = services
});
}
[HttpPost]
[Experimental("SKEXP0110")]
public async Task<ActionResult> Think([FromBody] StreamThinkingRequest request)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
var accountId = Guid.Parse(currentUser.Id);
if (request.AcceptProposals.Any(e => !AvailableProposals.Contains(e)))
return BadRequest("Request contains unavailable proposal");
var serviceId = provider.GetServiceId(request.ServiceId);
var serviceInfo = provider.GetServiceInfo(serviceId);
if (serviceInfo is null)
{
return BadRequest("Service not found or configured.");
}
if (serviceInfo.PerkLevel > 0 && !currentUser.IsSuperuser)
if (currentUser.PerkSubscription is null ||
PerkSubscriptionPrivilege.GetPrivilegeFromIdentifier(currentUser.PerkSubscription.Identifier) <
serviceInfo.PerkLevel)
return StatusCode(403, "Not enough perk level");
var kernel = provider.GetKernel(request.ServiceId);
if (kernel is null)
{
return BadRequest("Service not found or configured.");
}
// Generate a topic if creating a new sequence
string? topic = null;
if (!request.SequenceId.HasValue)
{
// Use AI to summarize a topic from a user message
var summaryHistory = new ChatHistory(
"You are a helpful assistant. Summarize the following user message into a concise topic title (max 100 characters).\n" +
"Direct give the topic you summerized, do not add extra prefix / suffix."
);
summaryHistory.AddUserMessage(request.UserMessage);
var summaryKernel = provider.GetKernel(); // Get default kernel
if (summaryKernel is null)
{
return BadRequest("Default service not found or configured.");
}
var summaryResult = await summaryKernel
.GetRequiredService<IChatCompletionService>()
.GetChatMessageContentAsync(summaryHistory);
topic = summaryResult.Content?[..Math.Min(summaryResult.Content.Length, 4096)];
}
// Handle sequence
var sequence = await service.GetOrCreateSequenceAsync(accountId, request.SequenceId, topic);
if (sequence == null) return Forbid(); // or NotFound
// Save user thought
await service.SaveThoughtAsync(sequence, [
new SnThinkingMessagePart
{
Type = ThinkingMessagePartType.Text,
Text = request.UserMessage
}
], ThinkingThoughtRole.User);
// Build chat history
var chatHistory = new ChatHistory(
"You're a helpful assistant on the Solar Network, a social network.\n" +
"Your name is Sn-chan (or SN 酱 in chinese), a cute sweet heart with passion for almost everything.\n" +
"When you talk to user, you can add some modal particles and emoticons to your response to be cute, but prevent use a lot of emojis." +
"Your creator is @littlesheep, which is also the creator of the Solar Network, if you met some problems you was unable to solve, trying guide the user to ask (DM) the @littlesheep.\n" +
"\n" +
"The ID on the Solar Network is UUID, so mostly hard to read, so do not show ID to user unless user ask to do so or necessary.\n" +
"\n" +
"Your aim is to helping solving questions for the users on the Solar Network.\n" +
"And the Solar Network is the social network platform you live on.\n" +
"When the user asks questions about the Solar Network (also known as SN and Solian), try use the tools you have to get latest and accurate data."
);
chatHistory.AddSystemMessage(
"You can issue some proposals to user, like creating a post. The proposal syntax is like a xml tag, with an attribute indicates which proposal.\n" +
"Depends on the proposal type, the payload (content inside the xml tag) might be different.\n" +
"\n" +
"Example: <proposal type=\"post_create\">...post content...</proposal>\n" +
"\n" +
"Here are some references of the proposals you can issue, but if you want to issue one, make sure the user is accept it.\n" +
"1. post_create: body takes simple string, create post for user." +
"\n" +
$"The user currently accept these proposals: {string.Join(',', request.AcceptProposals)}"
);
chatHistory.AddSystemMessage(
$"The user you're currently talking to is {currentUser.Nick} ({currentUser.Name}), ID is {currentUser.Id}"
);
if (request.AttachedPosts is { Count: > 0 })
{
chatHistory.AddUserMessage(
$"Attached post IDs: {string.Join(',', request.AttachedPosts!)}");
}
if (request.AttachedMessages is { Count: > 0 })
{
chatHistory.AddUserMessage(
$"Attached chat messages data: {JsonSerializer.Serialize(request.AttachedMessages)}");
}
// Add previous thoughts (excluding the current user thought, which is the first one since descending)
var previousThoughts = await service.GetPreviousThoughtsAsync(sequence);
var count = previousThoughts.Count;
for (var i = count - 1; i >= 1; i--) // skip first (the newest, current user)
{
var thought = previousThoughts[i];
var textContent = new StringBuilder();
var functionCalls = new List<FunctionCallContent>();
var functionResults = new List<FunctionResultContent>();
foreach (var part in thought.Parts)
{
switch (part.Type)
{
case ThinkingMessagePartType.Text:
textContent.Append(part.Text);
break;
case ThinkingMessagePartType.FunctionCall:
var arguments = !string.IsNullOrEmpty(part.FunctionCall!.Arguments)
? JsonSerializer.Deserialize<Dictionary<string, object?>>(part.FunctionCall!.Arguments)
: null;
var kernelArgs = arguments is not null ? new KernelArguments(arguments) : null;
functionCalls.Add(new FunctionCallContent(
functionName: part.FunctionCall!.Name,
pluginName: part.FunctionCall.PluginName,
id: part.FunctionCall.Id,
arguments: kernelArgs
));
break;
case ThinkingMessagePartType.FunctionResult:
var resultObject = part.FunctionResult!.Result;
var resultString = resultObject as string ?? JsonSerializer.Serialize(resultObject);
functionResults.Add(new FunctionResultContent(
callId: part.FunctionResult.CallId,
functionName: part.FunctionResult.FunctionName,
pluginName: part.FunctionResult.PluginName,
result: resultString
));
break;
default:
throw new ArgumentOutOfRangeException();
}
}
if (thought.Role == ThinkingThoughtRole.User)
{
chatHistory.AddUserMessage(textContent.ToString());
}
else
{
var assistantMessage = new ChatMessageContent(AuthorRole.Assistant, textContent.ToString());
if (functionCalls.Count > 0)
{
assistantMessage.Items = [];
foreach (var fc in functionCalls)
{
assistantMessage.Items.Add(fc);
}
}
chatHistory.Add(assistantMessage);
if (functionResults.Count <= 0) continue;
foreach (var fr in functionResults)
{
chatHistory.Add(fr.ToChatMessage());
}
}
}
chatHistory.AddUserMessage(request.UserMessage);
// Set response for streaming
Response.Headers.Append("Content-Type", "text/event-stream");
Response.StatusCode = 200;
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();
var executionSettings = provider.CreatePromptExecutionSettings(request.ServiceId);
var assistantParts = new List<SnThinkingMessagePart>();
while (true)
{
var textContentBuilder = new StringBuilder();
AuthorRole? authorRole = null;
var functionCallBuilder = new FunctionCallContentBuilder();
await foreach (
var streamingContent in chatCompletionService.GetStreamingChatMessageContentsAsync(
chatHistory, executionSettings, kernel)
)
{
authorRole ??= streamingContent.Role;
if (streamingContent.Content is not null)
{
textContentBuilder.Append(streamingContent.Content);
var messageJson = JsonSerializer.Serialize(new
{ type = "text", data = streamingContent.Content });
await Response.Body.WriteAsync(Encoding.UTF8.GetBytes($"data: {messageJson}\n\n"));
await Response.Body.FlushAsync();
}
functionCallBuilder.Append(streamingContent);
}
var finalMessageText = textContentBuilder.ToString();
if (!string.IsNullOrEmpty(finalMessageText))
{
assistantParts.Add(new SnThinkingMessagePart
{ Type = ThinkingMessagePartType.Text, Text = finalMessageText });
}
var functionCalls = functionCallBuilder.Build()
.Where(fc => !string.IsNullOrEmpty(fc.Id)).ToList();
if (functionCalls.Count == 0)
break;
var assistantMessage = new ChatMessageContent(
authorRole ?? AuthorRole.Assistant,
string.IsNullOrEmpty(finalMessageText) ? null : finalMessageText
);
foreach (var functionCall in functionCalls)
{
assistantMessage.Items.Add(functionCall);
}
chatHistory.Add(assistantMessage);
foreach (var functionCall in functionCalls)
{
var part = new SnThinkingMessagePart
{
Type = ThinkingMessagePartType.FunctionCall,
FunctionCall = new SnFunctionCall
{
Id = functionCall.Id!,
PluginName = functionCall.PluginName,
Name = functionCall.FunctionName,
Arguments = JsonSerializer.Serialize(functionCall.Arguments)
}
};
assistantParts.Add(part);
var messageJson = JsonSerializer.Serialize(new { type = "function_call", data = part.FunctionCall });
await Response.Body.WriteAsync(Encoding.UTF8.GetBytes($"data: {messageJson}\n\n"));
await Response.Body.FlushAsync();
FunctionResultContent resultContent;
try
{
resultContent = await functionCall.InvokeAsync(kernel);
}
catch (Exception ex)
{
resultContent = new FunctionResultContent(functionCall.Id!, ex.Message);
}
chatHistory.Add(resultContent.ToChatMessage());
var resultPart = new SnThinkingMessagePart
{
Type = ThinkingMessagePartType.FunctionResult,
FunctionResult = new SnFunctionResult
{
CallId = resultContent.CallId!,
PluginName = resultContent.PluginName,
FunctionName = resultContent.FunctionName,
Result = resultContent.Result!,
IsError = resultContent.Result is Exception
}
};
assistantParts.Add(resultPart);
var resultMessageJson =
JsonSerializer.Serialize(new { type = "function_result", data = resultPart.FunctionResult });
await Response.Body.WriteAsync(Encoding.UTF8.GetBytes($"data: {resultMessageJson}\n\n"));
await Response.Body.FlushAsync();
}
}
// Save assistant thought
var savedThought = await service.SaveThoughtAsync(
sequence,
assistantParts,
ThinkingThoughtRole.Assistant,
serviceId
);
// Write the topic if it was newly set, then the thought object as JSON to the stream
using (var streamBuilder = new MemoryStream())
{
await streamBuilder.WriteAsync("\n\n"u8.ToArray());
if (topic != null)
{
var topicJson = JsonSerializer.Serialize(new { type = "topic", data = sequence.Topic ?? "" });
await streamBuilder.WriteAsync(Encoding.UTF8.GetBytes($"topic: {topicJson}\n\n"));
}
var thoughtJson = JsonSerializer.Serialize(new { type = "thought", data = savedThought },
GrpcTypeHelper.SerializerOptions);
await streamBuilder.WriteAsync(Encoding.UTF8.GetBytes($"thought: {thoughtJson}\n\n"));
var outputBytes = streamBuilder.ToArray();
await Response.Body.WriteAsync(outputBytes);
await Response.Body.FlushAsync();
}
// Return empty result since we're streaming
return new EmptyResult();
}
/// <summary>
/// Retrieves a paginated list of thinking sequences for the authenticated user.
/// </summary>
/// <param name="offset">The number of sequences to skip for pagination.</param>
/// <param name="take">The maximum number of sequences to return (default: 20).</param>
/// <returns>
/// Returns an ActionResult containing a list of thinking sequences.
/// Includes an X-Total header with the total count of sequences before pagination.
/// </returns>
[HttpGet("sequences")]
[ProducesResponseType(StatusCodes.Status200OK)]
public async Task<ActionResult<List<SnThinkingSequence>>> ListSequences(
[FromQuery] int offset = 0,
[FromQuery] int take = 20
)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
var accountId = Guid.Parse(currentUser.Id);
var (totalCount, sequences) = await service.ListSequencesAsync(accountId, offset, take);
Response.Headers["X-Total"] = totalCount.ToString();
return Ok(sequences);
}
[HttpPatch("sequences/{sequenceId:guid}/sharing")]
[ProducesResponseType(StatusCodes.Status204NoContent)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
[ProducesResponseType(StatusCodes.Status403Forbidden)]
public async Task<ActionResult> UpdateSequenceSharing(Guid sequenceId, [FromBody] UpdateSharingRequest request)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
var accountId = Guid.Parse(currentUser.Id);
var sequence = await service.GetSequenceAsync(sequenceId);
if (sequence == null) return NotFound();
if (sequence.AccountId != accountId) return Forbid();
sequence.IsPublic = request.IsPublic;
await service.UpdateSequenceAsync(sequence);
return NoContent();
}
/// <summary>
/// Retrieves the thoughts in a specific thinking sequence.
/// </summary>
/// <param name="sequenceId">The ID of the sequence to retrieve thoughts from.</param>
/// <returns>
/// Returns an ActionResult containing a list of thoughts in the sequence, ordered by creation date.
/// </returns>
[HttpGet("sequences/{sequenceId:guid}")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<ActionResult<List<SnThinkingThought>>> GetSequenceThoughts(Guid sequenceId)
{
var sequence = await service.GetSequenceAsync(sequenceId);
if (sequence == null) return NotFound();
if (!sequence.IsPublic)
{
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
var accountId = Guid.Parse(currentUser.Id);
if (sequence.AccountId != accountId)
return StatusCode(403);
}
var thoughts = await service.GetPreviousThoughtsAsync(sequence);
return Ok(thoughts);
}
}

View File

@@ -1,191 +0,0 @@
using System.ClientModel;
using System.Diagnostics.CodeAnalysis;
using DysonNetwork.Insight.Thought.Plugins;
using DysonNetwork.Shared.Proto;
using DysonNetwork.Shared.Registry;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.Ollama;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using OpenAI;
using Microsoft.SemanticKernel.Plugins.Web;
using Microsoft.SemanticKernel.Plugins.Web.Bing;
using Microsoft.SemanticKernel.Plugins.Web.Google;
namespace DysonNetwork.Insight.Thought;
public class ThoughtServiceModel
{
public string ServiceId { get; set; } = null!;
public string? Provider { get; set; }
public string? Model { get; set; }
public double BillingMultiplier { get; set; }
public int PerkLevel { get; set; }
}
public class ThoughtProvider
{
private readonly PostService.PostServiceClient _postClient;
private readonly AccountService.AccountServiceClient _accountClient;
private readonly IConfiguration _configuration;
private readonly ILogger<ThoughtProvider> _logger;
private readonly Dictionary<string, Kernel> _kernels = new();
private readonly Dictionary<string, string> _serviceProviders = new();
private readonly Dictionary<string, ThoughtServiceModel> _serviceModels = new();
private readonly string _defaultServiceId;
[Experimental("SKEXP0050")]
public ThoughtProvider(
IConfiguration configuration,
PostService.PostServiceClient postServiceClient,
AccountService.AccountServiceClient accountServiceClient,
ILogger<ThoughtProvider> logger
)
{
_logger = logger;
_postClient = postServiceClient;
_accountClient = accountServiceClient;
_configuration = configuration;
var cfg = configuration.GetSection("Thinking");
_defaultServiceId = cfg.GetValue<string>("DefaultService")!;
var services = cfg.GetSection("Services").GetChildren();
foreach (var service in services)
{
var serviceId = service.Key;
var serviceModel = new ThoughtServiceModel
{
ServiceId = serviceId,
Provider = service.GetValue<string>("Provider"),
Model = service.GetValue<string>("Model"),
BillingMultiplier = service.GetValue<double>("BillingMultiplier", 1.0),
PerkLevel = service.GetValue<int>("PerkLevel", 0)
};
_serviceModels[serviceId] = serviceModel;
var providerType = service.GetValue<string>("Provider")?.ToLower();
if (providerType is null) continue;
var kernel = InitializeThinkingService(service);
InitializeHelperFunctions(kernel);
_kernels[serviceId] = kernel;
_serviceProviders[serviceId] = providerType;
}
}
private Kernel InitializeThinkingService(IConfigurationSection serviceConfig)
{
var providerType = serviceConfig.GetValue<string>("Provider")?.ToLower();
var model = serviceConfig.GetValue<string>("Model");
var endpoint = serviceConfig.GetValue<string>("Endpoint");
var apiKey = serviceConfig.GetValue<string>("ApiKey");
var builder = Kernel.CreateBuilder();
switch (providerType)
{
case "ollama":
builder.AddOllamaChatCompletion(
model!,
new Uri(endpoint ?? "http://localhost:11434/api")
);
break;
case "deepseek":
var client = new OpenAIClient(
new ApiKeyCredential(apiKey!),
new OpenAIClientOptions { Endpoint = new Uri(endpoint ?? "https://api.deepseek.com/v1") }
);
builder.AddOpenAIChatCompletion(model!, client);
break;
default:
throw new IndexOutOfRangeException("Unknown thinking provider: " + providerType);
}
// Add gRPC clients for Thought Plugins
builder.Services.AddServiceDiscoveryCore();
builder.Services.AddServiceDiscovery();
builder.Services.AddAccountService();
builder.Services.AddSphereService();
builder.Plugins.AddFromObject(new SnAccountKernelPlugin(_accountClient));
builder.Plugins.AddFromObject(new SnPostKernelPlugin(_postClient));
return builder.Build();
}
[Experimental("SKEXP0050")]
private void InitializeHelperFunctions(Kernel kernel)
{
// Add web search plugins if configured
var bingApiKey = _configuration.GetValue<string>("Thinking:BingApiKey");
if (!string.IsNullOrEmpty(bingApiKey))
{
var bingConnector = new BingConnector(bingApiKey);
var bing = new WebSearchEnginePlugin(bingConnector);
kernel.ImportPluginFromObject(bing, "bing");
}
var googleApiKey = _configuration.GetValue<string>("Thinking:GoogleApiKey");
var googleCx = _configuration.GetValue<string>("Thinking:GoogleCx");
if (!string.IsNullOrEmpty(googleApiKey) && !string.IsNullOrEmpty(googleCx))
{
var googleConnector = new GoogleConnector(
apiKey: googleApiKey,
searchEngineId: googleCx);
var google = new WebSearchEnginePlugin(googleConnector);
kernel.ImportPluginFromObject(google, "google");
}
}
public Kernel? GetKernel(string? serviceId = null)
{
serviceId ??= _defaultServiceId;
return _kernels.GetValueOrDefault(serviceId);
}
public string GetServiceId(string? serviceId = null)
{
return serviceId ?? _defaultServiceId;
}
public IEnumerable<string> GetAvailableServices()
{
return _kernels.Keys;
}
public IEnumerable<ThoughtServiceModel> GetAvailableServicesInfo()
{
return _serviceModels.Values;
}
public ThoughtServiceModel? GetServiceInfo(string? serviceId)
{
serviceId ??= _defaultServiceId;
return _serviceModels.GetValueOrDefault(serviceId);
}
public string GetDefaultServiceId()
{
return _defaultServiceId;
}
public PromptExecutionSettings CreatePromptExecutionSettings(string? serviceId = null)
{
serviceId ??= _defaultServiceId;
var providerType = _serviceProviders.GetValueOrDefault(serviceId);
return providerType switch
{
"ollama" => new OllamaPromptExecutionSettings
{
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false)
},
"deepseek" => new OpenAIPromptExecutionSettings
{
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false), ModelId = serviceId
},
_ => throw new InvalidOperationException("Unknown provider for service: " + serviceId)
};
}
}

View File

@@ -1,269 +0,0 @@
using DysonNetwork.Shared.Cache;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using Microsoft.EntityFrameworkCore;
using PaymentService = DysonNetwork.Shared.Proto.PaymentService;
using TransactionType = DysonNetwork.Shared.Proto.TransactionType;
using WalletService = DysonNetwork.Shared.Proto.WalletService;
namespace DysonNetwork.Insight.Thought;
public class ThoughtService(
AppDatabase db,
ICacheService cache,
PaymentService.PaymentServiceClient paymentService
)
{
public async Task<SnThinkingSequence?> GetOrCreateSequenceAsync(
Guid accountId,
Guid? sequenceId,
string? topic = null
)
{
if (sequenceId.HasValue)
{
var seq = await db.ThinkingSequences.FindAsync(sequenceId.Value);
if (seq == null || seq.AccountId != accountId)
return null;
return seq;
}
else
{
var seq = new SnThinkingSequence { AccountId = accountId, Topic = topic };
db.ThinkingSequences.Add(seq);
await db.SaveChangesAsync();
return seq;
}
}
public async Task<SnThinkingSequence?> GetSequenceAsync(Guid sequenceId)
{
return await db.ThinkingSequences.FindAsync(sequenceId);
}
public async Task UpdateSequenceAsync(SnThinkingSequence sequence)
{
db.ThinkingSequences.Update(sequence);
await db.SaveChangesAsync();
}
public async Task<SnThinkingThought> SaveThoughtAsync(
SnThinkingSequence sequence,
List<SnThinkingMessagePart> parts,
ThinkingThoughtRole role,
string? model = null
)
{
// Approximate token count (1 token ≈ 4 characters for GPT-like models)
var totalChars = parts.Sum(part =>
(part.Type == ThinkingMessagePartType.Text ? part.Text?.Length : 0) ?? 0 +
(part.Type == ThinkingMessagePartType.FunctionCall ? part.FunctionCall?.Arguments.Length : 0) ?? 0
);
var tokenCount = totalChars / 4;
var thought = new SnThinkingThought
{
SequenceId = sequence.Id,
Parts = parts,
Role = role,
TokenCount = tokenCount,
ModelName = model,
};
db.ThinkingThoughts.Add(thought);
// Update sequence total tokens only for assistant responses
if (role == ThinkingThoughtRole.Assistant)
sequence.TotalToken += tokenCount;
await db.SaveChangesAsync();
// Invalidate cache for this sequence's thoughts
await cache.RemoveGroupAsync($"sequence:{sequence.Id}");
return thought;
}
public async Task<List<SnThinkingThought>> GetPreviousThoughtsAsync(SnThinkingSequence sequence)
{
var cacheKey = $"thoughts:{sequence.Id}";
var (found, cachedThoughts) = await cache.GetAsyncWithStatus<List<SnThinkingThought>>(
cacheKey
);
if (found && cachedThoughts != null)
{
return cachedThoughts;
}
var thoughts = await db
.ThinkingThoughts.Where(t => t.SequenceId == sequence.Id)
.OrderByDescending(t => t.CreatedAt)
.ToListAsync();
// Cache for 10 minutes
await cache.SetWithGroupsAsync(
cacheKey,
thoughts,
[$"sequence:{sequence.Id}"],
TimeSpan.FromMinutes(10)
);
return thoughts;
}
public async Task<(int total, List<SnThinkingSequence> sequences)> ListSequencesAsync(
Guid accountId,
int offset,
int take
)
{
var query = db.ThinkingSequences.Where(s => s.AccountId == accountId);
var totalCount = await query.CountAsync();
var sequences = await query
.OrderByDescending(s => s.CreatedAt)
.Skip(offset)
.Take(take)
.ToListAsync();
return (totalCount, sequences);
}
public async Task SettleThoughtBills(ILogger logger)
{
var sequences = await db
.ThinkingSequences.Where(s => s.PaidToken < s.TotalToken)
.ToListAsync();
if (sequences.Count == 0)
{
logger.LogInformation("No unpaid sequences found.");
return;
}
// Group by account
var groupedByAccount = sequences.GroupBy(s => s.AccountId);
foreach (var accountGroup in groupedByAccount)
{
var accountId = accountGroup.Key;
if (await db.UnpaidAccounts.AnyAsync(u => u.AccountId == accountId))
{
logger.LogWarning("Skipping billing for marked account {accountId}", accountId);
continue;
}
var totalUnpaidTokens = accountGroup.Sum(s => s.TotalToken - s.PaidToken);
var cost = (long)Math.Ceiling(totalUnpaidTokens / 10.0);
if (cost == 0)
continue;
try
{
var date = DateTime.Now.ToString("yyyy-MM-dd");
await paymentService.CreateTransactionWithAccountAsync(
new CreateTransactionWithAccountRequest
{
PayerAccountId = accountId.ToString(),
Currency = WalletCurrency.SourcePoint,
Amount = cost.ToString(),
Remarks = $"Wage for SN-chan on {date}",
Type = TransactionType.System,
}
);
// Mark all sequences for this account as paid
foreach (var sequence in accountGroup)
sequence.PaidToken = sequence.TotalToken;
logger.LogInformation(
"Billed {cost} points for account {accountId}",
cost,
accountId
);
}
catch (Exception ex)
{
logger.LogError(ex, "Error billing for account {accountId}", accountId);
if (!await db.UnpaidAccounts.AnyAsync(u => u.AccountId == accountId))
{
db.UnpaidAccounts.Add(new SnUnpaidAccount { AccountId = accountId, MarkedAt = DateTime.UtcNow });
}
}
}
await db.SaveChangesAsync();
}
public async Task<(bool success, long cost)> RetryBillingForAccountAsync(Guid accountId, ILogger logger)
{
var isMarked = await db.UnpaidAccounts.FirstOrDefaultAsync(u => u.AccountId == accountId);
if (isMarked == null)
{
logger.LogInformation("Account {accountId} is not marked for unpaid bills.", accountId);
return (true, 0);
}
var sequences = await db
.ThinkingSequences.Where(s => s.AccountId == accountId && s.PaidToken < s.TotalToken)
.ToListAsync();
if (!sequences.Any())
{
logger.LogInformation("No unpaid sequences found for account {accountId}. Unmarking.", accountId);
db.UnpaidAccounts.Remove(isMarked);
await db.SaveChangesAsync();
return (true, 0);
}
var totalUnpaidTokens = sequences.Sum(s => s.TotalToken - s.PaidToken);
var cost = (long)Math.Ceiling(totalUnpaidTokens / 10.0);
if (cost == 0)
{
logger.LogInformation("Unpaid tokens for {accountId} resulted in zero cost. Marking as paid and unmarking.", accountId);
foreach (var sequence in sequences)
{
sequence.PaidToken = sequence.TotalToken;
}
db.UnpaidAccounts.Remove(isMarked);
await db.SaveChangesAsync();
return (true, 0);
}
try
{
var date = DateTime.Now.ToString("yyyy-MM-dd");
await paymentService.CreateTransactionWithAccountAsync(
new CreateTransactionWithAccountRequest
{
PayerAccountId = accountId.ToString(),
Currency = WalletCurrency.SourcePoint,
Amount = cost.ToString(),
Remarks = $"Wage for SN-chan on {date} (Retry)",
Type = TransactionType.System,
}
);
foreach (var sequence in sequences)
{
sequence.PaidToken = sequence.TotalToken;
}
db.UnpaidAccounts.Remove(isMarked);
logger.LogInformation(
"Successfully billed {cost} points for account {accountId} on retry.",
cost,
accountId
);
await db.SaveChangesAsync();
return (true, cost);
}
catch (Exception ex)
{
logger.LogError(ex, "Error retrying billing for account {accountId}", accountId);
return (false, cost);
}
}
}

View File

@@ -1,11 +0,0 @@
using Quartz;
namespace DysonNetwork.Insight.Thought;
public class TokenBillingJob(ThoughtService thoughtService, ILogger<TokenBillingJob> logger) : IJob
{
public async Task Execute(IJobExecutionContext context)
{
await thoughtService.SettleThoughtBills(logger);
}
}

View File

@@ -1,44 +0,0 @@
{
"Debug": true,
"BaseUrl": "http://localhost:5071",
"SiteUrl": "https://solian.app",
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*",
"ConnectionStrings": {
"App": "Host=localhost;Port=5432;Database=dyson_insight;Username=postgres;Password=postgres;Include Error Detail=True;Maximum Pool Size=20;Connection Idle Lifetime=60"
},
"KnownProxies": [
"127.0.0.1",
"::1"
],
"Etcd": {
"Insecure": true
},
"Cache": {
"Serializer": "MessagePack"
},
"Thinking": {
"DefaultService": "deepseek-chat",
"Services": {
"deepseek-chat": {
"Provider": "deepseek",
"Model": "deepseek-chat",
"ApiKey": "sk-",
"BillingMultiplier": 1.0,
"PerkLevel": 0
},
"deepseek-reasoner": {
"Provider": "deepseek",
"Model": "deepseek-reasoner",
"ApiKey": "sk-",
"BillingMultiplier": 1.5,
"PerkLevel": 1
}
}
}
}

View File

@@ -1,5 +1,4 @@
/wwwroot/dist/
**/bin/
**/obj/
**/node_modules/
Mailart/
**/node_modules/

View File

@@ -1,14 +1,10 @@
using System.ComponentModel.DataAnnotations;
using DysonNetwork.Pass.Affiliation;
using DysonNetwork.Pass.Auth;
using DysonNetwork.Pass.Credit;
using DysonNetwork.Pass.Permission;
using DysonNetwork.Pass.Wallet;
using DysonNetwork.Shared.Auth;
using DysonNetwork.Shared.GeoIp;
using DysonNetwork.Shared.Http;
using DysonNetwork.Shared.Models;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using NodaTime;
@@ -24,7 +20,6 @@ public class AccountController(
SubscriptionService subscriptions,
AccountEventService events,
SocialCreditService socialCreditService,
AffiliationSpellService ars,
GeoIpService geo
) : ControllerBase
{
@@ -37,7 +32,7 @@ public class AccountController(
.Include(e => e.Badges)
.Include(e => e.Profile)
.Include(e => e.Contacts.Where(c => c.IsPublic))
.Where(a => EF.Functions.Like(a.Name, name))
.Where(a => a.Name == name)
.FirstOrDefaultAsync();
if (account is null) return NotFound(ApiError.NotFound(name, traceId: HttpContext.TraceIdentifier));
@@ -106,52 +101,6 @@ public class AccountController(
[MaxLength(32)] public string Language { get; set; } = "en-us";
[Required] public string CaptchaToken { get; set; } = string.Empty;
public string? AffiliationSpell { get; set; }
}
public class AccountCreateValidateRequest
{
[MinLength(2)]
[MaxLength(256)]
[RegularExpression(@"^[A-Za-z0-9_-]+$",
ErrorMessage = "Name can only contain letters, numbers, underscores, and hyphens.")
]
public string? Name { get; set; }
[EmailAddress]
[RegularExpression(@"^[^+]+@[^@]+\.[^@]+$", ErrorMessage = "Email address cannot contain '+' symbol.")]
[MaxLength(1024)]
public string? Email { get; set; }
public string? AffiliationSpell { get; set; }
}
[HttpPost("validate")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<ActionResult<string>> ValidateCreateAccountRequest(
[FromBody] AccountCreateValidateRequest request)
{
if (request.Name is not null)
{
if (await accounts.CheckAccountNameHasTaken(request.Name))
return BadRequest("Account name has already been taken.");
}
if (request.Email is not null)
{
if (await accounts.CheckEmailHasBeenUsed(request.Email))
return BadRequest("Email has already been used.");
}
if (request.AffiliationSpell is not null)
{
if (!await ars.CheckAffiliationSpellHasTaken(request.AffiliationSpell))
return BadRequest("No affiliation spell has been found.");
}
return Ok("Everything seems good.");
}
[HttpPost]
@@ -317,24 +266,4 @@ public class AccountController(
.Take(take)
.ToListAsync();
}
[HttpPost("credits/validate")]
[Authorize]
[AskPermission("credits.validate.perform")]
public async Task<IActionResult> PerformSocialCreditValidation()
{
await socialCreditService.ValidateSocialCredits();
return Ok();
}
[HttpDelete("{name}")]
[Authorize]
[AskPermission("accounts.deletion")]
public async Task<IActionResult> AdminDeleteAccount(string name)
{
var account = await accounts.LookupAccount(name);
if (account is null) return NotFound();
await accounts.DeleteAccount(account);
return Ok();
}
}

View File

@@ -1,7 +1,6 @@
using System.ComponentModel.DataAnnotations;
using DysonNetwork.Pass.Permission;
using DysonNetwork.Pass.Wallet;
using DysonNetwork.Shared.Auth;
using DysonNetwork.Shared.Http;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
@@ -83,7 +82,7 @@ public class AccountCurrentController(
[MaxLength(4096)] public string? Bio { get; set; }
public Shared.Models.UsernameColor? UsernameColor { get; set; }
public Instant? Birthday { get; set; }
public List<SnProfileLink>? Links { get; set; }
public List<ProfileLink>? Links { get; set; }
[MaxLength(32)] public string? PictureId { get; set; }
[MaxLength(32)] public string? BackgroundId { get; set; }
@@ -195,7 +194,7 @@ public class AccountCurrentController(
}
[HttpPatch("statuses")]
[AskPermission("accounts.statuses.update")]
[RequiredPermission("global", "accounts.statuses.update")]
public async Task<ActionResult<SnAccountStatus>> UpdateStatus([FromBody] AccountController.StatusRequest request)
{
if (HttpContext.Items["CurrentUser"] is not SnAccount currentUser) return Unauthorized();
@@ -229,7 +228,7 @@ public class AccountCurrentController(
}
[HttpPost("statuses")]
[AskPermission("accounts.statuses.create")]
[RequiredPermission("global", "accounts.statuses.create")]
public async Task<ActionResult<SnAccountStatus>> CreateStatus([FromBody] AccountController.StatusRequest request)
{
if (HttpContext.Items["CurrentUser"] is not SnAccount currentUser) return Unauthorized();
@@ -560,7 +559,7 @@ public class AccountCurrentController(
[HttpGet("devices")]
[Authorize]
public async Task<ActionResult<List<SnAuthClientWithSessions>>> GetDevices()
public async Task<ActionResult<List<SnAuthClientWithChallenge>>> GetDevices()
{
if (HttpContext.Items["CurrentUser"] is not SnAccount currentUser ||
HttpContext.Items["CurrentSession"] is not SnAuthSession currentSession) return Unauthorized();
@@ -571,41 +570,18 @@ public class AccountCurrentController(
.Where(device => device.AccountId == currentUser.Id)
.ToListAsync();
var sessionDevices = devices.ConvertAll(SnAuthClientWithSessions.FromClient).ToList();
var clientIds = sessionDevices.Select(x => x.Id).ToList();
var challengeDevices = devices.Select(SnAuthClientWithChallenge.FromClient).ToList();
var deviceIds = challengeDevices.Select(x => x.Id).ToList();
var authSessions = await db.AuthSessions
.Where(c => c.ClientId != null && clientIds.Contains(c.ClientId.Value))
.GroupBy(c => c.ClientId!.Value)
.ToDictionaryAsync(c => c.Key, c => c.ToList());
foreach (var dev in sessionDevices)
if (authSessions.TryGetValue(dev.Id, out var challenge))
dev.Sessions = challenge;
var authChallenges = await db.AuthChallenges
.Where(c => c.ClientId != null && deviceIds.Contains(c.ClientId.Value))
.GroupBy(c => c.ClientId)
.ToDictionaryAsync(c => c.Key!.Value, c => c.ToList());
foreach (var challengeDevice in challengeDevices)
if (authChallenges.TryGetValue(challengeDevice.Id, out var challenge))
challengeDevice.Challenges = challenge;
return Ok(sessionDevices);
}
[HttpGet("challenges")]
[Authorize]
public async Task<ActionResult<List<SnAuthChallenge>>> GetChallenges(
[FromQuery] int take = 20,
[FromQuery] int offset = 0
)
{
if (HttpContext.Items["CurrentUser"] is not SnAccount currentUser) return Unauthorized();
var query = db.AuthChallenges
.Where(challenge => challenge.AccountId == currentUser.Id)
.OrderByDescending(c => c.CreatedAt);
var total = await query.CountAsync();
Response.Headers.Append("X-Total", total.ToString());
var challenges = await query
.Skip(offset)
.Take(take)
.ToListAsync();
return Ok(challenges);
return Ok(challengeDevices);
}
[HttpGet("sessions")]
@@ -619,8 +595,8 @@ public class AccountCurrentController(
HttpContext.Items["CurrentSession"] is not SnAuthSession currentSession) return Unauthorized();
var query = db.AuthSessions
.OrderByDescending(x => x.LastGrantedAt)
.Include(session => session.Account)
.Include(session => session.Challenge)
.Where(session => session.Account.Id == currentUser.Id);
var total = await query.CountAsync();
@@ -628,6 +604,7 @@ public class AccountCurrentController(
Response.Headers.Append("X-Auth-Session", currentSession.Id.ToString());
var sessions = await query
.OrderByDescending(x => x.LastGrantedAt)
.Skip(offset)
.Take(take)
.ToListAsync();
@@ -711,7 +688,7 @@ public class AccountCurrentController(
if (HttpContext.Items["CurrentUser"] is not SnAccount currentUser ||
HttpContext.Items["CurrentSession"] is not SnAuthSession currentSession) return Unauthorized();
var device = await db.AuthClients.FirstOrDefaultAsync(d => d.Id == currentSession.ClientId);
var device = await db.AuthClients.FirstOrDefaultAsync(d => d.Id == currentSession.Challenge.ClientId);
if (device is null) return NotFound();
try

View File

@@ -7,6 +7,7 @@ using DysonNetwork.Shared.Stream;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Localization;
using NATS.Client.Core;
using NATS.Net;
using NodaTime;
using NodaTime.Extensions;
@@ -25,9 +26,8 @@ public class AccountEventService(
{
private static readonly Random Random = new();
private const string StatusCacheKey = "account:status:";
private const string ActivityCacheKey = "account:activities:";
public async Task<bool> GetAccountIsConnected(Guid userId)
private async Task<bool> GetAccountIsConnected(Guid userId)
{
var resp = await pusher.GetWebsocketConnectionStatusAsync(
new GetWebsocketConnectionStatusRequest { UserId = userId.ToString() }
@@ -35,28 +35,12 @@ public class AccountEventService(
return resp.IsConnected;
}
public async Task<Dictionary<string, bool>> GetAccountIsConnectedBatch(List<Guid> userIds)
{
var req = new GetWebsocketConnectionStatusBatchRequest();
req.UsersId.AddRange(userIds.Select(u => u.ToString()));
var resp = await pusher.GetWebsocketConnectionStatusBatchAsync(
req
);
return resp.IsConnected.ToDictionary();
}
public void PurgeStatusCache(Guid userId)
{
var cacheKey = $"{StatusCacheKey}{userId}";
cache.RemoveAsync(cacheKey);
}
public void PurgeActivityCache(Guid userId)
{
var cacheKey = $"{ActivityCacheKey}{userId}";
cache.RemoveAsync(cacheKey);
}
private async Task BroadcastStatusUpdate(SnAccountStatus status)
{
await nats.PublishAsync(
@@ -137,7 +121,7 @@ public class AccountEventService(
}
}
if (cacheMissUserIds.Count == 0) return results;
if (cacheMissUserIds.Count != 0)
{
var now = SystemClock.Instance.GetCurrentInstant();
var statusesFromDb = await db.AccountStatuses
@@ -160,7 +144,7 @@ public class AccountEventService(
}
var usersWithoutStatus = cacheMissUserIds.Except(foundUserIds).ToList();
if (usersWithoutStatus.Count == 0) return results;
if (usersWithoutStatus.Any())
{
foreach (var userId in usersWithoutStatus)
{
@@ -287,7 +271,7 @@ public class AccountEventService(
return backdatedCheckInMonths < 4;
}
private const string CheckInLockKey = "checkin:lock:";
public const string CheckInLockKey = "checkin:lock:";
public async Task<SnCheckInResult> CheckInDaily(SnAccount user, Instant? backdated = null)
{
@@ -307,90 +291,47 @@ public class AccountEventService(
// Now try to acquire the lock properly
await using var lockObj =
await cache.AcquireLockAsync(lockKey, TimeSpan.FromMinutes(1), TimeSpan.FromSeconds(5)) ??
throw new InvalidOperationException("Check-in was in progress.");
await cache.AcquireLockAsync(lockKey, TimeSpan.FromMinutes(1), TimeSpan.FromSeconds(5)) ?? throw new InvalidOperationException("Check-in was in progress.");
var cultureInfo = new CultureInfo(user.Language, false);
CultureInfo.CurrentCulture = cultureInfo;
CultureInfo.CurrentUICulture = cultureInfo;
var accountProfile = await db.AccountProfiles
// Generate 2 positive tips
var positiveIndices = Enumerable.Range(1, FortuneTipCount)
.OrderBy(_ => Random.Next())
.Take(2)
.ToList();
var tips = positiveIndices.Select(index => new CheckInFortuneTip
{
IsPositive = true,
Title = localizer[$"FortuneTipPositiveTitle_{index}"].Value,
Content = localizer[$"FortuneTipPositiveContent_{index}"].Value
}).ToList();
// Generate 2 negative tips
var negativeIndices = Enumerable.Range(1, FortuneTipCount)
.Except(positiveIndices)
.OrderBy(_ => Random.Next())
.Take(2)
.ToList();
tips.AddRange(negativeIndices.Select(index => new CheckInFortuneTip
{
IsPositive = false,
Title = localizer[$"FortuneTipNegativeTitle_{index}"].Value,
Content = localizer[$"FortuneTipNegativeContent_{index}"].Value
}));
// The 5 is specialized, keep it alone.
var checkInLevel = (CheckInResultLevel)Random.Next(Enum.GetValues<CheckInResultLevel>().Length - 1);
var accountBirthday = await db.AccountProfiles
.Where(x => x.AccountId == user.Id)
.Select(x => new { x.Birthday, x.TimeZone })
.Select(x => x.Birthday)
.FirstOrDefaultAsync();
var accountBirthday = accountProfile?.Birthday;
var now = SystemClock.Instance.GetCurrentInstant();
var userTimeZone = DateTimeZone.Utc;
if (!string.IsNullOrEmpty(accountProfile?.TimeZone))
{
userTimeZone = DateTimeZoneProviders.Tzdb.GetZoneOrNull(accountProfile.TimeZone) ?? DateTimeZone.Utc;
}
var todayInUserTz = now.InZone(userTimeZone).Date;
var birthdayDate = accountBirthday?.InZone(userTimeZone).Date;
var isBirthday = birthdayDate.HasValue &&
birthdayDate.Value.Month == todayInUserTz.Month &&
birthdayDate.Value.Day == todayInUserTz.Day;
List<CheckInFortuneTip> tips;
CheckInResultLevel checkInLevel;
if (isBirthday)
{
// Skip random logic and tips generation for birthday
var now = SystemClock.Instance.GetCurrentInstant().InUtc().Date;
if (accountBirthday.HasValue && accountBirthday.Value.InUtc().Date == now)
checkInLevel = CheckInResultLevel.Special;
tips = [
new CheckInFortuneTip()
{
IsPositive = true,
Title = localizer["FortuneTipSpecialTitle_Birthday"].Value,
Content = localizer["FortuneTipSpecialContent_Birthday", user.Nick].Value,
}
];
}
else
{
// Generate 2 positive tips
var positiveIndices = Enumerable.Range(1, FortuneTipCount)
.OrderBy(_ => Random.Next())
.Take(2)
.ToList();
tips = positiveIndices.Select(index => new CheckInFortuneTip
{
IsPositive = true,
Title = localizer[$"FortuneTipPositiveTitle_{index}"].Value,
Content = localizer[$"FortuneTipPositiveContent_{index}"].Value
}).ToList();
// Generate 2 negative tips
var negativeIndices = Enumerable.Range(1, FortuneTipCount)
.Except(positiveIndices)
.OrderBy(_ => Random.Next())
.Take(2)
.ToList();
tips.AddRange(negativeIndices.Select(index => new CheckInFortuneTip
{
IsPositive = false,
Title = localizer[$"FortuneTipNegativeTitle_{index}"].Value,
Content = localizer[$"FortuneTipNegativeContent_{index}"].Value
}));
// The 5 is specialized, keep it alone.
// Use weighted random distribution to make all levels reasonably achievable
// Weights: Worst: 10%, Worse: 20%, Normal: 40%, Better: 20%, Best: 10%
var randomValue = Random.Next(100);
checkInLevel = randomValue switch
{
< 10 => CheckInResultLevel.Worst, // 0-9: 10% chance
< 30 => CheckInResultLevel.Worse, // 10-29: 20% chance
< 70 => CheckInResultLevel.Normal, // 30-69: 40% chance
< 90 => CheckInResultLevel.Better, // 70-89: 20% chance
_ => CheckInResultLevel.Best // 90-99: 10% chance
};
}
var result = new SnCheckInResult
{
@@ -476,8 +417,7 @@ public class AccountEventService(
.ToDictionary(g => g.Key, g => g.ToList());
var checkInByDate = checkIn
.GroupBy(c => c.CreatedAt.InUtc().Date)
.ToDictionary(g => g.Key, g => g.OrderByDescending(c => c.CreatedAt).First());
.ToDictionary(c => c.CreatedAt.InUtc().Date);
return dates.Select(date =>
{
@@ -490,222 +430,4 @@ public class AccountEventService(
};
}).ToList();
}
public async Task<List<SnPresenceActivity>> GetActiveActivities(Guid userId)
{
var cacheKey = $"{ActivityCacheKey}{userId}";
var cachedActivities = await cache.GetAsync<List<SnPresenceActivity>>(cacheKey);
if (cachedActivities != null)
{
return cachedActivities;
}
var now = SystemClock.Instance.GetCurrentInstant();
var activities = await db.PresenceActivities
.Where(e => e.AccountId == userId && e.LeaseExpiresAt > now && e.DeletedAt == null)
.ToListAsync();
await cache.SetWithGroupsAsync(cacheKey, activities, [$"{AccountService.AccountCachePrefix}{userId}"],
TimeSpan.FromMinutes(1));
return activities;
}
public async Task<Dictionary<Guid, List<SnPresenceActivity>>> GetActiveActivitiesBatch(List<Guid> userIds)
{
var results = new Dictionary<Guid, List<SnPresenceActivity>>();
var cacheMissUserIds = new List<Guid>();
// Try to get activities from cache first
foreach (var userId in userIds)
{
var cacheKey = $"{ActivityCacheKey}{userId}";
var cachedActivities = await cache.GetAsync<List<SnPresenceActivity>>(cacheKey);
if (cachedActivities != null)
{
results[userId] = cachedActivities;
}
else
{
cacheMissUserIds.Add(userId);
}
}
// If all activities were found in cache, return early
if (cacheMissUserIds.Count == 0) return results;
// Fetch remaining activities from database in a single query
var now = SystemClock.Instance.GetCurrentInstant();
var activitiesFromDb = await db.PresenceActivities
.Where(e => cacheMissUserIds.Contains(e.AccountId) && e.LeaseExpiresAt > now && e.DeletedAt == null)
.ToListAsync();
// Group activities by user ID and update cache
var activitiesByUser = activitiesFromDb
.GroupBy(a => a.AccountId)
.ToDictionary(g => g.Key, g => g.ToList());
foreach (var userId in cacheMissUserIds)
{
var userActivities = activitiesByUser.GetValueOrDefault(userId, new List<SnPresenceActivity>());
results[userId] = userActivities;
// Update cache for this user
var cacheKey = $"{ActivityCacheKey}{userId}";
await cache.SetWithGroupsAsync(cacheKey, userActivities, [$"{AccountService.AccountCachePrefix}{userId}"],
TimeSpan.FromMinutes(1));
}
return results;
}
public async Task<(List<SnPresenceActivity>, int)> GetAllActivities(Guid userId, int offset = 0, int take = 20)
{
var query = db.PresenceActivities
.Where(e => e.AccountId == userId && e.DeletedAt == null);
var totalCount = await query.CountAsync();
var activities = await query
.OrderByDescending(e => e.CreatedAt)
.Skip(offset)
.Take(take)
.ToListAsync();
return (activities, totalCount);
}
public async Task<SnPresenceActivity> SetActivity(SnPresenceActivity activity, int leaseMinutes)
{
if (leaseMinutes is < 1 or > 60)
throw new ArgumentException("Lease minutes must be between 1 and 60");
var now = SystemClock.Instance.GetCurrentInstant();
activity.LeaseMinutes = leaseMinutes;
activity.LeaseExpiresAt = now + Duration.FromMinutes(leaseMinutes);
db.PresenceActivities.Add(activity);
await db.SaveChangesAsync();
PurgeActivityCache(activity.AccountId);
return activity;
}
public async Task<SnPresenceActivity> UpdateActivity(Guid activityId, Guid userId,
Action<SnPresenceActivity> update, int? leaseMinutes = null)
{
var activity = await db.PresenceActivities.FindAsync(activityId);
if (activity == null)
throw new KeyNotFoundException("Activity not found");
if (activity.AccountId != userId)
throw new UnauthorizedAccessException("Activity does not belong to user");
if (leaseMinutes.HasValue)
{
if (leaseMinutes.Value < 1 || leaseMinutes.Value > 60)
throw new ArgumentException("Lease minutes must be between 1 and 60");
activity.LeaseMinutes = leaseMinutes.Value;
activity.LeaseExpiresAt =
SystemClock.Instance.GetCurrentInstant() + Duration.FromMinutes(leaseMinutes.Value);
}
update(activity);
await db.SaveChangesAsync();
PurgeActivityCache(activity.AccountId);
return activity;
}
public async Task<SnPresenceActivity?> UpdateActivityByManualId(
string manualId,
Guid userId,
Action<SnPresenceActivity> update,
int? leaseMinutes = null
)
{
var now = SystemClock.Instance.GetCurrentInstant();
var activity = await db.PresenceActivities.FirstOrDefaultAsync(e =>
e.ManualId == manualId && e.AccountId == userId && e.LeaseExpiresAt > now && e.DeletedAt == null
);
if (activity == null)
return null;
if (leaseMinutes.HasValue)
{
if (leaseMinutes.Value is < 1 or > 60)
throw new ArgumentException("Lease minutes must be between 1 and 60");
activity.LeaseMinutes = leaseMinutes.Value;
activity.LeaseExpiresAt =
SystemClock.Instance.GetCurrentInstant() + Duration.FromMinutes(leaseMinutes.Value);
}
update(activity);
await db.SaveChangesAsync();
PurgeActivityCache(activity.AccountId);
return activity;
}
public async Task<bool> DeleteActivityByManualId(string manualId, Guid userId)
{
var now = SystemClock.Instance.GetCurrentInstant();
var activity = await db.PresenceActivities.FirstOrDefaultAsync(e =>
e.ManualId == manualId && e.AccountId == userId && e.LeaseExpiresAt > now && e.DeletedAt == null
);
if (activity == null) return false;
if (activity.LeaseExpiresAt <= now)
{
activity.DeletedAt = now;
}
else
{
activity.LeaseExpiresAt = now;
}
db.Update(activity);
await db.SaveChangesAsync();
PurgeActivityCache(activity.AccountId);
return true;
}
public async Task<bool> DeleteActivity(Guid activityId, Guid userId)
{
var activity = await db.PresenceActivities.FindAsync(activityId);
if (activity == null) return false;
if (activity.AccountId != userId)
throw new UnauthorizedAccessException("Activity does not belong to user");
var now = SystemClock.Instance.GetCurrentInstant();
if (activity.LeaseExpiresAt <= now)
{
activity.DeletedAt = now;
}
else
{
activity.LeaseExpiresAt = now;
}
db.Update(activity);
await db.SaveChangesAsync();
PurgeActivityCache(activity.AccountId);
return true;
}
/// <summary>
/// Gets all user IDs that have Spotify connections
/// </summary>
public async Task<List<Guid>> GetSpotifyConnectedUsersAsync()
{
return await db.AccountConnections
.Where(c => c.Provider == "spotify" && c.AccessToken != null && c.RefreshToken != null)
.Select(c => c.AccountId)
.Distinct()
.ToListAsync();
}
}

View File

@@ -1,11 +1,8 @@
using System.Globalization;
using DysonNetwork.Pass.Affiliation;
using DysonNetwork.Pass.Auth.OpenId;
using DysonNetwork.Pass.Localization;
using DysonNetwork.Pass.Mailer;
using DysonNetwork.Pass.Resources.Emails;
using DysonNetwork.Shared.Cache;
using DysonNetwork.Shared.Data;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using DysonNetwork.Shared.Stream;
@@ -25,7 +22,6 @@ public class AccountService(
FileService.FileServiceClient files,
FileReferenceService.FileReferenceServiceClient fileRefs,
AccountUsernameService uname,
AffiliationSpellService ars,
EmailService mailer,
RingService.RingServiceClient pusher,
IStringLocalizer<NotificationResource> localizer,
@@ -56,13 +52,11 @@ public class AccountService(
public async Task<SnAccount?> LookupAccount(string probe)
{
var account = await db.Accounts.Where(a => EF.Functions.ILike(a.Name, probe)).FirstOrDefaultAsync();
var account = await db.Accounts.Where(a => a.Name == probe).FirstOrDefaultAsync();
if (account is not null) return account;
var contact = await db.AccountContacts
.Where(c => c.Type == Shared.Models.AccountContactType.Email ||
c.Type == Shared.Models.AccountContactType.PhoneNumber)
.Where(c => EF.Functions.ILike(c.Content, probe))
.Where(c => c.Content == probe)
.Include(c => c.Account)
.FirstOrDefaultAsync();
return contact?.Account;
@@ -85,17 +79,6 @@ public class AccountService(
return profile?.Level;
}
public async Task<bool> CheckAccountNameHasTaken(string name)
{
return await db.Accounts.AnyAsync(a => EF.Functions.ILike(a.Name, name));
}
public async Task<bool> CheckEmailHasBeenUsed(string email)
{
return await db.AccountContacts.AnyAsync(c =>
c.Type == Shared.Models.AccountContactType.Email && EF.Functions.ILike(c.Content, email));
}
public async Task<SnAccount> CreateAccount(
string name,
string nick,
@@ -103,12 +86,12 @@ public class AccountService(
string? password,
string language = "en-US",
string region = "en",
string? affiliationSpell = null,
bool isEmailVerified = false,
bool isActivated = false
)
{
if (await CheckAccountNameHasTaken(name))
var dupeNameCount = await db.Accounts.Where(a => a.Name == name).CountAsync();
if (dupeNameCount > 0)
throw new InvalidOperationException("Account name has already been taken.");
var dupeEmailCount = await db.AccountContacts
@@ -116,7 +99,7 @@ public class AccountService(
).CountAsync();
if (dupeEmailCount > 0)
throw new InvalidOperationException("Account email has already been used.");
var account = new SnAccount
{
Name = name,
@@ -125,7 +108,7 @@ public class AccountService(
Region = region,
Contacts =
[
new SnAccountContact
new()
{
Type = Shared.Models.AccountContactType.Email,
Content = email,
@@ -147,9 +130,6 @@ public class AccountService(
Profile = new SnAccountProfile()
};
if (affiliationSpell is not null)
await ars.CreateAffiliationResult(affiliationSpell, $"account:{account.Id}");
if (isActivated)
{
account.ActivatedAt = SystemClock.Instance.GetCurrentInstant();
@@ -158,7 +138,7 @@ public class AccountService(
{
db.PermissionGroupMembers.Add(new SnPermissionGroupMember
{
Actor = account.Id.ToString(),
Actor = $"user:{account.Id}",
Group = defaultGroup
});
}
@@ -199,7 +179,10 @@ public class AccountService(
displayName,
userInfo.Email,
null,
isEmailVerified: userInfo.EmailVerified
"en-US",
"en",
userInfo.EmailVerified,
userInfo.EmailVerified
);
}
@@ -289,8 +272,7 @@ public class AccountService(
return isExists;
}
public async Task<SnAccountAuthFactor?> CreateAuthFactor(SnAccount account,
Shared.Models.AccountAuthFactorType type, string? secret)
public async Task<SnAccountAuthFactor?> CreateAuthFactor(SnAccount account, Shared.Models.AccountAuthFactorType type, string? secret)
{
SnAccountAuthFactor? factor = null;
switch (type)
@@ -368,8 +350,7 @@ public class AccountService(
public async Task<SnAccountAuthFactor> EnableAuthFactor(SnAccountAuthFactor factor, string? code)
{
if (factor.EnabledAt is not null) throw new ArgumentException("The factor has been enabled.");
if (factor.Type is Shared.Models.AccountAuthFactorType.Password
or Shared.Models.AccountAuthFactorType.TimedCode)
if (factor.Type is Shared.Models.AccountAuthFactorType.Password or Shared.Models.AccountAuthFactorType.TimedCode)
{
if (code is null || !factor.VerifyPassword(code))
throw new InvalidOperationException(
@@ -466,10 +447,10 @@ public class AccountService(
}
await mailer
.SendTemplatedEmailAsync<FactorCodeEmail, VerificationEmailModel>(
.SendTemplatedEmailAsync<Emails.VerificationEmail, VerificationEmailModel>(
account.Nick,
contact.Content,
emailLocalizer["CodeEmailTitle"],
emailLocalizer["VerificationEmail"],
new VerificationEmailModel
{
Name = account.Name,
@@ -525,7 +506,9 @@ public class AccountService(
private async Task<bool> IsDeviceActive(Guid id)
{
return await db.AuthSessions.AnyAsync(s => s.ClientId == id);
return await db.AuthSessions
.Include(s => s.Challenge)
.AnyAsync(s => s.Challenge.ClientId == id);
}
public async Task<SnAuthClient> UpdateDeviceName(SnAccount account, string deviceId, string label)
@@ -544,7 +527,8 @@ public class AccountService(
public async Task DeleteSession(SnAccount account, Guid sessionId)
{
var session = await db.AuthSessions
.Include(s => s.Client)
.Include(s => s.Challenge)
.ThenInclude(s => s.Client)
.Where(s => s.Id == sessionId && s.AccountId == account.Id)
.FirstOrDefaultAsync();
if (session is null) throw new InvalidOperationException("Session was not found.");
@@ -553,11 +537,11 @@ public class AccountService(
db.AuthSessions.Remove(session);
await db.SaveChangesAsync();
if (session.ClientId.HasValue)
if (session.Challenge.ClientId.HasValue)
{
if (!await IsDeviceActive(session.ClientId.Value))
if (!await IsDeviceActive(session.Challenge.ClientId.Value))
await pusher.UnsubscribePushNotificationsAsync(new UnsubscribePushNotificationsRequest()
{ DeviceId = session.Client!.DeviceId }
{ DeviceId = session.Challenge.Client!.DeviceId }
);
}
@@ -578,13 +562,15 @@ public class AccountService(
);
var sessions = await db.AuthSessions
.Where(s => s.ClientId == device.Id && s.AccountId == account.Id)
.Include(s => s.Challenge)
.Where(s => s.Challenge.ClientId == device.Id && s.AccountId == account.Id)
.ToListAsync();
// The current session should be included in the sessions' list
var now = SystemClock.Instance.GetCurrentInstant();
await db.AuthSessions
.Where(s => s.ClientId == device.Id)
.Include(s => s.Challenge)
.Where(s => s.Challenge.ClientId == device.Id)
.ExecuteUpdateAsync(p => p.SetProperty(s => s.DeletedAt, s => now));
db.AuthClients.Remove(device);
@@ -594,8 +580,7 @@ public class AccountService(
await cache.RemoveAsync($"{AuthService.AuthCachePrefix}{item.Id}");
}
public async Task<SnAccountContact> CreateContactMethod(SnAccount account, Shared.Models.AccountContactType type,
string content)
public async Task<SnAccountContact> CreateContactMethod(SnAccount account, Shared.Models.AccountContactType type, string content)
{
var isExists = await db.AccountContacts
.Where(x => x.AccountId == account.Id && x.Type == type && x.Content == content)
@@ -657,8 +642,7 @@ public class AccountService(
}
}
public async Task<SnAccountContact> SetContactMethodPublic(SnAccount account, SnAccountContact contact,
bool isPublic)
public async Task<SnAccountContact> SetContactMethodPublic(SnAccount account, SnAccountContact contact, bool isPublic)
{
contact.IsPublic = isPublic;
db.AccountContacts.Update(contact);

View File

@@ -12,11 +12,13 @@ public class AccountServiceGrpc(
AccountEventService accountEvents,
RelationshipService relationships,
SubscriptionService subscriptions,
IClock clock,
ILogger<AccountServiceGrpc> logger
)
: Shared.Proto.AccountService.AccountServiceBase
{
private readonly AppDatabase _db = db ?? throw new ArgumentNullException(nameof(db));
private readonly IClock _clock = clock ?? throw new ArgumentNullException(nameof(clock));
private readonly ILogger<AccountServiceGrpc>
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
@@ -24,16 +26,15 @@ public class AccountServiceGrpc(
public override async Task<Shared.Proto.Account> GetAccount(GetAccountRequest request, ServerCallContext context)
{
if (!Guid.TryParse(request.Id, out var accountId))
throw new RpcException(new Status(StatusCode.InvalidArgument, "Invalid account ID format"));
throw new RpcException(new Grpc.Core.Status(StatusCode.InvalidArgument, "Invalid account ID format"));
var account = await _db.Accounts
.AsNoTracking()
.Include(a => a.Profile)
.Include(a => a.Contacts.Where(c => c.IsPublic))
.FirstOrDefaultAsync(a => a.Id == accountId);
if (account == null)
throw new RpcException(new Status(StatusCode.NotFound, $"Account {request.Id} not found"));
throw new RpcException(new Grpc.Core.Status(StatusCode.NotFound, $"Account {request.Id} not found"));
var perk = await subscriptions.GetPerkSubscriptionAsync(account.Id);
account.PerkSubscription = perk?.ToReference();
@@ -265,7 +266,7 @@ public class AccountServiceGrpc(
public override async Task<BoolValue> HasRelationship(GetRelationshipRequest request, ServerCallContext context)
{
bool hasRelationship;
var hasRelationship = false;
if (!request.HasStatus)
hasRelationship = await relationships.HasExistingRelationship(
Guid.Parse(request.AccountId),

View File

@@ -6,7 +6,7 @@ namespace DysonNetwork.Pass.Account;
public class ActionLogService(GeoIpService geo, FlushBufferService fbs)
{
public void CreateActionLog(Guid accountId, string action, Dictionary<string, object> meta)
public void CreateActionLog(Guid accountId, string action, Dictionary<string, object?> meta)
{
var log = new SnActionLog
{
@@ -42,4 +42,4 @@ public class ActionLogService(GeoIpService geo, FlushBufferService fbs)
fbs.Enqueue(log);
}
}
}

View File

@@ -32,8 +32,8 @@ public class ActionLogServiceGrpc : Shared.Proto.ActionLogService.ActionLogServi
try
{
var meta = request.Meta
?.Select(x => new KeyValuePair<string, object>(x.Key, GrpcTypeHelper.ConvertValueToObject(x.Value)))
.ToDictionary() ?? new Dictionary<string, object>();
?.Select(x => new KeyValuePair<string, object?>(x.Key, GrpcTypeHelper.ConvertValueToObject(x.Value)))
.ToDictionary() ?? new Dictionary<string, object?>();
_actionLogService.CreateActionLog(
accountId,
@@ -41,7 +41,6 @@ public class ActionLogServiceGrpc : Shared.Proto.ActionLogService.ActionLogServi
meta
);
await Task.CompletedTask;
return new CreateActionLogResponse();
}
catch (Exception ex)
@@ -112,4 +111,4 @@ public class ActionLogServiceGrpc : Shared.Proto.ActionLogService.ActionLogServi
throw new RpcException(new Grpc.Core.Status(Grpc.Core.StatusCode.Internal, "Failed to list action logs"));
}
}
}
}

View File

@@ -1,55 +0,0 @@
using DysonNetwork.Shared.Models;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace DysonNetwork.Pass.Account;
[ApiController]
[Route("/api/friends")]
public class FriendsController(AppDatabase db, RelationshipService rels, AccountEventService events) : ControllerBase
{
public class FriendOverviewItem
{
public SnAccount Account { get; set; } = null!;
public SnAccountStatus Status { get; set; } = null!;
public List<SnPresenceActivity> Activities { get; set; } = [];
}
[HttpGet("overview")]
[Authorize]
public async Task<ActionResult<List<FriendOverviewItem>>> GetOverview([FromQuery] bool includeOffline = false)
{
if (HttpContext.Items["CurrentUser"] is not SnAccount currentUser) return Unauthorized();
var friendIds = await rels.ListAccountFriends(currentUser);
// Fetch data in parallel using batch methods for better performance
var accountsTask = db.Accounts
.Where(a => friendIds.Contains(a.Id))
.Include(a => a.Profile)
.ToListAsync();
var statusesTask = events.GetStatuses(friendIds);
var activitiesTask = events.GetActiveActivitiesBatch(friendIds);
// Wait for all data to be fetched
await Task.WhenAll(accountsTask, statusesTask, activitiesTask);
var accounts = accountsTask.Result;
var statuses = statusesTask.Result;
var activities = activitiesTask.Result;
var result = (from account in accounts
let status = statuses.GetValueOrDefault(account.Id)
where includeOffline || status is { IsOnline: true }
let accountActivities = activities.GetValueOrDefault(account.Id, new List<SnPresenceActivity>())
select new FriendOverviewItem
{
Account = account, Status = status ?? new SnAccountStatus { AccountId = account.Id },
Activities = accountActivities
}).ToList();
return Ok(result);
}
}

View File

@@ -1,5 +1,3 @@
using DysonNetwork.Shared.Models;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
@@ -9,31 +7,17 @@ namespace DysonNetwork.Pass.Account;
[Route("/api/spells")]
public class MagicSpellController(AppDatabase db, MagicSpellService sp) : ControllerBase
{
[HttpPost("activation/resend")]
[Authorize]
public async Task<ActionResult> ResendActivationMagicSpell()
{
if (HttpContext.Items["CurrentUser"] is not SnAccount currentUser) return Unauthorized();
var spell = await db.MagicSpells.FirstOrDefaultAsync(s =>
s.Type == MagicSpellType.AccountActivation && s.AccountId == currentUser.Id);
if (spell is null) return BadRequest("Unable to find activation magic spell.");
await sp.NotifyMagicSpell(spell, true);
return Ok();
}
[HttpPost("{spellId:guid}/resend")]
public async Task<ActionResult> ResendMagicSpell(Guid spellId)
{
var spell = db.MagicSpells.FirstOrDefault(x => x.Id == spellId);
if (spell == null)
return NotFound();
await sp.NotifyMagicSpell(spell, true);
return Ok();
}
[HttpGet("{spellWord}")]
public async Task<ActionResult> GetMagicSpell(string spellWord)
{
@@ -54,8 +38,7 @@ public class MagicSpellController(AppDatabase db, MagicSpellService sp) : Contro
}
[HttpPost("{spellWord}/apply")]
public async Task<ActionResult> ApplyMagicSpell([FromRoute] string spellWord,
[FromBody] MagicSpellApplyRequest? request)
public async Task<ActionResult> ApplyMagicSpell([FromRoute] string spellWord, [FromBody] MagicSpellApplyRequest? request)
{
var word = Uri.UnescapeDataString(spellWord);
var spell = await db.MagicSpells
@@ -76,7 +59,6 @@ public class MagicSpellController(AppDatabase db, MagicSpellService sp) : Contro
{
return BadRequest(ex.Message);
}
return Ok();
}
}

View File

@@ -1,7 +1,7 @@
using System.Security.Cryptography;
using System.Text.Json;
using DysonNetwork.Pass.Emails;
using DysonNetwork.Pass.Mailer;
using DysonNetwork.Pass.Resources.Emails;
using DysonNetwork.Shared.Cache;
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
@@ -26,7 +26,6 @@ public class MagicSpellService(
Dictionary<string, object> meta,
Instant? expiredAt = null,
Instant? affectedAt = null,
string? code = null,
bool preventRepeat = false
)
{
@@ -42,7 +41,7 @@ public class MagicSpellService(
return existingSpell;
}
var spellWord = code ?? _GenerateRandomString(128);
var spellWord = _GenerateRandomString(128);
var spell = new SnMagicSpell
{
Spell = spellWord,
@@ -80,7 +79,7 @@ public class MagicSpellService(
.FirstOrDefaultAsync();
if (contact is null) throw new ArgumentException("Account has no contact method that can use");
var link = $"{configuration.GetValue<string>("SiteUrl")}/spells/{Uri.EscapeDataString(spell.Spell)}";
var link = $"{configuration.GetValue<string>("BaseUrl")}/spells/{Uri.EscapeDataString(spell.Spell)}";
logger.LogInformation("Sending magic spell... {Link}", link);
@@ -95,10 +94,10 @@ public class MagicSpellService(
switch (spell.Type)
{
case MagicSpellType.AccountActivation:
await email.SendTemplatedEmailAsync<RegistrationConfirmEmail, LandingEmailModel>(
await email.SendTemplatedEmailAsync<LandingEmail, LandingEmailModel>(
contact.Account.Nick,
contact.Content,
localizer["RegConfirmTitle"],
localizer["EmailLandingTitle"],
new LandingEmailModel
{
Name = contact.Account.Name,
@@ -110,7 +109,7 @@ public class MagicSpellService(
await email.SendTemplatedEmailAsync<AccountDeletionEmail, AccountDeletionEmailModel>(
contact.Account.Nick,
contact.Content,
localizer["AccountDeletionTitle"],
localizer["EmailAccountDeletionTitle"],
new AccountDeletionEmailModel
{
Name = contact.Account.Name,
@@ -122,7 +121,7 @@ public class MagicSpellService(
await email.SendTemplatedEmailAsync<PasswordResetEmail, PasswordResetEmailModel>(
contact.Account.Nick,
contact.Content,
localizer["PasswordResetTitle"],
localizer["EmailPasswordResetTitle"],
new PasswordResetEmailModel
{
Name = contact.Account.Name,
@@ -136,7 +135,7 @@ public class MagicSpellService(
await email.SendTemplatedEmailAsync<ContactVerificationEmail, ContactVerificationEmailModel>(
contact.Account.Nick,
contactMethod!,
localizer["ContractVerificationTitle"],
localizer["EmailContactVerificationTitle"],
new ContactVerificationEmailModel
{
Name = contact.Account.Name,
@@ -194,7 +193,7 @@ public class MagicSpellService(
{
db.PermissionGroupMembers.Add(new SnPermissionGroupMember
{
Actor = account.Id.ToString(),
Actor = $"user:{account.Id}",
Group = defaultGroup
});
}
@@ -252,15 +251,12 @@ public class MagicSpellService(
private static string _GenerateRandomString(int length)
{
const string chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
var result = new char[length];
using var rng = RandomNumberGenerator.Create();
for (var i = 0; i < length; i++)
{
var bytes = new byte[1];
rng.GetBytes(bytes);
result[i] = chars[bytes[0] % chars.Length];
}
return new string(result);
var randomBytes = new byte[length];
rng.GetBytes(randomBytes);
var base64String = Convert.ToBase64String(randomBytes);
return base64String[..length];
}
}
}

View File

@@ -1,260 +0,0 @@
using DysonNetwork.Shared.Models;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace DysonNetwork.Pass.Account;
/// <summary>
/// Controller for managing user presence activities with lease-based expiration.
/// Supports both user-defined manual IDs and autogenerated GUIDs for activity management.
/// </summary>
[ApiController]
[Route("/api/activities")]
public class PresenceActivityController(AppDatabase db, AccountEventService service)
: ControllerBase
{
/// <summary>
/// Retrieves active (non-expired) presence activities for the authenticated user.
/// Optionally includes expired activities if includeExpired is true.
/// </summary>
/// <param name="includeExpired">Whether to include expired activities</param>
/// <param name="offset">The number of activities to skip for pagination</param>
/// <param name="take">The maximum number of activities to return</param>
/// <returns>List of presence activities</returns>
[HttpGet]
[ProducesResponseType<List<SnPresenceActivity>>(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status401Unauthorized)]
public async Task<ActionResult<List<SnPresenceActivity>>> GetActivities(
[FromQuery] bool includeExpired = false,
[FromQuery] int offset = 0,
[FromQuery] int take = 20
)
{
if (HttpContext.Items["CurrentUser"] is not SnAccount currentUser)
return Unauthorized();
List<SnPresenceActivity> activities;
if (includeExpired)
{
(activities, var total) = await service.GetAllActivities(currentUser.Id, offset, take);
Response.Headers["X-Total"] = total.ToString();
}
else
{
activities = await service.GetActiveActivities(currentUser.Id);
}
return Ok(activities);
}
/// <summary>
/// Retrieves active presence activities for any user account (admin/debugging endpoint).
/// </summary>
/// <returns>List of active presence activities</returns>
[HttpGet("{identifier}")]
[ProducesResponseType<List<SnPresenceActivity>>(StatusCodes.Status200OK)]
public async Task<ActionResult<List<SnPresenceActivity>>> GetActivitiesByAccountId(
string identifier
)
{
var account = Guid.TryParse(identifier, out var identifierGuid)
? await db.Accounts.FirstOrDefaultAsync(a => a.Id == identifierGuid)
: await db.Accounts.FirstOrDefaultAsync(a => a.Name == identifier);
if (account is null)
return NotFound();
var activities = await service.GetActiveActivities(account.Id);
return Ok(activities);
}
/// <summary>
/// Creates or updates a presence activity with lease expiration.
/// If an activity with the same 'manualId' exists, it will be updated.
/// Otherwise, a new activity will be created.
/// </summary>
/// <param name="request">Activity creation or update parameters</param>
/// <returns>The created or updated activity</returns>
[HttpPost]
[ProducesResponseType<SnPresenceActivity>(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status401Unauthorized)]
[ProducesResponseType(StatusCodes.Status400BadRequest)]
public async Task<ActionResult<SnPresenceActivity>> SetActivity(
[FromBody] SetActivityRequest request
)
{
if (HttpContext.Items["CurrentUser"] is not SnAccount currentUser)
return Unauthorized();
if (!string.IsNullOrWhiteSpace(request.ManualId))
{
var result = await service.UpdateActivityByManualId(
request.ManualId,
currentUser.Id,
activity =>
{
if (request.Type.HasValue)
activity.Type = request.Type.Value;
activity.Title = request.Title;
activity.Subtitle = request.Subtitle;
activity.Caption = request.Caption;
activity.LargeImage = request.LargeImage;
activity.SmallImage = request.SmallImage;
activity.TitleUrl = request.TitleUrl;
activity.SubtitleUrl = request.SubtitleUrl;
activity.Meta = request.Meta;
},
request.LeaseMinutes
);
if (result != null)
{
return Ok(result);
}
}
if (!request.Type.HasValue)
return BadRequest("Type is required when creating a new activity");
var newActivity = new SnPresenceActivity
{
AccountId = currentUser.Id,
Type = request.Type.Value,
ManualId = request.ManualId,
Title = request.Title,
Subtitle = request.Subtitle,
Caption = request.Caption,
LargeImage = request.LargeImage,
SmallImage = request.SmallImage,
TitleUrl = request.TitleUrl,
SubtitleUrl = request.SubtitleUrl,
Meta = request.Meta
};
var createResult = await service.SetActivity(newActivity, request.LeaseMinutes);
return Ok(createResult);
}
/// <summary>
/// Updates an existing presence activity using either its GUID or manual ID.
/// </summary>
/// <param name="id">System-generated GUID of the activity (optional)</param>
/// <param name="request">Update parameters (only provided fields are updated)</param>
/// <returns>The updated activity</returns>
/// <remarks>One of 'id' or 'manualId' must be provided and non-empty.</remarks>
[HttpPut("{id:guid}")]
[ProducesResponseType<SnPresenceActivity>(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status401Unauthorized)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
[ProducesResponseType(StatusCodes.Status400BadRequest)]
public async Task<ActionResult<SnPresenceActivity>> UpdateActivity(
[FromRoute] Guid id,
[FromBody] SetActivityRequest request
)
{
if (HttpContext.Items["CurrentUser"] is not SnAccount currentUser)
return Unauthorized();
var result = await service.UpdateActivity(
id,
currentUser.Id,
activity =>
{
if (request.Type.HasValue)
activity.Type = request.Type.Value;
if (request.Title != null)
activity.Title = request.Title;
if (request.Subtitle != null)
activity.Subtitle = request.Subtitle;
if (request.Caption != null)
activity.Caption = request.Caption;
if (request.ManualId != null)
activity.ManualId = request.ManualId;
if (request.Meta != null)
activity.Meta = request.Meta;
},
request.LeaseMinutes
);
return Ok(result);
}
/// <summary>
/// Deletes a presence activity using either its GUID or manual ID.
/// </summary>
/// <param name="id">System-generated GUID of the activity (optional)</param>
/// <param name="manualId">User-defined manual ID of the activity (optional)</param>
/// <returns>NoContent on success</returns>
/// <remarks>One of 'id' or 'manualId' must be provided and non-empty. Soft-deletes the activity.</remarks>
[HttpDelete]
[ProducesResponseType(StatusCodes.Status204NoContent)]
[ProducesResponseType(StatusCodes.Status401Unauthorized)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
[ProducesResponseType(StatusCodes.Status400BadRequest)]
public async Task<IActionResult> DeleteActivityById(
[FromQuery] string? id,
[FromQuery] string? manualId
)
{
if (HttpContext.Items["CurrentUser"] is not SnAccount currentUser)
return Unauthorized();
if (!string.IsNullOrWhiteSpace(manualId))
{
var deleted = await service.DeleteActivityByManualId(manualId, currentUser.Id);
if (!deleted)
return NotFound();
return NoContent();
}
if (string.IsNullOrWhiteSpace(id) || !Guid.TryParse(id, out var activityGuid))
return BadRequest("Either 'id' (GUID) or 'manualId' must be provided");
{
var deleted = await service.DeleteActivity(activityGuid, currentUser.Id);
if (!deleted)
return NotFound();
return NoContent();
}
}
/// <summary>
/// Request model for creating a new presence activity.
/// </summary>
public class SetActivityRequest
{
/// <summary>The type of presence activity (e.g., Gaming, Music, Workout)</summary>
public PresenceType? Type { get; set; }
/// <summary>User-defined identifier for the activity (optional, for easy reference)</summary>
public string? ManualId { get; set; }
/// <summary>Main title of the activity</summary>
public string? Title { get; set; }
/// <summary>Secondary subtitle of the activity</summary>
public string? Subtitle { get; set; }
/// <summary>Additional caption/description</summary>
public string? Caption { get; set; }
/// <summary>Large image URL or base64 string</summary>
public string? LargeImage { get; set; }
/// <summary>Small image URL or base64 string</summary>
public string? SmallImage { get; set; }
/// <summary>Title URL</summary>
public string? TitleUrl { get; set; }
/// <summary>Subtitle URL</summary>
public string? SubtitleUrl { get; set; }
/// <summary>Extensible metadata dictionary for custom developer data</summary>
public Dictionary<string, object>? Meta { get; set; }
/// <summary>Lease duration in minutes (1-60, default: 5)</summary>
public int LeaseMinutes { get; set; } = 5;
}
}

View File

@@ -1,21 +0,0 @@
using DysonNetwork.Shared.Models;
namespace DysonNetwork.Pass.Account.Presences;
/// <summary>
/// Interface for presence services that can update user presence activities
/// </summary>
public interface IPresenceService
{
/// <summary>
/// The unique identifier for this presence service (e.g., "spotify", "discord")
/// </summary>
string ServiceId { get; }
/// <summary>
/// Updates presence activities for the specified users
/// </summary>
/// <param name="userIds">The user IDs to update presence for</param>
/// <returns>A task representing the asynchronous operation</returns>
Task UpdatePresencesAsync(IEnumerable<Guid> userIds);
}

View File

@@ -1,112 +0,0 @@
using Quartz;
namespace DysonNetwork.Pass.Account.Presences;
public class PresenceUpdateJob(
IEnumerable<IPresenceService> presenceServices,
AccountEventService accountEventService,
ILogger<PresenceUpdateJob> logger
) : IJob
{
public async Task Execute(IJobExecutionContext context)
{
// Get the stage parameter from the job data
var stageString = context.MergedJobDataMap.GetString("stage");
if (!Enum.TryParse<PresenceUpdateStage>(stageString, out var stage))
{
logger.LogError("Invalid or missing stage parameter: {Stage}", stageString);
return;
}
logger.LogInformation("Starting presence updates for stage: {Stage}", stage);
try
{
// Get users to update based on the stage
var userIds = await GetUsersForStageAsync(stage);
if (userIds.Count == 0)
{
logger.LogInformation("No users found for stage {Stage}", stage);
return;
}
logger.LogInformation("Found {UserCount} users for stage {Stage}", userIds.Count, stage);
// Update presence for each service
foreach (var presenceService in presenceServices)
{
try
{
await presenceService.UpdatePresencesAsync(userIds);
logger.LogInformation("Updated {ServiceId} presences for {UserCount} users in stage {Stage}",
presenceService.ServiceId, userIds.Count, stage);
}
catch (Exception ex)
{
logger.LogError(ex, "Error updating {ServiceId} presences for stage {Stage}",
presenceService.ServiceId, stage);
}
}
logger.LogInformation("Presence updates completed for stage {Stage}", stage);
}
catch (Exception ex)
{
logger.LogError(ex, "Error occurred during presence updates for stage {Stage}", stage);
}
}
private async Task<List<Guid>> GetUsersForStageAsync(PresenceUpdateStage stage)
{
// Get all users with presence connections
var allUserIds = await GetAllUsersWithPresenceConnectionsAsync();
if (!allUserIds.Any())
{
return new List<Guid>();
}
// Batch fetch online status for all users
var onlineStatuses = await accountEventService.GetAccountIsConnectedBatch(allUserIds);
var filteredUserIds = new List<Guid>();
foreach (var userId in allUserIds)
{
var userIdString = userId.ToString();
var isOnline = onlineStatuses.GetValueOrDefault(userIdString, false);
var activeActivities = await accountEventService.GetActiveActivities(userId);
var hasActivePresence = activeActivities.Any();
var shouldInclude = stage switch
{
PresenceUpdateStage.Active => isOnline && hasActivePresence,
PresenceUpdateStage.Maybe => isOnline && !hasActivePresence,
PresenceUpdateStage.Cold => !isOnline,
_ => false
};
if (shouldInclude)
{
filteredUserIds.Add(userId);
}
}
return filteredUserIds;
}
private async Task<List<Guid>> GetAllUsersWithPresenceConnectionsAsync()
{
// This method should return all users who have connections to any presence service
// For now, we'll focus on Spotify users, but this should be extended to include all presence services
// In a more complete implementation, you might want to query all presence services
// to get users with connections to any of them
// For simplicity, we'll return users with Spotify connections
// This should be made more generic in the future
var spotifyUsers = await accountEventService.GetSpotifyConnectedUsersAsync();
return spotifyUsers;
}
}

View File

@@ -1,19 +0,0 @@
namespace DysonNetwork.Pass.Account.Presences;
public enum PresenceUpdateStage
{
/// <summary>
/// Active users - online and have active presence activities
/// </summary>
Active,
/// <summary>
/// Maybe active users - online but no active presence activities
/// </summary>
Maybe,
/// <summary>
/// Cold users - offline users
/// </summary>
Cold
}

View File

@@ -1,228 +0,0 @@
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using NodaTime;
using SpotifyAPI.Web;
namespace DysonNetwork.Pass.Account.Presences;
public class SpotifyPresenceService(
AppDatabase db,
Auth.OpenId.SpotifyOidcService spotifyService,
AccountEventService accountEventService,
ILogger<SpotifyPresenceService> logger
) : IPresenceService
{
/// <inheritdoc />
public string ServiceId => "spotify";
/// <inheritdoc />
public async Task UpdatePresencesAsync(IEnumerable<Guid> userIds)
{
var userIdList = userIds.ToList();
var userConnections = await db.AccountConnections
.Where(c => userIdList.Contains(c.AccountId) && c.Provider == "spotify" && c.AccessToken != null && c.RefreshToken != null)
.Include(c => c.Account)
.ToListAsync();
foreach (var connection in userConnections)
await UpdateSpotifyPresenceAsync(connection.Account);
}
/// <summary>
/// Updates the Spotify presence activity for a specific user
/// </summary>
private async Task UpdateSpotifyPresenceAsync(SnAccount account)
{
var connection = await db.AccountConnections
.FirstOrDefaultAsync(c => c.AccountId == account.Id && c.Provider == "spotify");
if (connection?.RefreshToken == null)
{
// No Spotify connection, remove any existing Spotify presence
await RemoveSpotifyPresenceAsync(account.Id);
return;
}
try
{
// Ensure we have a valid access token
var validToken = await spotifyService.GetValidAccessTokenAsync(connection.RefreshToken, connection.AccessToken);
if (string.IsNullOrEmpty(validToken))
{
// Couldn't get a valid token, remove presence
await RemoveSpotifyPresenceAsync(account.Id);
return;
}
// Create Spotify client with the valid token
var spotify = new SpotifyClient(validToken);
// Get currently playing track
var currentlyPlaying = await spotify.Player.GetCurrentlyPlaying(new PlayerCurrentlyPlayingRequest());
if (currentlyPlaying?.Item == null || !currentlyPlaying.IsPlaying)
{
// Nothing playing or paused, remove the presence
await RemoveSpotifyPresenceAsync(account.Id);
return;
}
var presenceActivity = ParseCurrentlyPlayingToPresenceActivity(account.Id, currentlyPlaying);
// Try to update existing activity first
var updatedActivity = await accountEventService.UpdateActivityByManualId(
"spotify",
account.Id,
UpdateActivityWithPresenceData,
5
);
// If update failed (no existing activity), create a new one
if (updatedActivity == null)
await accountEventService.SetActivity(presenceActivity, 5);
// Local function to avoid capturing external variables in lambda
void UpdateActivityWithPresenceData(SnPresenceActivity activity)
{
activity.Type = PresenceType.Music;
activity.Title = presenceActivity.Title;
activity.Subtitle = presenceActivity.Subtitle;
activity.Caption = presenceActivity.Caption;
activity.LargeImage = presenceActivity.LargeImage;
activity.SmallImage = presenceActivity.SmallImage;
activity.TitleUrl = presenceActivity.TitleUrl;
activity.SubtitleUrl = presenceActivity.SubtitleUrl;
activity.Meta = presenceActivity.Meta;
}
}
catch (Exception ex)
{
// On error, remove the presence to avoid stale data
await RemoveSpotifyPresenceAsync(account.Id);
// In a real implementation, you might want to log the error
logger.LogError(ex, "Failed to update Spotify presence for user {UserId}", account.Id);
}
}
/// <summary>
/// Removes the Spotify presence activity for a user
/// </summary>
private async Task RemoveSpotifyPresenceAsync(Guid accountId)
{
await accountEventService.UpdateActivityByManualId(
"spotify",
accountId,
activity =>
{
// Mark it for immediate expiration
activity.LeaseExpiresAt = SystemClock.Instance.GetCurrentInstant();
}
);
}
private static SnPresenceActivity ParseCurrentlyPlayingToPresenceActivity(Guid accountId, CurrentlyPlaying currentlyPlaying)
{
// Cast the item to FullTrack (it should be a track for music presence)
if (currentlyPlaying.Item is not FullTrack track)
{
throw new InvalidOperationException("Currently playing item is not a track");
}
// Get track name
var trackName = track.Name ?? "";
if (string.IsNullOrEmpty(trackName))
{
throw new InvalidOperationException("Track name not available");
}
// Get artists
var artists = track.Artists ?? new List<SimpleArtist>();
var artistNames = artists.Select(a => a.Name).Where(name => !string.IsNullOrEmpty(name)).ToList();
var artistsString = string.Join(", ", artistNames);
// Get artist URLs
var artistsUrls = artists
.Where(a => a.ExternalUrls?.ContainsKey("spotify") == true)
.Select(a => a.ExternalUrls!["spotify"])
.ToList();
// Get album info
var album = track.Album;
var albumName = album?.Name ?? "";
string? albumImageUrl = null;
// Get largest album image
if (album?.Images != null && album.Images.Count > 0)
{
albumImageUrl = album.Images
.OrderByDescending(img => img.Width)
.FirstOrDefault()?.Url;
}
// Get track URL
string? trackUrl = null;
if (track.ExternalUrls?.ContainsKey("spotify") == true)
{
trackUrl = track.ExternalUrls["spotify"];
}
// Get progress and duration
var progressMs = currentlyPlaying.ProgressMs ?? 0;
var durationMs = track.DurationMs;
var progressPercent = durationMs > 0 ? (double)progressMs / durationMs * 100 : 0;
// Get context info
var contextType = currentlyPlaying.Context?.Type;
string? contextUrl = null;
if (currentlyPlaying.Context?.ExternalUrls?.ContainsKey("spotify") == true)
{
contextUrl = currentlyPlaying.Context.ExternalUrls["spotify"];
}
// Build metadata
var meta = new Dictionary<string, object>
{
["track_duration_ms"] = durationMs,
["progress_ms"] = progressMs,
["progress_percent"] = progressPercent,
["spotify_track_url"] = trackUrl,
["updated_at"] = SystemClock.Instance.GetCurrentInstant()
};
// Add track ID
if (!string.IsNullOrEmpty(track.Id))
meta["track_id"] = track.Id;
// Add album ID
if (!string.IsNullOrEmpty(album?.Id))
meta["album_id"] = album.Id;
// Add artist IDs
var artistIds = artists.Select(a => a.Id).Where(id => !string.IsNullOrEmpty(id)).ToArray();
meta["artist_ids"] = artistIds;
// Add context info
if (!string.IsNullOrEmpty(contextType))
meta["context_type"] = contextType;
if (!string.IsNullOrEmpty(contextUrl))
meta["context_url"] = contextUrl;
// Add track properties
meta["is_explicit"] = track.Explicit;
meta["popularity"] = track.Popularity;
return new SnPresenceActivity
{
AccountId = accountId,
Type = PresenceType.Music,
ManualId = "spotify",
Title = trackName,
Subtitle = artistsString,
Caption = albumName,
LargeImage = albumImageUrl,
TitleUrl = trackUrl,
SubtitleUrl = artistsUrls.FirstOrDefault(),
Meta = meta
};
}
}

View File

@@ -1,250 +0,0 @@
using DysonNetwork.Shared.Models;
using Microsoft.EntityFrameworkCore;
using NodaTime;
using SteamWebAPI2.Interfaces;
using SteamWebAPI2.Utilities;
namespace DysonNetwork.Pass.Account.Presences;
public class SteamPresenceService(
AppDatabase db,
AccountEventService accountEventService,
ILogger<SteamPresenceService> logger,
IConfiguration configuration
) : IPresenceService
{
/// <inheritdoc />
public string ServiceId => "steam";
/// <inheritdoc />
public async Task UpdatePresencesAsync(IEnumerable<Guid> userIds)
{
var userIdList = userIds.ToList();
var steamConnections = await db.AccountConnections
.Where(c => userIdList.Contains(c.AccountId) && c.Provider == "steam")
.Include(c => c.Account)
.ToListAsync();
if (steamConnections.Count == 0)
return;
// Get Steam API key from configuration
var apiKey = configuration["Oidc:Steam:ApiKey"];
if (string.IsNullOrEmpty(apiKey))
{
logger.LogWarning("Steam API key not configured, skipping presence update for {Count} users", steamConnections.Count);
return;
}
try
{
// Create Steam Web API client
var webInterfaceFactory = new SteamWebInterfaceFactory(apiKey);
var steamUserInterface = webInterfaceFactory.CreateSteamWebInterface<SteamUser>();
// Collect all Steam IDs for batch request
var steamIds = steamConnections
.Select(c => ulong.Parse(c.ProvidedIdentifier))
.ToList();
// Make batch API call (Steam supports up to 100 IDs per request)
var playerSummariesResponse = await steamUserInterface.GetPlayerSummariesAsync(steamIds);
var playerSummaries = playerSummariesResponse?.Data != null
? (IEnumerable<dynamic>)playerSummariesResponse.Data
: new List<dynamic>();
// Create a lookup dictionary for quick access
var playerSummaryLookup = playerSummaries
.ToDictionary(ps => ((dynamic)ps).SteamId.ToString(), ps => ps);
// Process each connection
foreach (var connection in steamConnections)
{
if (playerSummaryLookup.TryGetValue(connection.ProvidedIdentifier, out var playerSummaryData))
{
await UpdateSteamPresenceFromDataAsync(connection.Account, playerSummaryData);
}
else
{
// No data for this user, remove any existing presence
await RemoveSteamPresenceAsync(connection.Account.Id);
}
}
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to update Steam presence for {Count} users", steamConnections.Count);
// On batch error, fall back to individual calls to avoid losing all presence data
foreach (var connection in steamConnections)
{
try
{
await UpdateSteamPresenceAsync(connection.Account, connection.ProvidedIdentifier);
}
catch (Exception individualEx)
{
logger.LogError(individualEx, "Failed to update Steam presence for user {UserId}", connection.Account.Id);
await RemoveSteamPresenceAsync(connection.Account.Id);
}
}
}
}
/// <summary>
/// Updates the Steam presence activity for a specific user using pre-fetched player summary data
/// </summary>
private async Task UpdateSteamPresenceFromDataAsync(SnAccount account, dynamic playerSummaryData)
{
if (!string.IsNullOrEmpty(playerSummaryData.PlayingGameId) && !string.IsNullOrEmpty(playerSummaryData.PlayingGameName))
{
// User is playing a game
var presenceActivity = ParsePlayerSummaryToPresenceActivity(account.Id, playerSummaryData);
// Try to update existing activity first
var updatedActivity = await accountEventService.UpdateActivityByManualId(
"steam",
account.Id,
UpdateActivityWithPresenceData,
10
);
// If update failed (no existing activity), create a new one
if (updatedActivity == null)
await accountEventService.SetActivity(presenceActivity, 10);
// Local function to avoid capturing external variables in lambda
void UpdateActivityWithPresenceData(SnPresenceActivity activity)
{
activity.Type = PresenceType.Gaming;
activity.Title = presenceActivity.Title;
activity.Subtitle = presenceActivity.Subtitle;
activity.Caption = presenceActivity.Caption;
activity.LargeImage = presenceActivity.LargeImage;
activity.SmallImage = presenceActivity.SmallImage;
activity.TitleUrl = presenceActivity.TitleUrl;
activity.SubtitleUrl = presenceActivity.SubtitleUrl;
activity.Meta = presenceActivity.Meta;
}
}
else
{
// User is not playing a game, remove any existing Steam presence
await RemoveSteamPresenceAsync(account.Id);
}
}
/// <summary>
/// Updates the Steam presence activity for a specific user (fallback individual API call)
/// </summary>
private async Task UpdateSteamPresenceAsync(SnAccount account, string steamId)
{
try
{
// Get Steam API key from configuration
var apiKey = configuration["Oidc:Steam:ApiKey"];
if (string.IsNullOrEmpty(apiKey))
{
logger.LogWarning("Steam API key not configured, skipping presence update for user {UserId}", account.Id);
return;
}
// Create Steam Web API client
var webInterfaceFactory = new SteamWebInterfaceFactory(apiKey);
var steamUserInterface = webInterfaceFactory.CreateSteamWebInterface<SteamUser>();
// Get player summary
var playerSummaryResponse = await steamUserInterface.GetPlayerSummaryAsync(ulong.Parse(steamId));
var playerSummaryData = playerSummaryResponse.Data;
if (!string.IsNullOrEmpty(playerSummaryData.PlayingGameId) && !string.IsNullOrEmpty(playerSummaryData.PlayingGameName))
{
// User is playing a game
var presenceActivity = ParsePlayerSummaryToPresenceActivity(account.Id, playerSummaryData);
// Try to update existing activity first
var updatedActivity = await accountEventService.UpdateActivityByManualId(
"steam",
account.Id,
UpdateActivityWithPresenceData,
10
);
// If update failed (no existing activity), create a new one
if (updatedActivity == null)
await accountEventService.SetActivity(presenceActivity, 10);
// Local function to avoid capturing external variables in lambda
void UpdateActivityWithPresenceData(SnPresenceActivity activity)
{
activity.Type = PresenceType.Gaming;
activity.Title = presenceActivity.Title;
activity.Subtitle = presenceActivity.Subtitle;
activity.Caption = presenceActivity.Caption;
activity.LargeImage = presenceActivity.LargeImage;
activity.SmallImage = presenceActivity.SmallImage;
activity.TitleUrl = presenceActivity.TitleUrl;
activity.SubtitleUrl = presenceActivity.SubtitleUrl;
activity.Meta = presenceActivity.Meta;
}
}
else
{
// User is not playing a game, remove any existing Steam presence
await RemoveSteamPresenceAsync(account.Id);
}
}
catch (Exception ex)
{
// On error, remove the presence to avoid stale data
await RemoveSteamPresenceAsync(account.Id);
logger.LogError(ex, "Failed to update Steam presence for user {UserId}", account.Id);
}
}
/// <summary>
/// Removes the Steam presence activity for a user
/// </summary>
private async Task RemoveSteamPresenceAsync(Guid accountId)
{
await accountEventService.UpdateActivityByManualId(
"steam",
accountId,
activity =>
{
// Mark it for immediate expiration
activity.LeaseExpiresAt = SystemClock.Instance.GetCurrentInstant();
}
);
}
private static SnPresenceActivity ParsePlayerSummaryToPresenceActivity(Guid accountId, dynamic playerSummary)
{
var gameName = playerSummary.PlayingGameName ?? "Unknown Game";
var gameId = playerSummary.PlayingGameId?.ToString() ?? "";
// Build metadata
var meta = new Dictionary<string, object>
{
["game_id"] = gameId,
["steam_profile_url"] = $"https://steamcommunity.com/profiles/{playerSummary.SteamId}",
["updated_at"] = SystemClock.Instance.GetCurrentInstant()
};
return new SnPresenceActivity
{
AccountId = accountId,
Type = PresenceType.Gaming,
ManualId = "steam",
Title = gameName,
Subtitle = "Playing on Steam",
Caption = null, // Could be game details if available
LargeImage = null, // Could fetch game icon from Steam API if needed
SmallImage = null,
TitleUrl = $"https://store.steampowered.com/app/{gameId}",
SubtitleUrl = $"https://steamcommunity.com/profiles/{playerSummary.SteamId}",
Meta = meta
};
}
}

Some files were not shown because too many files have changed in this diff Show More