Compare commits
364 Commits
437f49fb20
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
24b1f24dea
|
|||
|
913a6e7382
|
|||
|
b90d1be552
|
|||
|
07b8c99682
|
|||
|
ede49333f8
|
|||
|
c4b2b2f61f
|
|||
|
501fce894e
|
|||
|
aa85a28d04
|
|||
|
c515ddff51
|
|||
|
b8686bd7e3
|
|||
|
50a3c2d038
|
|||
|
683fbf1a68
|
|||
|
b3633538cd
|
|||
|
6212820d74
|
|||
|
ab37bbc7b0
|
|||
|
c503083df7
|
|||
|
466a52ecd9
|
|||
|
c28d7bedd7
|
|||
|
8c19bd6a73
|
|||
|
7c5c92a501
|
|||
|
fb15930611
|
|||
|
a72dbcfc0c
|
|||
|
923ec0a157
|
|||
|
a0103ada64
|
|||
|
de3aa21909
|
|||
|
247296476c
|
|||
|
a795ff6db8
|
|||
|
d07e33cb75
|
|||
|
78f1d0ecd3
|
|||
|
14d5254461
|
|||
|
c59fc011f4
|
|||
|
3e59a102af
|
|||
|
42b46243a4
|
|||
|
3c83fdfc4d
|
|||
|
62a8153479
|
|||
|
0ec49787fb
|
|||
|
f49f17a9db
|
|||
|
c11b30d0bb
|
|||
|
add9fa49e5
|
|||
|
4815d31b31
|
|||
|
91764593c7
|
|||
|
ebd7539c95
|
|||
|
6c8c52e3b2
|
|||
|
413ae80c96
|
|||
|
3da6de1feb
|
|||
|
caf5468dad
|
|||
|
2b6cf503a5
|
|||
|
7991f88df5
|
|||
|
a005cfb143
|
|||
|
a11544c056
|
|||
|
cd8e6714b2
|
|||
|
eb8d126261
|
|||
|
71031e2222
|
|||
|
cb37edc0bb
|
|||
|
b9230699c5
|
|||
|
f2856c10a3
|
|||
|
0519f2a2e6
|
|||
|
6aa6833163
|
|||
|
8dc01c8a85
|
|||
|
67cd372b8d
|
|||
|
cfb4428e78
|
|||
|
1d95d637dd
|
|||
|
f42fc1da1c
|
|||
|
30cbbf0139
|
|||
|
d02edbd38d
|
|||
|
10067f6141
|
|||
|
6a360fe697
|
|||
|
777c0c089a
|
|||
|
6fdf34787d
|
|||
|
72b0739f41
|
|||
|
f556313f1d
|
|||
|
7fd75395f8
|
|||
|
70260967be
|
|||
|
db94b21aef
|
|||
|
d8d94d0aec
|
|||
|
e7bf760888
|
|||
|
7f5b447b3c
|
|||
|
84da11f301
|
|||
|
05a02046a9
|
|||
|
ce20c5980b
|
|||
|
bb71c558b1
|
|||
|
b76f614975
|
|||
|
fc89b46f98
|
|||
|
39587ed346
|
|||
|
f83327474e
|
|||
|
0961325642
|
|||
|
a63d21ed06
|
|||
|
7b09e63918
|
|||
|
cda48ea18d
|
|||
|
7cb471e978
|
|||
|
44a791db1f
|
|||
|
6cba70ee12
|
|||
|
ceadb5ad9b
|
|||
|
2e8a1d05a1
|
|||
|
df077b347e
|
|||
|
21108c19a9
|
|||
|
95472df02b
|
|||
|
9f4a7a3fe8
|
|||
|
2471fa2e75
|
|||
|
f06d93a348
|
|||
|
983f57c4c2
|
|||
|
00cd7ad2d8
|
|||
|
2bffbf18a3
|
|||
|
07445ebc25
|
|||
|
f83fb5d8a9
|
|||
|
a3e13d1581
|
|||
|
677d9761f9
|
|||
|
23c435e036
|
|||
|
fc61235d0c
|
|||
|
6e1b67609a
|
|||
|
4be054163b
|
|||
|
009f66154c
|
|||
|
5d3bd1144d
|
|||
|
334fa9b9a7
|
|||
|
bb5d70eddb
|
|||
|
b51a086031
|
|||
|
27afe5da9f
|
|||
|
9d1bc46bf1
|
|||
|
be176ef0c2
|
|||
|
93e7b04e74
|
|||
|
d9f10fd598
|
|||
|
50518351bc
|
|||
|
4443da5660
|
|||
|
b193224a2c
|
|||
|
4e9c5733d1
|
|||
|
1c6b324b0d
|
|||
|
ded3a70cb7
|
|||
|
9e54b61eee
|
|||
|
43d89299c3
|
|||
|
1af11b2a99
|
|||
|
1a31d7cbe7
|
|||
|
f0d6772dca
|
|||
|
24836fc606
|
|||
|
0bc77b948c
|
|||
|
f792d43ab9
|
|||
|
1b45be225a
|
|||
|
7811545726
|
|||
|
213608d4f0
|
|||
|
bca6a2ffde
|
|||
|
885b895a3a
|
|||
|
08941a282b
|
|||
|
4fd455acbf
|
|||
|
5ff1539f18
|
|||
|
3c023a71b1
|
|||
|
49d8eaa7b2
|
|||
|
16a37549fe
|
|||
|
2aff62c64f
|
|||
|
a49d485943
|
|||
|
4c65602465
|
|||
|
4242953969
|
|||
|
c9530ac8b5
|
|||
|
4ba7d38d78
|
|||
|
8642737a07
|
|||
|
8181938aaf
|
|||
|
922afc2239
|
|||
|
a071bd2738
|
|||
|
43945fc524
|
|||
|
e477429a35
|
|||
|
fe3a057185
|
|||
|
ad3c104c5c
|
|||
|
2020d625aa
|
|||
|
f471c5635d
|
|||
|
eaeaa28c60
|
|||
|
ee5c7cb7ce
|
|||
|
33abf12e41
|
|||
|
4a71f92ef0
|
|||
|
4faa1a4b64
|
|||
|
e49a1ec49a
|
|||
|
a88f42b26a
|
|||
|
c45be62331
|
|||
|
c8228e0c8e
|
|||
|
c642c6d646
|
|||
|
270c211cb8
|
|||
|
74c8f3490d
|
|||
|
b364edc74b
|
|||
|
9addf38677
|
|||
|
a02ed10434
|
|||
|
aca28f9318
|
|||
|
c2f72993b7
|
|||
|
158cc75c5b
|
|||
|
fa2f53ff7a
|
|||
|
2cce5ebf80
|
|||
|
13b2e46ecc
|
|||
|
cbd68c9ae6
|
|||
|
b99b61e0f9
|
|||
|
94f4e68120
|
|||
|
d5510f7e4d
|
|||
|
c038ab9e3c
|
|||
|
e97719ec84
|
|||
|
40b8ea8eb8
|
|||
|
f9b4dd45d7
|
|||
|
a46de4662c
|
|||
|
fdd14b860e
|
|||
|
cb62df81e2
|
|||
|
46717e39a7
|
|||
|
344ed6e348
|
|||
|
a8b62fb0eb
|
|||
|
00b3087d6a
|
|||
|
78f3873a0c
|
|||
|
a7f4173df7
|
|||
|
f51c3c1724
|
|||
|
a92dc7e140
|
|||
|
c42befed6b
|
|||
|
2b95d58611
|
|||
|
726a752fbb
|
|||
|
2024972832
|
|||
|
d553ca2ca7
|
|||
|
aeef16495f
|
|||
|
9b26a2a7eb
|
|||
|
2317033dae
|
|||
|
fd6e9c9780
|
|||
|
af0a2ff493
|
|||
|
b142a71c32
|
|||
|
27e3cc853a
|
|||
|
590519c28f
|
|||
|
8ccf8100d4
|
|||
|
ec21a94921
|
|||
|
7b7a6c9218
|
|||
|
0e44d9c514
|
|||
|
e449e16d33
|
|||
|
3ce2b36c15
|
|||
|
f7388822e0
|
|||
|
3800dae8b7
|
|||
|
c62ed191f3
|
|||
|
8b77f0e0ad
|
|||
|
2b56c6f1e5
|
|||
|
ef02265ccd
|
|||
|
f4505d2ecc
|
|||
|
9d2242d331
|
|||
|
c806365a81
|
|||
|
bd1715c9a3
|
|||
|
0b0598712e
|
|||
|
92a4899e7c
|
|||
|
bdc8db3091
|
|||
|
a16da37221
|
|||
|
70a18b07ff
|
|||
|
98b8d5f33b
|
|||
|
2a35786204
|
|||
|
7016a0a943
|
|||
|
cad72502d9
|
|||
|
226a64df41
|
|||
|
75b8567a28
|
|||
|
3aa5561a07
|
|||
|
c0ebb496fe
|
|||
|
afccb27bd4
|
|||
|
6ed96780ab
|
|||
|
8e5cdfbc62
|
|||
|
1b774c1de6
|
|||
|
9b4cbade5c
|
|||
|
a52e54f672
|
|||
|
aa48d5e25d
|
|||
|
ce18b194a5
|
|||
|
382579a20e
|
|||
|
18d50346a9
|
|||
|
ac51bbde6c
|
|||
|
4ab0dcf1c2
|
|||
|
587066d847
|
|||
|
faa375042a
|
|||
|
65b6f3a606
|
|||
|
fa1a40c637
|
|||
|
d43ce7cb11
|
|||
|
92b28d830d
|
|||
|
1fa6c893a5
|
|||
|
ba57becba8
|
|||
|
4280168002
|
|||
|
a172128d84
|
|||
|
34e78294a1
|
|||
|
82afdb3922
|
|||
|
260b3e7bc6
|
|||
|
713777cd8a
|
|||
|
5cd09bc2d0
|
|||
|
861fc7cafa
|
|||
|
6313f15375
|
|||
|
337cc1be97
|
|||
|
9b4f61fcda
|
|||
|
6252988390
|
|||
|
aace3b48b1
|
|||
|
5a097c7518
|
|||
|
ba3be1e3bb
|
|||
|
6fd90c424d
|
|||
|
a0ac3b5820
|
|||
|
076bf347c8
|
|||
|
788326381f
|
|||
|
a035b23242
|
|||
|
b29f4fce4d
|
|||
|
5418489f77
|
|||
|
310f2c1497
|
|||
|
0ae8a2cfd4
|
|||
|
c69256bda6
|
|||
|
80ea44f2cc
|
|||
|
b5f9faa724
|
|||
|
05985e0852
|
|||
|
6814b5690e
|
|||
|
78447de1b6
|
|||
|
e54dcccad9
|
|||
|
429a08930f
|
|||
|
b94b288755
|
|||
|
1c50c2f822
|
|||
|
73700e7cfd
|
|||
|
bd2943345a
|
|||
|
1647aa2f1e
|
|||
|
b137021b1f
|
|||
|
ffca94f789
|
|||
|
e2b2bdd262
|
|||
|
ce715cd6b0
|
|||
|
f7b3926338
|
|||
|
68cd23d64f
|
|||
|
db7d994039
|
|||
|
741ed18ce5
|
|||
|
2bfb50cc71
|
|||
|
db98fa240e
|
|||
|
d96937aabc
|
|||
|
dc0be3467f
|
|||
|
6101de741f
|
|||
|
6c8ad05872
|
|||
|
f5b37e9419
|
|||
|
ce5f3434eb
|
|||
|
c08503d2f3
|
|||
|
c8fec66e07
|
|||
|
61b49377a7
|
|||
|
0123c74ab8
|
|||
|
637cc0cfa4
|
|||
|
94a0ec71da
|
|||
|
1351db5482
|
|||
|
3e98ac29b7
|
|||
|
09625335f0
|
|||
|
ee9ad6d87f
|
|||
|
67fc82a8fb
|
|||
|
58e79655e8
|
|||
|
f271681b5d
|
|||
|
3e838cfdb5
|
|||
|
e0e00d023f
|
|||
|
433230b495
|
|||
|
b8fa5f5f24
|
|||
|
091fbd857e
|
|||
|
bfa9bedeea
|
|||
|
74f8221be4
|
|||
|
6817ab6b56
|
|||
|
c74ab20236
|
|||
|
b9edf51f05
|
|||
|
74a9ca98ad
|
|||
|
4bd59f107b
|
|||
|
08f924f647
|
|||
|
5445df3b61
|
|||
|
a377ca2072
|
|||
|
623e7a5771
|
|||
|
0351a2b4fa
|
|||
|
322dee4453
|
|||
|
5e5f4528b9
|
|||
|
70fdc247e7
|
|||
|
8f5f1efa24
|
|||
|
0f15510ac6
|
|||
|
3ce457e9f9
|
|||
|
a9168dcdc5
|
|||
|
4ad63577ba
|
|||
|
47722cfd57
|
|||
|
b46a010e73
|
|||
|
ccd9dbcdbf
|
|||
|
0b65bf8dd7
|
|||
|
ab23f87a66
|
|||
|
8f1047ff5d
|
|||
|
43e50a00ce
|
|||
|
50133684c7
|
|||
|
befde25266
|
43
.env.testing.example
Normal file
43
.env.testing.example
Normal file
@@ -0,0 +1,43 @@
|
||||
# ActivityPub Testing Environment Variables
|
||||
|
||||
# Solar Network Configuration
|
||||
SOLAR_DOMAIN=solar.local
|
||||
SOLAR_PORT=5000
|
||||
SOLAR_URL=http://solar.local:5000
|
||||
|
||||
# Mastodon (Self-Hosted Test Instance)
|
||||
MASTODON_DOMAIN=mastodon.local
|
||||
MASTODON_PORT=3001
|
||||
MASTODON_STREAMING_PORT=4000
|
||||
MASTODON_URL=http://mastodon.local:3001
|
||||
|
||||
# Database
|
||||
DB_CONNECTION_STRING=Host=localhost;Port=5432;Database=dyson_network;Username=postgres;Password=postgres
|
||||
|
||||
# Test Accounts
|
||||
SOLAR_TEST_USERNAME=solaruser
|
||||
MASTODON_TEST_USERNAME=testuser
|
||||
MASTODON_TEST_PASSWORD=TestPassword123!
|
||||
|
||||
# ActivityPub Settings
|
||||
ACTIVITYPUB_DOMAIN=solar.local
|
||||
ACTIVITYPUB_ENABLE_FEDERATION=true
|
||||
ACTIVITYPUB_SIGNATURE_ALGORITHM=rsa-sha256
|
||||
|
||||
# HTTP Settings
|
||||
HTTP_TIMEOUT=30
|
||||
HTTP_MAX_RETRIES=3
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL=Debug
|
||||
ACTIVITYPUB_LOG_LEVEL=Trace
|
||||
|
||||
# Testing
|
||||
TEST_SKIP_DATABASE_RESET=false
|
||||
TEST_SKIP_MASTODON_SETUP=false
|
||||
TEST_AUTO_ACCEPT_FOLLOWS=false
|
||||
|
||||
# Development (only in dev environment)
|
||||
DEV_DISABLE_SIGNATURE_VERIFICATION=false
|
||||
DEV_LOG_HTTP_BODIES=false
|
||||
DEV_DISABLE_CORS=false
|
||||
4
.github/workflows/docker-build.yml
vendored
4
.github/workflows/docker-build.yml
vendored
@@ -27,8 +27,8 @@ jobs:
|
||||
run: |
|
||||
files="${{ steps.changed-files.outputs.files }}"
|
||||
matrix="{\"include\":[]}"
|
||||
services=("Sphere" "Pass" "Ring" "Drive" "Develop" "Gateway" "Insight")
|
||||
images=("sphere" "pass" "ring" "drive" "develop" "gateway" "insight")
|
||||
services=("Sphere" "Pass" "Ring" "Drive" "Develop" "Gateway" "Insight" "Zone" "Messager")
|
||||
images=("sphere" "pass" "ring" "drive" "develop" "gateway" "insight" "zone" "messager")
|
||||
changed_services=()
|
||||
|
||||
for file in $files; do
|
||||
|
||||
@@ -1,613 +0,0 @@
|
||||
# Wallet Funds API Documentation
|
||||
|
||||
## Overview
|
||||
|
||||
The Wallet Funds API provides red packet functionality for the DysonNetwork platform, allowing users to create and distribute funds among multiple recipients with expiration and claiming mechanisms.
|
||||
|
||||
## Authentication
|
||||
|
||||
All endpoints require Bearer token authentication:
|
||||
|
||||
```
|
||||
Authorization: Bearer {jwt_token}
|
||||
```
|
||||
|
||||
## Data Types
|
||||
|
||||
### Enums
|
||||
|
||||
#### FundSplitType
|
||||
```typescript
|
||||
enum FundSplitType {
|
||||
Even = 0, // Equal distribution
|
||||
Random = 1 // Lucky draw distribution
|
||||
}
|
||||
```
|
||||
|
||||
#### FundStatus
|
||||
```typescript
|
||||
enum FundStatus {
|
||||
Created = 0, // Fund created, waiting for claims
|
||||
PartiallyReceived = 1, // Some recipients claimed
|
||||
FullyReceived = 2, // All recipients claimed
|
||||
Expired = 3, // Fund expired, unclaimed amounts refunded
|
||||
Refunded = 4 // Legacy status
|
||||
}
|
||||
```
|
||||
|
||||
### Request/Response Models
|
||||
|
||||
#### CreateFundRequest
|
||||
```typescript
|
||||
interface CreateFundRequest {
|
||||
recipientAccountIds: string[]; // UUIDs of recipients
|
||||
currency: string; // e.g., "points", "golds"
|
||||
totalAmount: number; // Total amount to distribute
|
||||
splitType: FundSplitType; // Even or Random
|
||||
message?: string; // Optional message
|
||||
expirationHours?: number; // Optional: hours until expiration (default: 24)
|
||||
pinCode: string; // Required: 6-digit PIN code for security
|
||||
}
|
||||
```
|
||||
|
||||
#### SnWalletFund
|
||||
```typescript
|
||||
interface SnWalletFund {
|
||||
id: string; // UUID
|
||||
currency: string;
|
||||
totalAmount: number;
|
||||
splitType: FundSplitType;
|
||||
status: FundStatus;
|
||||
message?: string;
|
||||
creatorAccountId: string; // UUID
|
||||
creatorAccount: SnAccount; // Creator account details (includes profile)
|
||||
recipients: SnWalletFundRecipient[];
|
||||
expiredAt: string; // ISO 8601 timestamp
|
||||
createdAt: string; // ISO 8601 timestamp
|
||||
updatedAt: string; // ISO 8601 timestamp
|
||||
}
|
||||
```
|
||||
|
||||
#### SnWalletFundRecipient
|
||||
```typescript
|
||||
interface SnWalletFundRecipient {
|
||||
id: string; // UUID
|
||||
fundId: string; // UUID
|
||||
recipientAccountId: string; // UUID
|
||||
recipientAccount: SnAccount; // Recipient account details (includes profile)
|
||||
amount: number; // Allocated amount
|
||||
isReceived: boolean;
|
||||
receivedAt?: string; // ISO 8601 timestamp (if claimed)
|
||||
createdAt: string; // ISO 8601 timestamp
|
||||
updatedAt: string; // ISO 8601 timestamp
|
||||
}
|
||||
```
|
||||
|
||||
#### SnWalletTransaction
|
||||
```typescript
|
||||
interface SnWalletTransaction {
|
||||
id: string; // UUID
|
||||
payerWalletId?: string; // UUID (null for system transfers)
|
||||
payeeWalletId?: string; // UUID (null for system transfers)
|
||||
currency: string;
|
||||
amount: number;
|
||||
remarks?: string;
|
||||
type: TransactionType;
|
||||
createdAt: string; // ISO 8601 timestamp
|
||||
updatedAt: string; // ISO 8601 timestamp
|
||||
}
|
||||
```
|
||||
|
||||
#### Error Response
|
||||
```typescript
|
||||
interface ErrorResponse {
|
||||
type: string; // Error type
|
||||
title: string; // Error title
|
||||
status: number; // HTTP status code
|
||||
detail: string; // Error details
|
||||
instance?: string; // Request instance
|
||||
}
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### 1. Create Fund
|
||||
|
||||
Creates a new fund (red packet) for distribution among recipients.
|
||||
|
||||
**Endpoint:** `POST /api/wallets/funds`
|
||||
|
||||
**Request Body:** `CreateFundRequest`
|
||||
|
||||
**Response:** `SnWalletFund` (201 Created)
|
||||
|
||||
**Example Request:**
|
||||
```bash
|
||||
curl -X POST "/api/wallets/funds" \
|
||||
-H "Authorization: Bearer {token}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"recipientAccountIds": [
|
||||
"550e8400-e29b-41d4-a716-446655440000",
|
||||
"550e8400-e29b-41d4-a716-446655440001",
|
||||
"550e8400-e29b-41d4-a716-446655440002"
|
||||
],
|
||||
"currency": "points",
|
||||
"totalAmount": 100.00,
|
||||
"splitType": "Even",
|
||||
"message": "Happy New Year! 🎉",
|
||||
"expirationHours": 48,
|
||||
"pinCode": "123456"
|
||||
}'
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "550e8400-e29b-41d4-a716-446655440003",
|
||||
"currency": "points",
|
||||
"totalAmount": 100.00,
|
||||
"splitType": 0,
|
||||
"status": 0,
|
||||
"message": "Happy New Year! 🎉",
|
||||
"creatorAccountId": "550e8400-e29b-41d4-a716-446655440004",
|
||||
"creatorAccount": {
|
||||
"id": "550e8400-e29b-41d4-a716-446655440004",
|
||||
"username": "creator_user"
|
||||
},
|
||||
"recipients": [
|
||||
{
|
||||
"id": "550e8400-e29b-41d4-a716-446655440005",
|
||||
"fundId": "550e8400-e29b-41d4-a716-446655440003",
|
||||
"recipientAccountId": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"amount": 33.34,
|
||||
"isReceived": false,
|
||||
"createdAt": "2025-10-03T22:00:00Z",
|
||||
"updatedAt": "2025-10-03T22:00:00Z"
|
||||
},
|
||||
{
|
||||
"id": "550e8400-e29b-41d4-a716-446655440006",
|
||||
"fundId": "550e8400-e29b-41d4-a716-446655440003",
|
||||
"recipientAccountId": "550e8400-e29b-41d4-a716-446655440001",
|
||||
"amount": 33.33,
|
||||
"isReceived": false,
|
||||
"createdAt": "2025-10-03T22:00:00Z",
|
||||
"updatedAt": "2025-10-03T22:00:00Z"
|
||||
},
|
||||
{
|
||||
"id": "550e8400-e29b-41d4-a716-446655440007",
|
||||
"fundId": "550e8400-e29b-41d4-a716-446655440003",
|
||||
"recipientAccountId": "550e8400-e29b-41d4-a716-446655440002",
|
||||
"amount": 33.33,
|
||||
"isReceived": false,
|
||||
"createdAt": "2025-10-03T22:00:00Z",
|
||||
"updatedAt": "2025-10-03T22:00:00Z"
|
||||
}
|
||||
],
|
||||
"expiredAt": "2025-10-05T22:00:00Z",
|
||||
"createdAt": "2025-10-03T22:00:00Z",
|
||||
"updatedAt": "2025-10-03T22:00:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
**Error Responses:**
|
||||
- `400 Bad Request`: Invalid parameters, insufficient funds, invalid recipients
|
||||
- `401 Unauthorized`: Missing or invalid authentication
|
||||
- `403 Forbidden`: Invalid PIN code
|
||||
- `422 Unprocessable Entity`: Business logic violations
|
||||
|
||||
---
|
||||
|
||||
### 2. Get Funds
|
||||
|
||||
Retrieves funds that the authenticated user is involved in (as creator or recipient).
|
||||
|
||||
**Endpoint:** `GET /api/wallets/funds`
|
||||
|
||||
**Query Parameters:**
|
||||
- `offset` (number, optional): Pagination offset (default: 0)
|
||||
- `take` (number, optional): Number of items to return (default: 20, max: 100)
|
||||
- `status` (FundStatus, optional): Filter by fund status
|
||||
|
||||
**Response:** `SnWalletFund[]` (200 OK)
|
||||
|
||||
**Headers:**
|
||||
- `X-Total`: Total number of funds matching the criteria
|
||||
|
||||
**Example Request:**
|
||||
```bash
|
||||
curl -X GET "/api/wallets/funds?offset=0&take=10&status=0" \
|
||||
-H "Authorization: Bearer {token}"
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": "550e8400-e29b-41d4-a716-446655440003",
|
||||
"currency": "points",
|
||||
"totalAmount": 100.00,
|
||||
"splitType": 0,
|
||||
"status": 0,
|
||||
"message": "Happy New Year! 🎉",
|
||||
"creatorAccountId": "550e8400-e29b-41d4-a716-446655440004",
|
||||
"creatorAccount": {
|
||||
"id": "550e8400-e29b-41d4-a716-446655440004",
|
||||
"username": "creator_user"
|
||||
},
|
||||
"recipients": [
|
||||
{
|
||||
"id": "550e8400-e29b-41d4-a716-446655440005",
|
||||
"fundId": "550e8400-e29b-41d4-a716-446655440003",
|
||||
"recipientAccountId": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"amount": 33.34,
|
||||
"isReceived": false
|
||||
}
|
||||
],
|
||||
"expiredAt": "2025-10-05T22:00:00Z",
|
||||
"createdAt": "2025-10-03T22:00:00Z",
|
||||
"updatedAt": "2025-10-03T22:00:00Z"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
**Error Responses:**
|
||||
- `401 Unauthorized`: Missing or invalid authentication
|
||||
|
||||
---
|
||||
|
||||
### 3. Get Fund
|
||||
|
||||
Retrieves details of a specific fund.
|
||||
|
||||
**Endpoint:** `GET /api/wallets/funds/{id}`
|
||||
|
||||
**Path Parameters:**
|
||||
- `id` (string): Fund UUID
|
||||
|
||||
**Response:** `SnWalletFund` (200 OK)
|
||||
|
||||
**Example Request:**
|
||||
```bash
|
||||
curl -X GET "/api/wallets/funds/550e8400-e29b-41d4-a716-446655440003" \
|
||||
-H "Authorization: Bearer {token}"
|
||||
```
|
||||
|
||||
**Example Response:** (Same as create fund response)
|
||||
|
||||
**Error Responses:**
|
||||
- `401 Unauthorized`: Missing or invalid authentication
|
||||
- `403 Forbidden`: User doesn't have permission to view this fund
|
||||
- `404 Not Found`: Fund not found
|
||||
|
||||
---
|
||||
|
||||
### 4. Receive Fund
|
||||
|
||||
Claims the authenticated user's portion of a fund.
|
||||
|
||||
**Endpoint:** `POST /api/wallets/funds/{id}/receive`
|
||||
|
||||
**Path Parameters:**
|
||||
- `id` (string): Fund UUID
|
||||
|
||||
**Response:** `SnWalletTransaction` (200 OK)
|
||||
|
||||
**Example Request:**
|
||||
```bash
|
||||
curl -X POST "/api/wallets/funds/550e8400-e29b-41d4-a716-446655440003/receive" \
|
||||
-H "Authorization: Bearer {token}"
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"id": "550e8400-e29b-41d4-a716-446655440008",
|
||||
"payerWalletId": null,
|
||||
"payeeWalletId": "550e8400-e29b-41d4-a716-446655440009",
|
||||
"currency": "points",
|
||||
"amount": 33.34,
|
||||
"remarks": "Received fund portion from 550e8400-e29b-41d4-a716-446655440004",
|
||||
"type": 1,
|
||||
"createdAt": "2025-10-03T22:05:00Z",
|
||||
"updatedAt": "2025-10-03T22:05:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
**Error Responses:**
|
||||
- `400 Bad Request`: Fund expired, already claimed, not a recipient
|
||||
- `401 Unauthorized`: Missing or invalid authentication
|
||||
- `404 Not Found`: Fund not found
|
||||
|
||||
---
|
||||
|
||||
### 5. Get Wallet Overview
|
||||
|
||||
Retrieves a summarized overview of wallet transactions grouped by type for graphing/charting purposes.
|
||||
|
||||
**Endpoint:** `GET /api/wallets/overview`
|
||||
|
||||
**Query Parameters:**
|
||||
- `startDate` (string, optional): Start date in ISO 8601 format (e.g., "2025-01-01T00:00:00Z")
|
||||
- `endDate` (string, optional): End date in ISO 8601 format (e.g., "2025-12-31T23:59:59Z")
|
||||
|
||||
**Response:** `WalletOverview` (200 OK)
|
||||
|
||||
**Example Request:**
|
||||
```bash
|
||||
curl -X GET "/api/wallets/overview?startDate=2025-01-01T00:00:00Z&endDate=2025-12-31T23:59:59Z" \
|
||||
-H "Authorization: Bearer {token}"
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"accountId": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"startDate": "2025-01-01T00:00:00.0000000Z",
|
||||
"endDate": "2025-12-31T23:59:59.0000000Z",
|
||||
"summary": {
|
||||
"System": {
|
||||
"type": "System",
|
||||
"currencies": {
|
||||
"points": {
|
||||
"currency": "points",
|
||||
"income": 150.00,
|
||||
"spending": 0.00,
|
||||
"net": 150.00
|
||||
}
|
||||
}
|
||||
},
|
||||
"Transfer": {
|
||||
"type": "Transfer",
|
||||
"currencies": {
|
||||
"points": {
|
||||
"currency": "points",
|
||||
"income": 25.00,
|
||||
"spending": 75.00,
|
||||
"net": -50.00
|
||||
},
|
||||
"golds": {
|
||||
"currency": "golds",
|
||||
"income": 0.00,
|
||||
"spending": 10.00,
|
||||
"net": -10.00
|
||||
}
|
||||
}
|
||||
},
|
||||
"Order": {
|
||||
"type": "Order",
|
||||
"currencies": {
|
||||
"points": {
|
||||
"currency": "points",
|
||||
"income": 0.00,
|
||||
"spending": 200.00,
|
||||
"net": -200.00
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"totalIncome": 175.00,
|
||||
"totalSpending": 285.00,
|
||||
"netTotal": -110.00
|
||||
}
|
||||
```
|
||||
|
||||
**Response Fields:**
|
||||
- `accountId`: User's account UUID
|
||||
- `startDate`/`endDate`: Date range applied (ISO 8601 format)
|
||||
- `summary`: Object keyed by transaction type
|
||||
- `type`: Transaction type name
|
||||
- `currencies`: Object keyed by currency code
|
||||
- `currency`: Currency name
|
||||
- `income`: Total money received
|
||||
- `spending`: Total money spent
|
||||
- `net`: Income minus spending
|
||||
- `totalIncome`: Sum of all income across all types/currencies
|
||||
- `totalSpending`: Sum of all spending across all types/currencies
|
||||
- `netTotal`: Overall net (totalIncome - totalSpending)
|
||||
|
||||
**Error Responses:**
|
||||
- `401 Unauthorized`: Missing or invalid authentication
|
||||
|
||||
## Error Codes
|
||||
|
||||
### Common Error Types
|
||||
|
||||
#### Validation Errors
|
||||
```json
|
||||
{
|
||||
"type": "https://tools.ietf.org/html/rfc9110#section-15.5.1",
|
||||
"title": "Bad Request",
|
||||
"status": 400,
|
||||
"detail": "At least one recipient is required",
|
||||
"instance": "/api/wallets/funds"
|
||||
}
|
||||
```
|
||||
|
||||
#### Insufficient Funds
|
||||
```json
|
||||
{
|
||||
"type": "https://tools.ietf.org/html/rfc9110#section-15.5.1",
|
||||
"title": "Bad Request",
|
||||
"status": 400,
|
||||
"detail": "Insufficient funds",
|
||||
"instance": "/api/wallets/funds"
|
||||
}
|
||||
```
|
||||
|
||||
#### Fund Not Available
|
||||
```json
|
||||
{
|
||||
"type": "https://tools.ietf.org/html/rfc9110#section-15.5.1",
|
||||
"title": "Bad Request",
|
||||
"status": 400,
|
||||
"detail": "Fund is no longer available",
|
||||
"instance": "/api/wallets/funds/550e8400-e29b-41d4-a716-446655440003/receive"
|
||||
}
|
||||
```
|
||||
|
||||
#### Already Claimed
|
||||
```json
|
||||
{
|
||||
"type": "https://tools.ietf.org/html/rfc9110#section-15.5.1",
|
||||
"title": "Bad Request",
|
||||
"status": 400,
|
||||
"detail": "You have already received this fund",
|
||||
"instance": "/api/wallets/funds/550e8400-e29b-41d4-a716-446655440003/receive"
|
||||
}
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
- **Create Fund**: 10 requests per minute per user
|
||||
- **Get Funds**: 60 requests per minute per user
|
||||
- **Get Fund**: 60 requests per minute per user
|
||||
- **Receive Fund**: 30 requests per minute per user
|
||||
|
||||
## Webhooks/Notifications
|
||||
|
||||
The system integrates with the platform's notification system:
|
||||
|
||||
- **Fund Created**: Creator receives confirmation
|
||||
- **Fund Claimed**: Creator receives notification when someone claims
|
||||
- **Fund Expired**: Creator receives refund notification
|
||||
|
||||
## SDK Examples
|
||||
|
||||
### JavaScript/TypeScript
|
||||
|
||||
```typescript
|
||||
// Create a fund
|
||||
const createFund = async (fundData: CreateFundRequest): Promise<SnWalletFund> => {
|
||||
const response = await fetch('/api/wallets/funds', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(fundData)
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
// Get user's funds
|
||||
const getFunds = async (params?: {
|
||||
offset?: number;
|
||||
take?: number;
|
||||
status?: FundStatus;
|
||||
}): Promise<SnWalletFund[]> => {
|
||||
const queryParams = new URLSearchParams();
|
||||
if (params?.offset) queryParams.set('offset', params.offset.toString());
|
||||
if (params?.take) queryParams.set('take', params.take.toString());
|
||||
if (params?.status !== undefined) queryParams.set('status', params.status.toString());
|
||||
|
||||
const response = await fetch(`/api/wallets/funds?${queryParams}`, {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
// Claim a fund
|
||||
const receiveFund = async (fundId: string): Promise<SnWalletTransaction> => {
|
||||
const response = await fetch(`/api/wallets/funds/${fundId}/receive`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
```
|
||||
|
||||
### Python
|
||||
|
||||
```python
|
||||
import requests
|
||||
from typing import List, Optional
|
||||
from enum import Enum
|
||||
|
||||
class FundSplitType(Enum):
|
||||
EVEN = 0
|
||||
RANDOM = 1
|
||||
|
||||
class FundStatus(Enum):
|
||||
CREATED = 0
|
||||
PARTIALLY_RECEIVED = 1
|
||||
FULLY_RECEIVED = 2
|
||||
EXPIRED = 3
|
||||
REFUNDED = 4
|
||||
|
||||
def create_fund(token: str, fund_data: dict) -> dict:
|
||||
"""Create a new fund"""
|
||||
response = requests.post(
|
||||
'/api/wallets/funds',
|
||||
json=fund_data,
|
||||
headers={
|
||||
'Authorization': f'Bearer {token}',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def get_funds(
|
||||
token: str,
|
||||
offset: int = 0,
|
||||
take: int = 20,
|
||||
status: Optional[FundStatus] = None
|
||||
) -> List[dict]:
|
||||
"""Get user's funds"""
|
||||
params = {'offset': offset, 'take': take}
|
||||
if status is not None:
|
||||
params['status'] = status.value
|
||||
|
||||
response = requests.get(
|
||||
'/api/wallets/funds',
|
||||
params=params,
|
||||
headers={'Authorization': f'Bearer {token}'}
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def receive_fund(token: str, fund_id: str) -> dict:
|
||||
"""Claim a fund portion"""
|
||||
response = requests.post(
|
||||
f'/api/wallets/funds/{fund_id}/receive',
|
||||
headers={'Authorization': f'Bearer {token}'}
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
```
|
||||
|
||||
## Changelog
|
||||
|
||||
### Version 1.0.0
|
||||
- Initial release with basic red packet functionality
|
||||
- Support for even and random split types
|
||||
- 24-hour expiration with automatic refunds
|
||||
- RESTful API endpoints
|
||||
- Comprehensive error handling
|
||||
|
||||
## Support
|
||||
|
||||
For API support or questions:
|
||||
- Check the main documentation at `README_WALLET_FUNDS.md`
|
||||
- Review error messages for specific guidance
|
||||
- Contact the development team for technical issues
|
||||
@@ -4,8 +4,8 @@ var builder = DistributedApplication.CreateBuilder(args);
|
||||
|
||||
var isDev = builder.Environment.IsDevelopment();
|
||||
|
||||
var cache = builder.AddRedis("cache");
|
||||
var queue = builder.AddNats("queue").WithJetStream();
|
||||
var cache = builder.AddRedis("Cache");
|
||||
var queue = builder.AddNats("Queue").WithJetStream();
|
||||
|
||||
var ringService = builder.AddProject<Projects.DysonNetwork_Ring>("ring");
|
||||
var passService = builder.AddProject<Projects.DysonNetwork_Pass>("pass")
|
||||
@@ -26,11 +26,31 @@ var insightService = builder.AddProject<Projects.DysonNetwork_Insight>("insight"
|
||||
.WithReference(ringService)
|
||||
.WithReference(sphereService)
|
||||
.WithReference(developService);
|
||||
var zoneService = builder.AddProject<Projects.DysonNetwork_Zone>("zone")
|
||||
.WithReference(passService)
|
||||
.WithReference(ringService)
|
||||
.WithReference(sphereService)
|
||||
.WithReference(developService)
|
||||
.WithReference(insightService);
|
||||
var messagerService = builder.AddProject<Projects.DysonNetwork_Messager>("messager")
|
||||
.WithReference(passService)
|
||||
.WithReference(ringService)
|
||||
.WithReference(sphereService)
|
||||
.WithReference(developService);
|
||||
|
||||
passService.WithReference(developService).WithReference(driveService);
|
||||
|
||||
List<IResourceBuilder<ProjectResource>> services =
|
||||
[ringService, passService, driveService, sphereService, developService, insightService];
|
||||
[
|
||||
ringService,
|
||||
passService,
|
||||
driveService,
|
||||
sphereService,
|
||||
developService,
|
||||
insightService,
|
||||
zoneService,
|
||||
messagerService
|
||||
];
|
||||
|
||||
for (var idx = 0; idx < services.Count; idx++)
|
||||
{
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<Sdk Name="Aspire.AppHost.Sdk" Version="9.5.2" />
|
||||
<Sdk Name="Aspire.AppHost.Sdk" Version="13.1.0"/>
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net9.0</TargetFramework>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<UserSecretsId>a68b3195-a00d-40c2-b5ed-d675356b7cde</UserSecretsId>
|
||||
@@ -11,10 +11,10 @@
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Aspire.Hosting.AppHost" Version="9.5.2" />
|
||||
<PackageReference Include="Aspire.Hosting.Docker" Version="9.4.2-preview.1.25428.12" />
|
||||
<PackageReference Include="Aspire.Hosting.Nats" Version="9.5.2" />
|
||||
<PackageReference Include="Aspire.Hosting.Redis" Version="9.5.2" />
|
||||
<PackageReference Include="Aspire.Hosting.AppHost" Version="13.1.0" />
|
||||
<PackageReference Include="Aspire.Hosting.Docker" Version="13.0.0-preview.1.25560.3"/>
|
||||
<PackageReference Include="Aspire.Hosting.Nats" Version="13.1.0"/>
|
||||
<PackageReference Include="Aspire.Hosting.Redis" Version="13.1.0"/>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\DysonNetwork.Develop\DysonNetwork.Develop.csproj"/>
|
||||
@@ -24,5 +24,7 @@
|
||||
<ProjectReference Include="..\DysonNetwork.Sphere\DysonNetwork.Sphere.csproj"/>
|
||||
<ProjectReference Include="..\DysonNetwork.Gateway\DysonNetwork.Gateway.csproj"/>
|
||||
<ProjectReference Include="..\DysonNetwork.Insight\DysonNetwork.Insight.csproj"/>
|
||||
<ProjectReference Include="..\DysonNetwork.Zone\DysonNetwork.Zone.csproj"/>
|
||||
<ProjectReference Include="..\DysonNetwork.Messager\DysonNetwork.Messager.csproj"/>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -5,7 +5,7 @@
|
||||
"commandName": "Project",
|
||||
"dotnetRunMessages": true,
|
||||
"launchBrowser": true,
|
||||
"applicationUrl": "https://localhost:17025;http://localhost:15057",
|
||||
"applicationUrl": "https://localhost:17169;http://localhost:15057",
|
||||
"environmentVariables": {
|
||||
"ASPNETCORE_ENVIRONMENT": "Development",
|
||||
"DOTNET_ENVIRONMENT": "Development",
|
||||
|
||||
357
DysonNetwork.Control/aspire-manifest.json
Normal file
357
DysonNetwork.Control/aspire-manifest.json
Normal file
@@ -0,0 +1,357 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/aspire-8.0.json",
|
||||
"resources": {
|
||||
"cache": {
|
||||
"type": "container.v1",
|
||||
"connectionString": "{cache.bindings.tcp.host}:{cache.bindings.tcp.port},password={cache-password.value}",
|
||||
"image": "docker.io/library/redis:8.2",
|
||||
"entrypoint": "/bin/sh",
|
||||
"args": [
|
||||
"-c",
|
||||
"redis-server --requirepass $REDIS_PASSWORD"
|
||||
],
|
||||
"env": {
|
||||
"REDIS_PASSWORD": "{cache-password.value}"
|
||||
},
|
||||
"bindings": {
|
||||
"tcp": {
|
||||
"scheme": "tcp",
|
||||
"protocol": "tcp",
|
||||
"transport": "tcp",
|
||||
"targetPort": 6379
|
||||
}
|
||||
}
|
||||
},
|
||||
"queue": {
|
||||
"type": "container.v1",
|
||||
"connectionString": "nats://nats:{queue-password.value}@{queue.bindings.tcp.host}:{queue.bindings.tcp.port}",
|
||||
"image": "docker.io/library/nats:2.11",
|
||||
"args": [
|
||||
"--user",
|
||||
"nats",
|
||||
"--pass",
|
||||
"{queue-password.value}",
|
||||
"-js"
|
||||
],
|
||||
"bindings": {
|
||||
"tcp": {
|
||||
"scheme": "tcp",
|
||||
"protocol": "tcp",
|
||||
"transport": "tcp",
|
||||
"targetPort": 4222
|
||||
}
|
||||
}
|
||||
},
|
||||
"ring": {
|
||||
"type": "project.v1",
|
||||
"path": "../DysonNetwork.Ring/DysonNetwork.Ring.csproj",
|
||||
"env": {
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
|
||||
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
|
||||
"HTTP_PORTS": "8001",
|
||||
"HTTPS_PORTS": "{ring.bindings.grpc.targetPort}",
|
||||
"ConnectionStrings__cache": "{cache.connectionString}",
|
||||
"ConnectionStrings__queue": "{queue.connectionString}",
|
||||
"GRPC_PORT": "7002",
|
||||
"services__pass__http__0": "{pass.bindings.http.url}",
|
||||
"services__pass__grpc__0": "{pass.bindings.grpc.url}",
|
||||
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
|
||||
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
|
||||
"OTEL_SERVICE_NAME": "ring"
|
||||
},
|
||||
"bindings": {
|
||||
"http": {
|
||||
"scheme": "http",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 8001
|
||||
},
|
||||
"grpc": {
|
||||
"scheme": "https",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 7002
|
||||
}
|
||||
}
|
||||
},
|
||||
"pass": {
|
||||
"type": "project.v1",
|
||||
"path": "../DysonNetwork.Pass/DysonNetwork.Pass.csproj",
|
||||
"env": {
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
|
||||
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
|
||||
"HTTP_PORTS": "8002",
|
||||
"HTTPS_PORTS": "{pass.bindings.grpc.targetPort}",
|
||||
"services__ring__http__0": "{ring.bindings.http.url}",
|
||||
"services__ring__grpc__0": "{ring.bindings.grpc.url}",
|
||||
"services__develop__http__0": "{develop.bindings.http.url}",
|
||||
"services__develop__grpc__0": "{develop.bindings.grpc.url}",
|
||||
"services__drive__http__0": "{drive.bindings.http.url}",
|
||||
"services__drive__grpc__0": "{drive.bindings.grpc.url}",
|
||||
"ConnectionStrings__cache": "{cache.connectionString}",
|
||||
"ConnectionStrings__queue": "{queue.connectionString}",
|
||||
"GRPC_PORT": "7003",
|
||||
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
|
||||
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
|
||||
"OTEL_SERVICE_NAME": "pass"
|
||||
},
|
||||
"bindings": {
|
||||
"http": {
|
||||
"scheme": "http",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 8002
|
||||
},
|
||||
"grpc": {
|
||||
"scheme": "https",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 7003
|
||||
}
|
||||
}
|
||||
},
|
||||
"drive": {
|
||||
"type": "project.v1",
|
||||
"path": "../DysonNetwork.Drive/DysonNetwork.Drive.csproj",
|
||||
"env": {
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
|
||||
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
|
||||
"HTTP_PORTS": "8003",
|
||||
"HTTPS_PORTS": "{drive.bindings.grpc.targetPort}",
|
||||
"services__pass__http__0": "{pass.bindings.http.url}",
|
||||
"services__pass__grpc__0": "{pass.bindings.grpc.url}",
|
||||
"services__ring__http__0": "{ring.bindings.http.url}",
|
||||
"services__ring__grpc__0": "{ring.bindings.grpc.url}",
|
||||
"ConnectionStrings__cache": "{cache.connectionString}",
|
||||
"ConnectionStrings__queue": "{queue.connectionString}",
|
||||
"GRPC_PORT": "7004",
|
||||
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
|
||||
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
|
||||
"OTEL_SERVICE_NAME": "drive"
|
||||
},
|
||||
"bindings": {
|
||||
"http": {
|
||||
"scheme": "http",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 8003
|
||||
},
|
||||
"grpc": {
|
||||
"scheme": "https",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 7004
|
||||
}
|
||||
}
|
||||
},
|
||||
"sphere": {
|
||||
"type": "project.v1",
|
||||
"path": "../DysonNetwork.Sphere/DysonNetwork.Sphere.csproj",
|
||||
"env": {
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
|
||||
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
|
||||
"HTTP_PORTS": "8004",
|
||||
"HTTPS_PORTS": "{sphere.bindings.grpc.targetPort}",
|
||||
"services__pass__http__0": "{pass.bindings.http.url}",
|
||||
"services__pass__grpc__0": "{pass.bindings.grpc.url}",
|
||||
"services__ring__http__0": "{ring.bindings.http.url}",
|
||||
"services__ring__grpc__0": "{ring.bindings.grpc.url}",
|
||||
"services__drive__http__0": "{drive.bindings.http.url}",
|
||||
"services__drive__grpc__0": "{drive.bindings.grpc.url}",
|
||||
"ConnectionStrings__cache": "{cache.connectionString}",
|
||||
"ConnectionStrings__queue": "{queue.connectionString}",
|
||||
"GRPC_PORT": "7005",
|
||||
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
|
||||
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
|
||||
"OTEL_SERVICE_NAME": "sphere"
|
||||
},
|
||||
"bindings": {
|
||||
"http": {
|
||||
"scheme": "http",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 8004
|
||||
},
|
||||
"grpc": {
|
||||
"scheme": "https",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 7005
|
||||
}
|
||||
}
|
||||
},
|
||||
"develop": {
|
||||
"type": "project.v1",
|
||||
"path": "../DysonNetwork.Develop/DysonNetwork.Develop.csproj",
|
||||
"env": {
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
|
||||
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
|
||||
"HTTP_PORTS": "8005",
|
||||
"HTTPS_PORTS": "{develop.bindings.grpc.targetPort}",
|
||||
"services__pass__http__0": "{pass.bindings.http.url}",
|
||||
"services__pass__grpc__0": "{pass.bindings.grpc.url}",
|
||||
"services__ring__http__0": "{ring.bindings.http.url}",
|
||||
"services__ring__grpc__0": "{ring.bindings.grpc.url}",
|
||||
"services__sphere__http__0": "{sphere.bindings.http.url}",
|
||||
"services__sphere__grpc__0": "{sphere.bindings.grpc.url}",
|
||||
"ConnectionStrings__cache": "{cache.connectionString}",
|
||||
"ConnectionStrings__queue": "{queue.connectionString}",
|
||||
"GRPC_PORT": "7006",
|
||||
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
|
||||
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
|
||||
"OTEL_SERVICE_NAME": "develop"
|
||||
},
|
||||
"bindings": {
|
||||
"http": {
|
||||
"scheme": "http",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 8005
|
||||
},
|
||||
"grpc": {
|
||||
"scheme": "https",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 7006
|
||||
}
|
||||
}
|
||||
},
|
||||
"insight": {
|
||||
"type": "project.v1",
|
||||
"path": "../DysonNetwork.Insight/DysonNetwork.Insight.csproj",
|
||||
"env": {
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
|
||||
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
|
||||
"HTTP_PORTS": "8006",
|
||||
"HTTPS_PORTS": "{insight.bindings.grpc.targetPort}",
|
||||
"services__pass__http__0": "{pass.bindings.http.url}",
|
||||
"services__pass__grpc__0": "{pass.bindings.grpc.url}",
|
||||
"services__ring__http__0": "{ring.bindings.http.url}",
|
||||
"services__ring__grpc__0": "{ring.bindings.grpc.url}",
|
||||
"services__sphere__http__0": "{sphere.bindings.http.url}",
|
||||
"services__sphere__grpc__0": "{sphere.bindings.grpc.url}",
|
||||
"services__develop__http__0": "{develop.bindings.http.url}",
|
||||
"services__develop__grpc__0": "{develop.bindings.grpc.url}",
|
||||
"ConnectionStrings__cache": "{cache.connectionString}",
|
||||
"ConnectionStrings__queue": "{queue.connectionString}",
|
||||
"GRPC_PORT": "7007",
|
||||
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
|
||||
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
|
||||
"OTEL_SERVICE_NAME": "insight"
|
||||
},
|
||||
"bindings": {
|
||||
"http": {
|
||||
"scheme": "http",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 8006
|
||||
},
|
||||
"grpc": {
|
||||
"scheme": "https",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 7007
|
||||
}
|
||||
}
|
||||
},
|
||||
"gateway": {
|
||||
"type": "project.v1",
|
||||
"path": "../DysonNetwork.Gateway/DysonNetwork.Gateway.csproj",
|
||||
"env": {
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EXCEPTION_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_EMIT_EVENT_LOG_ATTRIBUTES": "true",
|
||||
"OTEL_DOTNET_EXPERIMENTAL_OTLP_RETRY": "in_memory",
|
||||
"ASPNETCORE_FORWARDEDHEADERS_ENABLED": "true",
|
||||
"HTTP_PORTS": "5001",
|
||||
"services__ring__http__0": "{ring.bindings.http.url}",
|
||||
"services__ring__grpc__0": "{ring.bindings.grpc.url}",
|
||||
"services__pass__http__0": "{pass.bindings.http.url}",
|
||||
"services__pass__grpc__0": "{pass.bindings.grpc.url}",
|
||||
"services__drive__http__0": "{drive.bindings.http.url}",
|
||||
"services__drive__grpc__0": "{drive.bindings.grpc.url}",
|
||||
"services__sphere__http__0": "{sphere.bindings.http.url}",
|
||||
"services__sphere__grpc__0": "{sphere.bindings.grpc.url}",
|
||||
"services__develop__http__0": "{develop.bindings.http.url}",
|
||||
"services__develop__grpc__0": "{develop.bindings.grpc.url}",
|
||||
"services__insight__http__0": "{insight.bindings.http.url}",
|
||||
"services__insight__grpc__0": "{insight.bindings.grpc.url}",
|
||||
"OTEL_EXPORTER_OTLP_ENDPOINT": "{docker-compose-dashboard.bindings.otlp-grpc.url}",
|
||||
"OTEL_EXPORTER_OTLP_PROTOCOL": "grpc",
|
||||
"OTEL_SERVICE_NAME": "gateway"
|
||||
},
|
||||
"bindings": {
|
||||
"http": {
|
||||
"scheme": "http",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 5001
|
||||
}
|
||||
}
|
||||
},
|
||||
"docker-compose": {
|
||||
"error": "This resource does not support generation in the manifest."
|
||||
},
|
||||
"cache-password": {
|
||||
"type": "parameter.v0",
|
||||
"value": "{cache-password.inputs.value}",
|
||||
"inputs": {
|
||||
"value": {
|
||||
"type": "string",
|
||||
"secret": true,
|
||||
"default": {
|
||||
"generate": {
|
||||
"minLength": 22,
|
||||
"special": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queue-password": {
|
||||
"type": "parameter.v0",
|
||||
"value": "{queue-password.inputs.value}",
|
||||
"inputs": {
|
||||
"value": {
|
||||
"type": "string",
|
||||
"secret": true,
|
||||
"default": {
|
||||
"generate": {
|
||||
"minLength": 22,
|
||||
"special": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"docker-compose-dashboard": {
|
||||
"type": "container.v1",
|
||||
"image": "mcr.microsoft.com/dotnet/nightly/aspire-dashboard:latest",
|
||||
"bindings": {
|
||||
"http": {
|
||||
"scheme": "http",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 18888
|
||||
},
|
||||
"otlp-grpc": {
|
||||
"scheme": "http",
|
||||
"protocol": "tcp",
|
||||
"transport": "http",
|
||||
"targetPort": 18889
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
using DysonNetwork.Shared.Data;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Design;
|
||||
@@ -33,36 +34,15 @@ public class AppDatabase(
|
||||
|
||||
public override async Task<int> SaveChangesAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var now = SystemClock.Instance.GetCurrentInstant();
|
||||
|
||||
foreach (var entry in ChangeTracker.Entries<ModelBase>())
|
||||
{
|
||||
switch (entry.State)
|
||||
{
|
||||
case EntityState.Added:
|
||||
entry.Entity.CreatedAt = now;
|
||||
entry.Entity.UpdatedAt = now;
|
||||
break;
|
||||
case EntityState.Modified:
|
||||
entry.Entity.UpdatedAt = now;
|
||||
break;
|
||||
case EntityState.Deleted:
|
||||
entry.State = EntityState.Modified;
|
||||
entry.Entity.DeletedAt = now;
|
||||
break;
|
||||
case EntityState.Detached:
|
||||
case EntityState.Unchanged:
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
this.ApplyAuditableAndSoftDelete();
|
||||
return await base.SaveChangesAsync(cancellationToken);
|
||||
}
|
||||
|
||||
protected override void OnModelCreating(ModelBuilder modelBuilder)
|
||||
{
|
||||
base.OnModelCreating(modelBuilder);
|
||||
|
||||
modelBuilder.ApplySoftDeleteFilters();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:9.0 AS base
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
libkrb5-3 \
|
||||
libgssapi-krb5-2 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
USER $APP_UID
|
||||
WORKDIR /app
|
||||
EXPOSE 8080
|
||||
EXPOSE 8081
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build
|
||||
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
WORKDIR /src
|
||||
COPY ["DysonNetwork.Develop/DysonNetwork.Develop.csproj", "DysonNetwork.Develop/"]
|
||||
|
||||
@@ -1,26 +1,22 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net9.0</TargetFramework>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="EFCore.NamingConventions" Version="9.0.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.10" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.10">
|
||||
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.1" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="10.0.1">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="NodaTime.Serialization.Protobuf" Version="2.0.2" />
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4"/>
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NodaTime" Version="9.0.4" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.6" />
|
||||
<PackageReference Include="NodaTime" Version="3.2.2"/>
|
||||
<PackageReference Include="NodaTime" Version="3.2.3" />
|
||||
<PackageReference Include="NodaTime.Serialization.SystemTextJson" Version="1.3.0"/>
|
||||
<PackageReference Include="Grpc.AspNetCore.Server" Version="2.71.0"/>
|
||||
<PackageReference Include="Grpc.AspNetCore.Server" Version="2.76.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -69,7 +69,7 @@ public class DeveloperController(
|
||||
|
||||
[HttpPost("{name}/enroll")]
|
||||
[Authorize]
|
||||
[RequiredPermission("global", "developers.create")]
|
||||
[AskPermission("developers.create")]
|
||||
public async Task<ActionResult<SnDeveloper>> EnrollDeveloperProgram(string name)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
|
||||
@@ -8,7 +8,7 @@ namespace DysonNetwork.Develop.Project;
|
||||
|
||||
[ApiController]
|
||||
[Route("/api/developers/{pubName}/projects")]
|
||||
public class DevProjectController(DevProjectService projectService, DeveloperService developerService) : ControllerBase
|
||||
public class DevProjectController(DevProjectService ps, DeveloperService ds) : ControllerBase
|
||||
{
|
||||
public record DevProjectRequest(
|
||||
[MaxLength(1024)] string? Slug,
|
||||
@@ -19,20 +19,20 @@ public class DevProjectController(DevProjectService projectService, DeveloperSer
|
||||
[HttpGet]
|
||||
public async Task<IActionResult> ListProjects([FromRoute] string pubName)
|
||||
{
|
||||
var developer = await developerService.GetDeveloperByName(pubName);
|
||||
var developer = await ds.GetDeveloperByName(pubName);
|
||||
if (developer is null) return NotFound();
|
||||
|
||||
var projects = await projectService.GetProjectsByDeveloperAsync(developer.Id);
|
||||
var projects = await ps.GetProjectsByDeveloperAsync(developer.Id);
|
||||
return Ok(projects);
|
||||
}
|
||||
|
||||
[HttpGet("{id:guid}")]
|
||||
public async Task<IActionResult> GetProject([FromRoute] string pubName, Guid id)
|
||||
{
|
||||
var developer = await developerService.GetDeveloperByName(pubName);
|
||||
var developer = await ds.GetDeveloperByName(pubName);
|
||||
if (developer is null) return NotFound();
|
||||
|
||||
var project = await projectService.GetProjectAsync(id, developer.Id);
|
||||
var project = await ps.GetProjectAsync(id, developer.Id);
|
||||
if (project is null) return NotFound();
|
||||
|
||||
return Ok(project);
|
||||
@@ -45,17 +45,17 @@ public class DevProjectController(DevProjectService projectService, DeveloperSer
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return Unauthorized();
|
||||
|
||||
var developer = await developerService.GetDeveloperByName(pubName);
|
||||
var developer = await ds.GetDeveloperByName(pubName);
|
||||
if (developer is null)
|
||||
return NotFound("Developer not found");
|
||||
|
||||
if (!await developerService.IsMemberWithRole(developer.PublisherId, Guid.Parse(currentUser.Id), PublisherMemberRole.Editor))
|
||||
if (!await ds.IsMemberWithRole(developer.PublisherId, Guid.Parse(currentUser.Id), PublisherMemberRole.Editor))
|
||||
return StatusCode(403, "You must be an editor of the developer to create a project");
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.Slug) || string.IsNullOrWhiteSpace(request.Name))
|
||||
return BadRequest("Slug and Name are required");
|
||||
|
||||
var project = await projectService.CreateProjectAsync(developer, request);
|
||||
var project = await ps.CreateProjectAsync(developer, request);
|
||||
return CreatedAtAction(
|
||||
nameof(GetProject),
|
||||
new { pubName, id = project.Id },
|
||||
@@ -74,12 +74,15 @@ public class DevProjectController(DevProjectService projectService, DeveloperSer
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return Unauthorized();
|
||||
|
||||
var developer = await developerService.GetDeveloperByName(pubName);
|
||||
var developer = await ds.GetDeveloperByName(pubName);
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
if (developer is null || developer.Id != accountId)
|
||||
return Forbid();
|
||||
|
||||
var project = await projectService.UpdateProjectAsync(id, developer.Id, request);
|
||||
if (developer is null)
|
||||
return Forbid();
|
||||
if (!await ds.IsMemberWithRole(developer.PublisherId, accountId, PublisherMemberRole.Manager))
|
||||
return StatusCode(403, "You must be an manager of the developer to update a project");
|
||||
|
||||
var project = await ps.UpdateProjectAsync(id, developer.Id, request);
|
||||
if (project is null)
|
||||
return NotFound();
|
||||
|
||||
@@ -93,12 +96,14 @@ public class DevProjectController(DevProjectService projectService, DeveloperSer
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return Unauthorized();
|
||||
|
||||
var developer = await developerService.GetDeveloperByName(pubName);
|
||||
var developer = await ds.GetDeveloperByName(pubName);
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
if (developer is null || developer.Id != accountId)
|
||||
if (developer is null)
|
||||
return Forbid();
|
||||
if (!await ds.IsMemberWithRole(developer.PublisherId, accountId, PublisherMemberRole.Manager))
|
||||
return StatusCode(403, "You must be an manager of the developer to delete a project");
|
||||
|
||||
var success = await projectService.DeleteProjectAsync(id, developer.Id);
|
||||
var success = await ps.DeleteProjectAsync(id, developer.Id);
|
||||
if (!success)
|
||||
return NotFound();
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ public static class ApplicationConfiguration
|
||||
|
||||
app.UseAuthentication();
|
||||
app.UseAuthorization();
|
||||
app.UseMiddleware<PermissionMiddleware>();
|
||||
app.UseMiddleware<RemotePermissionMiddleware>();
|
||||
|
||||
app.MapControllers();
|
||||
|
||||
|
||||
@@ -16,9 +16,7 @@ public static class ServiceCollectionExtensions
|
||||
services.AddLocalization();
|
||||
|
||||
services.AddDbContext<AppDatabase>();
|
||||
services.AddSingleton<IClock>(SystemClock.Instance);
|
||||
services.AddHttpContextAccessor();
|
||||
services.AddSingleton<ICacheService, CacheServiceRedis>();
|
||||
|
||||
services.AddHttpClient();
|
||||
|
||||
|
||||
@@ -12,11 +12,14 @@
|
||||
"ConnectionStrings": {
|
||||
"App": "Host=localhost;Port=5432;Database=dyson_develop;Username=postgres;Password=postgres;Include Error Detail=True;Maximum Pool Size=20;Connection Idle Lifetime=60"
|
||||
},
|
||||
"KnownProxies": ["127.0.0.1", "::1"],
|
||||
"KnownProxies": [
|
||||
"127.0.0.1",
|
||||
"::1"
|
||||
],
|
||||
"Swagger": {
|
||||
"PublicBasePath": "/develop"
|
||||
},
|
||||
"Etcd": {
|
||||
"Insecure": true
|
||||
"Cache": {
|
||||
"Serializer": "MessagePack"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
using System.Linq.Expressions;
|
||||
using System.Reflection;
|
||||
using DysonNetwork.Drive.Billing;
|
||||
using DysonNetwork.Drive.Storage;
|
||||
using DysonNetwork.Drive.Storage.Model;
|
||||
using DysonNetwork.Shared.Data;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Design;
|
||||
using Microsoft.EntityFrameworkCore.Query;
|
||||
using NodaTime;
|
||||
using Quartz;
|
||||
using TaskStatus = DysonNetwork.Drive.Storage.Model.TaskStatus;
|
||||
|
||||
namespace DysonNetwork.Drive;
|
||||
|
||||
@@ -21,7 +23,11 @@ public class AppDatabase(
|
||||
public DbSet<QuotaRecord> QuotaRecords { get; set; } = null!;
|
||||
|
||||
public DbSet<SnCloudFile> Files { get; set; } = null!;
|
||||
public DbSet<CloudFileReference> FileReferences { get; set; } = null!;
|
||||
public DbSet<SnCloudFileReference> FileReferences { get; set; } = null!;
|
||||
public DbSet<SnCloudFileIndex> FileIndexes { get; set; }
|
||||
|
||||
public DbSet<PersistentTask> Tasks { get; set; } = null!;
|
||||
public DbSet<PersistentUploadTask> UploadTasks { get; set; } = null!; // Backward compatibility
|
||||
|
||||
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
|
||||
{
|
||||
@@ -39,52 +45,12 @@ public class AppDatabase(
|
||||
protected override void OnModelCreating(ModelBuilder modelBuilder)
|
||||
{
|
||||
base.OnModelCreating(modelBuilder);
|
||||
|
||||
// Automatically apply soft-delete filter to all entities inheriting BaseModel
|
||||
foreach (var entityType in modelBuilder.Model.GetEntityTypes())
|
||||
{
|
||||
if (!typeof(ModelBase).IsAssignableFrom(entityType.ClrType)) continue;
|
||||
var method = typeof(AppDatabase)
|
||||
.GetMethod(nameof(SetSoftDeleteFilter),
|
||||
BindingFlags.NonPublic | BindingFlags.Static)!
|
||||
.MakeGenericMethod(entityType.ClrType);
|
||||
|
||||
method.Invoke(null, [modelBuilder]);
|
||||
}
|
||||
}
|
||||
|
||||
private static void SetSoftDeleteFilter<TEntity>(ModelBuilder modelBuilder)
|
||||
where TEntity : ModelBase
|
||||
{
|
||||
modelBuilder.Entity<TEntity>().HasQueryFilter(e => e.DeletedAt == null);
|
||||
modelBuilder.ApplySoftDeleteFilters();
|
||||
}
|
||||
|
||||
public override async Task<int> SaveChangesAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var now = SystemClock.Instance.GetCurrentInstant();
|
||||
|
||||
foreach (var entry in ChangeTracker.Entries<ModelBase>())
|
||||
{
|
||||
switch (entry.State)
|
||||
{
|
||||
case EntityState.Added:
|
||||
entry.Entity.CreatedAt = now;
|
||||
entry.Entity.UpdatedAt = now;
|
||||
break;
|
||||
case EntityState.Modified:
|
||||
entry.Entity.UpdatedAt = now;
|
||||
break;
|
||||
case EntityState.Deleted:
|
||||
entry.State = EntityState.Modified;
|
||||
entry.Entity.DeletedAt = now;
|
||||
break;
|
||||
case EntityState.Detached:
|
||||
case EntityState.Unchanged:
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
this.ApplyAuditableAndSoftDelete();
|
||||
return await base.SaveChangesAsync(cancellationToken);
|
||||
}
|
||||
}
|
||||
@@ -136,6 +102,45 @@ public class AppDatabaseRecyclingJob(AppDatabase db, ILogger<AppDatabaseRecyclin
|
||||
}
|
||||
}
|
||||
|
||||
public class PersistentTaskCleanupJob(
|
||||
IServiceProvider serviceProvider,
|
||||
ILogger<PersistentTaskCleanupJob> logger
|
||||
) : IJob
|
||||
{
|
||||
public async Task Execute(IJobExecutionContext context)
|
||||
{
|
||||
logger.LogInformation("Cleaning up stale persistent tasks...");
|
||||
|
||||
// Get the PersistentTaskService from DI
|
||||
using var scope = serviceProvider.CreateScope();
|
||||
var persistentTaskService = scope.ServiceProvider.GetService(typeof(PersistentTaskService));
|
||||
|
||||
if (persistentTaskService is PersistentTaskService service)
|
||||
{
|
||||
// Clean up tasks for all users (you might want to add user-specific logic here)
|
||||
// For now, we'll clean up tasks older than 30 days for all users
|
||||
var cutoff = SystemClock.Instance.GetCurrentInstant() - Duration.FromDays(30);
|
||||
var tasksToClean = await service.GetUserTasksAsync(
|
||||
Guid.Empty, // This would need to be adjusted for multi-user cleanup
|
||||
status: TaskStatus.Completed | TaskStatus.Failed | TaskStatus.Cancelled | TaskStatus.Expired
|
||||
);
|
||||
|
||||
var cleanedCount = 0;
|
||||
foreach (var task in tasksToClean.Items.Where(t => t.UpdatedAt < cutoff))
|
||||
{
|
||||
await service.CancelTaskAsync(task.TaskId); // Or implement a proper cleanup method
|
||||
cleanedCount++;
|
||||
}
|
||||
|
||||
logger.LogInformation("Cleaned up {Count} stale persistent tasks", cleanedCount);
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.LogWarning("PersistentTaskService not found in DI container");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class AppDatabaseFactory : IDesignTimeDbContextFactory<AppDatabase>
|
||||
{
|
||||
public AppDatabase CreateDbContext(string[] args)
|
||||
@@ -149,35 +154,3 @@ public class AppDatabaseFactory : IDesignTimeDbContextFactory<AppDatabase>
|
||||
return new AppDatabase(optionsBuilder.Options, configuration);
|
||||
}
|
||||
}
|
||||
|
||||
public static class OptionalQueryExtensions
|
||||
{
|
||||
public static IQueryable<T> If<T>(
|
||||
this IQueryable<T> source,
|
||||
bool condition,
|
||||
Func<IQueryable<T>, IQueryable<T>> transform
|
||||
)
|
||||
{
|
||||
return condition ? transform(source) : source;
|
||||
}
|
||||
|
||||
public static IQueryable<T> If<T, TP>(
|
||||
this IIncludableQueryable<T, TP> source,
|
||||
bool condition,
|
||||
Func<IIncludableQueryable<T, TP>, IQueryable<T>> transform
|
||||
)
|
||||
where T : class
|
||||
{
|
||||
return condition ? transform(source) : source;
|
||||
}
|
||||
|
||||
public static IQueryable<T> If<T, TP>(
|
||||
this IIncludableQueryable<T, IEnumerable<TP>> source,
|
||||
bool condition,
|
||||
Func<IIncludableQueryable<T, IEnumerable<TP>>, IQueryable<T>> transform
|
||||
)
|
||||
where T : class
|
||||
{
|
||||
return condition ? transform(source) : source;
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:9.0 AS base
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
|
||||
WORKDIR /app
|
||||
EXPOSE 8080
|
||||
EXPOSE 8081
|
||||
@@ -20,7 +20,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
USER $APP_UID
|
||||
|
||||
# Stage 2: Build .NET application
|
||||
FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build
|
||||
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
WORKDIR /src
|
||||
COPY ["DysonNetwork.Drive/DysonNetwork.Drive.csproj", "DysonNetwork.Drive/"]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net9.0</TargetFramework>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
|
||||
@@ -10,49 +10,34 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BCrypt.Net-Next" Version="4.0.3" />
|
||||
<PackageReference Include="BlurHashSharp.SkiaSharp" Version="1.3.4" />
|
||||
<PackageReference Include="FFMpegCore" Version="5.3.0" />
|
||||
<PackageReference Include="Grpc.AspNetCore.Server" Version="2.71.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.10" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.10">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PackageReference Include="FFMpegCore" Version="5.4.0" />
|
||||
<PackageReference Include="Grpc.AspNetCore.Server" Version="2.76.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.1" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="10.0.1">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="MimeKit" Version="4.14.0" />
|
||||
<PackageReference Include="MimeTypes" Version="2.5.2">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Minio" Version="6.0.5" />
|
||||
<PackageReference Include="Minio" Version="7.0.0" />
|
||||
<PackageReference Include="Nanoid" Version="3.1.0" />
|
||||
<PackageReference Include="Nerdbank.GitVersioning" Version="3.8.118">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="NetVips" Version="3.1.0" />
|
||||
<PackageReference Include="NetVips.Native.linux-x64" Version="8.17.2" />
|
||||
<PackageReference Include="NetVips.Native.osx-arm64" Version="8.17.2" />
|
||||
<PackageReference Include="NodaTime" Version="3.2.2" />
|
||||
<PackageReference Include="NetVips.Native.linux-x64" Version="8.17.3" />
|
||||
<PackageReference Include="NetVips.Native.osx-arm64" Version="8.17.3" />
|
||||
<PackageReference Include="NodaTime" Version="3.2.3" />
|
||||
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.2.0" />
|
||||
<PackageReference Include="NodaTime.Serialization.Protobuf" Version="2.0.2" />
|
||||
<PackageReference Include="NodaTime.Serialization.SystemTextJson" Version="1.3.0" />
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4" />
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.Design" Version="1.1.0" />
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NodaTime" Version="9.0.4" />
|
||||
<PackageReference Include="OpenTelemetry.Exporter.OpenTelemetryProtocol" Version="1.13.1" />
|
||||
<PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.13.1" />
|
||||
<PackageReference Include="OpenTelemetry.Instrumentation.AspNetCore" Version="1.13.0" />
|
||||
<PackageReference Include="OpenTelemetry.Instrumentation.Http" Version="1.13.0" />
|
||||
<PackageReference Include="OpenTelemetry.Instrumentation.Runtime" Version="1.13.0" />
|
||||
<PackageReference Include="Quartz" Version="3.15.0" />
|
||||
<PackageReference Include="Quartz.AspNetCore" Version="3.15.0" />
|
||||
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.15.0" />
|
||||
<PackageReference Include="EFCore.BulkExtensions" Version="9.0.2" />
|
||||
<PackageReference Include="EFCore.BulkExtensions.PostgreSql" Version="9.0.2" />
|
||||
<PackageReference Include="EFCore.NamingConventions" Version="9.0.0" />
|
||||
<PackageReference Include="SkiaSharp.NativeAssets.Linux" Version="3.119.1" />
|
||||
<PackageReference Include="SkiaSharp.NativeAssets.Linux.NoDependencies" Version="3.119.1" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.6" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore.SwaggerUI" Version="9.0.6" />
|
||||
<PackageReference Include="Quartz" Version="3.15.1" />
|
||||
<PackageReference Include="Quartz.AspNetCore" Version="3.15.1" />
|
||||
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.15.1" />
|
||||
<!-- Pin the SkiaSharp version at the 2.88.9 due to the BlurHash need this specific version -->
|
||||
<PackageReference Include="SkiaSharp" Version="2.88.9" />
|
||||
<PackageReference Include="SkiaSharp.NativeAssets.Linux" Version="2.88.9" />
|
||||
<PackageReference Include="SkiaSharp.NativeAssets.Linux.NoDependencies" Version="2.88.9" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
585
DysonNetwork.Drive/Index/FileIndexController.cs
Normal file
585
DysonNetwork.Drive/Index/FileIndexController.cs
Normal file
@@ -0,0 +1,585 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using DysonNetwork.Drive.Storage;
|
||||
using DysonNetwork.Shared.Auth;
|
||||
using DysonNetwork.Shared.Http;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace DysonNetwork.Drive.Index;
|
||||
|
||||
[ApiController]
|
||||
[Route("/api/index")]
|
||||
[Authorize]
|
||||
public class FileIndexController(
|
||||
FileIndexService fileIndexService,
|
||||
AppDatabase db,
|
||||
ILogger<FileIndexController> logger
|
||||
) : ControllerBase
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets files in a specific path for the current user
|
||||
/// </summary>
|
||||
/// <param name="path">The path to browse (defaults to root "/")</param>
|
||||
/// <param name="query">Optional query to filter files by name</param>
|
||||
/// <param name="order">The field to order by (date, size, name - defaults to date)</param>
|
||||
/// <param name="orderDesc">Whether to order in descending order (defaults to true)</param>
|
||||
/// <returns>List of files in the specified path</returns>
|
||||
[HttpGet("browse")]
|
||||
public async Task<IActionResult> BrowseFiles(
|
||||
[FromQuery] string path = "/",
|
||||
[FromQuery] string? query = null,
|
||||
[FromQuery] string order = "date",
|
||||
[FromQuery] bool orderDesc = true
|
||||
)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
try
|
||||
{
|
||||
var fileIndexes = await fileIndexService.GetByPathAsync(accountId, path);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(query))
|
||||
{
|
||||
fileIndexes = fileIndexes
|
||||
.Where(fi => fi.File.Name.Contains(query, StringComparison.OrdinalIgnoreCase))
|
||||
.ToList();
|
||||
}
|
||||
|
||||
// Apply sorting
|
||||
fileIndexes = order.ToLower() switch
|
||||
{
|
||||
"name" => orderDesc ? fileIndexes.OrderByDescending(fi => fi.File.Name).ToList()
|
||||
: fileIndexes.OrderBy(fi => fi.File.Name).ToList(),
|
||||
"size" => orderDesc ? fileIndexes.OrderByDescending(fi => fi.File.Size).ToList()
|
||||
: fileIndexes.OrderBy(fi => fi.File.Size).ToList(),
|
||||
_ => orderDesc ? fileIndexes.OrderByDescending(fi => fi.File.CreatedAt).ToList()
|
||||
: fileIndexes.OrderBy(fi => fi.File.CreatedAt).ToList()
|
||||
};
|
||||
|
||||
// Get all file indexes for this account to extract child folders
|
||||
var allFileIndexes = await fileIndexService.GetByAccountIdAsync(accountId);
|
||||
|
||||
// Extract unique child folder paths
|
||||
var childFolders = ExtractChildFolders(allFileIndexes, path);
|
||||
|
||||
return Ok(new
|
||||
{
|
||||
Path = path,
|
||||
Files = fileIndexes,
|
||||
Folders = childFolders,
|
||||
TotalCount = fileIndexes.Count
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to browse files for account {AccountId} at path {Path}", accountId, path);
|
||||
return new ObjectResult(new ApiError
|
||||
{
|
||||
Code = "BROWSE_FAILED",
|
||||
Message = "Failed to browse files",
|
||||
Status = 500
|
||||
}) { StatusCode = 500 };
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts unique child folder paths from all file indexes for a given parent path
|
||||
/// </summary>
|
||||
/// <param name="allFileIndexes">All file indexes for the account</param>
|
||||
/// <param name="parentPath">The parent path to find children for</param>
|
||||
/// <returns>List of unique child folder names</returns>
|
||||
private List<string> ExtractChildFolders(List<SnCloudFileIndex> allFileIndexes, string parentPath)
|
||||
{
|
||||
var normalizedParentPath = FileIndexService.NormalizePath(parentPath);
|
||||
var childFolders = new HashSet<string>();
|
||||
|
||||
foreach (var index in allFileIndexes)
|
||||
{
|
||||
var normalizedIndexPath = FileIndexService.NormalizePath(index.Path);
|
||||
|
||||
// Check if this path is a direct child of the parent path
|
||||
if (normalizedIndexPath.StartsWith(normalizedParentPath) &&
|
||||
normalizedIndexPath != normalizedParentPath)
|
||||
{
|
||||
// Remove the parent path prefix to get the relative path
|
||||
var relativePath = normalizedIndexPath.Substring(normalizedParentPath.Length);
|
||||
|
||||
// Extract the first folder name (direct child)
|
||||
var firstSlashIndex = relativePath.IndexOf('/');
|
||||
if (firstSlashIndex > 0)
|
||||
{
|
||||
var folderName = relativePath.Substring(0, firstSlashIndex);
|
||||
childFolders.Add(folderName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return childFolders.OrderBy(f => f).ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all files for the current user (across all paths)
|
||||
/// </summary>
|
||||
/// <param name="query">Optional query to filter files by name</param>
|
||||
/// <param name="order">The field to order by (date, size, name - defaults to date)</param>
|
||||
/// <param name="orderDesc">Whether to order in descending order (defaults to true)</param>
|
||||
/// <returns>List of all files for the user</returns>
|
||||
[HttpGet("all")]
|
||||
public async Task<IActionResult> GetAllFiles(
|
||||
[FromQuery] string? query = null,
|
||||
[FromQuery] string order = "date",
|
||||
[FromQuery] bool orderDesc = true
|
||||
)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
try
|
||||
{
|
||||
var fileIndexes = await fileIndexService.GetByAccountIdAsync(accountId);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(query))
|
||||
{
|
||||
fileIndexes = fileIndexes
|
||||
.Where(fi => fi.File.Name.Contains(query, StringComparison.OrdinalIgnoreCase))
|
||||
.ToList();
|
||||
}
|
||||
|
||||
// Apply sorting
|
||||
fileIndexes = order.ToLower() switch
|
||||
{
|
||||
"name" => orderDesc ? fileIndexes.OrderByDescending(fi => fi.File.Name).ToList()
|
||||
: fileIndexes.OrderBy(fi => fi.File.Name).ToList(),
|
||||
"size" => orderDesc ? fileIndexes.OrderByDescending(fi => fi.File.Size).ToList()
|
||||
: fileIndexes.OrderBy(fi => fi.File.Size).ToList(),
|
||||
_ => orderDesc ? fileIndexes.OrderByDescending(fi => fi.File.CreatedAt).ToList()
|
||||
: fileIndexes.OrderBy(fi => fi.File.CreatedAt).ToList()
|
||||
};
|
||||
|
||||
return Ok(new
|
||||
{
|
||||
Files = fileIndexes,
|
||||
TotalCount = fileIndexes.Count()
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to get all files for account {AccountId}", accountId);
|
||||
return new ObjectResult(new ApiError
|
||||
{
|
||||
Code = "GET_ALL_FAILED",
|
||||
Message = "Failed to get files",
|
||||
Status = 500
|
||||
}) { StatusCode = 500 };
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets files that have not been indexed for the current user.
|
||||
/// </summary>
|
||||
/// <param name="recycled">Shows recycled files or not</param>
|
||||
/// <param name="offset">The number of files to skip</param>
|
||||
/// <param name="take">The number of files to return</param>
|
||||
/// <param name="pool">The pool ID of those files</param>
|
||||
/// <param name="query">Optional query to filter files by name</param>
|
||||
/// <param name="order">The field to order by (date, size, name - defaults to date)</param>
|
||||
/// <param name="orderDesc">Whether to order in descending order (defaults to true)</param>
|
||||
/// <returns>List of unindexed files</returns>
|
||||
[HttpGet("unindexed")]
|
||||
public async Task<IActionResult> GetUnindexedFiles(
|
||||
[FromQuery] Guid? pool,
|
||||
[FromQuery] bool recycled = false,
|
||||
[FromQuery] int offset = 0,
|
||||
[FromQuery] int take = 20,
|
||||
[FromQuery] string? query = null,
|
||||
[FromQuery] string order = "date",
|
||||
[FromQuery] bool orderDesc = true
|
||||
)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
try
|
||||
{
|
||||
var filesQuery = db.Files
|
||||
.Where(f => f.AccountId == accountId
|
||||
&& f.IsMarkedRecycle == recycled
|
||||
&& !db.FileIndexes.Any(fi => fi.FileId == f.Id && fi.AccountId == accountId)
|
||||
)
|
||||
.AsQueryable();
|
||||
|
||||
// Apply sorting
|
||||
filesQuery = order.ToLower() switch
|
||||
{
|
||||
"name" => orderDesc ? filesQuery.OrderByDescending(f => f.Name)
|
||||
: filesQuery.OrderBy(f => f.Name),
|
||||
"size" => orderDesc ? filesQuery.OrderByDescending(f => f.Size)
|
||||
: filesQuery.OrderBy(f => f.Size),
|
||||
_ => orderDesc ? filesQuery.OrderByDescending(f => f.CreatedAt)
|
||||
: filesQuery.OrderBy(f => f.CreatedAt)
|
||||
};
|
||||
|
||||
if (pool.HasValue) filesQuery = filesQuery.Where(f => f.PoolId == pool);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(query))
|
||||
{
|
||||
filesQuery = filesQuery.Where(f => f.Name.Contains(query));
|
||||
}
|
||||
|
||||
var totalCount = await filesQuery.CountAsync();
|
||||
|
||||
Response.Headers.Append("X-Total", totalCount.ToString());
|
||||
|
||||
var unindexedFiles = await filesQuery
|
||||
.Skip(offset)
|
||||
.Take(take)
|
||||
.ToListAsync();
|
||||
|
||||
return Ok(unindexedFiles);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to get unindexed files for account {AccountId}", accountId);
|
||||
return new ObjectResult(new ApiError
|
||||
{
|
||||
Code = "GET_UNINDEXED_FAILED",
|
||||
Message = "Failed to get unindexed files",
|
||||
Status = 500
|
||||
}) { StatusCode = 500 };
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Moves a file to a new path
|
||||
/// </summary>
|
||||
/// <param name="indexId">The file index ID</param>
|
||||
/// <param name="newPath">The new path</param>
|
||||
/// <returns>The updated file index</returns>
|
||||
[HttpPost("move/{indexId}")]
|
||||
public async Task<IActionResult> MoveFile(Guid indexId, [FromBody] MoveFileRequest request)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
try
|
||||
{
|
||||
// Verify ownership
|
||||
var existingIndex = await db.FileIndexes
|
||||
.Include(fi => fi.File)
|
||||
.FirstOrDefaultAsync(fi => fi.Id == indexId && fi.AccountId == accountId);
|
||||
|
||||
if (existingIndex == null)
|
||||
return new ObjectResult(ApiError.NotFound("File index")) { StatusCode = 404 };
|
||||
|
||||
var updatedIndex = await fileIndexService.UpdateAsync(indexId, request.NewPath);
|
||||
|
||||
if (updatedIndex == null)
|
||||
return new ObjectResult(ApiError.NotFound("File index")) { StatusCode = 404 };
|
||||
|
||||
return Ok(new
|
||||
{
|
||||
updatedIndex.FileId,
|
||||
IndexId = updatedIndex.Id,
|
||||
OldPath = existingIndex.Path,
|
||||
NewPath = updatedIndex.Path,
|
||||
Message = "File moved successfully"
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to move file index {IndexId} for account {AccountId}", indexId, accountId);
|
||||
return new ObjectResult(new ApiError
|
||||
{
|
||||
Code = "MOVE_FAILED",
|
||||
Message = "Failed to move file",
|
||||
Status = 500
|
||||
}) { StatusCode = 500 };
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Removes a file index (does not delete the actual file by default)
|
||||
/// </summary>
|
||||
/// <param name="indexId">The file index ID</param>
|
||||
/// <param name="deleteFile">Whether to also delete the actual file data</param>
|
||||
/// <returns>Success message</returns>
|
||||
[HttpDelete("remove/{indexId}")]
|
||||
public async Task<IActionResult> RemoveFileIndex(Guid indexId, [FromQuery] bool deleteFile = false)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
try
|
||||
{
|
||||
// Verify ownership
|
||||
var existingIndex = await db.FileIndexes
|
||||
.Include(fi => fi.File)
|
||||
.FirstOrDefaultAsync(fi => fi.Id == indexId && fi.AccountId == accountId);
|
||||
|
||||
if (existingIndex == null)
|
||||
return new ObjectResult(ApiError.NotFound("File index")) { StatusCode = 404 };
|
||||
|
||||
var fileId = existingIndex.FileId;
|
||||
var fileName = existingIndex.File.Name;
|
||||
var filePath = existingIndex.Path;
|
||||
|
||||
// Remove the index
|
||||
var removed = await fileIndexService.RemoveAsync(indexId);
|
||||
|
||||
if (!removed)
|
||||
return new ObjectResult(ApiError.NotFound("File index")) { StatusCode = 404 };
|
||||
|
||||
// Optionally delete the actual file
|
||||
if (!deleteFile)
|
||||
return Ok(new
|
||||
{
|
||||
Message = deleteFile
|
||||
? "File index and file data removed successfully"
|
||||
: "File index removed successfully",
|
||||
FileId = fileId,
|
||||
FileName = fileName,
|
||||
Path = filePath,
|
||||
FileDataDeleted = deleteFile
|
||||
});
|
||||
try
|
||||
{
|
||||
// Check if there are any other indexes for this file
|
||||
var remainingIndexes = await fileIndexService.GetByFileIdAsync(fileId);
|
||||
if (remainingIndexes.Count == 0)
|
||||
{
|
||||
// No other indexes exist, safe to delete the file
|
||||
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == fileId.ToString());
|
||||
if (file != null)
|
||||
{
|
||||
db.Files.Remove(file);
|
||||
await db.SaveChangesAsync();
|
||||
logger.LogInformation("Deleted file {FileId} ({FileName}) as requested", fileId, fileName);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning(ex, "Failed to delete file {FileId} while removing index", fileId);
|
||||
// Continue even if file deletion fails
|
||||
}
|
||||
|
||||
return Ok(new
|
||||
{
|
||||
Message = deleteFile
|
||||
? "File index and file data removed successfully"
|
||||
: "File index removed successfully",
|
||||
FileId = fileId,
|
||||
FileName = fileName,
|
||||
Path = filePath,
|
||||
FileDataDeleted = deleteFile
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to remove file index {IndexId} for account {AccountId}", indexId, accountId);
|
||||
return new ObjectResult(new ApiError
|
||||
{
|
||||
Code = "REMOVE_FAILED",
|
||||
Message = "Failed to remove file",
|
||||
Status = 500
|
||||
}) { StatusCode = 500 };
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Removes all file indexes in a specific path
|
||||
/// </summary>
|
||||
/// <param name="path">The path to clear</param>
|
||||
/// <param name="deleteFiles">Whether to also delete the actual file data</param>
|
||||
/// <returns>Success message with count of removed items</returns>
|
||||
[HttpDelete("clear-path")]
|
||||
public async Task<IActionResult> ClearPath([FromQuery] string path = "/", [FromQuery] bool deleteFiles = false)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
try
|
||||
{
|
||||
var removedCount = await fileIndexService.RemoveByPathAsync(accountId, path);
|
||||
|
||||
if (!deleteFiles || removedCount <= 0)
|
||||
return Ok(new
|
||||
{
|
||||
Message = deleteFiles
|
||||
? $"Cleared {removedCount} file indexes from path and deleted orphaned files"
|
||||
: $"Cleared {removedCount} file indexes from path",
|
||||
Path = path,
|
||||
RemovedCount = removedCount,
|
||||
FilesDeleted = deleteFiles
|
||||
});
|
||||
// Get the files that were in this path and check if they have other indexes
|
||||
var filesInPath = await fileIndexService.GetByPathAsync(accountId, path);
|
||||
var fileIdsToCheck = filesInPath.Select(fi => fi.FileId).Distinct().ToList();
|
||||
|
||||
foreach (var fileId in fileIdsToCheck)
|
||||
{
|
||||
var remainingIndexes = await fileIndexService.GetByFileIdAsync(fileId);
|
||||
if (remainingIndexes.Count != 0) continue;
|
||||
// No other indexes exist, safe to delete the file
|
||||
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == fileId.ToString());
|
||||
if (file == null) continue;
|
||||
db.Files.Remove(file);
|
||||
logger.LogInformation("Deleted orphaned file {FileId} after clearing path {Path}", fileId, path);
|
||||
}
|
||||
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
return Ok(new
|
||||
{
|
||||
Message = deleteFiles
|
||||
? $"Cleared {removedCount} file indexes from path and deleted orphaned files"
|
||||
: $"Cleared {removedCount} file indexes from path",
|
||||
Path = path,
|
||||
RemovedCount = removedCount,
|
||||
FilesDeleted = deleteFiles
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to clear path {Path} for account {AccountId}", path, accountId);
|
||||
return new ObjectResult(new ApiError
|
||||
{
|
||||
Code = "CLEAR_PATH_FAILED",
|
||||
Message = "Failed to clear path",
|
||||
Status = 500
|
||||
}) { StatusCode = 500 };
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new file index (useful for adding existing files to a path)
|
||||
/// </summary>
|
||||
/// <param name="request">The create index request</param>
|
||||
/// <returns>The created file index</returns>
|
||||
[HttpPost("create")]
|
||||
public async Task<IActionResult> CreateFileIndex([FromBody] CreateFileIndexRequest request)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
try
|
||||
{
|
||||
// Verify the file exists and belongs to the user
|
||||
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == request.FileId);
|
||||
if (file == null)
|
||||
return new ObjectResult(ApiError.NotFound("File")) { StatusCode = 404 };
|
||||
|
||||
if (file.AccountId != accountId)
|
||||
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
|
||||
|
||||
// Check if index already exists for this file and path
|
||||
var existingIndex = await db.FileIndexes
|
||||
.FirstOrDefaultAsync(fi =>
|
||||
fi.FileId == request.FileId && fi.Path == request.Path && fi.AccountId == accountId);
|
||||
|
||||
if (existingIndex != null)
|
||||
return new ObjectResult(ApiError.Validation(new Dictionary<string, string[]>
|
||||
{
|
||||
{ "fileId", ["File index already exists for this path"] }
|
||||
})) { StatusCode = 400 };
|
||||
|
||||
var fileIndex = await fileIndexService.CreateAsync(request.Path, request.FileId, accountId);
|
||||
|
||||
return Ok(new
|
||||
{
|
||||
IndexId = fileIndex.Id,
|
||||
fileIndex.FileId,
|
||||
fileIndex.Path,
|
||||
Message = "File index created successfully"
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to create file index for file {FileId} at path {Path} for account {AccountId}",
|
||||
request.FileId, request.Path, accountId);
|
||||
return new ObjectResult(new ApiError
|
||||
{
|
||||
Code = "CREATE_INDEX_FAILED",
|
||||
Message = "Failed to create file index",
|
||||
Status = 500
|
||||
}) { StatusCode = 500 };
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Searches for files by name or metadata
|
||||
/// </summary>
|
||||
/// <param name="query">The search query</param>
|
||||
/// <param name="path">Optional path to limit search to</param>
|
||||
/// <returns>Matching files</returns>
|
||||
[HttpGet("search")]
|
||||
public async Task<IActionResult> SearchFiles([FromQuery] string query, [FromQuery] string? path = null)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
try
|
||||
{
|
||||
// Build the query with all conditions at once
|
||||
var searchTerm = query.ToLower();
|
||||
var fileIndexes = await db.FileIndexes
|
||||
.Where(fi => fi.AccountId == accountId)
|
||||
.Include(fi => fi.File)
|
||||
.Where(fi =>
|
||||
(string.IsNullOrEmpty(path) || fi.Path == FileIndexService.NormalizePath(path)) &&
|
||||
(fi.File.Name.ToLower().Contains(searchTerm) ||
|
||||
(fi.File.Description != null && fi.File.Description.ToLower().Contains(searchTerm)) ||
|
||||
(fi.File.MimeType != null && fi.File.MimeType.ToLower().Contains(searchTerm))))
|
||||
.ToListAsync();
|
||||
|
||||
return Ok(new
|
||||
{
|
||||
Query = query,
|
||||
Path = path,
|
||||
Results = fileIndexes,
|
||||
TotalCount = fileIndexes.Count()
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to search files for account {AccountId} with query {Query}", accountId, query);
|
||||
return new ObjectResult(new ApiError
|
||||
{
|
||||
Code = "SEARCH_FAILED",
|
||||
Message = "Failed to search files",
|
||||
Status = 500
|
||||
}) { StatusCode = 500 };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class MoveFileRequest
|
||||
{
|
||||
public string NewPath { get; set; } = null!;
|
||||
}
|
||||
|
||||
public class CreateFileIndexRequest
|
||||
{
|
||||
[MaxLength(32)] public string FileId { get; set; } = null!;
|
||||
public string Path { get; set; } = null!;
|
||||
}
|
||||
197
DysonNetwork.Drive/Index/FileIndexService.cs
Normal file
197
DysonNetwork.Drive/Index/FileIndexService.cs
Normal file
@@ -0,0 +1,197 @@
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace DysonNetwork.Drive.Index;
|
||||
|
||||
public class FileIndexService(AppDatabase db)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a new file index entry
|
||||
/// </summary>
|
||||
/// <param name="path">The parent folder path with a trailing slash</param>
|
||||
/// <param name="fileId">The file ID</param>
|
||||
/// <param name="accountId">The account ID</param>
|
||||
/// <returns>The created file index</returns>
|
||||
public async Task<SnCloudFileIndex> CreateAsync(string path, string fileId, Guid accountId)
|
||||
{
|
||||
// Ensure a path has a trailing slash and is query-safe
|
||||
var normalizedPath = NormalizePath(path);
|
||||
|
||||
// Check if a file with the same name already exists in the same path for this account
|
||||
var existingFileIndex = await db.FileIndexes
|
||||
.FirstOrDefaultAsync(fi => fi.AccountId == accountId && fi.Path == normalizedPath && fi.FileId == fileId);
|
||||
|
||||
if (existingFileIndex != null)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"A file with ID '{fileId}' already exists in path '{normalizedPath}' for account '{accountId}'");
|
||||
}
|
||||
|
||||
var fileIndex = new SnCloudFileIndex
|
||||
{
|
||||
Path = normalizedPath,
|
||||
FileId = fileId,
|
||||
AccountId = accountId
|
||||
};
|
||||
|
||||
db.FileIndexes.Add(fileIndex);
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
return fileIndex;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates an existing file index entry by removing the old one and creating a new one
|
||||
/// </summary>
|
||||
/// <param name="id">The file index ID</param>
|
||||
/// <param name="newPath">The new parent folder path with trailing slash</param>
|
||||
/// <returns>The updated file index</returns>
|
||||
public async Task<SnCloudFileIndex?> UpdateAsync(Guid id, string newPath)
|
||||
{
|
||||
var fileIndex = await db.FileIndexes.FindAsync(id);
|
||||
if (fileIndex == null)
|
||||
return null;
|
||||
|
||||
// Since properties are init-only, we need to remove the old index and create a new one
|
||||
db.FileIndexes.Remove(fileIndex);
|
||||
|
||||
var newFileIndex = new SnCloudFileIndex
|
||||
{
|
||||
Path = NormalizePath(newPath),
|
||||
FileId = fileIndex.FileId,
|
||||
AccountId = fileIndex.AccountId
|
||||
};
|
||||
|
||||
db.FileIndexes.Add(newFileIndex);
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
return newFileIndex;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Removes a file index entry by ID
|
||||
/// </summary>
|
||||
/// <param name="id">The file index ID</param>
|
||||
/// <returns>True if the index was found and removed, false otherwise</returns>
|
||||
public async Task<bool> RemoveAsync(Guid id)
|
||||
{
|
||||
var fileIndex = await db.FileIndexes.FindAsync(id);
|
||||
if (fileIndex == null)
|
||||
return false;
|
||||
|
||||
db.FileIndexes.Remove(fileIndex);
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Removes file index entries by file ID
|
||||
/// </summary>
|
||||
/// <param name="fileId">The file ID</param>
|
||||
/// <returns>The number of indexes removed</returns>
|
||||
public async Task<int> RemoveByFileIdAsync(string fileId)
|
||||
{
|
||||
var indexes = await db.FileIndexes
|
||||
.Where(fi => fi.FileId == fileId)
|
||||
.ToListAsync();
|
||||
|
||||
if (indexes.Count == 0)
|
||||
return 0;
|
||||
|
||||
db.FileIndexes.RemoveRange(indexes);
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
return indexes.Count;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Removes file index entries by account ID and path
|
||||
/// </summary>
|
||||
/// <param name="accountId">The account ID</param>
|
||||
/// <param name="path">The parent folder path</param>
|
||||
/// <returns>The number of indexes removed</returns>
|
||||
public async Task<int> RemoveByPathAsync(Guid accountId, string path)
|
||||
{
|
||||
var normalizedPath = NormalizePath(path);
|
||||
|
||||
var indexes = await db.FileIndexes
|
||||
.Where(fi => fi.AccountId == accountId && fi.Path == normalizedPath)
|
||||
.ToListAsync();
|
||||
|
||||
if (!indexes.Any())
|
||||
return 0;
|
||||
|
||||
db.FileIndexes.RemoveRange(indexes);
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
return indexes.Count;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets file indexes by account ID and path
|
||||
/// </summary>
|
||||
/// <param name="accountId">The account ID</param>
|
||||
/// <param name="path">The parent folder path</param>
|
||||
/// <returns>List of file indexes</returns>
|
||||
public async Task<List<SnCloudFileIndex>> GetByPathAsync(Guid accountId, string path)
|
||||
{
|
||||
var normalizedPath = NormalizePath(path);
|
||||
|
||||
return await db.FileIndexes
|
||||
.Where(fi => fi.AccountId == accountId && fi.Path == normalizedPath)
|
||||
.Include(fi => fi.File)
|
||||
.ToListAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets file indexes by file ID
|
||||
/// </summary>
|
||||
/// <param name="fileId">The file ID</param>
|
||||
/// <returns>List of file indexes</returns>
|
||||
public async Task<List<SnCloudFileIndex>> GetByFileIdAsync(string fileId)
|
||||
{
|
||||
return await db.FileIndexes
|
||||
.Where(fi => fi.FileId == fileId)
|
||||
.Include(fi => fi.File)
|
||||
.ToListAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all file indexes for an account
|
||||
/// </summary>
|
||||
/// <param name="accountId">The account ID</param>
|
||||
/// <returns>List of file indexes</returns>
|
||||
public async Task<List<SnCloudFileIndex>> GetByAccountIdAsync(Guid accountId)
|
||||
{
|
||||
return await db.FileIndexes
|
||||
.Where(fi => fi.AccountId == accountId)
|
||||
.Include(fi => fi.File)
|
||||
.ToListAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes the path to ensure it has a trailing slash and is query-safe
|
||||
/// </summary>
|
||||
/// <param name="path">The original path</param>
|
||||
/// <returns>The normalized path</returns>
|
||||
public static string NormalizePath(string path)
|
||||
{
|
||||
if (string.IsNullOrEmpty(path))
|
||||
return "/";
|
||||
|
||||
// Ensure the path starts with a slash
|
||||
if (!path.StartsWith('/'))
|
||||
path = "/" + path;
|
||||
|
||||
// Ensure the path ends with a slash (unless it's just the root)
|
||||
if (path != "/" && !path.EndsWith('/'))
|
||||
path += "/";
|
||||
|
||||
// Make path query-safe by removing problematic characters
|
||||
// This is a basic implementation - you might want to add more robust validation
|
||||
path = path.Replace("%", "").Replace("'", "").Replace("\"", "");
|
||||
|
||||
return path;
|
||||
}
|
||||
}
|
||||
341
DysonNetwork.Drive/Index/README.md
Normal file
341
DysonNetwork.Drive/Index/README.md
Normal file
@@ -0,0 +1,341 @@
|
||||
# File Indexing System Documentation
|
||||
|
||||
## Overview
|
||||
|
||||
The File Indexing System provides a hierarchical file organization layer on top of the existing file storage system in DysonNetwork Drive. It allows users to organize their files in folders and paths while maintaining the underlying file storage capabilities.
|
||||
|
||||
When using with the gateway, replace the `/api` with the `/drive` in the path.
|
||||
And all the arguments will be transformed into snake case via the gateway.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Components
|
||||
|
||||
1. **SnCloudFileIndex Model** - Represents the file-to-path mapping
|
||||
2. **FileIndexService** - Business logic for file index operations
|
||||
3. **FileIndexController** - REST API endpoints for file management
|
||||
4. **FileUploadController Integration** - Automatic index creation during upload
|
||||
|
||||
### Database Schema
|
||||
|
||||
```sql
|
||||
-- File Indexes table
|
||||
CREATE TABLE "FileIndexes" (
|
||||
"Id" uuid NOT NULL DEFAULT gen_random_uuid(),
|
||||
"Path" character varying(8192) NOT NULL,
|
||||
"FileId" uuid NOT NULL,
|
||||
"AccountId" uuid NOT NULL,
|
||||
"CreatedAt" timestamp with time zone NOT NULL DEFAULT (now() at time zone 'utc'),
|
||||
"UpdatedAt" timestamp with time zone NOT NULL DEFAULT (now() at time zone 'utc'),
|
||||
CONSTRAINT "PK_FileIndexes" PRIMARY KEY ("Id"),
|
||||
CONSTRAINT "FK_FileIndexes_Files_FileId" FOREIGN KEY ("FileId") REFERENCES "Files" ("Id") ON DELETE CASCADE,
|
||||
INDEX "IX_FileIndexes_Path_AccountId" ("Path", "AccountId")
|
||||
);
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### Browse Files
|
||||
**GET** `/api/index/browse?path=/documents/`
|
||||
|
||||
Browse files in a specific path.
|
||||
|
||||
**Query Parameters:**
|
||||
- `path` (optional, default: "/") - The path to browse
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"path": "/documents/",
|
||||
"files": [
|
||||
{
|
||||
"id": "guid",
|
||||
"path": "/documents/",
|
||||
"fileId": "guid",
|
||||
"accountId": "guid",
|
||||
"createdAt": "2024-01-01T00:00:00Z",
|
||||
"updatedAt": "2024-01-01T00:00:00Z",
|
||||
"file": {
|
||||
"id": "string",
|
||||
"name": "document.pdf",
|
||||
"size": 1024,
|
||||
"mimeType": "application/pdf",
|
||||
"hash": "sha256-hash",
|
||||
"uploadedAt": "2024-01-01T00:00:00Z",
|
||||
"expiredAt": null,
|
||||
"hasCompression": false,
|
||||
"hasThumbnail": true,
|
||||
"isEncrypted": false,
|
||||
"description": null
|
||||
}
|
||||
}
|
||||
],
|
||||
"totalCount": 1
|
||||
}
|
||||
```
|
||||
|
||||
### Get All Files
|
||||
**GET** `/api/index/all`
|
||||
|
||||
Get all files for the current user across all paths.
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"files": [
|
||||
// Same structure as browse endpoint
|
||||
],
|
||||
"totalCount": 10
|
||||
}
|
||||
```
|
||||
|
||||
### Move File
|
||||
**POST** `/api/index/move/{indexId}`
|
||||
|
||||
Move a file to a new path.
|
||||
|
||||
**Path Parameters:**
|
||||
- `indexId` - The file index ID
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"newPath": "/archived/"
|
||||
}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"fileId": "guid",
|
||||
"indexId": "guid",
|
||||
"oldPath": "/documents/",
|
||||
"newPath": "/archived/",
|
||||
"message": "File moved successfully"
|
||||
}
|
||||
```
|
||||
|
||||
### Remove File Index
|
||||
**DELETE** `/api/index/remove/{indexId}?deleteFile=false`
|
||||
|
||||
Remove a file index. Optionally delete the actual file data.
|
||||
|
||||
**Path Parameters:**
|
||||
- `indexId` - The file index ID
|
||||
|
||||
**Query Parameters:**
|
||||
- `deleteFile` (optional, default: false) - Whether to also delete the file data
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"message": "File index removed successfully",
|
||||
"fileId": "guid",
|
||||
"fileName": "document.pdf",
|
||||
"path": "/documents/",
|
||||
"fileDataDeleted": false
|
||||
}
|
||||
```
|
||||
|
||||
### Clear Path
|
||||
**DELETE** `/api/index/clear-path?path=/temp/&deleteFiles=false`
|
||||
|
||||
Remove all file indexes in a specific path.
|
||||
|
||||
**Query Parameters:**
|
||||
- `path` (optional, default: "/") - The path to clear
|
||||
- `deleteFiles` (optional, default: false) - Whether to also delete orphaned files
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"message": "Cleared 5 file indexes from path",
|
||||
"path": "/temp/",
|
||||
"removedCount": 5,
|
||||
"filesDeleted": false
|
||||
}
|
||||
```
|
||||
|
||||
### Create File Index
|
||||
**POST** `/api/index/create`
|
||||
|
||||
Create a new file index for an existing file.
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"fileId": "guid",
|
||||
"path": "/documents/"
|
||||
}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"indexId": "guid",
|
||||
"fileId": "guid",
|
||||
"path": "/documents/",
|
||||
"message": "File index created successfully"
|
||||
}
|
||||
```
|
||||
|
||||
### Search Files
|
||||
**GET** `/api/index/search?query=report&path=/documents/`
|
||||
|
||||
Search for files by name or metadata.
|
||||
|
||||
**Query Parameters:**
|
||||
- `query` (required) - The search query
|
||||
- `path` (optional) - Limit search to specific path
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"query": "report",
|
||||
"path": "/documents/",
|
||||
"results": [
|
||||
// Same structure as browse endpoint
|
||||
],
|
||||
"totalCount": 3
|
||||
}
|
||||
```
|
||||
|
||||
## Path Normalization
|
||||
|
||||
The system automatically normalizes paths to ensure consistency:
|
||||
|
||||
- **Trailing Slash**: All paths end with `/`
|
||||
- **Root Path**: User home folder is represented as `/`
|
||||
- **Query Safety**: Paths are validated to avoid SQL injection
|
||||
- **Examples**:
|
||||
- `/documents/` ✅ (correct)
|
||||
- `/documents` → `/documents/` ✅ (normalized)
|
||||
- `/documents/reports/` ✅ (correct)
|
||||
- `/documents/reports` → `/documents/reports/` ✅ (normalized)
|
||||
|
||||
## File Upload Integration
|
||||
|
||||
When uploading files with the `FileUploadController`, you can specify a path to automatically create file indexes:
|
||||
|
||||
**Create Upload Task Request:**
|
||||
```json
|
||||
{
|
||||
"fileName": "document.pdf",
|
||||
"fileSize": 1024,
|
||||
"contentType": "application/pdf",
|
||||
"hash": "sha256-hash",
|
||||
"path": "/documents/" // New field for file indexing
|
||||
}
|
||||
```
|
||||
|
||||
The system will automatically create a file index when the upload completes successfully.
|
||||
|
||||
## Service Methods
|
||||
|
||||
### FileIndexService
|
||||
|
||||
```csharp
|
||||
public class FileIndexService
|
||||
{
|
||||
// Create a new file index
|
||||
Task<SnCloudFileIndex> CreateAsync(string path, Guid fileId, Guid accountId);
|
||||
|
||||
// Get files by path
|
||||
Task<List<SnCloudFileIndex>> GetByPathAsync(Guid accountId, string path);
|
||||
|
||||
// Get all files for account
|
||||
Task<List<SnCloudFileIndex>> GetByAccountIdAsync(Guid accountId);
|
||||
|
||||
// Get indexes for specific file
|
||||
Task<List<SnCloudFileIndex>> GetByFileIdAsync(Guid fileId);
|
||||
|
||||
// Move file to new path
|
||||
Task<SnCloudFileIndex?> UpdateAsync(Guid indexId, string newPath);
|
||||
|
||||
// Remove file index
|
||||
Task<bool> RemoveAsync(Guid indexId);
|
||||
|
||||
// Remove all indexes in path
|
||||
Task<int> RemoveByPathAsync(Guid accountId, string path);
|
||||
|
||||
// Normalize path format
|
||||
public static string NormalizePath(string path);
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The API returns appropriate HTTP status codes and error messages:
|
||||
|
||||
- **400 Bad Request**: Invalid input parameters
|
||||
- **401 Unauthorized**: User not authenticated
|
||||
- **403 Forbidden**: User lacks permission
|
||||
- **404 Not Found**: Resource not found
|
||||
- **500 Internal Server Error**: Server-side error
|
||||
|
||||
**Error Response Format:**
|
||||
```json
|
||||
{
|
||||
"code": "BROWSE_FAILED",
|
||||
"message": "Failed to browse files",
|
||||
"status": 500
|
||||
}
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **Ownership Verification**: All operations verify that the user owns the file indexes
|
||||
2. **Path Validation**: Paths are normalized and validated
|
||||
3. **Cascade Deletion**: File indexes are automatically removed when files are deleted
|
||||
4. **Safe File Deletion**: Files are only deleted when no other indexes reference them
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Upload File to Specific Path
|
||||
```bash
|
||||
# Create upload task with path
|
||||
curl -X POST /api/files/upload/create \
|
||||
-H "Authorization: Bearer {token}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"fileName": "report.pdf",
|
||||
"fileSize": 2048,
|
||||
"contentType": "application/pdf",
|
||||
"path": "/documents/reports/"
|
||||
}'
|
||||
```
|
||||
|
||||
### Browse Files
|
||||
```bash
|
||||
curl -X GET "/api/index/browse?path=/documents/reports/" \
|
||||
-H "Authorization: Bearer {token}"
|
||||
```
|
||||
|
||||
### Move File
|
||||
```bash
|
||||
curl -X POST "/api/index/move/{indexId}" \
|
||||
-H "Authorization: Bearer {token}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"newPath": "/archived/"}'
|
||||
```
|
||||
|
||||
### Search Files
|
||||
```bash
|
||||
curl -X GET "/api/index/search?query=invoice&path=/documents/" \
|
||||
-H "Authorization: Bearer {token}"
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use Trailing Slashes**: Always include trailing slashes in paths
|
||||
2. **Organize Hierarchically**: Use meaningful folder structures
|
||||
3. **Search Efficiently**: Use the search endpoint instead of client-side filtering
|
||||
4. **Clean Up**: Use the clear-path endpoint for temporary directories
|
||||
5. **Monitor Usage**: Check total file counts for quota management
|
||||
|
||||
## Integration Notes
|
||||
|
||||
- The file indexing system works alongside the existing file storage
|
||||
- Files can exist in multiple paths (hard links)
|
||||
- File deletion is optional and only removes data when safe
|
||||
- The system maintains referential integrity between files and indexes
|
||||
567
DysonNetwork.Drive/Migrations/20251108191230_AddPersistentTask.Designer.cs
generated
Normal file
567
DysonNetwork.Drive/Migrations/20251108191230_AddPersistentTask.Designer.cs
generated
Normal file
@@ -0,0 +1,567 @@
|
||||
// <auto-generated />
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using DysonNetwork.Drive;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||
using NodaTime;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Drive.Migrations
|
||||
{
|
||||
[DbContext(typeof(AppDatabase))]
|
||||
[Migration("20251108191230_AddPersistentTask")]
|
||||
partial class AddPersistentTask
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "9.0.10")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Billing.QuotaRecord", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.IsRequired()
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<long>("Quota")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("quota");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_quota_records");
|
||||
|
||||
b.ToTable("quota_records", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentTask", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant?>("CompletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("completed_at");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<string>("Discriminator")
|
||||
.IsRequired()
|
||||
.HasMaxLength(21)
|
||||
.HasColumnType("character varying(21)")
|
||||
.HasColumnName("discriminator");
|
||||
|
||||
b.Property<string>("ErrorMessage")
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("error_message");
|
||||
|
||||
b.Property<long?>("EstimatedDurationSeconds")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("estimated_duration_seconds");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<Instant>("LastActivity")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("last_activity");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<Dictionary<string, object>>("Parameters")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("parameters");
|
||||
|
||||
b.Property<int>("Priority")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("priority");
|
||||
|
||||
b.Property<double>("Progress")
|
||||
.HasColumnType("double precision")
|
||||
.HasColumnName("progress");
|
||||
|
||||
b.Property<Dictionary<string, object>>("Results")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("results");
|
||||
|
||||
b.Property<Instant?>("StartedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("started_at");
|
||||
|
||||
b.Property<int>("Status")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("status");
|
||||
|
||||
b.Property<string>("TaskId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(64)
|
||||
.HasColumnType("character varying(64)")
|
||||
.HasColumnName("task_id");
|
||||
|
||||
b.Property<int>("Type")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("type");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_tasks");
|
||||
|
||||
b.ToTable("tasks", (string)null);
|
||||
|
||||
b.HasDiscriminator().HasValue("PersistentTask");
|
||||
|
||||
b.UseTphMappingStrategy();
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.CloudFileReference", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<string>("FileId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("file_id");
|
||||
|
||||
b.Property<string>("ResourceId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("resource_id");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.Property<string>("Usage")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("usage");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_file_references");
|
||||
|
||||
b.HasIndex("FileId")
|
||||
.HasDatabaseName("ix_file_references_file_id");
|
||||
|
||||
b.ToTable("file_references", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.FilePool", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid?>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<BillingConfig>("BillingConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("billing_config");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.IsRequired()
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<bool>("IsHidden")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_hidden");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<PolicyConfig>("PolicyConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("policy_config");
|
||||
|
||||
b.Property<RemoteStorageConfig>("StorageConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("storage_config");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_pools");
|
||||
|
||||
b.ToTable("pools", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.Property<string>("Id")
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Guid?>("BundleId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("bundle_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<Dictionary<string, object>>("FileMeta")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("file_meta");
|
||||
|
||||
b.Property<bool>("HasCompression")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("has_compression");
|
||||
|
||||
b.Property<bool>("HasThumbnail")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("has_thumbnail");
|
||||
|
||||
b.Property<string>("Hash")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("hash");
|
||||
|
||||
b.Property<bool>("IsEncrypted")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_encrypted");
|
||||
|
||||
b.Property<bool>("IsMarkedRecycle")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_marked_recycle");
|
||||
|
||||
b.Property<string>("MimeType")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("mime_type");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<Guid?>("PoolId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("pool_id");
|
||||
|
||||
b.Property<List<ContentSensitiveMark>>("SensitiveMarks")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("sensitive_marks");
|
||||
|
||||
b.Property<long>("Size")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("size");
|
||||
|
||||
b.Property<string>("StorageId")
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("storage_id");
|
||||
|
||||
b.Property<string>("StorageUrl")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("storage_url");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.Property<Instant?>("UploadedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("uploaded_at");
|
||||
|
||||
b.Property<Dictionary<string, object>>("UserMeta")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("user_meta");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_files");
|
||||
|
||||
b.HasIndex("BundleId")
|
||||
.HasDatabaseName("ix_files_bundle_id");
|
||||
|
||||
b.HasIndex("PoolId")
|
||||
.HasDatabaseName("ix_files_pool_id");
|
||||
|
||||
b.ToTable("files", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<string>("Passcode")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("passcode");
|
||||
|
||||
b.Property<string>("Slug")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("slug");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_bundles");
|
||||
|
||||
b.HasIndex("Slug")
|
||||
.IsUnique()
|
||||
.HasDatabaseName("ix_bundles_slug");
|
||||
|
||||
b.ToTable("bundles", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentUploadTask", b =>
|
||||
{
|
||||
b.HasBaseType("DysonNetwork.Drive.Storage.Model.PersistentTask");
|
||||
|
||||
b.Property<Guid?>("BundleId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("bundle_id");
|
||||
|
||||
b.Property<long>("ChunkSize")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("chunk_size");
|
||||
|
||||
b.Property<int>("ChunksCount")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("chunks_count");
|
||||
|
||||
b.Property<int>("ChunksUploaded")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("chunks_uploaded");
|
||||
|
||||
b.Property<string>("ContentType")
|
||||
.IsRequired()
|
||||
.HasMaxLength(128)
|
||||
.HasColumnType("character varying(128)")
|
||||
.HasColumnName("content_type");
|
||||
|
||||
b.Property<string>("EncryptPassword")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("encrypt_password");
|
||||
|
||||
b.Property<string>("FileName")
|
||||
.IsRequired()
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("file_name");
|
||||
|
||||
b.Property<long>("FileSize")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("file_size");
|
||||
|
||||
b.Property<string>("Hash")
|
||||
.IsRequired()
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("hash");
|
||||
|
||||
b.Property<Guid>("PoolId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("pool_id");
|
||||
|
||||
b.PrimitiveCollection<List<int>>("UploadedChunks")
|
||||
.IsRequired()
|
||||
.HasColumnType("integer[]")
|
||||
.HasColumnName("uploaded_chunks");
|
||||
|
||||
b.HasDiscriminator().HasValue("PersistentUploadTask");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.CloudFileReference", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
|
||||
.WithMany("References")
|
||||
.HasForeignKey("FileId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_file_references_files_file_id");
|
||||
|
||||
b.Navigation("File");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnFileBundle", "Bundle")
|
||||
.WithMany("Files")
|
||||
.HasForeignKey("BundleId")
|
||||
.HasConstraintName("fk_files_bundles_bundle_id");
|
||||
|
||||
b.HasOne("DysonNetwork.Shared.Models.FilePool", "Pool")
|
||||
.WithMany()
|
||||
.HasForeignKey("PoolId")
|
||||
.HasConstraintName("fk_files_pools_pool_id");
|
||||
|
||||
b.Navigation("Bundle");
|
||||
|
||||
b.Navigation("Pool");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.Navigation("References");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
|
||||
{
|
||||
b.Navigation("Files");
|
||||
});
|
||||
#pragma warning restore 612, 618
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using NodaTime;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Drive.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class AddPersistentTask : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.CreateTable(
|
||||
name: "tasks",
|
||||
columns: table => new
|
||||
{
|
||||
id = table.Column<Guid>(type: "uuid", nullable: false),
|
||||
task_id = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
|
||||
name = table.Column<string>(type: "character varying(256)", maxLength: 256, nullable: false),
|
||||
description = table.Column<string>(type: "character varying(1024)", maxLength: 1024, nullable: true),
|
||||
type = table.Column<int>(type: "integer", nullable: false),
|
||||
status = table.Column<int>(type: "integer", nullable: false),
|
||||
account_id = table.Column<Guid>(type: "uuid", nullable: false),
|
||||
progress = table.Column<double>(type: "double precision", nullable: false),
|
||||
parameters = table.Column<Dictionary<string, object>>(type: "jsonb", nullable: false),
|
||||
results = table.Column<Dictionary<string, object>>(type: "jsonb", nullable: false),
|
||||
error_message = table.Column<string>(type: "character varying(1024)", maxLength: 1024, nullable: true),
|
||||
started_at = table.Column<Instant>(type: "timestamp with time zone", nullable: true),
|
||||
completed_at = table.Column<Instant>(type: "timestamp with time zone", nullable: true),
|
||||
expired_at = table.Column<Instant>(type: "timestamp with time zone", nullable: true),
|
||||
last_activity = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
|
||||
priority = table.Column<int>(type: "integer", nullable: false),
|
||||
estimated_duration_seconds = table.Column<long>(type: "bigint", nullable: true),
|
||||
discriminator = table.Column<string>(type: "character varying(21)", maxLength: 21, nullable: false),
|
||||
file_name = table.Column<string>(type: "character varying(256)", maxLength: 256, nullable: true),
|
||||
file_size = table.Column<long>(type: "bigint", nullable: true),
|
||||
content_type = table.Column<string>(type: "character varying(128)", maxLength: 128, nullable: true),
|
||||
chunk_size = table.Column<long>(type: "bigint", nullable: true),
|
||||
chunks_count = table.Column<int>(type: "integer", nullable: true),
|
||||
chunks_uploaded = table.Column<int>(type: "integer", nullable: true),
|
||||
pool_id = table.Column<Guid>(type: "uuid", nullable: true),
|
||||
bundle_id = table.Column<Guid>(type: "uuid", nullable: true),
|
||||
encrypt_password = table.Column<string>(type: "character varying(256)", maxLength: 256, nullable: true),
|
||||
hash = table.Column<string>(type: "text", nullable: true),
|
||||
uploaded_chunks = table.Column<List<int>>(type: "integer[]", nullable: true),
|
||||
created_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
|
||||
updated_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
|
||||
deleted_at = table.Column<Instant>(type: "timestamp with time zone", nullable: true)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("pk_tasks", x => x.id);
|
||||
});
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropTable(
|
||||
name: "tasks");
|
||||
}
|
||||
}
|
||||
}
|
||||
632
DysonNetwork.Drive/Migrations/20251112135535_AddFileIndex.Designer.cs
generated
Normal file
632
DysonNetwork.Drive/Migrations/20251112135535_AddFileIndex.Designer.cs
generated
Normal file
@@ -0,0 +1,632 @@
|
||||
// <auto-generated />
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using DysonNetwork.Drive;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||
using NodaTime;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Drive.Migrations
|
||||
{
|
||||
[DbContext(typeof(AppDatabase))]
|
||||
[Migration("20251112135535_AddFileIndex")]
|
||||
partial class AddFileIndex
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "9.0.10")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Billing.QuotaRecord", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.IsRequired()
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<long>("Quota")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("quota");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_quota_records");
|
||||
|
||||
b.ToTable("quota_records", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentTask", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant?>("CompletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("completed_at");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<string>("Discriminator")
|
||||
.IsRequired()
|
||||
.HasMaxLength(21)
|
||||
.HasColumnType("character varying(21)")
|
||||
.HasColumnName("discriminator");
|
||||
|
||||
b.Property<string>("ErrorMessage")
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("error_message");
|
||||
|
||||
b.Property<long?>("EstimatedDurationSeconds")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("estimated_duration_seconds");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<Instant>("LastActivity")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("last_activity");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<Dictionary<string, object>>("Parameters")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("parameters");
|
||||
|
||||
b.Property<int>("Priority")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("priority");
|
||||
|
||||
b.Property<double>("Progress")
|
||||
.HasColumnType("double precision")
|
||||
.HasColumnName("progress");
|
||||
|
||||
b.Property<Dictionary<string, object>>("Results")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("results");
|
||||
|
||||
b.Property<Instant?>("StartedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("started_at");
|
||||
|
||||
b.Property<int>("Status")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("status");
|
||||
|
||||
b.Property<string>("TaskId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(64)
|
||||
.HasColumnType("character varying(64)")
|
||||
.HasColumnName("task_id");
|
||||
|
||||
b.Property<int>("Type")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("type");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_tasks");
|
||||
|
||||
b.ToTable("tasks", (string)null);
|
||||
|
||||
b.HasDiscriminator().HasValue("PersistentTask");
|
||||
|
||||
b.UseTphMappingStrategy();
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.CloudFileReference", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<string>("FileId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("file_id");
|
||||
|
||||
b.Property<string>("ResourceId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("resource_id");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.Property<string>("Usage")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("usage");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_file_references");
|
||||
|
||||
b.HasIndex("FileId")
|
||||
.HasDatabaseName("ix_file_references_file_id");
|
||||
|
||||
b.ToTable("file_references", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.FilePool", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid?>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<BillingConfig>("BillingConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("billing_config");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.IsRequired()
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<bool>("IsHidden")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_hidden");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<PolicyConfig>("PolicyConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("policy_config");
|
||||
|
||||
b.Property<RemoteStorageConfig>("StorageConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("storage_config");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_pools");
|
||||
|
||||
b.ToTable("pools", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.Property<string>("Id")
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Guid?>("BundleId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("bundle_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<Dictionary<string, object>>("FileMeta")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("file_meta");
|
||||
|
||||
b.Property<bool>("HasCompression")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("has_compression");
|
||||
|
||||
b.Property<bool>("HasThumbnail")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("has_thumbnail");
|
||||
|
||||
b.Property<string>("Hash")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("hash");
|
||||
|
||||
b.Property<bool>("IsEncrypted")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_encrypted");
|
||||
|
||||
b.Property<bool>("IsMarkedRecycle")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_marked_recycle");
|
||||
|
||||
b.Property<string>("MimeType")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("mime_type");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<Guid?>("PoolId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("pool_id");
|
||||
|
||||
b.Property<List<ContentSensitiveMark>>("SensitiveMarks")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("sensitive_marks");
|
||||
|
||||
b.Property<long>("Size")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("size");
|
||||
|
||||
b.Property<string>("StorageId")
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("storage_id");
|
||||
|
||||
b.Property<string>("StorageUrl")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("storage_url");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.Property<Instant?>("UploadedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("uploaded_at");
|
||||
|
||||
b.Property<Dictionary<string, object>>("UserMeta")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("user_meta");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_files");
|
||||
|
||||
b.HasIndex("BundleId")
|
||||
.HasDatabaseName("ix_files_bundle_id");
|
||||
|
||||
b.HasIndex("PoolId")
|
||||
.HasDatabaseName("ix_files_pool_id");
|
||||
|
||||
b.ToTable("files", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileIndex", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("FileId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("file_id");
|
||||
|
||||
b.Property<string>("Path")
|
||||
.IsRequired()
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("path");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_file_indexes");
|
||||
|
||||
b.HasIndex("FileId")
|
||||
.HasDatabaseName("ix_file_indexes_file_id");
|
||||
|
||||
b.HasIndex("Path", "AccountId")
|
||||
.HasDatabaseName("ix_file_indexes_path_account_id");
|
||||
|
||||
b.ToTable("file_indexes", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<string>("Passcode")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("passcode");
|
||||
|
||||
b.Property<string>("Slug")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("slug");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_bundles");
|
||||
|
||||
b.HasIndex("Slug")
|
||||
.IsUnique()
|
||||
.HasDatabaseName("ix_bundles_slug");
|
||||
|
||||
b.ToTable("bundles", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentUploadTask", b =>
|
||||
{
|
||||
b.HasBaseType("DysonNetwork.Drive.Storage.Model.PersistentTask");
|
||||
|
||||
b.Property<Guid?>("BundleId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("bundle_id");
|
||||
|
||||
b.Property<long>("ChunkSize")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("chunk_size");
|
||||
|
||||
b.Property<int>("ChunksCount")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("chunks_count");
|
||||
|
||||
b.Property<int>("ChunksUploaded")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("chunks_uploaded");
|
||||
|
||||
b.Property<string>("ContentType")
|
||||
.IsRequired()
|
||||
.HasMaxLength(128)
|
||||
.HasColumnType("character varying(128)")
|
||||
.HasColumnName("content_type");
|
||||
|
||||
b.Property<string>("EncryptPassword")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("encrypt_password");
|
||||
|
||||
b.Property<string>("FileName")
|
||||
.IsRequired()
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("file_name");
|
||||
|
||||
b.Property<long>("FileSize")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("file_size");
|
||||
|
||||
b.Property<string>("Hash")
|
||||
.IsRequired()
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("hash");
|
||||
|
||||
b.Property<string>("Path")
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("path");
|
||||
|
||||
b.Property<Guid>("PoolId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("pool_id");
|
||||
|
||||
b.PrimitiveCollection<List<int>>("UploadedChunks")
|
||||
.IsRequired()
|
||||
.HasColumnType("integer[]")
|
||||
.HasColumnName("uploaded_chunks");
|
||||
|
||||
b.HasDiscriminator().HasValue("PersistentUploadTask");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.CloudFileReference", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
|
||||
.WithMany("References")
|
||||
.HasForeignKey("FileId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_file_references_files_file_id");
|
||||
|
||||
b.Navigation("File");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnFileBundle", "Bundle")
|
||||
.WithMany("Files")
|
||||
.HasForeignKey("BundleId")
|
||||
.HasConstraintName("fk_files_bundles_bundle_id");
|
||||
|
||||
b.HasOne("DysonNetwork.Shared.Models.FilePool", "Pool")
|
||||
.WithMany()
|
||||
.HasForeignKey("PoolId")
|
||||
.HasConstraintName("fk_files_pools_pool_id");
|
||||
|
||||
b.Navigation("Bundle");
|
||||
|
||||
b.Navigation("Pool");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileIndex", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
|
||||
.WithMany("FileIndexes")
|
||||
.HasForeignKey("FileId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_file_indexes_files_file_id");
|
||||
|
||||
b.Navigation("File");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.Navigation("FileIndexes");
|
||||
|
||||
b.Navigation("References");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
|
||||
{
|
||||
b.Navigation("Files");
|
||||
});
|
||||
#pragma warning restore 612, 618
|
||||
}
|
||||
}
|
||||
}
|
||||
66
DysonNetwork.Drive/Migrations/20251112135535_AddFileIndex.cs
Normal file
66
DysonNetwork.Drive/Migrations/20251112135535_AddFileIndex.cs
Normal file
@@ -0,0 +1,66 @@
|
||||
using System;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using NodaTime;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Drive.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class AddFileIndex : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "path",
|
||||
table: "tasks",
|
||||
type: "text",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "file_indexes",
|
||||
columns: table => new
|
||||
{
|
||||
id = table.Column<Guid>(type: "uuid", nullable: false),
|
||||
path = table.Column<string>(type: "character varying(8192)", maxLength: 8192, nullable: false),
|
||||
file_id = table.Column<string>(type: "character varying(32)", maxLength: 32, nullable: false),
|
||||
account_id = table.Column<Guid>(type: "uuid", nullable: false),
|
||||
created_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
|
||||
updated_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
|
||||
deleted_at = table.Column<Instant>(type: "timestamp with time zone", nullable: true)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("pk_file_indexes", x => x.id);
|
||||
table.ForeignKey(
|
||||
name: "fk_file_indexes_files_file_id",
|
||||
column: x => x.file_id,
|
||||
principalTable: "files",
|
||||
principalColumn: "id",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
});
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "ix_file_indexes_file_id",
|
||||
table: "file_indexes",
|
||||
column: "file_id");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "ix_file_indexes_path_account_id",
|
||||
table: "file_indexes",
|
||||
columns: new[] { "path", "account_id" });
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropTable(
|
||||
name: "file_indexes");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "path",
|
||||
table: "tasks");
|
||||
}
|
||||
}
|
||||
}
|
||||
560
DysonNetwork.Drive/Migrations/20260101153809_RemoveUploadTask.Designer.cs
generated
Normal file
560
DysonNetwork.Drive/Migrations/20260101153809_RemoveUploadTask.Designer.cs
generated
Normal file
@@ -0,0 +1,560 @@
|
||||
// <auto-generated />
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using DysonNetwork.Drive;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||
using NodaTime;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Drive.Migrations
|
||||
{
|
||||
[DbContext(typeof(AppDatabase))]
|
||||
[Migration("20260101153809_RemoveUploadTask")]
|
||||
partial class RemoveUploadTask
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "10.0.1")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Billing.QuotaRecord", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.IsRequired()
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<long>("Quota")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("quota");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_quota_records");
|
||||
|
||||
b.ToTable("quota_records", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentTask", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant?>("CompletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("completed_at");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<string>("ErrorMessage")
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("error_message");
|
||||
|
||||
b.Property<long?>("EstimatedDurationSeconds")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("estimated_duration_seconds");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<Instant>("LastActivity")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("last_activity");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<Dictionary<string, object>>("Parameters")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("parameters");
|
||||
|
||||
b.Property<int>("Priority")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("priority");
|
||||
|
||||
b.Property<double>("Progress")
|
||||
.HasColumnType("double precision")
|
||||
.HasColumnName("progress");
|
||||
|
||||
b.Property<Dictionary<string, object>>("Results")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("results");
|
||||
|
||||
b.Property<Instant?>("StartedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("started_at");
|
||||
|
||||
b.Property<int>("Status")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("status");
|
||||
|
||||
b.Property<string>("TaskId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(64)
|
||||
.HasColumnType("character varying(64)")
|
||||
.HasColumnName("task_id");
|
||||
|
||||
b.Property<int>("Type")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("type");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_tasks");
|
||||
|
||||
b.ToTable("tasks", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.FilePool", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid?>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<BillingConfig>("BillingConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("billing_config");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.IsRequired()
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<bool>("IsHidden")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_hidden");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<PolicyConfig>("PolicyConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("policy_config");
|
||||
|
||||
b.Property<RemoteStorageConfig>("StorageConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("storage_config");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_pools");
|
||||
|
||||
b.ToTable("pools", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.Property<string>("Id")
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Guid?>("BundleId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("bundle_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<Dictionary<string, object>>("FileMeta")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("file_meta");
|
||||
|
||||
b.Property<bool>("HasCompression")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("has_compression");
|
||||
|
||||
b.Property<bool>("HasThumbnail")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("has_thumbnail");
|
||||
|
||||
b.Property<string>("Hash")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("hash");
|
||||
|
||||
b.Property<bool>("IsEncrypted")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_encrypted");
|
||||
|
||||
b.Property<bool>("IsMarkedRecycle")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_marked_recycle");
|
||||
|
||||
b.Property<string>("MimeType")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("mime_type");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<Guid?>("PoolId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("pool_id");
|
||||
|
||||
b.PrimitiveCollection<string>("SensitiveMarks")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("sensitive_marks");
|
||||
|
||||
b.Property<long>("Size")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("size");
|
||||
|
||||
b.Property<string>("StorageId")
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("storage_id");
|
||||
|
||||
b.Property<string>("StorageUrl")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("storage_url");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.Property<Instant?>("UploadedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("uploaded_at");
|
||||
|
||||
b.Property<Dictionary<string, object>>("UserMeta")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("user_meta");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_files");
|
||||
|
||||
b.HasIndex("BundleId")
|
||||
.HasDatabaseName("ix_files_bundle_id");
|
||||
|
||||
b.HasIndex("PoolId")
|
||||
.HasDatabaseName("ix_files_pool_id");
|
||||
|
||||
b.ToTable("files", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileIndex", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("FileId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("file_id");
|
||||
|
||||
b.Property<string>("Path")
|
||||
.IsRequired()
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("path");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_file_indexes");
|
||||
|
||||
b.HasIndex("FileId")
|
||||
.HasDatabaseName("ix_file_indexes_file_id");
|
||||
|
||||
b.HasIndex("Path", "AccountId")
|
||||
.HasDatabaseName("ix_file_indexes_path_account_id");
|
||||
|
||||
b.ToTable("file_indexes", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileReference", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<string>("FileId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("file_id");
|
||||
|
||||
b.Property<string>("ResourceId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("resource_id");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.Property<string>("Usage")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("usage");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_file_references");
|
||||
|
||||
b.HasIndex("FileId")
|
||||
.HasDatabaseName("ix_file_references_file_id");
|
||||
|
||||
b.ToTable("file_references", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<string>("Passcode")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("passcode");
|
||||
|
||||
b.Property<string>("Slug")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("slug");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_bundles");
|
||||
|
||||
b.HasIndex("Slug")
|
||||
.IsUnique()
|
||||
.HasDatabaseName("ix_bundles_slug");
|
||||
|
||||
b.ToTable("bundles", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnFileBundle", "Bundle")
|
||||
.WithMany("Files")
|
||||
.HasForeignKey("BundleId")
|
||||
.HasConstraintName("fk_files_bundles_bundle_id");
|
||||
|
||||
b.HasOne("DysonNetwork.Shared.Models.FilePool", "Pool")
|
||||
.WithMany()
|
||||
.HasForeignKey("PoolId")
|
||||
.HasConstraintName("fk_files_pools_pool_id");
|
||||
|
||||
b.Navigation("Bundle");
|
||||
|
||||
b.Navigation("Pool");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileIndex", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
|
||||
.WithMany("FileIndexes")
|
||||
.HasForeignKey("FileId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_file_indexes_files_file_id");
|
||||
|
||||
b.Navigation("File");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileReference", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
|
||||
.WithMany("References")
|
||||
.HasForeignKey("FileId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_file_references_files_file_id");
|
||||
|
||||
b.Navigation("File");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.Navigation("FileIndexes");
|
||||
|
||||
b.Navigation("References");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
|
||||
{
|
||||
b.Navigation("Files");
|
||||
});
|
||||
#pragma warning restore 612, 618
|
||||
}
|
||||
}
|
||||
}
|
||||
155
DysonNetwork.Drive/Migrations/20260101153809_RemoveUploadTask.cs
Normal file
155
DysonNetwork.Drive/Migrations/20260101153809_RemoveUploadTask.cs
Normal file
@@ -0,0 +1,155 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Drive.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class RemoveUploadTask : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "bundle_id",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "chunk_size",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "chunks_count",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "chunks_uploaded",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "content_type",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "discriminator",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "encrypt_password",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "file_name",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "file_size",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "hash",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "path",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "pool_id",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "uploaded_chunks",
|
||||
table: "tasks");
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<Guid>(
|
||||
name: "bundle_id",
|
||||
table: "tasks",
|
||||
type: "uuid",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<long>(
|
||||
name: "chunk_size",
|
||||
table: "tasks",
|
||||
type: "bigint",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<int>(
|
||||
name: "chunks_count",
|
||||
table: "tasks",
|
||||
type: "integer",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<int>(
|
||||
name: "chunks_uploaded",
|
||||
table: "tasks",
|
||||
type: "integer",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "content_type",
|
||||
table: "tasks",
|
||||
type: "character varying(128)",
|
||||
maxLength: 128,
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "discriminator",
|
||||
table: "tasks",
|
||||
type: "character varying(21)",
|
||||
maxLength: 21,
|
||||
nullable: false,
|
||||
defaultValue: "");
|
||||
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "encrypt_password",
|
||||
table: "tasks",
|
||||
type: "character varying(256)",
|
||||
maxLength: 256,
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "file_name",
|
||||
table: "tasks",
|
||||
type: "character varying(256)",
|
||||
maxLength: 256,
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<long>(
|
||||
name: "file_size",
|
||||
table: "tasks",
|
||||
type: "bigint",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "hash",
|
||||
table: "tasks",
|
||||
type: "text",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "path",
|
||||
table: "tasks",
|
||||
type: "text",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<Guid>(
|
||||
name: "pool_id",
|
||||
table: "tasks",
|
||||
type: "uuid",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<List<int>>(
|
||||
name: "uploaded_chunks",
|
||||
table: "tasks",
|
||||
type: "integer[]",
|
||||
nullable: true);
|
||||
}
|
||||
}
|
||||
}
|
||||
632
DysonNetwork.Drive/Migrations/20260101154612_RollbackRemoveUploadTask.Designer.cs
generated
Normal file
632
DysonNetwork.Drive/Migrations/20260101154612_RollbackRemoveUploadTask.Designer.cs
generated
Normal file
@@ -0,0 +1,632 @@
|
||||
// <auto-generated />
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using DysonNetwork.Drive;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||
using NodaTime;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Drive.Migrations
|
||||
{
|
||||
[DbContext(typeof(AppDatabase))]
|
||||
[Migration("20260101154612_RollbackRemoveUploadTask")]
|
||||
partial class RollbackRemoveUploadTask
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "10.0.1")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Billing.QuotaRecord", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.IsRequired()
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<long>("Quota")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("quota");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_quota_records");
|
||||
|
||||
b.ToTable("quota_records", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentTask", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant?>("CompletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("completed_at");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<string>("Discriminator")
|
||||
.IsRequired()
|
||||
.HasMaxLength(21)
|
||||
.HasColumnType("character varying(21)")
|
||||
.HasColumnName("discriminator");
|
||||
|
||||
b.Property<string>("ErrorMessage")
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("error_message");
|
||||
|
||||
b.Property<long?>("EstimatedDurationSeconds")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("estimated_duration_seconds");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<Instant>("LastActivity")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("last_activity");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<Dictionary<string, object>>("Parameters")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("parameters");
|
||||
|
||||
b.Property<int>("Priority")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("priority");
|
||||
|
||||
b.Property<double>("Progress")
|
||||
.HasColumnType("double precision")
|
||||
.HasColumnName("progress");
|
||||
|
||||
b.Property<Dictionary<string, object>>("Results")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("results");
|
||||
|
||||
b.Property<Instant?>("StartedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("started_at");
|
||||
|
||||
b.Property<int>("Status")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("status");
|
||||
|
||||
b.Property<string>("TaskId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(64)
|
||||
.HasColumnType("character varying(64)")
|
||||
.HasColumnName("task_id");
|
||||
|
||||
b.Property<int>("Type")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("type");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_tasks");
|
||||
|
||||
b.ToTable("tasks", (string)null);
|
||||
|
||||
b.HasDiscriminator().HasValue("PersistentTask");
|
||||
|
||||
b.UseTphMappingStrategy();
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.FilePool", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid?>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<BillingConfig>("BillingConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("billing_config");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.IsRequired()
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<bool>("IsHidden")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_hidden");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<PolicyConfig>("PolicyConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("policy_config");
|
||||
|
||||
b.Property<RemoteStorageConfig>("StorageConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("storage_config");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_pools");
|
||||
|
||||
b.ToTable("pools", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.Property<string>("Id")
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Guid?>("BundleId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("bundle_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<Dictionary<string, object>>("FileMeta")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("file_meta");
|
||||
|
||||
b.Property<bool>("HasCompression")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("has_compression");
|
||||
|
||||
b.Property<bool>("HasThumbnail")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("has_thumbnail");
|
||||
|
||||
b.Property<string>("Hash")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("hash");
|
||||
|
||||
b.Property<bool>("IsEncrypted")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_encrypted");
|
||||
|
||||
b.Property<bool>("IsMarkedRecycle")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_marked_recycle");
|
||||
|
||||
b.Property<string>("MimeType")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("mime_type");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<Guid?>("PoolId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("pool_id");
|
||||
|
||||
b.PrimitiveCollection<string>("SensitiveMarks")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("sensitive_marks");
|
||||
|
||||
b.Property<long>("Size")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("size");
|
||||
|
||||
b.Property<string>("StorageId")
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("storage_id");
|
||||
|
||||
b.Property<string>("StorageUrl")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("storage_url");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.Property<Instant?>("UploadedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("uploaded_at");
|
||||
|
||||
b.Property<Dictionary<string, object>>("UserMeta")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("user_meta");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_files");
|
||||
|
||||
b.HasIndex("BundleId")
|
||||
.HasDatabaseName("ix_files_bundle_id");
|
||||
|
||||
b.HasIndex("PoolId")
|
||||
.HasDatabaseName("ix_files_pool_id");
|
||||
|
||||
b.ToTable("files", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileIndex", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("FileId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("file_id");
|
||||
|
||||
b.Property<string>("Path")
|
||||
.IsRequired()
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("path");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_file_indexes");
|
||||
|
||||
b.HasIndex("FileId")
|
||||
.HasDatabaseName("ix_file_indexes_file_id");
|
||||
|
||||
b.HasIndex("Path", "AccountId")
|
||||
.HasDatabaseName("ix_file_indexes_path_account_id");
|
||||
|
||||
b.ToTable("file_indexes", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileReference", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<string>("FileId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("file_id");
|
||||
|
||||
b.Property<string>("ResourceId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("resource_id");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.Property<string>("Usage")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("usage");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_file_references");
|
||||
|
||||
b.HasIndex("FileId")
|
||||
.HasDatabaseName("ix_file_references_file_id");
|
||||
|
||||
b.ToTable("file_references", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<string>("Passcode")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("passcode");
|
||||
|
||||
b.Property<string>("Slug")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("slug");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_bundles");
|
||||
|
||||
b.HasIndex("Slug")
|
||||
.IsUnique()
|
||||
.HasDatabaseName("ix_bundles_slug");
|
||||
|
||||
b.ToTable("bundles", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentUploadTask", b =>
|
||||
{
|
||||
b.HasBaseType("DysonNetwork.Drive.Storage.Model.PersistentTask");
|
||||
|
||||
b.Property<Guid?>("BundleId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("bundle_id");
|
||||
|
||||
b.Property<long>("ChunkSize")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("chunk_size");
|
||||
|
||||
b.Property<int>("ChunksCount")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("chunks_count");
|
||||
|
||||
b.Property<int>("ChunksUploaded")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("chunks_uploaded");
|
||||
|
||||
b.Property<string>("ContentType")
|
||||
.IsRequired()
|
||||
.HasMaxLength(128)
|
||||
.HasColumnType("character varying(128)")
|
||||
.HasColumnName("content_type");
|
||||
|
||||
b.Property<string>("EncryptPassword")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("encrypt_password");
|
||||
|
||||
b.Property<string>("FileName")
|
||||
.IsRequired()
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("file_name");
|
||||
|
||||
b.Property<long>("FileSize")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("file_size");
|
||||
|
||||
b.Property<string>("Hash")
|
||||
.IsRequired()
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("hash");
|
||||
|
||||
b.Property<string>("Path")
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("path");
|
||||
|
||||
b.Property<Guid>("PoolId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("pool_id");
|
||||
|
||||
b.PrimitiveCollection<List<int>>("UploadedChunks")
|
||||
.IsRequired()
|
||||
.HasColumnType("integer[]")
|
||||
.HasColumnName("uploaded_chunks");
|
||||
|
||||
b.HasDiscriminator().HasValue("PersistentUploadTask");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnFileBundle", "Bundle")
|
||||
.WithMany("Files")
|
||||
.HasForeignKey("BundleId")
|
||||
.HasConstraintName("fk_files_bundles_bundle_id");
|
||||
|
||||
b.HasOne("DysonNetwork.Shared.Models.FilePool", "Pool")
|
||||
.WithMany()
|
||||
.HasForeignKey("PoolId")
|
||||
.HasConstraintName("fk_files_pools_pool_id");
|
||||
|
||||
b.Navigation("Bundle");
|
||||
|
||||
b.Navigation("Pool");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileIndex", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
|
||||
.WithMany("FileIndexes")
|
||||
.HasForeignKey("FileId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_file_indexes_files_file_id");
|
||||
|
||||
b.Navigation("File");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileReference", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
|
||||
.WithMany("References")
|
||||
.HasForeignKey("FileId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_file_references_files_file_id");
|
||||
|
||||
b.Navigation("File");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.Navigation("FileIndexes");
|
||||
|
||||
b.Navigation("References");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
|
||||
{
|
||||
b.Navigation("Files");
|
||||
});
|
||||
#pragma warning restore 612, 618
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Drive.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class RollbackRemoveUploadTask : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<Guid>(
|
||||
name: "bundle_id",
|
||||
table: "tasks",
|
||||
type: "uuid",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<long>(
|
||||
name: "chunk_size",
|
||||
table: "tasks",
|
||||
type: "bigint",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<int>(
|
||||
name: "chunks_count",
|
||||
table: "tasks",
|
||||
type: "integer",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<int>(
|
||||
name: "chunks_uploaded",
|
||||
table: "tasks",
|
||||
type: "integer",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "content_type",
|
||||
table: "tasks",
|
||||
type: "character varying(128)",
|
||||
maxLength: 128,
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "discriminator",
|
||||
table: "tasks",
|
||||
type: "character varying(21)",
|
||||
maxLength: 21,
|
||||
nullable: false,
|
||||
defaultValue: "");
|
||||
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "encrypt_password",
|
||||
table: "tasks",
|
||||
type: "character varying(256)",
|
||||
maxLength: 256,
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "file_name",
|
||||
table: "tasks",
|
||||
type: "character varying(256)",
|
||||
maxLength: 256,
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<long>(
|
||||
name: "file_size",
|
||||
table: "tasks",
|
||||
type: "bigint",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "hash",
|
||||
table: "tasks",
|
||||
type: "text",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "path",
|
||||
table: "tasks",
|
||||
type: "text",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<Guid>(
|
||||
name: "pool_id",
|
||||
table: "tasks",
|
||||
type: "uuid",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<List<int>>(
|
||||
name: "uploaded_chunks",
|
||||
table: "tasks",
|
||||
type: "integer[]",
|
||||
nullable: true);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "bundle_id",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "chunk_size",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "chunks_count",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "chunks_uploaded",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "content_type",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "discriminator",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "encrypt_password",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "file_name",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "file_size",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "hash",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "path",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "pool_id",
|
||||
table: "tasks");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "uploaded_chunks",
|
||||
table: "tasks");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -20,7 +20,7 @@ namespace DysonNetwork.Drive.Migrations
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "9.0.7")
|
||||
.HasAnnotation("ProductVersion", "10.0.1")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
@@ -72,7 +72,174 @@ namespace DysonNetwork.Drive.Migrations
|
||||
b.ToTable("quota_records", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFile", b =>
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentTask", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant?>("CompletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("completed_at");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<string>("Discriminator")
|
||||
.IsRequired()
|
||||
.HasMaxLength(21)
|
||||
.HasColumnType("character varying(21)")
|
||||
.HasColumnName("discriminator");
|
||||
|
||||
b.Property<string>("ErrorMessage")
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("error_message");
|
||||
|
||||
b.Property<long?>("EstimatedDurationSeconds")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("estimated_duration_seconds");
|
||||
|
||||
b.Property<Instant?>("ExpiredAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("expired_at");
|
||||
|
||||
b.Property<Instant>("LastActivity")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("last_activity");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<Dictionary<string, object>>("Parameters")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("parameters");
|
||||
|
||||
b.Property<int>("Priority")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("priority");
|
||||
|
||||
b.Property<double>("Progress")
|
||||
.HasColumnType("double precision")
|
||||
.HasColumnName("progress");
|
||||
|
||||
b.Property<Dictionary<string, object>>("Results")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("results");
|
||||
|
||||
b.Property<Instant?>("StartedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("started_at");
|
||||
|
||||
b.Property<int>("Status")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("status");
|
||||
|
||||
b.Property<string>("TaskId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(64)
|
||||
.HasColumnType("character varying(64)")
|
||||
.HasColumnName("task_id");
|
||||
|
||||
b.Property<int>("Type")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("type");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_tasks");
|
||||
|
||||
b.ToTable("tasks", (string)null);
|
||||
|
||||
b.HasDiscriminator().HasValue("PersistentTask");
|
||||
|
||||
b.UseTphMappingStrategy();
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.FilePool", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid?>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<BillingConfig>("BillingConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("billing_config");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.IsRequired()
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<bool>("IsHidden")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_hidden");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<PolicyConfig>("PolicyConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("policy_config");
|
||||
|
||||
b.Property<RemoteStorageConfig>("StorageConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("storage_config");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_pools");
|
||||
|
||||
b.ToTable("pools", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.Property<string>("Id")
|
||||
.HasMaxLength(32)
|
||||
@@ -144,7 +311,7 @@ namespace DysonNetwork.Drive.Migrations
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("pool_id");
|
||||
|
||||
b.Property<List<ContentSensitiveMark>>("SensitiveMarks")
|
||||
b.PrimitiveCollection<string>("SensitiveMarks")
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("sensitive_marks");
|
||||
|
||||
@@ -186,7 +353,54 @@ namespace DysonNetwork.Drive.Migrations
|
||||
b.ToTable("files", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFileReference", b =>
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileIndex", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("FileId")
|
||||
.IsRequired()
|
||||
.HasMaxLength(32)
|
||||
.HasColumnType("character varying(32)")
|
||||
.HasColumnName("file_id");
|
||||
|
||||
b.Property<string>("Path")
|
||||
.IsRequired()
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("path");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_file_indexes");
|
||||
|
||||
b.HasIndex("FileId")
|
||||
.HasDatabaseName("ix_file_indexes_file_id");
|
||||
|
||||
b.HasIndex("Path", "AccountId")
|
||||
.HasDatabaseName("ix_file_indexes_path_account_id");
|
||||
|
||||
b.ToTable("file_indexes", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileReference", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
@@ -236,7 +450,7 @@ namespace DysonNetwork.Drive.Migrations
|
||||
b.ToTable("file_references", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.FileBundle", b =>
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
@@ -295,74 +509,76 @@ namespace DysonNetwork.Drive.Migrations
|
||||
b.ToTable("bundles", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.FilePool", b =>
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.Model.PersistentUploadTask", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
b.HasBaseType("DysonNetwork.Drive.Storage.Model.PersistentTask");
|
||||
|
||||
b.Property<Guid?>("BundleId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
.HasColumnName("bundle_id");
|
||||
|
||||
b.Property<Guid?>("AccountId")
|
||||
b.Property<long>("ChunkSize")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("chunk_size");
|
||||
|
||||
b.Property<int>("ChunksCount")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("chunks_count");
|
||||
|
||||
b.Property<int>("ChunksUploaded")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("chunks_uploaded");
|
||||
|
||||
b.Property<string>("ContentType")
|
||||
.IsRequired()
|
||||
.HasMaxLength(128)
|
||||
.HasColumnType("character varying(128)")
|
||||
.HasColumnName("content_type");
|
||||
|
||||
b.Property<string>("EncryptPassword")
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("encrypt_password");
|
||||
|
||||
b.Property<string>("FileName")
|
||||
.IsRequired()
|
||||
.HasMaxLength(256)
|
||||
.HasColumnType("character varying(256)")
|
||||
.HasColumnName("file_name");
|
||||
|
||||
b.Property<long>("FileSize")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("file_size");
|
||||
|
||||
b.Property<string>("Hash")
|
||||
.IsRequired()
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("hash");
|
||||
|
||||
b.Property<string>("Path")
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("path");
|
||||
|
||||
b.Property<Guid>("PoolId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
.HasColumnName("pool_id");
|
||||
|
||||
b.Property<BillingConfig>("BillingConfig")
|
||||
b.PrimitiveCollection<List<int>>("UploadedChunks")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("billing_config");
|
||||
.HasColumnType("integer[]")
|
||||
.HasColumnName("uploaded_chunks");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.IsRequired()
|
||||
.HasMaxLength(8192)
|
||||
.HasColumnType("character varying(8192)")
|
||||
.HasColumnName("description");
|
||||
|
||||
b.Property<bool>("IsHidden")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_hidden");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasMaxLength(1024)
|
||||
.HasColumnType("character varying(1024)")
|
||||
.HasColumnName("name");
|
||||
|
||||
b.Property<PolicyConfig>("PolicyConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("policy_config");
|
||||
|
||||
b.Property<RemoteStorageConfig>("StorageConfig")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("storage_config");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_pools");
|
||||
|
||||
b.ToTable("pools", (string)null);
|
||||
b.HasDiscriminator().HasValue("PersistentUploadTask");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFile", b =>
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Drive.Storage.FileBundle", "Bundle")
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnFileBundle", "Bundle")
|
||||
.WithMany("Files")
|
||||
.HasForeignKey("BundleId")
|
||||
.HasConstraintName("fk_files_bundles_bundle_id");
|
||||
|
||||
b.HasOne("DysonNetwork.Drive.Storage.FilePool", "Pool")
|
||||
b.HasOne("DysonNetwork.Shared.Models.FilePool", "Pool")
|
||||
.WithMany()
|
||||
.HasForeignKey("PoolId")
|
||||
.HasConstraintName("fk_files_pools_pool_id");
|
||||
@@ -372,9 +588,21 @@ namespace DysonNetwork.Drive.Migrations
|
||||
b.Navigation("Pool");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFileReference", b =>
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileIndex", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Drive.Storage.CloudFile", "File")
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
|
||||
.WithMany("FileIndexes")
|
||||
.HasForeignKey("FileId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_file_indexes_files_file_id");
|
||||
|
||||
b.Navigation("File");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFileReference", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnCloudFile", "File")
|
||||
.WithMany("References")
|
||||
.HasForeignKey("FileId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
@@ -384,12 +612,14 @@ namespace DysonNetwork.Drive.Migrations
|
||||
b.Navigation("File");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFile", b =>
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnCloudFile", b =>
|
||||
{
|
||||
b.Navigation("FileIndexes");
|
||||
|
||||
b.Navigation("References");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Drive.Storage.FileBundle", b =>
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnFileBundle", b =>
|
||||
{
|
||||
b.Navigation("Files");
|
||||
});
|
||||
|
||||
@@ -17,6 +17,7 @@ builder.ConfigureAppKestrel(builder.Configuration, maxRequestBodySize: long.MaxV
|
||||
builder.Services.AddAppServices(builder.Configuration);
|
||||
builder.Services.AddAppAuthentication();
|
||||
builder.Services.AddDysonAuth();
|
||||
builder.Services.AddRingService();
|
||||
builder.Services.AddAccountService();
|
||||
|
||||
builder.Services.AddAppFlushHandlers();
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
using System.Text.Json;
|
||||
using DysonNetwork.Drive.Storage;
|
||||
using DysonNetwork.Drive.Storage.Model;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using DysonNetwork.Shared.Stream;
|
||||
using DysonNetwork.Shared.Queue;
|
||||
using FFMpegCore;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NATS.Client.Core;
|
||||
@@ -29,7 +31,6 @@ public class BroadcastEventHandler(
|
||||
[".gif", ".apng", ".avif"];
|
||||
|
||||
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
var js = nats.CreateJetStreamContext();
|
||||
@@ -53,7 +54,8 @@ public class BroadcastEventHandler(
|
||||
{
|
||||
await foreach (var msg in consumer.ConsumeAsync<byte[]>(cancellationToken: stoppingToken))
|
||||
{
|
||||
var payload = JsonSerializer.Deserialize<FileUploadedEventPayload>(msg.Data, GrpcTypeHelper.SerializerOptions);
|
||||
var payload =
|
||||
JsonSerializer.Deserialize<FileUploadedEventPayload>(msg.Data, GrpcTypeHelper.SerializerOptions);
|
||||
if (payload == null)
|
||||
{
|
||||
await msg.AckAsync(cancellationToken: stoppingToken);
|
||||
@@ -142,6 +144,7 @@ public class BroadcastEventHandler(
|
||||
using var scope = serviceProvider.CreateScope();
|
||||
var fs = scope.ServiceProvider.GetRequiredService<FileService>();
|
||||
var scopedDb = scope.ServiceProvider.GetRequiredService<AppDatabase>();
|
||||
var persistentTaskService = scope.ServiceProvider.GetRequiredService<PersistentTaskService>();
|
||||
|
||||
var pool = await fs.GetPoolAsync(remoteId);
|
||||
if (pool is null) return;
|
||||
@@ -155,6 +158,11 @@ public class BroadcastEventHandler(
|
||||
|
||||
var fileToUpdate = await scopedDb.Files.AsNoTracking().FirstAsync(f => f.Id == fileId);
|
||||
|
||||
// Find the upload task associated with this file
|
||||
var uploadTask = await scopedDb.Tasks
|
||||
.OfType<PersistentUploadTask>()
|
||||
.FirstOrDefaultAsync(t => t.FileName == fileToUpdate.Name && t.FileSize == fileToUpdate.Size);
|
||||
|
||||
if (fileToUpdate.IsEncrypted)
|
||||
{
|
||||
uploads.Add((processingFilePath, string.Empty, contentType, false));
|
||||
@@ -293,5 +301,51 @@ public class BroadcastEventHandler(
|
||||
}
|
||||
|
||||
await fs._PurgeCacheAsync(fileId);
|
||||
|
||||
// Complete the upload task if found
|
||||
if (uploadTask != null)
|
||||
{
|
||||
await persistentTaskService.MarkTaskCompletedAsync(uploadTask.TaskId, new Dictionary<string, object?>
|
||||
{
|
||||
{ "FileId", fileId },
|
||||
{ "FileName", fileToUpdate.Name },
|
||||
{ "FileInfo", fileToUpdate },
|
||||
{ "FileSize", fileToUpdate.Size },
|
||||
{ "MimeType", newMimeType },
|
||||
{ "HasCompression", hasCompression },
|
||||
{ "HasThumbnail", hasThumbnail }
|
||||
});
|
||||
|
||||
// Send push notification for large files (>5MB) that took longer to process
|
||||
if (fileToUpdate.Size > 5 * 1024 * 1024) // 5MB threshold
|
||||
await SendLargeFileProcessingCompleteNotificationAsync(uploadTask, fileToUpdate);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task SendLargeFileProcessingCompleteNotificationAsync(PersistentUploadTask task, SnCloudFile file)
|
||||
{
|
||||
try
|
||||
{
|
||||
var ringService = serviceProvider.GetRequiredService<RingService.RingServiceClient>();
|
||||
|
||||
var pushNotification = new PushNotification
|
||||
{
|
||||
Topic = "drive.tasks.upload",
|
||||
Title = "File Processing Complete",
|
||||
Subtitle = file.Name,
|
||||
Body = $"Your file '{file.Name}' has finished processing and is now available.",
|
||||
IsSavable = true
|
||||
};
|
||||
|
||||
await ringService.SendPushNotificationToUserAsync(new SendPushNotificationToUserRequest
|
||||
{
|
||||
UserId = task.AccountId.ToString(),
|
||||
Notification = pushNotification
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning(ex, "Failed to send large file processing notification for task {TaskId}", task.TaskId);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -22,6 +22,13 @@ public static class ScheduledJobsConfiguration
|
||||
.ForJob(cloudFileUnusedRecyclingJob)
|
||||
.WithIdentity("CloudFileUnusedRecyclingTrigger")
|
||||
.WithCronSchedule("0 0 0 * * ?"));
|
||||
|
||||
var persistentTaskCleanupJob = new JobKey("PersistentTaskCleanup");
|
||||
q.AddJob<PersistentTaskCleanupJob>(opts => opts.WithIdentity(persistentTaskCleanupJob));
|
||||
q.AddTrigger(opts => opts
|
||||
.ForJob(persistentTaskCleanupJob)
|
||||
.WithIdentity("PersistentTaskCleanupTrigger")
|
||||
.WithCronSchedule("0 0 2 * * ?")); // Run daily at 2 AM
|
||||
});
|
||||
services.AddQuartzHostedService(q => q.WaitForJobsToComplete = true);
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using DysonNetwork.Drive.Index;
|
||||
using DysonNetwork.Shared.Cache;
|
||||
using NodaTime;
|
||||
using NodaTime.Serialization.SystemTextJson;
|
||||
@@ -8,12 +9,12 @@ namespace DysonNetwork.Drive.Startup;
|
||||
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddAppServices(this IServiceCollection services, IConfiguration configuration)
|
||||
extension(IServiceCollection services)
|
||||
{
|
||||
services.AddDbContext<AppDatabase>(); // Assuming you'll have an AppDatabase
|
||||
services.AddSingleton<IClock>(SystemClock.Instance);
|
||||
public IServiceCollection AddAppServices(IConfiguration configuration)
|
||||
{
|
||||
services.AddDbContext<AppDatabase>();
|
||||
services.AddHttpContextAccessor();
|
||||
services.AddSingleton<ICacheService, CacheServiceRedis>(); // Uncomment if you have CacheServiceRedis
|
||||
|
||||
services.AddHttpClient();
|
||||
|
||||
@@ -38,23 +39,25 @@ public static class ServiceCollectionExtensions
|
||||
return services;
|
||||
}
|
||||
|
||||
public static IServiceCollection AddAppAuthentication(this IServiceCollection services)
|
||||
public IServiceCollection AddAppAuthentication()
|
||||
{
|
||||
services.AddAuthorization();
|
||||
return services;
|
||||
}
|
||||
|
||||
public static IServiceCollection AddAppFlushHandlers(this IServiceCollection services)
|
||||
public IServiceCollection AddAppFlushHandlers()
|
||||
{
|
||||
services.AddSingleton<FlushBufferService>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
public static IServiceCollection AddAppBusinessServices(this IServiceCollection services)
|
||||
public IServiceCollection AddAppBusinessServices()
|
||||
{
|
||||
services.AddScoped<Storage.FileService>();
|
||||
services.AddScoped<Storage.FileReferenceService>();
|
||||
services.AddScoped<Storage.PersistentTaskService>();
|
||||
services.AddScoped<FileIndexService>();
|
||||
services.AddScoped<Billing.UsageService>();
|
||||
services.AddScoped<Billing.QuotaService>();
|
||||
|
||||
@@ -63,3 +66,4 @@ public static class ServiceCollectionExtensions
|
||||
return services;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ namespace DysonNetwork.Drive.Storage;
|
||||
|
||||
public class CloudFileUnusedRecyclingJob(
|
||||
AppDatabase db,
|
||||
FileReferenceService fileRefService,
|
||||
ILogger<CloudFileUnusedRecyclingJob> logger,
|
||||
IConfiguration configuration
|
||||
)
|
||||
@@ -15,7 +14,7 @@ public class CloudFileUnusedRecyclingJob(
|
||||
public async Task Execute(IJobExecutionContext context)
|
||||
{
|
||||
logger.LogInformation("Cleaning tus cloud files...");
|
||||
var storePath = configuration["Tus:StorePath"];
|
||||
var storePath = configuration["Storage:Uploads"];
|
||||
if (Directory.Exists(storePath))
|
||||
{
|
||||
var oneHourAgo = SystemClock.Instance.GetCurrentInstant() - Duration.FromHours(1);
|
||||
@@ -40,6 +39,7 @@ public class CloudFileUnusedRecyclingJob(
|
||||
var processedCount = 0;
|
||||
var markedCount = 0;
|
||||
var totalFiles = await db.Files
|
||||
.Where(f => f.FileIndexes.Count == 0)
|
||||
.Where(f => f.PoolId.HasValue && recyclablePools.Contains(f.PoolId.Value))
|
||||
.Where(f => !f.IsMarkedRecycle)
|
||||
.CountAsync();
|
||||
@@ -80,15 +80,15 @@ public class CloudFileUnusedRecyclingJob(
|
||||
processedCount += fileBatch.Count;
|
||||
lastProcessedId = fileBatch.Last();
|
||||
|
||||
// Get all relevant file references for this batch
|
||||
var fileReferences = await fileRefService.GetReferencesAsync(fileBatch);
|
||||
|
||||
// Filter to find files that have no references or all expired references
|
||||
var filesToMark = fileBatch.Where(fileId =>
|
||||
!fileReferences.TryGetValue(fileId, out var references) ||
|
||||
references.Count == 0 ||
|
||||
references.All(r => r.ExpiredAt.HasValue && r.ExpiredAt.Value <= now)
|
||||
).ToList();
|
||||
// Optimized query: Find files that have no references OR all references are expired
|
||||
// This replaces the memory-intensive approach of loading all references
|
||||
var filesToMark = await db.Files
|
||||
.Where(f => fileBatch.Contains(f.Id))
|
||||
.Where(f => !db.FileReferences.Any(r => r.FileId == f.Id) || // No references at all
|
||||
!db.FileReferences.Any(r => r.FileId == f.Id && // OR has references but all are expired
|
||||
(r.ExpiredAt == null || r.ExpiredAt > now)))
|
||||
.Select(f => f.Id)
|
||||
.ToListAsync();
|
||||
|
||||
if (filesToMark.Count > 0)
|
||||
{
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
using DysonNetwork.Drive.Billing;
|
||||
using DysonNetwork.Shared.Auth;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
@@ -14,9 +13,9 @@ namespace DysonNetwork.Drive.Storage;
|
||||
public class FileController(
|
||||
AppDatabase db,
|
||||
FileService fs,
|
||||
QuotaService qs,
|
||||
IConfiguration configuration,
|
||||
IWebHostEnvironment env
|
||||
IWebHostEnvironment env,
|
||||
FileReferenceService fileReferenceService
|
||||
) : ControllerBase
|
||||
{
|
||||
[HttpGet("{id}")]
|
||||
@@ -29,114 +28,190 @@ public class FileController(
|
||||
[FromQuery] string? passcode = null
|
||||
)
|
||||
{
|
||||
// Support the file extension for client side data recognize
|
||||
string? fileExtension = null;
|
||||
if (id.Contains('.'))
|
||||
{
|
||||
var splitId = id.Split('.');
|
||||
id = splitId.First();
|
||||
fileExtension = splitId.Last();
|
||||
}
|
||||
|
||||
var file = await fs.GetFileAsync(id);
|
||||
var (fileId, fileExtension) = ParseFileId(id);
|
||||
var file = await fs.GetFileAsync(fileId);
|
||||
if (file is null) return NotFound("File not found.");
|
||||
|
||||
var accessResult = await ValidateFileAccess(file, passcode);
|
||||
if (accessResult is not null) return accessResult;
|
||||
|
||||
// Handle direct storage URL redirect
|
||||
if (!string.IsNullOrWhiteSpace(file.StorageUrl))
|
||||
return Redirect(file.StorageUrl);
|
||||
|
||||
// Handle files not yet uploaded to remote storage
|
||||
if (file.UploadedAt is null)
|
||||
return await ServeLocalFile(file);
|
||||
|
||||
// Handle uploaded files
|
||||
return await ServeRemoteFile(file, fileExtension, download, original, thumbnail, overrideMimeType);
|
||||
}
|
||||
|
||||
private (string fileId, string? extension) ParseFileId(string id)
|
||||
{
|
||||
if (!id.Contains('.')) return (id, null);
|
||||
|
||||
var parts = id.Split('.');
|
||||
return (parts.First(), parts.Last());
|
||||
}
|
||||
|
||||
private async Task<ActionResult?> ValidateFileAccess(SnCloudFile file, string? passcode)
|
||||
{
|
||||
if (file.Bundle is not null && !file.Bundle.VerifyPasscode(passcode))
|
||||
return StatusCode(StatusCodes.Status403Forbidden, "The passcode is incorrect.");
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(file.StorageUrl)) return Redirect(file.StorageUrl);
|
||||
|
||||
if (file.UploadedAt is null)
|
||||
private Task<ActionResult> ServeLocalFile(SnCloudFile file)
|
||||
{
|
||||
// File is not yet uploaded to remote storage. Try to serve from local temp storage.
|
||||
// Try temp storage first
|
||||
var tempFilePath = Path.Combine(Path.GetTempPath(), file.Id);
|
||||
if (System.IO.File.Exists(tempFilePath))
|
||||
{
|
||||
if (file.IsEncrypted)
|
||||
{
|
||||
return StatusCode(StatusCodes.Status403Forbidden, "Encrypted files cannot be accessed before they are processed and stored.");
|
||||
}
|
||||
return PhysicalFile(tempFilePath, file.MimeType ?? "application/octet-stream", file.Name, enableRangeProcessing: true);
|
||||
return Task.FromResult<ActionResult>(StatusCode(StatusCodes.Status403Forbidden,
|
||||
"Encrypted files cannot be accessed before they are processed and stored."));
|
||||
|
||||
return Task.FromResult<ActionResult>(PhysicalFile(tempFilePath, file.MimeType ?? "application/octet-stream",
|
||||
file.Name, enableRangeProcessing: true));
|
||||
}
|
||||
|
||||
// Fallback for tus uploads that are not processed yet.
|
||||
var tusStorePath = configuration.GetValue<string>("Tus:StorePath");
|
||||
if (!string.IsNullOrEmpty(tusStorePath))
|
||||
{
|
||||
// Fallback for tus uploads
|
||||
var tusStorePath = configuration.GetValue<string>("Storage:Uploads");
|
||||
if (string.IsNullOrEmpty(tusStorePath))
|
||||
return Task.FromResult<ActionResult>(StatusCode(StatusCodes.Status400BadRequest,
|
||||
"File is being processed. Please try again later."));
|
||||
var tusFilePath = Path.Combine(env.ContentRootPath, tusStorePath, file.Id);
|
||||
if (System.IO.File.Exists(tusFilePath))
|
||||
return System.IO.File.Exists(tusFilePath)
|
||||
? Task.FromResult<ActionResult>(PhysicalFile(tusFilePath, file.MimeType ?? "application/octet-stream",
|
||||
file.Name, enableRangeProcessing: true))
|
||||
: Task.FromResult<ActionResult>(StatusCode(StatusCodes.Status400BadRequest,
|
||||
"File is being processed. Please try again later."));
|
||||
}
|
||||
|
||||
private async Task<ActionResult> ServeRemoteFile(
|
||||
SnCloudFile file,
|
||||
string? fileExtension,
|
||||
bool download,
|
||||
bool original,
|
||||
bool thumbnail,
|
||||
string? overrideMimeType
|
||||
)
|
||||
{
|
||||
return PhysicalFile(tusFilePath, file.MimeType ?? "application/octet-stream", file.Name, enableRangeProcessing: true);
|
||||
}
|
||||
}
|
||||
|
||||
return StatusCode(StatusCodes.Status400BadRequest, "File is being processed. Please try again later.");
|
||||
}
|
||||
|
||||
if (!file.PoolId.HasValue)
|
||||
return StatusCode(StatusCodes.Status500InternalServerError, "File is in an inconsistent state: uploaded but no pool ID.");
|
||||
return StatusCode(StatusCodes.Status500InternalServerError,
|
||||
"File is in an inconsistent state: uploaded but no pool ID.");
|
||||
|
||||
var pool = await fs.GetPoolAsync(file.PoolId.Value);
|
||||
if (pool is null)
|
||||
return StatusCode(StatusCodes.Status410Gone, "The pool of the file no longer exists or not accessible.");
|
||||
var dest = pool.StorageConfig;
|
||||
|
||||
if (!pool.PolicyConfig.AllowAnonymous)
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
if (!pool.PolicyConfig.AllowAnonymous && HttpContext.Items["CurrentUser"] is not Account)
|
||||
return Unauthorized();
|
||||
// TODO: Provide ability to add access log
|
||||
|
||||
var dest = pool.StorageConfig;
|
||||
var fileName = BuildRemoteFileName(file, original, thumbnail);
|
||||
|
||||
// Try proxy redirects first
|
||||
var proxyResult = TryProxyRedirect(file, dest, fileName);
|
||||
if (proxyResult is not null) return proxyResult;
|
||||
|
||||
// Handle signed URLs
|
||||
if (dest.EnableSigned)
|
||||
return await CreateSignedUrl(file, dest, fileName, fileExtension, download, overrideMimeType);
|
||||
|
||||
// Fallback to direct S3 endpoint
|
||||
var protocol = dest.EnableSsl ? "https" : "http";
|
||||
return Redirect($"{protocol}://{dest.Endpoint}/{dest.Bucket}/{fileName}");
|
||||
}
|
||||
|
||||
private string BuildRemoteFileName(SnCloudFile file, bool original, bool thumbnail)
|
||||
{
|
||||
var fileName = string.IsNullOrWhiteSpace(file.StorageId) ? file.Id : file.StorageId;
|
||||
|
||||
switch (thumbnail)
|
||||
if (thumbnail)
|
||||
{
|
||||
case true when file.HasThumbnail:
|
||||
if (!file.HasThumbnail) throw new InvalidOperationException("Thumbnail not available");
|
||||
fileName += ".thumbnail";
|
||||
break;
|
||||
case true when !file.HasThumbnail:
|
||||
return NotFound();
|
||||
}
|
||||
|
||||
if (!original && file.HasCompression)
|
||||
else if (!original && file.HasCompression)
|
||||
{
|
||||
fileName += ".compressed";
|
||||
}
|
||||
|
||||
return fileName;
|
||||
}
|
||||
|
||||
private ActionResult? TryProxyRedirect(SnCloudFile file, RemoteStorageConfig dest, string fileName)
|
||||
{
|
||||
if (dest.ImageProxy is not null && (file.MimeType?.StartsWith("image/") ?? false))
|
||||
{
|
||||
var proxyUrl = dest.ImageProxy;
|
||||
var baseUri = new Uri(proxyUrl.EndsWith('/') ? proxyUrl : $"{proxyUrl}/");
|
||||
var fullUri = new Uri(baseUri, fileName);
|
||||
return Redirect(fullUri.ToString());
|
||||
return Redirect(BuildProxyUrl(dest.ImageProxy, fileName));
|
||||
|
||||
return dest.AccessProxy is not null ? Redirect(BuildProxyUrl(dest.AccessProxy, fileName)) : null;
|
||||
}
|
||||
|
||||
if (dest.AccessProxy is not null)
|
||||
private static string BuildProxyUrl(string proxyUrl, string fileName)
|
||||
{
|
||||
var proxyUrl = dest.AccessProxy;
|
||||
var baseUri = new Uri(proxyUrl.EndsWith('/') ? proxyUrl : $"{proxyUrl}/");
|
||||
var fullUri = new Uri(baseUri, fileName);
|
||||
return Redirect(fullUri.ToString());
|
||||
return fullUri.ToString();
|
||||
}
|
||||
|
||||
if (dest.EnableSigned)
|
||||
private async Task<ActionResult> CreateSignedUrl(
|
||||
SnCloudFile file,
|
||||
RemoteStorageConfig dest,
|
||||
string fileName,
|
||||
string? fileExtension,
|
||||
bool download,
|
||||
string? overrideMimeType
|
||||
)
|
||||
{
|
||||
var client = fs.CreateMinioClient(dest);
|
||||
if (client is null)
|
||||
return BadRequest(
|
||||
"Failed to configure client for remote destination, file got an invalid storage remote."
|
||||
return BadRequest("Failed to configure client for remote destination, file got an invalid storage remote.");
|
||||
|
||||
var headers = BuildSignedUrlHeaders(file, fileExtension, overrideMimeType, download);
|
||||
|
||||
var openUrl = await client.PresignedGetObjectAsync(
|
||||
new PresignedGetObjectArgs()
|
||||
.WithBucket(dest.Bucket)
|
||||
.WithObject(fileName)
|
||||
.WithExpiry(3600)
|
||||
.WithHeaders(headers)
|
||||
);
|
||||
|
||||
var headers = new Dictionary<string, string>();
|
||||
if (fileExtension is not null)
|
||||
if (dest.AccessEndpoint is not null)
|
||||
openUrl = openUrl.Replace($"{dest.Endpoint}/{dest.Bucket}", dest.AccessEndpoint);
|
||||
|
||||
return Redirect(openUrl);
|
||||
}
|
||||
|
||||
private static Dictionary<string, string> BuildSignedUrlHeaders(
|
||||
SnCloudFile file,
|
||||
string? fileExtension,
|
||||
string? overrideMimeType,
|
||||
bool download
|
||||
)
|
||||
{
|
||||
if (MimeTypes.TryGetMimeType(fileExtension, out var mimeType))
|
||||
headers.Add("Response-Content-Type", mimeType);
|
||||
var headers = new Dictionary<string, string>();
|
||||
|
||||
string? contentType = null;
|
||||
if (fileExtension is not null && MimeTypes.TryGetMimeType(fileExtension, out var mimeType))
|
||||
{
|
||||
contentType = mimeType;
|
||||
}
|
||||
else if (overrideMimeType is not null)
|
||||
{
|
||||
headers.Add("Response-Content-Type", overrideMimeType);
|
||||
contentType = overrideMimeType;
|
||||
}
|
||||
else if (file.MimeType is not null && !file.MimeType!.EndsWith("unknown"))
|
||||
else if (file.MimeType is not null && !file.MimeType.EndsWith("unknown"))
|
||||
{
|
||||
headers.Add("Response-Content-Type", file.MimeType);
|
||||
contentType = file.MimeType;
|
||||
}
|
||||
|
||||
if (contentType is not null)
|
||||
{
|
||||
headers.Add("Response-Content-Type", contentType);
|
||||
}
|
||||
|
||||
if (download)
|
||||
@@ -144,22 +219,7 @@ public class FileController(
|
||||
headers.Add("Response-Content-Disposition", $"attachment; filename=\"{file.Name}\"");
|
||||
}
|
||||
|
||||
var bucket = dest.Bucket;
|
||||
var openUrl = await client.PresignedGetObjectAsync(
|
||||
new PresignedGetObjectArgs()
|
||||
.WithBucket(bucket)
|
||||
.WithObject(fileName)
|
||||
.WithExpiry(3600)
|
||||
.WithHeaders(headers)
|
||||
);
|
||||
|
||||
return Redirect(openUrl);
|
||||
}
|
||||
|
||||
// Fallback redirect to the S3 endpoint (public read)
|
||||
var protocol = dest.EnableSsl ? "https" : "http";
|
||||
// Use the path bucket lookup mode
|
||||
return Redirect($"{protocol}://{dest.Endpoint}/{dest.Bucket}/{fileName}");
|
||||
return headers;
|
||||
}
|
||||
|
||||
[HttpGet("{id}/info")]
|
||||
@@ -171,18 +231,26 @@ public class FileController(
|
||||
return file;
|
||||
}
|
||||
|
||||
[HttpGet("{id}/references")]
|
||||
public async Task<ActionResult<List<Shared.Models.SnCloudFileReference>>> GetFileReferences(string id)
|
||||
{
|
||||
var file = await fs.GetFileAsync(id);
|
||||
if (file is null) return NotFound("File not found.");
|
||||
|
||||
// Check if user has access to the file
|
||||
var accessResult = await ValidateFileAccess(file, null);
|
||||
if (accessResult is not null) return accessResult;
|
||||
|
||||
// Get references using the injected FileReferenceService
|
||||
var references = await fileReferenceService.GetReferencesAsync(id);
|
||||
return Ok(references);
|
||||
}
|
||||
|
||||
[Authorize]
|
||||
[HttpPatch("{id}/name")]
|
||||
public async Task<ActionResult<SnCloudFile>> UpdateFileName(string id, [FromBody] string name)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == id && f.AccountId == accountId);
|
||||
if (file is null) return NotFound();
|
||||
file.Name = name;
|
||||
await db.SaveChangesAsync();
|
||||
await fs._PurgeCacheAsync(file.Id);
|
||||
return file;
|
||||
return await UpdateFileProperty(id, file => file.Name = name);
|
||||
}
|
||||
|
||||
public class MarkFileRequest
|
||||
@@ -194,27 +262,28 @@ public class FileController(
|
||||
[HttpPut("{id}/marks")]
|
||||
public async Task<ActionResult<SnCloudFile>> MarkFile(string id, [FromBody] MarkFileRequest request)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == id && f.AccountId == accountId);
|
||||
if (file is null) return NotFound();
|
||||
file.SensitiveMarks = request.SensitiveMarks;
|
||||
await db.SaveChangesAsync();
|
||||
await fs._PurgeCacheAsync(file.Id);
|
||||
return file;
|
||||
return await UpdateFileProperty(id, file => file.SensitiveMarks = request.SensitiveMarks);
|
||||
}
|
||||
|
||||
[Authorize]
|
||||
[HttpPut("{id}/meta")]
|
||||
public async Task<ActionResult<SnCloudFile>> UpdateFileMeta(string id, [FromBody] Dictionary<string, object?> meta)
|
||||
{
|
||||
return await UpdateFileProperty(id, file => file.UserMeta = meta);
|
||||
}
|
||||
|
||||
private async Task<ActionResult<SnCloudFile>> UpdateFileProperty(string fileId, Action<SnCloudFile> updateAction)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == id && f.AccountId == accountId);
|
||||
|
||||
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == fileId && f.AccountId == accountId);
|
||||
if (file is null) return NotFound();
|
||||
file.UserMeta = meta;
|
||||
|
||||
updateAction(file);
|
||||
await db.SaveChangesAsync();
|
||||
await fs._PurgeCacheAsync(file.Id);
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
@@ -224,25 +293,40 @@ public class FileController(
|
||||
[FromQuery] Guid? pool,
|
||||
[FromQuery] bool recycled = false,
|
||||
[FromQuery] int offset = 0,
|
||||
[FromQuery] int take = 20
|
||||
[FromQuery] int take = 20,
|
||||
[FromQuery] string? query = null,
|
||||
[FromQuery] string order = "date",
|
||||
[FromQuery] bool orderDesc = true
|
||||
)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
var query = db.Files
|
||||
var filesQuery = db.Files
|
||||
.Where(e => e.IsMarkedRecycle == recycled)
|
||||
.Where(e => e.AccountId == accountId)
|
||||
.Include(e => e.Pool)
|
||||
.OrderByDescending(e => e.CreatedAt)
|
||||
.AsQueryable();
|
||||
|
||||
if (pool.HasValue) query = query.Where(e => e.PoolId == pool);
|
||||
if (pool.HasValue) filesQuery = filesQuery.Where(e => e.PoolId == pool);
|
||||
|
||||
var total = await query.CountAsync();
|
||||
if (!string.IsNullOrWhiteSpace(query))
|
||||
{
|
||||
filesQuery = filesQuery.Where(e => e.Name.Contains(query));
|
||||
}
|
||||
|
||||
filesQuery = order.ToLower() switch
|
||||
{
|
||||
"date" => orderDesc ? filesQuery.OrderByDescending(e => e.CreatedAt) : filesQuery.OrderBy(e => e.CreatedAt),
|
||||
"size" => orderDesc ? filesQuery.OrderByDescending(e => e.Size) : filesQuery.OrderBy(e => e.Size),
|
||||
"name" => orderDesc ? filesQuery.OrderByDescending(e => e.Name) : filesQuery.OrderBy(e => e.Name),
|
||||
_ => filesQuery.OrderByDescending(e => e.CreatedAt)
|
||||
};
|
||||
|
||||
var total = await filesQuery.CountAsync();
|
||||
Response.Headers.Append("X-Total", total.ToString());
|
||||
|
||||
var files = await query
|
||||
var files = await filesQuery
|
||||
.Skip(offset)
|
||||
.Take(take)
|
||||
.ToListAsync();
|
||||
@@ -250,9 +334,25 @@ public class FileController(
|
||||
return Ok(files);
|
||||
}
|
||||
|
||||
public class FileBatchDeletionRequest
|
||||
{
|
||||
public List<string> FileIds { get; set; } = [];
|
||||
}
|
||||
|
||||
[Authorize]
|
||||
[HttpPost("batches/delete")]
|
||||
public async Task<ActionResult> DeleteFileBatch([FromBody] FileBatchDeletionRequest request)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
var userId = Guid.Parse(currentUser.Id);
|
||||
|
||||
var count = await fs.DeleteAccountFileBatchAsync(userId, request.FileIds);
|
||||
return Ok(new { Count = count });
|
||||
}
|
||||
|
||||
[Authorize]
|
||||
[HttpDelete("{id}")]
|
||||
public async Task<ActionResult> DeleteFile(string id)
|
||||
public async Task<ActionResult<SnCloudFile>> DeleteFile(string id)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
var userId = Guid.Parse(currentUser.Id);
|
||||
@@ -264,9 +364,9 @@ public class FileController(
|
||||
if (file is null) return NotFound();
|
||||
|
||||
await fs.DeleteFileDataAsync(file, force: true);
|
||||
await fs.DeleteFileAsync(file);
|
||||
await fs.DeleteFileAsync(file, skipData: true);
|
||||
|
||||
return NoContent();
|
||||
return Ok(file);
|
||||
}
|
||||
|
||||
[Authorize]
|
||||
@@ -282,116 +382,10 @@ public class FileController(
|
||||
|
||||
[Authorize]
|
||||
[HttpDelete("recycle")]
|
||||
[RequiredPermission("maintenance", "files.delete.recycle")]
|
||||
[AskPermission("files.delete.recycle")]
|
||||
public async Task<ActionResult> DeleteAllRecycledFiles()
|
||||
{
|
||||
var count = await fs.DeleteAllRecycledFilesAsync();
|
||||
return Ok(new { Count = count });
|
||||
}
|
||||
|
||||
public class CreateFastFileRequest
|
||||
{
|
||||
public string Name { get; set; } = null!;
|
||||
public long Size { get; set; }
|
||||
public string Hash { get; set; } = null!;
|
||||
public string? MimeType { get; set; }
|
||||
public string? Description { get; set; }
|
||||
public Dictionary<string, object?>? UserMeta { get; set; }
|
||||
public Dictionary<string, object?>? FileMeta { get; set; }
|
||||
public List<Shared.Models.ContentSensitiveMark>? SensitiveMarks { get; set; }
|
||||
public Guid PoolId { get; set; }
|
||||
}
|
||||
|
||||
[Authorize]
|
||||
[HttpPost("fast")]
|
||||
[RequiredPermission("global", "files.create")]
|
||||
public async Task<ActionResult<SnCloudFile>> CreateFastFile([FromBody] CreateFastFileRequest request)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
var pool = await db.Pools.FirstOrDefaultAsync(p => p.Id == request.PoolId);
|
||||
if (pool is null) return BadRequest();
|
||||
if (!currentUser.IsSuperuser && pool.AccountId != accountId)
|
||||
return StatusCode(403, "You don't have permission to create files in this pool.");
|
||||
|
||||
if (!pool.PolicyConfig.EnableFastUpload)
|
||||
return StatusCode(
|
||||
403,
|
||||
"This pool does not allow fast upload"
|
||||
);
|
||||
|
||||
if (pool.PolicyConfig.RequirePrivilege > 0)
|
||||
{
|
||||
if (currentUser.PerkSubscription is null)
|
||||
{
|
||||
return StatusCode(
|
||||
403,
|
||||
$"You need to have join the Stellar Program to use this pool"
|
||||
);
|
||||
}
|
||||
|
||||
var privilege =
|
||||
PerkSubscriptionPrivilege.GetPrivilegeFromIdentifier(currentUser.PerkSubscription.Identifier);
|
||||
if (privilege < pool.PolicyConfig.RequirePrivilege)
|
||||
{
|
||||
return StatusCode(
|
||||
403,
|
||||
$"You need Stellar Program tier {pool.PolicyConfig.RequirePrivilege} to use this pool, you are tier {privilege}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (request.Size > pool.PolicyConfig.MaxFileSize)
|
||||
{
|
||||
return StatusCode(
|
||||
403,
|
||||
$"File size {request.Size} is larger than the pool's maximum file size {pool.PolicyConfig.MaxFileSize}"
|
||||
);
|
||||
}
|
||||
|
||||
var (ok, billableUnit, quota) = await qs.IsFileAcceptable(
|
||||
accountId,
|
||||
pool.BillingConfig.CostMultiplier ?? 1.0,
|
||||
request.Size
|
||||
);
|
||||
if (!ok)
|
||||
{
|
||||
return StatusCode(
|
||||
403,
|
||||
$"File size {billableUnit} is larger than the user's quota {quota}"
|
||||
);
|
||||
}
|
||||
|
||||
await using var transaction = await db.Database.BeginTransactionAsync();
|
||||
try
|
||||
{
|
||||
var file = new SnCloudFile
|
||||
{
|
||||
Name = request.Name,
|
||||
Size = request.Size,
|
||||
Hash = request.Hash,
|
||||
MimeType = request.MimeType,
|
||||
Description = request.Description,
|
||||
AccountId = accountId,
|
||||
UserMeta = request.UserMeta,
|
||||
FileMeta = request.FileMeta,
|
||||
SensitiveMarks = request.SensitiveMarks,
|
||||
PoolId = request.PoolId
|
||||
};
|
||||
db.Files.Add(file);
|
||||
await db.SaveChangesAsync();
|
||||
await fs._PurgeCacheAsync(file.Id);
|
||||
await transaction.CommitAsync();
|
||||
|
||||
file.FastUploadLink = await fs.CreateFastUploadLinkAsync(file);
|
||||
|
||||
return file;
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
await transaction.RollbackAsync();
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,49 +14,55 @@ public class FileExpirationJob(AppDatabase db, FileService fileService, ILogger<
|
||||
var now = SystemClock.Instance.GetCurrentInstant();
|
||||
logger.LogInformation("Running file reference expiration job at {now}", now);
|
||||
|
||||
// Find all expired references
|
||||
var expiredReferences = await db.FileReferences
|
||||
// Delete expired references in bulk and get affected file IDs
|
||||
var affectedFileIds = await db.FileReferences
|
||||
.Where(r => r.ExpiredAt < now && r.ExpiredAt != null)
|
||||
.Select(r => r.FileId)
|
||||
.Distinct()
|
||||
.ToListAsync();
|
||||
|
||||
if (!expiredReferences.Any())
|
||||
if (!affectedFileIds.Any())
|
||||
{
|
||||
logger.LogInformation("No expired file references found");
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogInformation("Found {count} expired file references", expiredReferences.Count);
|
||||
logger.LogInformation("Found expired references for {count} files", affectedFileIds.Count);
|
||||
|
||||
// Get unique file IDs
|
||||
var fileIds = expiredReferences.Select(r => r.FileId).Distinct().ToList();
|
||||
var filesAndReferenceCount = new Dictionary<string, int>();
|
||||
// Delete expired references in bulk
|
||||
var deletedReferencesCount = await db.FileReferences
|
||||
.Where(r => r.ExpiredAt < now && r.ExpiredAt != null)
|
||||
.ExecuteDeleteAsync();
|
||||
|
||||
// Delete expired references
|
||||
db.FileReferences.RemoveRange(expiredReferences);
|
||||
await db.SaveChangesAsync();
|
||||
logger.LogInformation("Deleted {count} expired file references", deletedReferencesCount);
|
||||
|
||||
// Check remaining references for each file
|
||||
foreach (var fileId in fileIds)
|
||||
// Find files that now have no remaining references (bulk operation)
|
||||
var filesToDelete = await db.Files
|
||||
.Where(f => affectedFileIds.Contains(f.Id))
|
||||
.Where(f => !db.FileReferences.Any(r => r.FileId == f.Id))
|
||||
.Select(f => f.Id)
|
||||
.ToListAsync();
|
||||
|
||||
if (filesToDelete.Any())
|
||||
{
|
||||
var remainingReferences = await db.FileReferences
|
||||
.Where(r => r.FileId == fileId)
|
||||
.CountAsync();
|
||||
logger.LogInformation("Deleting {count} files that have no remaining references", filesToDelete.Count);
|
||||
|
||||
filesAndReferenceCount[fileId] = remainingReferences;
|
||||
// Get files for deletion
|
||||
var files = await db.Files
|
||||
.Where(f => filesToDelete.Contains(f.Id))
|
||||
.ToListAsync();
|
||||
|
||||
// If no references remain, delete the file
|
||||
if (remainingReferences == 0)
|
||||
{
|
||||
var file = await db.Files.FirstOrDefaultAsync(f => f.Id == fileId);
|
||||
if (file == null) continue;
|
||||
logger.LogInformation("Deleting file {fileId} as all references have expired", fileId);
|
||||
await fileService.DeleteFileAsync(file);
|
||||
// Delete files and their data in parallel
|
||||
var deleteTasks = files.Select(f => fileService.DeleteFileAsync(f));
|
||||
await Task.WhenAll(deleteTasks);
|
||||
}
|
||||
else
|
||||
|
||||
// Purge cache for files that still have references
|
||||
var filesWithRemainingRefs = affectedFileIds.Except(filesToDelete).ToList();
|
||||
if (filesWithRemainingRefs.Any())
|
||||
{
|
||||
// Just purge the cache
|
||||
await fileService._PurgeCacheAsync(fileId);
|
||||
}
|
||||
var cachePurgeTasks = filesWithRemainingRefs.Select(fileService._PurgeCacheAsync);
|
||||
await Task.WhenAll(cachePurgeTasks);
|
||||
}
|
||||
|
||||
logger.LogInformation("Completed file reference expiration job");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
using DysonNetwork.Shared.Cache;
|
||||
using DysonNetwork.Shared.Data;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using EFCore.BulkExtensions;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NodaTime;
|
||||
|
||||
@@ -20,7 +20,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
/// <param name="expiredAt">Optional expiration time for the file</param>
|
||||
/// <param name="duration">Optional duration after which the file expires (alternative to expiredAt)</param>
|
||||
/// <returns>The created file reference</returns>
|
||||
public async Task<CloudFileReference> CreateReferenceAsync(
|
||||
public async Task<SnCloudFileReference> CreateReferenceAsync(
|
||||
string fileId,
|
||||
string usage,
|
||||
string resourceId,
|
||||
@@ -33,7 +33,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
if (duration.HasValue)
|
||||
finalExpiration = SystemClock.Instance.GetCurrentInstant() + duration.Value;
|
||||
|
||||
var reference = new CloudFileReference
|
||||
var reference = new SnCloudFileReference
|
||||
{
|
||||
FileId = fileId,
|
||||
Usage = usage,
|
||||
@@ -49,7 +49,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
return reference;
|
||||
}
|
||||
|
||||
public async Task<List<CloudFileReference>> CreateReferencesAsync(
|
||||
public async Task<List<SnCloudFileReference>> CreateReferencesAsync(
|
||||
List<string> fileId,
|
||||
string usage,
|
||||
string resourceId,
|
||||
@@ -57,14 +57,26 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
Duration? duration = null
|
||||
)
|
||||
{
|
||||
var data = fileId.Select(id => new CloudFileReference
|
||||
var now = SystemClock.Instance.GetCurrentInstant();
|
||||
var finalExpiredAt = expiredAt;
|
||||
if (finalExpiredAt == null && duration.HasValue)
|
||||
{
|
||||
finalExpiredAt = now + duration.Value;
|
||||
}
|
||||
|
||||
var data = fileId.Select(id => new SnCloudFileReference
|
||||
{
|
||||
FileId = id,
|
||||
Usage = usage,
|
||||
ResourceId = resourceId,
|
||||
ExpiredAt = expiredAt ?? SystemClock.Instance.GetCurrentInstant() + duration
|
||||
}).ToList();
|
||||
await db.BulkInsertAsync(data);
|
||||
ExpiredAt = finalExpiredAt,
|
||||
CreatedAt = now,
|
||||
UpdatedAt = now
|
||||
})
|
||||
.ToList();
|
||||
|
||||
db.FileReferences.AddRange(data);
|
||||
await db.SaveChangesAsync();
|
||||
return data;
|
||||
}
|
||||
|
||||
@@ -73,11 +85,11 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
/// </summary>
|
||||
/// <param name="fileId">The ID of the file</param>
|
||||
/// <returns>A list of all references to the file</returns>
|
||||
public async Task<List<CloudFileReference>> GetReferencesAsync(string fileId)
|
||||
public async Task<List<SnCloudFileReference>> GetReferencesAsync(string fileId)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}list:{fileId}";
|
||||
|
||||
var cachedReferences = await cache.GetAsync<List<CloudFileReference>>(cacheKey);
|
||||
var cachedReferences = await cache.GetAsync<List<SnCloudFileReference>>(cacheKey);
|
||||
if (cachedReferences is not null)
|
||||
return cachedReferences;
|
||||
|
||||
@@ -90,13 +102,45 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
return references;
|
||||
}
|
||||
|
||||
public async Task<Dictionary<string, List<CloudFileReference>>> GetReferencesAsync(IEnumerable<string> fileId)
|
||||
public async Task<Dictionary<string, List<SnCloudFileReference>>> GetReferencesAsync(IEnumerable<string> fileIds)
|
||||
{
|
||||
var references = await db.FileReferences
|
||||
.Where(r => fileId.Contains(r.FileId))
|
||||
var fileIdList = fileIds.ToList();
|
||||
var result = new Dictionary<string, List<SnCloudFileReference>>();
|
||||
|
||||
// Check cache for each file ID
|
||||
var uncachedFileIds = new List<string>();
|
||||
foreach (var fileId in fileIdList)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}list:{fileId}";
|
||||
var cachedReferences = await cache.GetAsync<List<SnCloudFileReference>>(cacheKey);
|
||||
if (cachedReferences is not null)
|
||||
{
|
||||
result[fileId] = cachedReferences;
|
||||
}
|
||||
else
|
||||
{
|
||||
uncachedFileIds.Add(fileId);
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch uncached references from database
|
||||
if (uncachedFileIds.Any())
|
||||
{
|
||||
var dbReferences = await db.FileReferences
|
||||
.Where(r => uncachedFileIds.Contains(r.FileId))
|
||||
.GroupBy(r => r.FileId)
|
||||
.ToDictionaryAsync(r => r.Key, r => r.ToList());
|
||||
return references;
|
||||
|
||||
// Cache the results
|
||||
foreach (var kvp in dbReferences)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}list:{kvp.Key}";
|
||||
await cache.SetAsync(cacheKey, kvp.Value, CacheDuration);
|
||||
result[kvp.Key] = kvp.Value;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -126,11 +170,11 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
/// </summary>
|
||||
/// <param name="resourceId">The ID of the resource</param>
|
||||
/// <returns>A list of file references associated with the resource</returns>
|
||||
public async Task<List<CloudFileReference>> GetResourceReferencesAsync(string resourceId)
|
||||
public async Task<List<SnCloudFileReference>> GetResourceReferencesAsync(string resourceId)
|
||||
{
|
||||
var cacheKey = $"{CacheKeyPrefix}resource:{resourceId}";
|
||||
|
||||
var cachedReferences = await cache.GetAsync<List<CloudFileReference>>(cacheKey);
|
||||
var cachedReferences = await cache.GetAsync<List<SnCloudFileReference>>(cacheKey);
|
||||
if (cachedReferences is not null)
|
||||
return cachedReferences;
|
||||
|
||||
@@ -148,11 +192,21 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
/// </summary>
|
||||
/// <param name="usage">The usage context</param>
|
||||
/// <returns>A list of file references with the specified usage</returns>
|
||||
public async Task<List<CloudFileReference>> GetUsageReferencesAsync(string usage)
|
||||
public async Task<List<SnCloudFileReference>> GetUsageReferencesAsync(string usage)
|
||||
{
|
||||
return await db.FileReferences
|
||||
var cacheKey = $"{CacheKeyPrefix}usage:{usage}";
|
||||
|
||||
var cachedReferences = await cache.GetAsync<List<SnCloudFileReference>>(cacheKey);
|
||||
if (cachedReferences is not null)
|
||||
return cachedReferences;
|
||||
|
||||
var references = await db.FileReferences
|
||||
.Where(r => r.Usage == usage)
|
||||
.ToListAsync();
|
||||
|
||||
await cache.SetAsync(cacheKey, references, CacheDuration);
|
||||
|
||||
return references;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -209,8 +263,9 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
|
||||
public async Task<int> DeleteResourceReferencesBatchAsync(IEnumerable<string> resourceIds, string? usage = null)
|
||||
{
|
||||
var resourceIdList = resourceIds.ToList();
|
||||
var references = await db.FileReferences
|
||||
.Where(r => resourceIds.Contains(r.ResourceId))
|
||||
.Where(r => resourceIdList.Contains(r.ResourceId))
|
||||
.If(usage != null, q => q.Where(q => q.Usage == usage))
|
||||
.ToListAsync();
|
||||
|
||||
@@ -222,8 +277,9 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
db.FileReferences.RemoveRange(references);
|
||||
var deletedCount = await db.SaveChangesAsync();
|
||||
|
||||
// Purge caches
|
||||
// Purge caches for files and resources
|
||||
var tasks = fileIds.Select(fileService._PurgeCacheAsync).ToList();
|
||||
tasks.AddRange(resourceIdList.Select(PurgeCacheForResourceAsync));
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
return deletedCount;
|
||||
@@ -262,7 +318,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
/// <param name="expiredAt">Optional expiration time for newly added files</param>
|
||||
/// <param name="duration">Optional duration after which newly added files expire</param>
|
||||
/// <returns>A list of the updated file references</returns>
|
||||
public async Task<List<CloudFileReference>> UpdateResourceFilesAsync(
|
||||
public async Task<List<SnCloudFileReference>> UpdateResourceFilesAsync(
|
||||
string resourceId,
|
||||
IEnumerable<string>? newFileIds,
|
||||
string usage,
|
||||
@@ -270,7 +326,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
Duration? duration = null)
|
||||
{
|
||||
if (newFileIds == null)
|
||||
return new List<CloudFileReference>();
|
||||
return new List<SnCloudFileReference>();
|
||||
|
||||
var existingReferences = await db.FileReferences
|
||||
.Where(r => r.ResourceId == resourceId && r.Usage == usage)
|
||||
@@ -288,7 +344,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
// Files to add
|
||||
var toAdd = newFileIdsList
|
||||
.Where(id => !existingFileIds.Contains(id))
|
||||
.Select(id => new CloudFileReference
|
||||
.Select(id => new SnCloudFileReference
|
||||
{
|
||||
FileId = id,
|
||||
Usage = usage,
|
||||
@@ -440,7 +496,7 @@ public class FileReferenceService(AppDatabase db, FileService fileService, ICach
|
||||
/// <param name="resourceId">The resource ID</param>
|
||||
/// <param name="usageType">The usage type</param>
|
||||
/// <returns>List of file references</returns>
|
||||
public async Task<List<CloudFileReference>> GetResourceReferencesAsync(string resourceId, string usageType)
|
||||
public async Task<List<SnCloudFileReference>> GetResourceReferencesAsync(string resourceId, string usageType)
|
||||
{
|
||||
return await db.FileReferences
|
||||
.Where(r => r.ResourceId == resourceId && r.Usage == usageType)
|
||||
|
||||
@@ -99,30 +99,74 @@ public class FileService(
|
||||
)
|
||||
{
|
||||
var accountId = Guid.Parse(account.Id);
|
||||
var pool = await ValidateAndGetPoolAsync(filePool);
|
||||
var bundle = await ValidateAndGetBundleAsync(fileBundleId, accountId);
|
||||
var finalExpiredAt = CalculateFinalExpiration(expiredAt, pool, bundle);
|
||||
|
||||
var (managedTempPath, fileSize, finalContentType) =
|
||||
await PrepareFileAsync(fileId, filePath, fileName, contentType);
|
||||
|
||||
var file = CreateFileObject(fileId, fileName, finalContentType, fileSize, finalExpiredAt, bundle, accountId);
|
||||
|
||||
if (!pool.PolicyConfig.NoMetadata)
|
||||
{
|
||||
await ExtractMetadataAsync(file, managedTempPath);
|
||||
}
|
||||
|
||||
var (processingPath, isTempFile) =
|
||||
await ProcessEncryptionAsync(fileId, managedTempPath, encryptPassword, pool, file);
|
||||
|
||||
file.Hash = await HashFileAsync(processingPath);
|
||||
|
||||
await SaveFileToDatabaseAsync(file);
|
||||
|
||||
await PublishFileUploadedEventAsync(file, pool, processingPath, isTempFile);
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
private async Task<FilePool> ValidateAndGetPoolAsync(string filePool)
|
||||
{
|
||||
var pool = await GetPoolAsync(Guid.Parse(filePool));
|
||||
if (pool is null) throw new InvalidOperationException("Pool not found");
|
||||
return pool ?? throw new InvalidOperationException("Pool not found: " + filePool);
|
||||
}
|
||||
|
||||
private async Task<SnFileBundle?> ValidateAndGetBundleAsync(string? fileBundleId, Guid accountId)
|
||||
{
|
||||
if (fileBundleId is null) return null;
|
||||
|
||||
var bundle = await GetBundleAsync(Guid.Parse(fileBundleId), accountId);
|
||||
return bundle ?? throw new InvalidOperationException("Bundle not found: " + fileBundleId);
|
||||
}
|
||||
|
||||
private static Instant? CalculateFinalExpiration(Instant? expiredAt, FilePool pool, SnFileBundle? bundle)
|
||||
{
|
||||
var finalExpiredAt = expiredAt;
|
||||
|
||||
// Apply pool expiration policy
|
||||
if (pool.StorageConfig.Expiration is not null && expiredAt.HasValue)
|
||||
{
|
||||
var expectedExpiration = SystemClock.Instance.GetCurrentInstant() - expiredAt.Value;
|
||||
var effectiveExpiration = pool.StorageConfig.Expiration < expectedExpiration
|
||||
? pool.StorageConfig.Expiration
|
||||
: expectedExpiration;
|
||||
expiredAt = SystemClock.Instance.GetCurrentInstant() + effectiveExpiration;
|
||||
}
|
||||
|
||||
var bundle = fileBundleId is not null
|
||||
? await GetBundleAsync(Guid.Parse(fileBundleId), accountId)
|
||||
: null;
|
||||
if (fileBundleId is not null && bundle is null)
|
||||
{
|
||||
throw new InvalidOperationException("Bundle not found");
|
||||
finalExpiredAt = SystemClock.Instance.GetCurrentInstant() + effectiveExpiration;
|
||||
}
|
||||
|
||||
// Bundle expiration takes precedence
|
||||
if (bundle?.ExpiredAt != null)
|
||||
expiredAt = bundle.ExpiredAt.Value;
|
||||
finalExpiredAt = bundle.ExpiredAt.Value;
|
||||
|
||||
return finalExpiredAt;
|
||||
}
|
||||
|
||||
private async Task<(string tempPath, long fileSize, string contentType)> PrepareFileAsync(
|
||||
string fileId,
|
||||
string filePath,
|
||||
string fileName,
|
||||
string? contentType
|
||||
)
|
||||
{
|
||||
var managedTempPath = Path.Combine(Path.GetTempPath(), fileId);
|
||||
File.Copy(filePath, managedTempPath, true);
|
||||
|
||||
@@ -131,27 +175,42 @@ public class FileService(
|
||||
var finalContentType = contentType ??
|
||||
(!fileName.Contains('.') ? "application/octet-stream" : MimeTypes.GetMimeType(fileName));
|
||||
|
||||
var file = new SnCloudFile
|
||||
return (managedTempPath, fileSize, finalContentType);
|
||||
}
|
||||
|
||||
private SnCloudFile CreateFileObject(
|
||||
string fileId,
|
||||
string fileName,
|
||||
string contentType,
|
||||
long fileSize,
|
||||
Instant? expiredAt,
|
||||
SnFileBundle? bundle,
|
||||
Guid accountId
|
||||
)
|
||||
{
|
||||
return new SnCloudFile
|
||||
{
|
||||
Id = fileId,
|
||||
Name = fileName,
|
||||
MimeType = finalContentType,
|
||||
MimeType = contentType,
|
||||
Size = fileSize,
|
||||
ExpiredAt = expiredAt,
|
||||
BundleId = bundle?.Id,
|
||||
AccountId = Guid.Parse(account.Id),
|
||||
AccountId = accountId,
|
||||
};
|
||||
|
||||
if (!pool.PolicyConfig.NoMetadata)
|
||||
{
|
||||
await ExtractMetadataAsync(file, managedTempPath);
|
||||
}
|
||||
|
||||
string processingPath = managedTempPath;
|
||||
bool isTempFile = true;
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(encryptPassword))
|
||||
private async Task<(string processingPath, bool isTempFile)> ProcessEncryptionAsync(
|
||||
string fileId,
|
||||
string managedTempPath,
|
||||
string? encryptPassword,
|
||||
FilePool pool,
|
||||
SnCloudFile file
|
||||
)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(encryptPassword))
|
||||
return (managedTempPath, true);
|
||||
|
||||
if (!pool.PolicyConfig.AllowEncryption)
|
||||
throw new InvalidOperationException("Encryption is not allowed in this pool");
|
||||
|
||||
@@ -160,20 +219,23 @@ public class FileService(
|
||||
|
||||
File.Delete(managedTempPath);
|
||||
|
||||
processingPath = encryptedPath;
|
||||
|
||||
file.IsEncrypted = true;
|
||||
file.MimeType = "application/octet-stream";
|
||||
file.Size = new FileInfo(processingPath).Length;
|
||||
file.Size = new FileInfo(encryptedPath).Length;
|
||||
|
||||
return (encryptedPath, true);
|
||||
}
|
||||
|
||||
file.Hash = await HashFileAsync(processingPath);
|
||||
|
||||
private async Task SaveFileToDatabaseAsync(SnCloudFile file)
|
||||
{
|
||||
db.Files.Add(file);
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
file.StorageId ??= file.Id;
|
||||
}
|
||||
|
||||
private async Task PublishFileUploadedEventAsync(SnCloudFile file, FilePool pool, string processingPath,
|
||||
bool isTempFile)
|
||||
{
|
||||
var js = nats.CreateJetStreamContext();
|
||||
await js.PublishAsync(
|
||||
FileUploadedEvent.Type,
|
||||
@@ -186,8 +248,6 @@ public class FileService(
|
||||
isTempFile)
|
||||
).ToByteArray()
|
||||
);
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
private async Task ExtractMetadataAsync(SnCloudFile file, string filePath)
|
||||
@@ -414,12 +474,13 @@ public class FileService(
|
||||
return await db.Files.AsNoTracking().FirstAsync(f => f.Id == file.Id);
|
||||
}
|
||||
|
||||
public async Task DeleteFileAsync(SnCloudFile file)
|
||||
public async Task DeleteFileAsync(SnCloudFile file, bool skipData = false)
|
||||
{
|
||||
db.Remove(file);
|
||||
await db.SaveChangesAsync();
|
||||
await _PurgeCacheAsync(file.Id);
|
||||
|
||||
if (!skipData)
|
||||
await DeleteFileDataAsync(file);
|
||||
}
|
||||
|
||||
@@ -603,9 +664,12 @@ public class FileService(
|
||||
}
|
||||
}
|
||||
|
||||
return [.. references
|
||||
return
|
||||
[
|
||||
.. references
|
||||
.Select(r => cachedFiles.GetValueOrDefault(r.Id))
|
||||
.Where(f => f != null)];
|
||||
.Where(f => f != null)
|
||||
];
|
||||
}
|
||||
|
||||
public async Task<int> GetReferenceCountAsync(string fileId)
|
||||
@@ -654,6 +718,21 @@ public class FileService(
|
||||
return count;
|
||||
}
|
||||
|
||||
public async Task<int> DeleteAccountFileBatchAsync(Guid accountId, List<string> fileIds)
|
||||
{
|
||||
var files = await db.Files
|
||||
.Where(f => f.AccountId == accountId && fileIds.Contains(f.Id))
|
||||
.ToListAsync();
|
||||
var count = files.Count;
|
||||
var tasks = files.Select(f => DeleteFileDataAsync(f, true));
|
||||
await Task.WhenAll(tasks);
|
||||
var fileIdsList = files.Select(f => f.Id).ToList();
|
||||
await _PurgeCacheRangeAsync(fileIdsList);
|
||||
db.RemoveRange(files);
|
||||
await db.SaveChangesAsync();
|
||||
return count;
|
||||
}
|
||||
|
||||
public async Task<int> DeletePoolRecycledFilesAsync(Guid poolId)
|
||||
{
|
||||
var files = await db.Files
|
||||
@@ -714,7 +793,7 @@ file class UpdatableCloudFile(SnCloudFile file)
|
||||
public Dictionary<string, object?>? UserMeta { get; set; } = file.UserMeta;
|
||||
public bool IsMarkedRecycle { get; set; } = file.IsMarkedRecycle;
|
||||
|
||||
public Expression<Func<SetPropertyCalls<SnCloudFile>, SetPropertyCalls<SnCloudFile>>> ToSetPropertyCalls()
|
||||
public Action<UpdateSettersBuilder<SnCloudFile>> ToSetPropertyCalls()
|
||||
{
|
||||
var userMeta = UserMeta ?? [];
|
||||
return setter => setter
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.Text.Json;
|
||||
using DysonNetwork.Drive.Billing;
|
||||
using DysonNetwork.Drive.Index;
|
||||
using DysonNetwork.Drive.Storage.Model;
|
||||
using DysonNetwork.Shared.Auth;
|
||||
using DysonNetwork.Shared.Http;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NanoidDotNet;
|
||||
using NodaTime;
|
||||
using TaskStatus = DysonNetwork.Drive.Storage.Model.TaskStatus;
|
||||
|
||||
namespace DysonNetwork.Drive.Storage;
|
||||
|
||||
@@ -20,7 +23,10 @@ public class FileUploadController(
|
||||
FileService fileService,
|
||||
AppDatabase db,
|
||||
PermissionService.PermissionServiceClient permission,
|
||||
QuotaService quotaService
|
||||
QuotaService quotaService,
|
||||
PersistentTaskService persistentTaskService,
|
||||
FileIndexService fileIndexService,
|
||||
ILogger<FileUploadController> logger
|
||||
)
|
||||
: ControllerBase
|
||||
{
|
||||
@@ -33,45 +39,108 @@ public class FileUploadController(
|
||||
public async Task<IActionResult> CreateUploadTask([FromBody] CreateUploadTaskRequest request)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
{
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
}
|
||||
|
||||
if (!currentUser.IsSuperuser)
|
||||
{
|
||||
var allowed = await permission.HasPermissionAsync(new HasPermissionRequest
|
||||
{ Actor = $"user:{currentUser.Id}", Area = "global", Key = "files.create" });
|
||||
if (!allowed.HasPermission)
|
||||
{
|
||||
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
|
||||
}
|
||||
}
|
||||
var permissionCheck = await ValidateUserPermissions(currentUser);
|
||||
if (permissionCheck is not null) return permissionCheck;
|
||||
|
||||
request.PoolId ??= Guid.Parse(configuration["Storage:PreferredRemote"]!);
|
||||
|
||||
var pool = await fileService.GetPoolAsync(request.PoolId.Value);
|
||||
if (pool is null)
|
||||
{
|
||||
return new ObjectResult(ApiError.NotFound("Pool")) { StatusCode = 404 };
|
||||
|
||||
var poolValidation = await ValidatePoolAccess(currentUser, pool, request);
|
||||
if (poolValidation is not null) return poolValidation;
|
||||
|
||||
var policyValidation = ValidatePoolPolicy(pool.PolicyConfig, request);
|
||||
if (policyValidation is not null) return policyValidation;
|
||||
|
||||
var quotaValidation = await ValidateQuota(currentUser, pool, request.FileSize);
|
||||
if (quotaValidation is not null) return quotaValidation;
|
||||
|
||||
EnsureTempDirectoryExists();
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
// Check if a file with the same hash already exists
|
||||
var existingFile = await db.Files.FirstOrDefaultAsync(f => f.Hash == request.Hash);
|
||||
if (existingFile != null)
|
||||
{
|
||||
// Create the file index if a path is provided, even for existing files
|
||||
if (string.IsNullOrEmpty(request.Path))
|
||||
return Ok(new CreateUploadTaskResponse
|
||||
{
|
||||
FileExists = true,
|
||||
File = existingFile
|
||||
});
|
||||
try
|
||||
{
|
||||
await fileIndexService.CreateAsync(request.Path, existingFile.Id, accountId);
|
||||
logger.LogInformation("Created file index for existing file {FileId} at path {Path}",
|
||||
existingFile.Id, request.Path);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning(ex, "Failed to create file index for existing file {FileId} at path {Path}",
|
||||
existingFile.Id, request.Path);
|
||||
// Don't fail the request if index creation fails, just log it
|
||||
}
|
||||
|
||||
if (pool.PolicyConfig.RequirePrivilege is > 0)
|
||||
return Ok(new CreateUploadTaskResponse
|
||||
{
|
||||
var privilege =
|
||||
currentUser.PerkSubscription is null ? 0 :
|
||||
PerkSubscriptionPrivilege.GetPrivilegeFromIdentifier(currentUser.PerkSubscription.Identifier);
|
||||
FileExists = true,
|
||||
File = existingFile
|
||||
});
|
||||
}
|
||||
|
||||
var taskId = await Nanoid.GenerateAsync();
|
||||
|
||||
// Create persistent upload task
|
||||
var persistentTask = await persistentTaskService.CreateUploadTaskAsync(taskId, request, accountId);
|
||||
|
||||
return Ok(new CreateUploadTaskResponse
|
||||
{
|
||||
FileExists = false,
|
||||
TaskId = taskId,
|
||||
ChunkSize = persistentTask.ChunkSize,
|
||||
ChunksCount = persistentTask.ChunksCount
|
||||
});
|
||||
}
|
||||
|
||||
private async Task<IActionResult?> ValidateUserPermissions(Account currentUser)
|
||||
{
|
||||
if (currentUser.IsSuperuser) return null;
|
||||
|
||||
var allowed = await permission.HasPermissionAsync(new HasPermissionRequest
|
||||
{ Actor = currentUser.Id, Key = "files.create" });
|
||||
|
||||
return allowed.HasPermission
|
||||
? null
|
||||
: new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
|
||||
}
|
||||
|
||||
private Task<IActionResult?> ValidatePoolAccess(Account currentUser, FilePool pool, CreateUploadTaskRequest request)
|
||||
{
|
||||
if (pool.PolicyConfig.RequirePrivilege <= 0) return Task.FromResult<IActionResult?>(null);
|
||||
|
||||
var privilege = currentUser.PerkSubscription is null
|
||||
? 0
|
||||
: PerkSubscriptionPrivilege.GetPrivilegeFromIdentifier(currentUser.PerkSubscription.Identifier);
|
||||
|
||||
if (privilege < pool.PolicyConfig.RequirePrivilege)
|
||||
{
|
||||
return new ObjectResult(ApiError.Unauthorized(
|
||||
return Task.FromResult<IActionResult?>(new ObjectResult(ApiError.Unauthorized(
|
||||
$"You need Stellar Program tier {pool.PolicyConfig.RequirePrivilege} to use pool {pool.Name}, you are tier {privilege}",
|
||||
forbidden: true))
|
||||
{
|
||||
StatusCode = 403
|
||||
};
|
||||
}
|
||||
{ StatusCode = 403 });
|
||||
}
|
||||
|
||||
var policy = pool.PolicyConfig;
|
||||
return Task.FromResult<IActionResult?>(null);
|
||||
}
|
||||
|
||||
private static IActionResult? ValidatePoolPolicy(PolicyConfig policy, CreateUploadTaskRequest request)
|
||||
{
|
||||
if (!policy.AllowEncryption && !string.IsNullOrEmpty(request.EncryptPassword))
|
||||
{
|
||||
return new ObjectResult(ApiError.Unauthorized("File encryption is not allowed in this pool", true))
|
||||
@@ -91,13 +160,10 @@ public class FileUploadController(
|
||||
|
||||
var foundMatch = policy.AcceptTypes.Any(acceptType =>
|
||||
{
|
||||
if (acceptType.EndsWith("/*", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (!acceptType.EndsWith("/*", StringComparison.OrdinalIgnoreCase))
|
||||
return acceptType.Equals(request.ContentType, StringComparison.OrdinalIgnoreCase);
|
||||
var type = acceptType[..^2];
|
||||
return request.ContentType.StartsWith($"{type}/", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
return acceptType.Equals(request.ContentType, StringComparison.OrdinalIgnoreCase);
|
||||
});
|
||||
|
||||
if (!foundMatch)
|
||||
@@ -114,16 +180,20 @@ public class FileUploadController(
|
||||
return new ObjectResult(ApiError.Unauthorized(
|
||||
$"File size {request.FileSize} is larger than the pool's maximum file size {policy.MaxFileSize}",
|
||||
true))
|
||||
{
|
||||
StatusCode = 403
|
||||
};
|
||||
{ StatusCode = 403 };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private async Task<IActionResult?> ValidateQuota(Account currentUser, FilePool pool, long fileSize)
|
||||
{
|
||||
var (ok, billableUnit, quota) = await quotaService.IsFileAcceptable(
|
||||
Guid.Parse(currentUser.Id),
|
||||
pool.BillingConfig.CostMultiplier ?? 1.0,
|
||||
request.FileSize
|
||||
fileSize
|
||||
);
|
||||
|
||||
if (!ok)
|
||||
{
|
||||
return new ObjectResult(
|
||||
@@ -132,147 +202,486 @@ public class FileUploadController(
|
||||
{ StatusCode = 403 };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private void EnsureTempDirectoryExists()
|
||||
{
|
||||
if (!Directory.Exists(_tempPath))
|
||||
{
|
||||
Directory.CreateDirectory(_tempPath);
|
||||
}
|
||||
|
||||
// Check if a file with the same hash already exists
|
||||
var existingFile = await db.Files.FirstOrDefaultAsync(f => f.Hash == request.Hash);
|
||||
if (existingFile != null)
|
||||
{
|
||||
return Ok(new CreateUploadTaskResponse
|
||||
{
|
||||
FileExists = true,
|
||||
File = existingFile
|
||||
});
|
||||
}
|
||||
|
||||
var taskId = await Nanoid.GenerateAsync();
|
||||
var taskPath = Path.Combine(_tempPath, taskId);
|
||||
Directory.CreateDirectory(taskPath);
|
||||
|
||||
var chunkSize = request.ChunkSize ?? DefaultChunkSize;
|
||||
var chunksCount = (int)Math.Ceiling((double)request.FileSize / chunkSize);
|
||||
|
||||
var task = new UploadTask
|
||||
{
|
||||
TaskId = taskId,
|
||||
FileName = request.FileName,
|
||||
FileSize = request.FileSize,
|
||||
ContentType = request.ContentType,
|
||||
ChunkSize = chunkSize,
|
||||
ChunksCount = chunksCount,
|
||||
PoolId = request.PoolId.Value,
|
||||
BundleId = request.BundleId,
|
||||
EncryptPassword = request.EncryptPassword,
|
||||
ExpiredAt = request.ExpiredAt,
|
||||
Hash = request.Hash,
|
||||
};
|
||||
|
||||
await System.IO.File.WriteAllTextAsync(Path.Combine(taskPath, "task.json"), JsonSerializer.Serialize(task));
|
||||
|
||||
return Ok(new CreateUploadTaskResponse
|
||||
{
|
||||
FileExists = false,
|
||||
TaskId = taskId,
|
||||
ChunkSize = chunkSize,
|
||||
ChunksCount = chunksCount
|
||||
});
|
||||
}
|
||||
|
||||
public class UploadChunkRequest
|
||||
{
|
||||
[Required]
|
||||
public IFormFile Chunk { get; set; } = null!;
|
||||
[Required] public IFormFile Chunk { get; set; } = null!;
|
||||
}
|
||||
|
||||
[HttpPost("chunk/{taskId}/{chunkIndex}")]
|
||||
[HttpPost("chunk/{taskId}/{chunkIndex:int}")]
|
||||
[RequestSizeLimit(DefaultChunkSize + 1024 * 1024)] // 6MB to be safe
|
||||
[RequestFormLimits(MultipartBodyLengthLimit = DefaultChunkSize + 1024 * 1024)]
|
||||
public async Task<IActionResult> UploadChunk(string taskId, int chunkIndex, [FromForm] UploadChunkRequest request)
|
||||
{
|
||||
var chunk = request.Chunk;
|
||||
|
||||
// Check if chunk is already uploaded (resumable upload)
|
||||
if (await persistentTaskService.IsChunkUploadedAsync(taskId, chunkIndex))
|
||||
{
|
||||
return Ok(new { message = "Chunk already uploaded" });
|
||||
}
|
||||
|
||||
var taskPath = Path.Combine(_tempPath, taskId);
|
||||
if (!Directory.Exists(taskPath))
|
||||
{
|
||||
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
|
||||
Directory.CreateDirectory(taskPath);
|
||||
}
|
||||
|
||||
var chunkPath = Path.Combine(taskPath, $"{chunkIndex}.chunk");
|
||||
await using var stream = new FileStream(chunkPath, FileMode.Create);
|
||||
await chunk.CopyToAsync(stream);
|
||||
|
||||
// Update persistent task progress
|
||||
await persistentTaskService.UpdateChunkProgressAsync(taskId, chunkIndex);
|
||||
|
||||
return Ok();
|
||||
}
|
||||
|
||||
[HttpPost("complete/{taskId}")]
|
||||
public async Task<IActionResult> CompleteUpload(string taskId)
|
||||
{
|
||||
// Get persistent task
|
||||
var persistentTask = await persistentTaskService.GetUploadTaskAsync(taskId);
|
||||
if (persistentTask is null)
|
||||
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
|
||||
|
||||
var currentUser = HttpContext.Items["CurrentUser"] as Account;
|
||||
if (currentUser is null)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
// Verify ownership
|
||||
if (persistentTask.AccountId != Guid.Parse(currentUser.Id))
|
||||
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
|
||||
|
||||
var taskPath = Path.Combine(_tempPath, taskId);
|
||||
if (!Directory.Exists(taskPath))
|
||||
{
|
||||
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
|
||||
}
|
||||
|
||||
var taskJsonPath = Path.Combine(taskPath, "task.json");
|
||||
if (!System.IO.File.Exists(taskJsonPath))
|
||||
{
|
||||
return new ObjectResult(ApiError.NotFound("Upload task metadata")) { StatusCode = 404 };
|
||||
}
|
||||
|
||||
var task = JsonSerializer.Deserialize<UploadTask>(await System.IO.File.ReadAllTextAsync(taskJsonPath));
|
||||
if (task == null)
|
||||
{
|
||||
return new ObjectResult(new ApiError { Code = "BAD_REQUEST", Message = "Invalid task metadata.", Status = 400 })
|
||||
{ StatusCode = 400 };
|
||||
}
|
||||
return new ObjectResult(ApiError.NotFound("Upload task directory")) { StatusCode = 404 };
|
||||
|
||||
var mergedFilePath = Path.Combine(_tempPath, taskId + ".tmp");
|
||||
await using (var mergedStream = new FileStream(mergedFilePath, FileMode.Create))
|
||||
{
|
||||
for (var i = 0; i < task.ChunksCount; i++)
|
||||
{
|
||||
var chunkPath = Path.Combine(taskPath, $"{i}.chunk");
|
||||
if (!System.IO.File.Exists(chunkPath))
|
||||
{
|
||||
// Clean up partially uploaded file
|
||||
mergedStream.Close();
|
||||
System.IO.File.Delete(mergedFilePath);
|
||||
Directory.Delete(taskPath, true);
|
||||
return new ObjectResult(new ApiError
|
||||
{ Code = "CHUNK_MISSING", Message = $"Chunk {i} is missing.", Status = 400 })
|
||||
{ StatusCode = 400 };
|
||||
}
|
||||
|
||||
await using var chunkStream = new FileStream(chunkPath, FileMode.Open);
|
||||
await chunkStream.CopyToAsync(mergedStream);
|
||||
}
|
||||
}
|
||||
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
try
|
||||
{
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
}
|
||||
await MergeChunks(taskId, taskPath, mergedFilePath, persistentTask.ChunksCount, persistentTaskService);
|
||||
|
||||
var fileId = await Nanoid.GenerateAsync();
|
||||
|
||||
var cloudFile = await fileService.ProcessNewFileAsync(
|
||||
currentUser,
|
||||
fileId,
|
||||
task.PoolId.ToString(),
|
||||
task.BundleId?.ToString(),
|
||||
persistentTask.PoolId.ToString(),
|
||||
persistentTask.BundleId?.ToString(),
|
||||
mergedFilePath,
|
||||
task.FileName,
|
||||
task.ContentType,
|
||||
task.EncryptPassword,
|
||||
task.ExpiredAt
|
||||
persistentTask.FileName,
|
||||
persistentTask.ContentType,
|
||||
persistentTask.EncryptPassword,
|
||||
persistentTask.ExpiredAt
|
||||
);
|
||||
|
||||
// Clean up
|
||||
Directory.Delete(taskPath, true);
|
||||
System.IO.File.Delete(mergedFilePath);
|
||||
// Create the file index if a path is provided
|
||||
if (!string.IsNullOrEmpty(persistentTask.Path))
|
||||
{
|
||||
try
|
||||
{
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
await fileIndexService.CreateAsync(persistentTask.Path, fileId, accountId);
|
||||
logger.LogInformation("Created file index for file {FileId} at path {Path}", fileId,
|
||||
persistentTask.Path);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning(ex, "Failed to create file index for file {FileId} at path {Path}", fileId,
|
||||
persistentTask.Path);
|
||||
// Don't fail the upload if index creation fails, just log it
|
||||
}
|
||||
}
|
||||
|
||||
// Update the task status to "processing" - background processing is now happening
|
||||
await persistentTaskService.UpdateTaskProgressAsync(taskId, 0.95, "Processing file in background...");
|
||||
|
||||
// Send upload completion notification (a file is uploaded, but processing continues)
|
||||
await persistentTaskService.SendUploadCompletedNotificationAsync(persistentTask, fileId);
|
||||
|
||||
return Ok(cloudFile);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Log the actual exception for debugging
|
||||
logger.LogError(ex, "Failed to complete upload for task {TaskId}. Error: {ErrorMessage}", taskId,
|
||||
ex.Message);
|
||||
|
||||
// Mark task as failed
|
||||
await persistentTaskService.MarkTaskFailedAsync(taskId);
|
||||
|
||||
// Send failure notification
|
||||
await persistentTaskService.SendUploadFailedNotificationAsync(persistentTask, ex.Message);
|
||||
|
||||
await CleanupTempFiles(taskPath, mergedFilePath);
|
||||
|
||||
return new ObjectResult(new ApiError
|
||||
{
|
||||
Code = "UPLOAD_FAILED",
|
||||
Message = $"Failed to complete file upload: {ex.Message}",
|
||||
Status = 500
|
||||
}) { StatusCode = 500 };
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Always clean up temp files
|
||||
await CleanupTempFiles(taskPath, mergedFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task MergeChunks(
|
||||
string taskId,
|
||||
string taskPath,
|
||||
string mergedFilePath,
|
||||
int chunksCount,
|
||||
PersistentTaskService persistentTaskService)
|
||||
{
|
||||
await using var mergedStream = new FileStream(mergedFilePath, FileMode.Create);
|
||||
|
||||
const double baseProgress = 0.8; // Start from 80% (chunk upload is already at 95%)
|
||||
const double remainingProgress = 0.15; // Remaining 15% progress distributed across chunks
|
||||
var progressPerChunk = remainingProgress / chunksCount;
|
||||
|
||||
for (var i = 0; i < chunksCount; i++)
|
||||
{
|
||||
var chunkPath = Path.Combine(taskPath, i + ".chunk");
|
||||
if (!System.IO.File.Exists(chunkPath))
|
||||
throw new InvalidOperationException("Chunk " + i + " is missing.");
|
||||
|
||||
await using var chunkStream = new FileStream(chunkPath, FileMode.Open);
|
||||
await chunkStream.CopyToAsync(mergedStream);
|
||||
|
||||
// Update progress after each chunk is merged
|
||||
var currentProgress = baseProgress + progressPerChunk * (i + 1);
|
||||
await persistentTaskService.UpdateTaskProgressAsync(
|
||||
taskId,
|
||||
currentProgress,
|
||||
"Merging chunks... (" + (i + 1) + "/" + chunksCount + ")"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private static Task CleanupTempFiles(string taskPath, string mergedFilePath)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(taskPath))
|
||||
Directory.Delete(taskPath, true);
|
||||
|
||||
if (System.IO.File.Exists(mergedFilePath))
|
||||
System.IO.File.Delete(mergedFilePath);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup errors to avoid masking the original exception
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
// New endpoints for resumable uploads
|
||||
|
||||
[HttpGet("tasks")]
|
||||
public async Task<IActionResult> GetMyUploadTasks(
|
||||
[FromQuery] UploadTaskStatus? status = null,
|
||||
[FromQuery] string? sortBy = "lastActivity",
|
||||
[FromQuery] bool sortDescending = true,
|
||||
[FromQuery] int offset = 0,
|
||||
[FromQuery] int limit = 50
|
||||
)
|
||||
{
|
||||
var currentUser = HttpContext.Items["CurrentUser"] as Account;
|
||||
if (currentUser is null)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
var tasks = await persistentTaskService.GetUserUploadTasksAsync(accountId, status, sortBy, sortDescending,
|
||||
offset, limit);
|
||||
|
||||
Response.Headers.Append("X-Total", tasks.TotalCount.ToString());
|
||||
|
||||
return Ok(tasks.Items.Select(t => new
|
||||
{
|
||||
t.TaskId,
|
||||
t.FileName,
|
||||
t.FileSize,
|
||||
t.ContentType,
|
||||
t.ChunkSize,
|
||||
t.ChunksCount,
|
||||
t.ChunksUploaded,
|
||||
Progress = t.ChunksCount > 0 ? (double)t.ChunksUploaded / t.ChunksCount * 100 : 0,
|
||||
t.Status,
|
||||
t.LastActivity,
|
||||
t.CreatedAt,
|
||||
t.UpdatedAt,
|
||||
t.UploadedChunks,
|
||||
Pool = new { t.PoolId, Name = "Pool Name" }, // Could be expanded to include pool details
|
||||
Bundle = t.BundleId.HasValue ? new { t.BundleId } : null
|
||||
}));
|
||||
}
|
||||
|
||||
[HttpGet("progress/{taskId}")]
|
||||
public async Task<IActionResult> GetUploadProgress(string taskId)
|
||||
{
|
||||
var currentUser = HttpContext.Items["CurrentUser"] as Account;
|
||||
if (currentUser is null)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var task = await persistentTaskService.GetUploadTaskAsync(taskId);
|
||||
if (task is null)
|
||||
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
|
||||
|
||||
// Verify ownership
|
||||
if (task.AccountId != Guid.Parse(currentUser.Id))
|
||||
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
|
||||
|
||||
var progress = await persistentTaskService.GetUploadProgressAsync(taskId);
|
||||
|
||||
return Ok(new
|
||||
{
|
||||
task.TaskId,
|
||||
task.FileName,
|
||||
task.FileSize,
|
||||
task.ChunksCount,
|
||||
task.ChunksUploaded,
|
||||
Progress = progress,
|
||||
task.Status,
|
||||
task.LastActivity,
|
||||
task.UploadedChunks
|
||||
});
|
||||
}
|
||||
|
||||
[HttpGet("resume/{taskId}")]
|
||||
public async Task<IActionResult> ResumeUploadTask(string taskId)
|
||||
{
|
||||
var currentUser = HttpContext.Items["CurrentUser"] as Account;
|
||||
if (currentUser is null)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var task = await persistentTaskService.GetUploadTaskAsync(taskId);
|
||||
if (task is null)
|
||||
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
|
||||
|
||||
// Verify ownership
|
||||
if (task.AccountId != Guid.Parse(currentUser.Id))
|
||||
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
|
||||
|
||||
// Ensure temp directory exists
|
||||
var taskPath = Path.Combine(_tempPath, taskId);
|
||||
if (!Directory.Exists(taskPath))
|
||||
{
|
||||
Directory.CreateDirectory(taskPath);
|
||||
}
|
||||
|
||||
return Ok(new
|
||||
{
|
||||
task.TaskId,
|
||||
task.FileName,
|
||||
task.FileSize,
|
||||
task.ContentType,
|
||||
task.ChunkSize,
|
||||
task.ChunksCount,
|
||||
task.ChunksUploaded,
|
||||
task.UploadedChunks,
|
||||
Progress = task.ChunksCount > 0 ? (double)task.ChunksUploaded / task.ChunksCount * 100 : 0
|
||||
});
|
||||
}
|
||||
|
||||
[HttpDelete("task/{taskId}")]
|
||||
public async Task<IActionResult> CancelUploadTask(string taskId)
|
||||
{
|
||||
var currentUser = HttpContext.Items["CurrentUser"] as Account;
|
||||
if (currentUser is null)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var task = await persistentTaskService.GetUploadTaskAsync(taskId);
|
||||
if (task is null)
|
||||
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
|
||||
|
||||
// Verify ownership
|
||||
if (task.AccountId != Guid.Parse(currentUser.Id))
|
||||
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
|
||||
|
||||
// Mark as failed (cancelled)
|
||||
await persistentTaskService.MarkTaskFailedAsync(taskId);
|
||||
|
||||
// Clean up temp files
|
||||
var taskPath = Path.Combine(_tempPath, taskId);
|
||||
await CleanupTempFiles(taskPath, string.Empty);
|
||||
|
||||
return Ok(new { message = "Upload task cancelled" });
|
||||
}
|
||||
|
||||
[HttpGet("stats")]
|
||||
public async Task<IActionResult> GetUploadStats()
|
||||
{
|
||||
var currentUser = HttpContext.Items["CurrentUser"] as Account;
|
||||
if (currentUser is null)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
var stats = await persistentTaskService.GetUserUploadStatsAsync(accountId);
|
||||
|
||||
return Ok(new
|
||||
{
|
||||
stats.TotalTasks,
|
||||
stats.InProgressTasks,
|
||||
stats.CompletedTasks,
|
||||
stats.FailedTasks,
|
||||
stats.ExpiredTasks,
|
||||
stats.TotalUploadedBytes,
|
||||
stats.AverageProgress,
|
||||
stats.RecentActivity
|
||||
});
|
||||
}
|
||||
|
||||
[HttpDelete("tasks/cleanup")]
|
||||
public async Task<IActionResult> CleanupFailedTasks()
|
||||
{
|
||||
var currentUser = HttpContext.Items["CurrentUser"] as Account;
|
||||
if (currentUser is null)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
var cleanedCount = await persistentTaskService.CleanupUserFailedTasksAsync(accountId);
|
||||
|
||||
return Ok(new { message = $"Cleaned up {cleanedCount} failed tasks" });
|
||||
}
|
||||
|
||||
[HttpGet("tasks/recent")]
|
||||
public async Task<IActionResult> GetRecentTasks([FromQuery] int limit = 10)
|
||||
{
|
||||
var currentUser = HttpContext.Items["CurrentUser"] as Account;
|
||||
if (currentUser is null)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
var tasks = await persistentTaskService.GetRecentUserTasksAsync(accountId, limit);
|
||||
|
||||
return Ok(tasks.Select(t => new
|
||||
{
|
||||
t.TaskId,
|
||||
t.FileName,
|
||||
t.FileSize,
|
||||
t.ContentType,
|
||||
Progress = t.ChunksCount > 0 ? (double)t.ChunksUploaded / t.ChunksCount * 100 : 0,
|
||||
t.Status,
|
||||
t.LastActivity,
|
||||
t.CreatedAt
|
||||
}));
|
||||
}
|
||||
|
||||
[HttpGet("tasks/{taskId}/details")]
|
||||
public async Task<IActionResult> GetTaskDetails(string taskId)
|
||||
{
|
||||
var currentUser = HttpContext.Items["CurrentUser"] as Account;
|
||||
if (currentUser is null)
|
||||
return new ObjectResult(ApiError.Unauthorized()) { StatusCode = 401 };
|
||||
|
||||
var task = await persistentTaskService.GetUploadTaskAsync(taskId);
|
||||
if (task is null)
|
||||
return new ObjectResult(ApiError.NotFound("Upload task")) { StatusCode = 404 };
|
||||
|
||||
// Verify ownership
|
||||
if (task.AccountId != Guid.Parse(currentUser.Id))
|
||||
return new ObjectResult(ApiError.Unauthorized(forbidden: true)) { StatusCode = 403 };
|
||||
|
||||
// Get pool information
|
||||
var pool = await fileService.GetPoolAsync(task.PoolId);
|
||||
var bundle = task.BundleId.HasValue
|
||||
? await db.Bundles.FirstOrDefaultAsync(b => b.Id == task.BundleId.Value)
|
||||
: null;
|
||||
|
||||
return Ok(new
|
||||
{
|
||||
Task = new
|
||||
{
|
||||
task.TaskId,
|
||||
task.FileName,
|
||||
task.FileSize,
|
||||
task.ContentType,
|
||||
task.ChunkSize,
|
||||
task.ChunksCount,
|
||||
task.ChunksUploaded,
|
||||
Progress = task.ChunksCount > 0 ? (double)task.ChunksUploaded / task.ChunksCount * 100 : 0,
|
||||
task.Status,
|
||||
task.LastActivity,
|
||||
task.CreatedAt,
|
||||
task.UpdatedAt,
|
||||
task.ExpiredAt,
|
||||
task.Hash,
|
||||
task.UploadedChunks
|
||||
},
|
||||
Pool = pool != null
|
||||
? new
|
||||
{
|
||||
pool.Id,
|
||||
pool.Name,
|
||||
pool.Description
|
||||
}
|
||||
: null,
|
||||
Bundle = bundle != null
|
||||
? new
|
||||
{
|
||||
bundle.Id,
|
||||
bundle.Name,
|
||||
bundle.Description
|
||||
}
|
||||
: null,
|
||||
EstimatedTimeRemaining = CalculateEstimatedTime(task),
|
||||
UploadSpeed = CalculateUploadSpeed(task)
|
||||
});
|
||||
}
|
||||
|
||||
private static string? CalculateEstimatedTime(PersistentUploadTask task)
|
||||
{
|
||||
if (task.Status != TaskStatus.InProgress || task.ChunksUploaded == 0)
|
||||
return null;
|
||||
|
||||
var elapsed = NodaTime.SystemClock.Instance.GetCurrentInstant() - task.CreatedAt;
|
||||
var elapsedSeconds = elapsed.TotalSeconds;
|
||||
var chunksPerSecond = task.ChunksUploaded / elapsedSeconds;
|
||||
var remainingChunks = task.ChunksCount - task.ChunksUploaded;
|
||||
|
||||
if (chunksPerSecond <= 0)
|
||||
return null;
|
||||
|
||||
var remainingSeconds = remainingChunks / chunksPerSecond;
|
||||
|
||||
return remainingSeconds switch
|
||||
{
|
||||
< 60 => $"{remainingSeconds:F0} seconds",
|
||||
< 3600 => $"{remainingSeconds / 60:F0} minutes",
|
||||
_ => $"{remainingSeconds / 3600:F1} hours"
|
||||
};
|
||||
}
|
||||
|
||||
private static string? CalculateUploadSpeed(PersistentUploadTask task)
|
||||
{
|
||||
if (task.ChunksUploaded == 0)
|
||||
return null;
|
||||
|
||||
var elapsed = SystemClock.Instance.GetCurrentInstant() - task.CreatedAt;
|
||||
var elapsedSeconds = elapsed.TotalSeconds;
|
||||
var bytesUploaded = task.ChunksUploaded * task.ChunkSize;
|
||||
var bytesPerSecond = bytesUploaded / elapsedSeconds;
|
||||
|
||||
return bytesPerSecond switch
|
||||
{
|
||||
< 1024 => $"{bytesPerSecond:F0} B/s",
|
||||
< 1024 * 1024 => $"{bytesPerSecond / 1024:F0} KB/s",
|
||||
_ => $"{bytesPerSecond / (1024 * 1024):F1} MB/s"
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,88 @@
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using Google.Protobuf.Collections;
|
||||
using NodaTime;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace DysonNetwork.Drive.Storage.Model
|
||||
namespace DysonNetwork.Drive.Storage.Model;
|
||||
|
||||
// File Upload Task Parameters
|
||||
public class FileUploadParameters
|
||||
{
|
||||
public string FileName { get; set; } = string.Empty;
|
||||
public long FileSize { get; set; }
|
||||
public string ContentType { get; set; } = string.Empty;
|
||||
public long ChunkSize { get; set; } = 5242880L;
|
||||
public int ChunksCount { get; set; }
|
||||
public int ChunksUploaded { get; set; }
|
||||
public Guid PoolId { get; set; }
|
||||
public Guid? BundleId { get; set; }
|
||||
public string? EncryptPassword { get; set; }
|
||||
public string Hash { get; set; } = string.Empty;
|
||||
public List<int> UploadedChunks { get; set; } = [];
|
||||
public string? Path { get; set; }
|
||||
}
|
||||
|
||||
// File Move Task Parameters
|
||||
public class FileMoveParameters
|
||||
{
|
||||
public List<string> FileIds { get; set; } = [];
|
||||
public Guid TargetPoolId { get; set; }
|
||||
public Guid? TargetBundleId { get; set; }
|
||||
public int FilesProcessed { get; set; }
|
||||
}
|
||||
|
||||
// File Compression Task Parameters
|
||||
public class FileCompressParameters
|
||||
{
|
||||
public List<string> FileIds { get; set; } = [];
|
||||
public string CompressionFormat { get; set; } = "zip";
|
||||
public int CompressionLevel { get; set; } = 6;
|
||||
public string? OutputFileName { get; set; }
|
||||
public int FilesProcessed { get; set; }
|
||||
public string? ResultFileId { get; set; }
|
||||
}
|
||||
|
||||
// Bulk Operation Task Parameters
|
||||
public class BulkOperationParameters
|
||||
{
|
||||
public string OperationType { get; set; } = string.Empty;
|
||||
public List<string> TargetIds { get; set; } = [];
|
||||
public Dictionary<string, object?> OperationParameters { get; set; } = new();
|
||||
public int ItemsProcessed { get; set; }
|
||||
public Dictionary<string, object?>? OperationResults { get; set; }
|
||||
}
|
||||
|
||||
// Storage Migration Task Parameters
|
||||
public class StorageMigrationParameters
|
||||
{
|
||||
public Guid SourcePoolId { get; set; }
|
||||
public Guid TargetPoolId { get; set; }
|
||||
public List<string> FileIds { get; set; } = new();
|
||||
public bool PreserveOriginals { get; set; } = true;
|
||||
public long TotalBytesToTransfer { get; set; }
|
||||
public long BytesTransferred { get; set; }
|
||||
public int FilesMigrated { get; set; }
|
||||
}
|
||||
|
||||
// Helper class for parameter operations using GrpcTypeHelper
|
||||
public static class ParameterHelper
|
||||
{
|
||||
public static T? Typed<T>(Dictionary<string, object?> parameters)
|
||||
{
|
||||
var rawParams = GrpcTypeHelper.ConvertObjectToByteString(parameters);
|
||||
return GrpcTypeHelper.ConvertByteStringToObject<T>(rawParams);
|
||||
}
|
||||
|
||||
public static Dictionary<string, object?> Untyped<T>(T parameters)
|
||||
{
|
||||
var rawParams = GrpcTypeHelper.ConvertObjectToByteString(parameters);
|
||||
return GrpcTypeHelper.ConvertByteStringToObject<Dictionary<string, object?>>(rawParams) ?? [];
|
||||
}
|
||||
}
|
||||
|
||||
public class CreateUploadTaskRequest
|
||||
{
|
||||
public string Hash { get; set; } = null!;
|
||||
@@ -14,6 +94,7 @@ namespace DysonNetwork.Drive.Storage.Model
|
||||
public string? EncryptPassword { get; set; }
|
||||
public Instant? ExpiredAt { get; set; }
|
||||
public long? ChunkSize { get; set; }
|
||||
public string? Path { get; set; }
|
||||
}
|
||||
|
||||
public class CreateUploadTaskResponse
|
||||
@@ -39,4 +120,551 @@ namespace DysonNetwork.Drive.Storage.Model
|
||||
public Instant? ExpiredAt { get; set; }
|
||||
public string Hash { get; set; } = null!;
|
||||
}
|
||||
|
||||
public class PersistentTask : ModelBase
|
||||
{
|
||||
public Guid Id { get; set; } = Guid.NewGuid();
|
||||
|
||||
[MaxLength(64)] public string TaskId { get; set; } = null!;
|
||||
|
||||
[MaxLength(256)] public string Name { get; set; } = null!;
|
||||
|
||||
[MaxLength(1024)] public string? Description { get; set; }
|
||||
|
||||
public TaskType Type { get; set; }
|
||||
|
||||
public TaskStatus Status { get; set; } = TaskStatus.InProgress;
|
||||
|
||||
public Guid AccountId { get; set; }
|
||||
|
||||
// Progress tracking (0-100)
|
||||
public double Progress { get; set; }
|
||||
|
||||
// Task-specific parameters stored as JSON
|
||||
[Column(TypeName = "jsonb")] public Dictionary<string, object?> Parameters { get; set; } = new();
|
||||
|
||||
// Task results/output stored as JSON
|
||||
[Column(TypeName = "jsonb")] public Dictionary<string, object?> Results { get; set; } = new();
|
||||
|
||||
[MaxLength(1024)] public string? ErrorMessage { get; set; }
|
||||
|
||||
public Instant? StartedAt { get; set; }
|
||||
public Instant? CompletedAt { get; set; }
|
||||
public Instant? ExpiredAt { get; set; }
|
||||
|
||||
public Instant LastActivity { get; set; }
|
||||
|
||||
// Priority (higher = more important)
|
||||
public int Priority { get; set; } = 0;
|
||||
|
||||
// Estimated duration in seconds
|
||||
public long? EstimatedDurationSeconds { get; set; }
|
||||
}
|
||||
|
||||
// Backward compatibility - UploadTask inherits from PersistentTask
|
||||
public class PersistentUploadTask : PersistentTask
|
||||
{
|
||||
public PersistentUploadTask()
|
||||
{
|
||||
Type = TaskType.FileUpload;
|
||||
Name = "File Upload";
|
||||
}
|
||||
|
||||
// Convenience properties using typed parameters
|
||||
[NotMapped]
|
||||
public FileUploadParameters TypedParameters
|
||||
{
|
||||
get => ParameterHelper.Typed<FileUploadParameters>(Parameters)!;
|
||||
set => Parameters = ParameterHelper.Untyped(value);
|
||||
}
|
||||
|
||||
[MaxLength(256)]
|
||||
public string FileName
|
||||
{
|
||||
get => TypedParameters.FileName;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.FileName = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public long FileSize
|
||||
{
|
||||
get => TypedParameters.FileSize;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.FileSize = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
[MaxLength(128)]
|
||||
public string ContentType
|
||||
{
|
||||
get => TypedParameters.ContentType;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.ContentType = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public long ChunkSize
|
||||
{
|
||||
get => TypedParameters.ChunkSize;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.ChunkSize = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public int ChunksCount
|
||||
{
|
||||
get => TypedParameters.ChunksCount;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.ChunksCount = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public int ChunksUploaded
|
||||
{
|
||||
get => TypedParameters.ChunksUploaded;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.ChunksUploaded = value;
|
||||
TypedParameters = parameters;
|
||||
Progress = ChunksCount > 0 ? (double)value / ChunksCount * 100 : 0;
|
||||
}
|
||||
}
|
||||
|
||||
public Guid PoolId
|
||||
{
|
||||
get => TypedParameters.PoolId;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.PoolId = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public Guid? BundleId
|
||||
{
|
||||
get => TypedParameters.BundleId;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.BundleId = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
[MaxLength(256)]
|
||||
public string? EncryptPassword
|
||||
{
|
||||
get => TypedParameters.EncryptPassword;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.EncryptPassword = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public string Hash
|
||||
{
|
||||
get => TypedParameters.Hash;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.Hash = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
// JSON array of uploaded chunk indices for resumability
|
||||
public List<int> UploadedChunks
|
||||
{
|
||||
get => TypedParameters.UploadedChunks;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.UploadedChunks = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public string? Path
|
||||
{
|
||||
get => TypedParameters.Path;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.Path = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public enum TaskType
|
||||
{
|
||||
FileUpload,
|
||||
FileMove,
|
||||
FileCompress,
|
||||
FileDecompress,
|
||||
FileEncrypt,
|
||||
FileDecrypt,
|
||||
BulkOperation,
|
||||
StorageMigration,
|
||||
FileConversion,
|
||||
Custom
|
||||
}
|
||||
|
||||
[Flags]
|
||||
public enum TaskStatus
|
||||
{
|
||||
Pending,
|
||||
InProgress,
|
||||
Paused,
|
||||
Completed,
|
||||
Failed,
|
||||
Cancelled,
|
||||
Expired
|
||||
}
|
||||
|
||||
// File Move Task
|
||||
public class FileMoveTask : PersistentTask
|
||||
{
|
||||
public FileMoveTask()
|
||||
{
|
||||
Type = TaskType.FileMove;
|
||||
Name = "Move Files";
|
||||
}
|
||||
|
||||
// Convenience properties using typed parameters
|
||||
public FileMoveParameters TypedParameters
|
||||
{
|
||||
get => ParameterHelper.Typed<FileMoveParameters>(Parameters)!;
|
||||
set => Parameters = ParameterHelper.Untyped(value);
|
||||
}
|
||||
|
||||
public List<string> FileIds
|
||||
{
|
||||
get => TypedParameters.FileIds;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.FileIds = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public Guid TargetPoolId
|
||||
{
|
||||
get => TypedParameters.TargetPoolId;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.TargetPoolId = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public Guid? TargetBundleId
|
||||
{
|
||||
get => TypedParameters.TargetBundleId;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.TargetBundleId = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public int FilesProcessed
|
||||
{
|
||||
get => TypedParameters.FilesProcessed;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.FilesProcessed = value;
|
||||
TypedParameters = parameters;
|
||||
Progress = FileIds.Count > 0 ? (double)value / FileIds.Count * 100 : 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// File Compression Task
|
||||
public class FileCompressTask : PersistentTask
|
||||
{
|
||||
public FileCompressTask()
|
||||
{
|
||||
Type = TaskType.FileCompress;
|
||||
Name = "Compress Files";
|
||||
}
|
||||
|
||||
// Convenience properties using typed parameters
|
||||
public FileCompressParameters TypedParameters
|
||||
{
|
||||
get => ParameterHelper.Typed<FileCompressParameters>(Parameters)!;
|
||||
set => Parameters = ParameterHelper.Untyped(value);
|
||||
}
|
||||
|
||||
public List<string> FileIds
|
||||
{
|
||||
get => TypedParameters.FileIds;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.FileIds = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
[MaxLength(32)]
|
||||
public string CompressionFormat
|
||||
{
|
||||
get => TypedParameters.CompressionFormat;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.CompressionFormat = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public int CompressionLevel
|
||||
{
|
||||
get => TypedParameters.CompressionLevel;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.CompressionLevel = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public string? OutputFileName
|
||||
{
|
||||
get => TypedParameters.OutputFileName;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.OutputFileName = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public int FilesProcessed
|
||||
{
|
||||
get => TypedParameters.FilesProcessed;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.FilesProcessed = value;
|
||||
TypedParameters = parameters;
|
||||
Progress = FileIds.Count > 0 ? (double)value / FileIds.Count * 100 : 0;
|
||||
}
|
||||
}
|
||||
|
||||
public string? ResultFileId
|
||||
{
|
||||
get => TypedParameters.ResultFileId;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.ResultFileId = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Bulk Operation Task
|
||||
public class BulkOperationTask : PersistentTask
|
||||
{
|
||||
public BulkOperationTask()
|
||||
{
|
||||
Type = TaskType.BulkOperation;
|
||||
Name = "Bulk Operation";
|
||||
}
|
||||
|
||||
// Convenience properties using typed parameters
|
||||
public BulkOperationParameters TypedParameters
|
||||
{
|
||||
get => ParameterHelper.Typed<BulkOperationParameters>(Parameters)!;
|
||||
set => Parameters = ParameterHelper.Untyped(value);
|
||||
}
|
||||
|
||||
[MaxLength(128)]
|
||||
public string OperationType
|
||||
{
|
||||
get => TypedParameters.OperationType;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.OperationType = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public List<string> TargetIds
|
||||
{
|
||||
get => TypedParameters.TargetIds;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.TargetIds = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
[Column(TypeName = "jsonb")]
|
||||
public Dictionary<string, object?> OperationParameters
|
||||
{
|
||||
get => TypedParameters.OperationParameters;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.OperationParameters = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public int ItemsProcessed
|
||||
{
|
||||
get => TypedParameters.ItemsProcessed;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.ItemsProcessed = value;
|
||||
TypedParameters = parameters;
|
||||
Progress = TargetIds.Count > 0 ? (double)value / TargetIds.Count * 100 : 0;
|
||||
}
|
||||
}
|
||||
|
||||
[Column(TypeName = "jsonb")]
|
||||
public Dictionary<string, object?>? OperationResults
|
||||
{
|
||||
get => TypedParameters.OperationResults;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.OperationResults = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Storage Migration Task
|
||||
public class StorageMigrationTask : PersistentTask
|
||||
{
|
||||
public StorageMigrationTask()
|
||||
{
|
||||
Type = TaskType.StorageMigration;
|
||||
Name = "Storage Migration";
|
||||
}
|
||||
|
||||
// Convenience properties using typed parameters
|
||||
public StorageMigrationParameters TypedParameters
|
||||
{
|
||||
get => ParameterHelper.Typed<StorageMigrationParameters>(Parameters)!;
|
||||
set => Parameters = ParameterHelper.Untyped(value);
|
||||
}
|
||||
|
||||
public Guid SourcePoolId
|
||||
{
|
||||
get => TypedParameters.SourcePoolId;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.SourcePoolId = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public Guid TargetPoolId
|
||||
{
|
||||
get => TypedParameters.TargetPoolId;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.TargetPoolId = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public List<string> FileIds
|
||||
{
|
||||
get => TypedParameters.FileIds;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.FileIds = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public bool PreserveOriginals
|
||||
{
|
||||
get => TypedParameters.PreserveOriginals;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.PreserveOriginals = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public long TotalBytesToTransfer
|
||||
{
|
||||
get => TypedParameters.TotalBytesToTransfer;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.TotalBytesToTransfer = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
|
||||
public long BytesTransferred
|
||||
{
|
||||
get => TypedParameters.BytesTransferred;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.BytesTransferred = value;
|
||||
TypedParameters = parameters;
|
||||
Progress = TotalBytesToTransfer > 0 ? (double)value / TotalBytesToTransfer * 100 : 0;
|
||||
}
|
||||
}
|
||||
|
||||
public int FilesMigrated
|
||||
{
|
||||
get => TypedParameters.FilesMigrated;
|
||||
set
|
||||
{
|
||||
var parameters = TypedParameters;
|
||||
parameters.FilesMigrated = value;
|
||||
TypedParameters = parameters;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Legacy enum for backward compatibility
|
||||
public enum UploadTaskStatus
|
||||
{
|
||||
InProgress = TaskStatus.InProgress,
|
||||
Completed = TaskStatus.Completed,
|
||||
Failed = TaskStatus.Failed,
|
||||
Expired = TaskStatus.Expired
|
||||
}
|
||||
|
||||
1143
DysonNetwork.Drive/Storage/PersistentTaskService.cs
Normal file
1143
DysonNetwork.Drive/Storage/PersistentTaskService.cs
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,20 +0,0 @@
|
||||
using DysonNetwork.Shared.Data;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
|
||||
namespace DysonNetwork.Drive;
|
||||
|
||||
[ApiController]
|
||||
[Route("/api/version")]
|
||||
public class VersionController : ControllerBase
|
||||
{
|
||||
[HttpGet]
|
||||
public IActionResult Get()
|
||||
{
|
||||
return Ok(new AppVersion
|
||||
{
|
||||
Version = ThisAssembly.AssemblyVersion,
|
||||
Commit = ThisAssembly.GitCommitId,
|
||||
UpdateDate = ThisAssembly.GitCommitDate
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -27,12 +27,9 @@
|
||||
"PublicKeyPath": "Keys/PublicKey.pem",
|
||||
"PrivateKeyPath": "Keys/PrivateKey.pem"
|
||||
},
|
||||
"Tus": {
|
||||
"StorePath": "Uploads"
|
||||
},
|
||||
"Storage": {
|
||||
"Uploads": "Uploads",
|
||||
"PreferredRemote": "2adceae3-981a-4564-9b8d-5d71a211c873",
|
||||
"PreferredRemote": "c53136a6-9152-4ecb-9f88-43c41438c23e",
|
||||
"Remote": [
|
||||
{
|
||||
"Id": "minio",
|
||||
@@ -114,6 +111,9 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"Cache": {
|
||||
"Serializer": "MessagePack"
|
||||
},
|
||||
"KnownProxies": [
|
||||
"127.0.0.1",
|
||||
"::1"
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:9.0 AS base
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
|
||||
USER $APP_UID
|
||||
WORKDIR /app
|
||||
EXPOSE 8080
|
||||
EXPOSE 8081
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build
|
||||
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
WORKDIR /src
|
||||
COPY ["DysonNetwork.Gateway/DysonNetwork.Gateway.csproj", "DysonNetwork.Gateway/"]
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net9.0</TargetFramework>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.ServiceDiscovery.Yarp" Version="9.5.2" />
|
||||
<PackageReference Include="Microsoft.Extensions.ServiceDiscovery.Yarp" Version="10.1.0" />
|
||||
<PackageReference Include="Nerdbank.GitVersioning" Version="3.9.50">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Yarp.ReverseProxy" Version="2.3.0" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
25
DysonNetwork.Gateway/Health/GatewayConstant.cs
Normal file
25
DysonNetwork.Gateway/Health/GatewayConstant.cs
Normal file
@@ -0,0 +1,25 @@
|
||||
namespace DysonNetwork.Gateway.Health;
|
||||
|
||||
public abstract class GatewayConstant
|
||||
{
|
||||
public static readonly string[] ServiceNames =
|
||||
[
|
||||
"ring",
|
||||
"pass",
|
||||
"drive",
|
||||
"sphere",
|
||||
"develop",
|
||||
"insight",
|
||||
"zone",
|
||||
"messager"
|
||||
];
|
||||
|
||||
// Core services stands with w/o these services the functional of entire app will broke.
|
||||
public static readonly string[] CoreServiceNames =
|
||||
[
|
||||
"ring",
|
||||
"pass",
|
||||
"drive",
|
||||
"sphere"
|
||||
];
|
||||
}
|
||||
60
DysonNetwork.Gateway/Health/GatewayHealthAggregator.cs
Normal file
60
DysonNetwork.Gateway/Health/GatewayHealthAggregator.cs
Normal file
@@ -0,0 +1,60 @@
|
||||
using NodaTime;
|
||||
|
||||
namespace DysonNetwork.Gateway.Health;
|
||||
|
||||
public class GatewayHealthAggregator(IHttpClientFactory httpClientFactory, GatewayReadinessStore store)
|
||||
: BackgroundService
|
||||
{
|
||||
private async Task<ServiceHealthState> CheckService(string serviceName)
|
||||
{
|
||||
var client = httpClientFactory.CreateClient("health");
|
||||
var now = SystemClock.Instance.GetCurrentInstant();
|
||||
|
||||
try
|
||||
{
|
||||
// Use the service discovery to lookup service
|
||||
// The service defaults give every single service a health endpoint that we can use here
|
||||
using var response = await client.GetAsync($"http://{serviceName}/health");
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
return new ServiceHealthState(
|
||||
serviceName,
|
||||
true,
|
||||
now,
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
return new ServiceHealthState(
|
||||
serviceName,
|
||||
false,
|
||||
now,
|
||||
$"StatusCode: {(int)response.StatusCode}"
|
||||
);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new ServiceHealthState(
|
||||
serviceName,
|
||||
false,
|
||||
now,
|
||||
ex.Message
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
foreach (var service in GatewayConstant.ServiceNames)
|
||||
{
|
||||
var result = await CheckService(service);
|
||||
store.Update(result);
|
||||
}
|
||||
|
||||
await Task.Delay(TimeSpan.FromSeconds(5), stoppingToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
35
DysonNetwork.Gateway/Health/GatewayReadinessMiddleware.cs
Normal file
35
DysonNetwork.Gateway/Health/GatewayReadinessMiddleware.cs
Normal file
@@ -0,0 +1,35 @@
|
||||
namespace DysonNetwork.Gateway.Health;
|
||||
|
||||
using Microsoft.AspNetCore.Http;
|
||||
|
||||
public sealed class GatewayReadinessMiddleware(RequestDelegate next)
|
||||
{
|
||||
public async Task InvokeAsync(HttpContext context, GatewayReadinessStore store)
|
||||
{
|
||||
if (context.Request.Path.StartsWithSegments("/health"))
|
||||
{
|
||||
await next(context);
|
||||
return;
|
||||
}
|
||||
|
||||
var readiness = store.Current;
|
||||
|
||||
// Only core services participate in readiness gating
|
||||
var notReadyCoreServices = readiness.Services
|
||||
.Where(kv => GatewayConstant.CoreServiceNames.Contains(kv.Key))
|
||||
.Where(kv => !kv.Value.IsHealthy)
|
||||
.Select(kv => kv.Key)
|
||||
.ToArray();
|
||||
|
||||
if (notReadyCoreServices.Length > 0)
|
||||
{
|
||||
context.Response.StatusCode = StatusCodes.Status503ServiceUnavailable;
|
||||
var unavailableServices = string.Join(", ", notReadyCoreServices);
|
||||
context.Response.Headers["X-NotReady"] = unavailableServices;
|
||||
await context.Response.WriteAsync("Solar Network is warming up. Try again later please.");
|
||||
return;
|
||||
}
|
||||
|
||||
await next(context);
|
||||
}
|
||||
}
|
||||
76
DysonNetwork.Gateway/Health/GatewayReadinessStore.cs
Normal file
76
DysonNetwork.Gateway/Health/GatewayReadinessStore.cs
Normal file
@@ -0,0 +1,76 @@
|
||||
using NodaTime;
|
||||
|
||||
namespace DysonNetwork.Gateway.Health;
|
||||
|
||||
public record ServiceHealthState(
|
||||
string ServiceName,
|
||||
bool IsHealthy,
|
||||
Instant LastChecked,
|
||||
string? Error
|
||||
);
|
||||
|
||||
public record GatewayReadinessState(
|
||||
bool IsReady,
|
||||
IReadOnlyDictionary<string, ServiceHealthState> Services,
|
||||
Instant LastUpdated
|
||||
);
|
||||
|
||||
public class GatewayReadinessStore
|
||||
{
|
||||
private readonly Lock _lock = new();
|
||||
|
||||
private readonly Dictionary<string, ServiceHealthState> _services = new();
|
||||
|
||||
public GatewayReadinessState Current { get; private set; } = new(
|
||||
IsReady: false,
|
||||
Services: new Dictionary<string, ServiceHealthState>(),
|
||||
LastUpdated: SystemClock.Instance.GetCurrentInstant()
|
||||
);
|
||||
|
||||
public IReadOnlyCollection<string> ServiceNames => _services.Keys;
|
||||
|
||||
public GatewayReadinessStore()
|
||||
{
|
||||
InitializeServices(GatewayConstant.ServiceNames);
|
||||
}
|
||||
|
||||
private void InitializeServices(IEnumerable<string> serviceNames)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_services.Clear();
|
||||
|
||||
foreach (var name in serviceNames)
|
||||
{
|
||||
_services[name] = new ServiceHealthState(
|
||||
name,
|
||||
IsHealthy: false,
|
||||
LastChecked: SystemClock.Instance.GetCurrentInstant(),
|
||||
Error: "Not checked yet"
|
||||
);
|
||||
}
|
||||
|
||||
RecalculateLocked();
|
||||
}
|
||||
}
|
||||
|
||||
public void Update(ServiceHealthState state)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_services[state.ServiceName] = state;
|
||||
RecalculateLocked();
|
||||
}
|
||||
}
|
||||
|
||||
private void RecalculateLocked()
|
||||
{
|
||||
var isReady = _services.Count > 0 && _services.Values.All(s => s.IsHealthy);
|
||||
|
||||
Current = new GatewayReadinessState(
|
||||
IsReady: isReady,
|
||||
Services: new Dictionary<string, ServiceHealthState>(_services),
|
||||
LastUpdated: SystemClock.Instance.GetCurrentInstant()
|
||||
);
|
||||
}
|
||||
}
|
||||
14
DysonNetwork.Gateway/Health/GatewayStatusController.cs
Normal file
14
DysonNetwork.Gateway/Health/GatewayStatusController.cs
Normal file
@@ -0,0 +1,14 @@
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
|
||||
namespace DysonNetwork.Gateway.Health;
|
||||
|
||||
[ApiController]
|
||||
[Route("/health")]
|
||||
public class GatewayStatusController(GatewayReadinessStore readinessStore) : ControllerBase
|
||||
{
|
||||
[HttpGet]
|
||||
public ActionResult<GatewayReadinessState> GetHealthStatus()
|
||||
{
|
||||
return Ok(readinessStore.Current);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,12 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading.RateLimiting;
|
||||
using DysonNetwork.Gateway.Health;
|
||||
using DysonNetwork.Shared.Http;
|
||||
using Yarp.ReverseProxy.Configuration;
|
||||
using Microsoft.AspNetCore.HttpOverrides;
|
||||
using NodaTime;
|
||||
using NodaTime.Serialization.SystemTextJson;
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
@@ -9,16 +14,18 @@ builder.AddServiceDefaults();
|
||||
|
||||
builder.ConfigureAppKestrel(builder.Configuration, maxRequestBodySize: long.MaxValue, enableGrpc: false);
|
||||
|
||||
builder.Services.AddSingleton<GatewayReadinessStore>();
|
||||
builder.Services.AddHostedService<GatewayHealthAggregator>();
|
||||
|
||||
builder.Services.AddCors(options =>
|
||||
{
|
||||
options.AddDefaultPolicy(
|
||||
policy =>
|
||||
options.AddDefaultPolicy(policy =>
|
||||
{
|
||||
policy.SetIsOriginAllowed(origin => true)
|
||||
.AllowAnyMethod()
|
||||
.AllowAnyHeader()
|
||||
.AllowCredentials()
|
||||
.WithExposedHeaders("X-Total");
|
||||
.WithExposedHeaders("X-Total", "X-NotReady");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -56,7 +63,6 @@ builder.Services.AddRateLimiter(options =>
|
||||
};
|
||||
});
|
||||
|
||||
var serviceNames = new[] { "ring", "pass", "drive", "sphere", "develop", "insight" };
|
||||
|
||||
var specialRoutes = new[]
|
||||
{
|
||||
@@ -80,13 +86,19 @@ var specialRoutes = new[]
|
||||
},
|
||||
new RouteConfig
|
||||
{
|
||||
RouteId = "drive-tus",
|
||||
ClusterId = "drive",
|
||||
Match = new RouteMatch { Path = "/api/tus" }
|
||||
}
|
||||
RouteId = "sphere-webfinger",
|
||||
ClusterId = "sphere",
|
||||
Match = new RouteMatch { Path = "/.well-known/webfinger" }
|
||||
},
|
||||
new RouteConfig
|
||||
{
|
||||
RouteId = "sphere-activitypub",
|
||||
ClusterId = "sphere",
|
||||
Match = new RouteMatch { Path = "/activitypub/{**catch-all}" }
|
||||
},
|
||||
};
|
||||
|
||||
var apiRoutes = serviceNames.Select(serviceName =>
|
||||
var apiRoutes = GatewayConstant.ServiceNames.Select(serviceName =>
|
||||
{
|
||||
var apiPath = serviceName switch
|
||||
{
|
||||
@@ -105,7 +117,7 @@ var apiRoutes = serviceNames.Select(serviceName =>
|
||||
};
|
||||
});
|
||||
|
||||
var swaggerRoutes = serviceNames.Select(serviceName => new RouteConfig
|
||||
var swaggerRoutes = GatewayConstant.ServiceNames.Select(serviceName => new RouteConfig
|
||||
{
|
||||
RouteId = $"{serviceName}-swagger",
|
||||
ClusterId = serviceName,
|
||||
@@ -119,7 +131,7 @@ var swaggerRoutes = serviceNames.Select(serviceName => new RouteConfig
|
||||
|
||||
var routes = specialRoutes.Concat(apiRoutes).Concat(swaggerRoutes).ToArray();
|
||||
|
||||
var clusters = serviceNames.Select(serviceName => new ClusterConfig
|
||||
var clusters = GatewayConstant.ServiceNames.Select(serviceName => new ClusterConfig
|
||||
{
|
||||
ClusterId = serviceName,
|
||||
HealthCheck = new HealthCheckConfig
|
||||
@@ -131,7 +143,7 @@ var clusters = serviceNames.Select(serviceName => new ClusterConfig
|
||||
Timeout = TimeSpan.FromSeconds(5),
|
||||
Path = "/health"
|
||||
},
|
||||
Passive = new()
|
||||
Passive = new PassiveHealthCheckConfig
|
||||
{
|
||||
Enabled = true
|
||||
}
|
||||
@@ -147,7 +159,14 @@ builder.Services
|
||||
.LoadFromMemory(routes, clusters)
|
||||
.AddServiceDiscoveryDestinationResolver();
|
||||
|
||||
builder.Services.AddControllers();
|
||||
builder.Services.AddControllers().AddJsonOptions(options =>
|
||||
{
|
||||
options.JsonSerializerOptions.NumberHandling = JsonNumberHandling.AllowNamedFloatingPointLiterals;
|
||||
options.JsonSerializerOptions.PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower;
|
||||
options.JsonSerializerOptions.DictionaryKeyPolicy = JsonNamingPolicy.SnakeCaseLower;
|
||||
|
||||
options.JsonSerializerOptions.ConfigureForNodaTime(DateTimeZoneProviders.Tzdb);
|
||||
});
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
@@ -155,12 +174,14 @@ var forwardedHeadersOptions = new ForwardedHeadersOptions
|
||||
{
|
||||
ForwardedHeaders = ForwardedHeaders.All
|
||||
};
|
||||
forwardedHeadersOptions.KnownNetworks.Clear();
|
||||
forwardedHeadersOptions.KnownIPNetworks.Clear();
|
||||
forwardedHeadersOptions.KnownProxies.Clear();
|
||||
app.UseForwardedHeaders(forwardedHeadersOptions);
|
||||
|
||||
app.UseCors();
|
||||
|
||||
app.UseMiddleware<GatewayReadinessMiddleware>();
|
||||
|
||||
app.MapReverseProxy().RequireRateLimiting("fixed");
|
||||
|
||||
app.MapControllers();
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using DysonNetwork.Shared.Data;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
|
||||
namespace DysonNetwork.Pass;
|
||||
namespace DysonNetwork.Gateway;
|
||||
|
||||
[ApiController]
|
||||
[Route("/api/version")]
|
||||
[Route("/version")]
|
||||
public class VersionController : ControllerBase
|
||||
{
|
||||
[HttpGet]
|
||||
@@ -5,6 +5,9 @@
|
||||
"Microsoft.AspNetCore": "Warning"
|
||||
}
|
||||
},
|
||||
"Cache": {
|
||||
"Serializer": "MessagePack"
|
||||
},
|
||||
"AllowedHosts": "*",
|
||||
"SiteUrl": "http://localhost:3000",
|
||||
"Client": {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using DysonNetwork.Shared.Data;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Design;
|
||||
@@ -12,6 +13,11 @@ public class AppDatabase(
|
||||
{
|
||||
public DbSet<SnThinkingSequence> ThinkingSequences { get; set; }
|
||||
public DbSet<SnThinkingThought> ThinkingThoughts { get; set; }
|
||||
public DbSet<SnUnpaidAccount> UnpaidAccounts { get; set; }
|
||||
|
||||
public DbSet<SnWebArticle> WebArticles { get; set; }
|
||||
public DbSet<SnWebFeed> WebFeeds { get; set; }
|
||||
public DbSet<SnWebFeedSubscription> WebFeedSubscriptions { get; set; }
|
||||
|
||||
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
|
||||
{
|
||||
@@ -28,36 +34,15 @@ public class AppDatabase(
|
||||
|
||||
public override async Task<int> SaveChangesAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var now = SystemClock.Instance.GetCurrentInstant();
|
||||
|
||||
foreach (var entry in ChangeTracker.Entries<ModelBase>())
|
||||
{
|
||||
switch (entry.State)
|
||||
{
|
||||
case EntityState.Added:
|
||||
entry.Entity.CreatedAt = now;
|
||||
entry.Entity.UpdatedAt = now;
|
||||
break;
|
||||
case EntityState.Modified:
|
||||
entry.Entity.UpdatedAt = now;
|
||||
break;
|
||||
case EntityState.Deleted:
|
||||
entry.State = EntityState.Modified;
|
||||
entry.Entity.DeletedAt = now;
|
||||
break;
|
||||
case EntityState.Detached:
|
||||
case EntityState.Unchanged:
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
this.ApplyAuditableAndSoftDelete();
|
||||
return await base.SaveChangesAsync(cancellationToken);
|
||||
}
|
||||
|
||||
protected override void OnModelCreating(ModelBuilder modelBuilder)
|
||||
{
|
||||
base.OnModelCreating(modelBuilder);
|
||||
|
||||
modelBuilder.ApplySoftDeleteFilters();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
42
DysonNetwork.Insight/Controllers/BillingController.cs
Normal file
42
DysonNetwork.Insight/Controllers/BillingController.cs
Normal file
@@ -0,0 +1,42 @@
|
||||
using DysonNetwork.Insight.Thought;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
|
||||
namespace DysonNetwork.Insight.Controllers;
|
||||
|
||||
[ApiController]
|
||||
[Route("api/billing")]
|
||||
public class BillingController(AppDatabase db, ThoughtService thoughtService, ILogger<BillingController> logger)
|
||||
: ControllerBase
|
||||
{
|
||||
[HttpGet("status")]
|
||||
public async Task<IActionResult> GetBillingStatus()
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return Unauthorized();
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
var isMarked = await db.UnpaidAccounts.AnyAsync(u => u.AccountId == accountId);
|
||||
return Ok(isMarked ? new { status = "unpaid" } : new { status = "ok" });
|
||||
}
|
||||
|
||||
[HttpPost("retry")]
|
||||
public async Task<IActionResult> RetryBilling()
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return Unauthorized();
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
var (success, cost) = await thoughtService.RetryBillingForAccountAsync(accountId, logger);
|
||||
|
||||
if (success)
|
||||
{
|
||||
return Ok(cost > 0
|
||||
? new { message = $"Billing retry successful. Billed {cost} points." }
|
||||
: new { message = "No outstanding payment found." });
|
||||
}
|
||||
|
||||
return BadRequest(new { message = "Billing retry failed. Please check your balance and try again." });
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,15 @@
|
||||
#See https://aka.ms/customizecontainer to learn how to customize your debug container and how Visual Studio uses this Dockerfile to build your images for faster debugging.
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:9.0 AS base
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
libkrb5-3 \
|
||||
libgssapi-krb5-2 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
USER app
|
||||
WORKDIR /app
|
||||
EXPOSE 8080
|
||||
EXPOSE 8081
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build
|
||||
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
WORKDIR /src
|
||||
COPY ["DysonNetwork.Insight/DysonNetwork.Insight.csproj", "DysonNetwork.Insight/"]
|
||||
|
||||
@@ -1,23 +1,33 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net9.0</TargetFramework>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="EFCore.NamingConventions" Version="9.0.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.10" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.10">
|
||||
<PackageReference Include="AngleSharp" Version="1.4.0" />
|
||||
<PackageReference Include="Google.Protobuf" Version="3.33.2" />
|
||||
<PackageReference Include="Grpc.AspNetCore.Server.ClientFactory" Version="2.76.0" />
|
||||
<PackageReference Include="Grpc.AspNetCore.Server.Reflection" Version="2.76.0" />
|
||||
<PackageReference Include="Grpc.Net.Client" Version="2.76.0" />
|
||||
<PackageReference Include="Grpc.Tools" Version="2.76.0">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.SemanticKernel" Version="1.66.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.1" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="10.0.1">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.SemanticKernel" Version="1.68.0" />
|
||||
<PackageReference Include="Microsoft.SemanticKernel.Connectors.Ollama" Version="1.66.0-alpha" />
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4" />
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NodaTime" Version="9.0.4" />
|
||||
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Web" Version="1.66.0-alpha" />
|
||||
<PackageReference Include="Quartz" Version="3.15.1" />
|
||||
<PackageReference Include="Quartz.AspNetCore" Version="3.15.1" />
|
||||
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.15.1" />
|
||||
<PackageReference Include="System.ServiceModel.Syndication" Version="10.0.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
@@ -28,4 +38,8 @@
|
||||
<Folder Include="Controllers\" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Protobuf Remove="..\DysonNetwork.Shared\Proto\**" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -69,11 +69,6 @@ namespace DysonNetwork.Insight.Migrations
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<List<SnThinkingChunk>>("Chunks")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("chunks");
|
||||
|
||||
b.Property<string>("Content")
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("content");
|
||||
|
||||
@@ -12,21 +12,13 @@ namespace DysonNetwork.Insight.Migrations
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<List<SnThinkingChunk>>(
|
||||
name: "chunks",
|
||||
table: "thinking_thoughts",
|
||||
type: "jsonb",
|
||||
nullable: false,
|
||||
defaultValue: new List<SnThinkingChunk>()
|
||||
);
|
||||
// The chunk type has been removed, so this did nothing
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "chunks",
|
||||
table: "thinking_thoughts");
|
||||
// The chunk type has been removed, so this did nothing
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
141
DysonNetwork.Insight/Migrations/20251026134218_AddBilling.Designer.cs
generated
Normal file
141
DysonNetwork.Insight/Migrations/20251026134218_AddBilling.Designer.cs
generated
Normal file
@@ -0,0 +1,141 @@
|
||||
// <auto-generated />
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using DysonNetwork.Insight;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||
using NodaTime;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Insight.Migrations
|
||||
{
|
||||
[DbContext(typeof(AppDatabase))]
|
||||
[Migration("20251026134218_AddBilling")]
|
||||
partial class AddBilling
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "9.0.10")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<long>("PaidToken")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("paid_token");
|
||||
|
||||
b.Property<string>("Topic")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("topic");
|
||||
|
||||
b.Property<long>("TotalToken")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("total_token");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_thinking_sequences");
|
||||
|
||||
b.ToTable("thinking_sequences", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<string>("Content")
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("content");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<List<SnCloudFileReferenceObject>>("Files")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("files");
|
||||
|
||||
b.Property<string>("ModelName")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("model_name");
|
||||
|
||||
b.Property<int>("Role")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("role");
|
||||
|
||||
b.Property<Guid>("SequenceId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("sequence_id");
|
||||
|
||||
b.Property<long>("TokenCount")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("token_count");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_thinking_thoughts");
|
||||
|
||||
b.HasIndex("SequenceId")
|
||||
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
|
||||
|
||||
b.ToTable("thinking_thoughts", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
|
||||
.WithMany()
|
||||
.HasForeignKey("SequenceId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
|
||||
|
||||
b.Navigation("Sequence");
|
||||
});
|
||||
#pragma warning restore 612, 618
|
||||
}
|
||||
}
|
||||
}
|
||||
62
DysonNetwork.Insight/Migrations/20251026134218_AddBilling.cs
Normal file
62
DysonNetwork.Insight/Migrations/20251026134218_AddBilling.cs
Normal file
@@ -0,0 +1,62 @@
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Insight.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class AddBilling : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "model_name",
|
||||
table: "thinking_thoughts",
|
||||
type: "character varying(4096)",
|
||||
maxLength: 4096,
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.AddColumn<long>(
|
||||
name: "token_count",
|
||||
table: "thinking_thoughts",
|
||||
type: "bigint",
|
||||
nullable: false,
|
||||
defaultValue: 0L);
|
||||
|
||||
migrationBuilder.AddColumn<long>(
|
||||
name: "paid_token",
|
||||
table: "thinking_sequences",
|
||||
type: "bigint",
|
||||
nullable: false,
|
||||
defaultValue: 0L);
|
||||
|
||||
migrationBuilder.AddColumn<long>(
|
||||
name: "total_token",
|
||||
table: "thinking_sequences",
|
||||
type: "bigint",
|
||||
nullable: false,
|
||||
defaultValue: 0L);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "model_name",
|
||||
table: "thinking_thoughts");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "token_count",
|
||||
table: "thinking_thoughts");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "paid_token",
|
||||
table: "thinking_sequences");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "total_token",
|
||||
table: "thinking_sequences");
|
||||
}
|
||||
}
|
||||
}
|
||||
142
DysonNetwork.Insight/Migrations/20251115084746_RefactorThoughtMessage.Designer.cs
generated
Normal file
142
DysonNetwork.Insight/Migrations/20251115084746_RefactorThoughtMessage.Designer.cs
generated
Normal file
@@ -0,0 +1,142 @@
|
||||
// <auto-generated />
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using DysonNetwork.Insight;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||
using NodaTime;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Insight.Migrations
|
||||
{
|
||||
[DbContext(typeof(AppDatabase))]
|
||||
[Migration("20251115084746_RefactorThoughtMessage")]
|
||||
partial class RefactorThoughtMessage
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "9.0.11")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<long>("PaidToken")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("paid_token");
|
||||
|
||||
b.Property<string>("Topic")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("topic");
|
||||
|
||||
b.Property<long>("TotalToken")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("total_token");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_thinking_sequences");
|
||||
|
||||
b.ToTable("thinking_sequences", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<List<SnCloudFileReferenceObject>>("Files")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("files");
|
||||
|
||||
b.Property<string>("ModelName")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("model_name");
|
||||
|
||||
b.Property<List<SnThinkingMessagePart>>("Parts")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("parts");
|
||||
|
||||
b.Property<int>("Role")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("role");
|
||||
|
||||
b.Property<Guid>("SequenceId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("sequence_id");
|
||||
|
||||
b.Property<long>("TokenCount")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("token_count");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_thinking_thoughts");
|
||||
|
||||
b.HasIndex("SequenceId")
|
||||
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
|
||||
|
||||
b.ToTable("thinking_thoughts", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
|
||||
.WithMany()
|
||||
.HasForeignKey("SequenceId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
|
||||
|
||||
b.Navigation("Sequence");
|
||||
});
|
||||
#pragma warning restore 612, 618
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
using System.Collections.Generic;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Insight.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class RefactorThoughtMessage : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<List<SnThinkingMessagePart>>(
|
||||
name: "parts",
|
||||
table: "thinking_thoughts",
|
||||
type: "jsonb",
|
||||
nullable: false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "parts",
|
||||
table: "thinking_thoughts");
|
||||
}
|
||||
}
|
||||
}
|
||||
142
DysonNetwork.Insight/Migrations/20251115162347_UpdatedFunctionCallModels.Designer.cs
generated
Normal file
142
DysonNetwork.Insight/Migrations/20251115162347_UpdatedFunctionCallModels.Designer.cs
generated
Normal file
@@ -0,0 +1,142 @@
|
||||
// <auto-generated />
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using DysonNetwork.Insight;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||
using NodaTime;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Insight.Migrations
|
||||
{
|
||||
[DbContext(typeof(AppDatabase))]
|
||||
[Migration("20251115162347_UpdatedFunctionCallModels")]
|
||||
partial class UpdatedFunctionCallModels
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "9.0.11")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<long>("PaidToken")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("paid_token");
|
||||
|
||||
b.Property<string>("Topic")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("topic");
|
||||
|
||||
b.Property<long>("TotalToken")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("total_token");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_thinking_sequences");
|
||||
|
||||
b.ToTable("thinking_sequences", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<List<SnCloudFileReferenceObject>>("Files")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("files");
|
||||
|
||||
b.Property<string>("ModelName")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("model_name");
|
||||
|
||||
b.Property<List<SnThinkingMessagePart>>("Parts")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("parts");
|
||||
|
||||
b.Property<int>("Role")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("role");
|
||||
|
||||
b.Property<Guid>("SequenceId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("sequence_id");
|
||||
|
||||
b.Property<long>("TokenCount")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("token_count");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_thinking_thoughts");
|
||||
|
||||
b.HasIndex("SequenceId")
|
||||
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
|
||||
|
||||
b.ToTable("thinking_thoughts", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
|
||||
.WithMany()
|
||||
.HasForeignKey("SequenceId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
|
||||
|
||||
b.Navigation("Sequence");
|
||||
});
|
||||
#pragma warning restore 612, 618
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,23 +2,21 @@
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Pass.Migrations
|
||||
namespace DysonNetwork.Insight.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class RemoveNetTopo : Migration
|
||||
public partial class UpdatedFunctionCallModels : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AlterDatabase()
|
||||
.OldAnnotation("Npgsql:PostgresExtension:postgis", ",,");
|
||||
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AlterDatabase()
|
||||
.Annotation("Npgsql:PostgresExtension:postgis", ",,");
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
159
DysonNetwork.Insight/Migrations/20251115165833_AddUnpaidAccounts.Designer.cs
generated
Normal file
159
DysonNetwork.Insight/Migrations/20251115165833_AddUnpaidAccounts.Designer.cs
generated
Normal file
@@ -0,0 +1,159 @@
|
||||
// <auto-generated />
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using DysonNetwork.Insight;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||
using NodaTime;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Insight.Migrations
|
||||
{
|
||||
[DbContext(typeof(AppDatabase))]
|
||||
[Migration("20251115165833_AddUnpaidAccounts")]
|
||||
partial class AddUnpaidAccounts
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "9.0.11")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<long>("PaidToken")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("paid_token");
|
||||
|
||||
b.Property<string>("Topic")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("topic");
|
||||
|
||||
b.Property<long>("TotalToken")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("total_token");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_thinking_sequences");
|
||||
|
||||
b.ToTable("thinking_sequences", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<List<SnCloudFileReferenceObject>>("Files")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("files");
|
||||
|
||||
b.Property<string>("ModelName")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("model_name");
|
||||
|
||||
b.Property<List<SnThinkingMessagePart>>("Parts")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("parts");
|
||||
|
||||
b.Property<int>("Role")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("role");
|
||||
|
||||
b.Property<Guid>("SequenceId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("sequence_id");
|
||||
|
||||
b.Property<long>("TokenCount")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("token_count");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_thinking_thoughts");
|
||||
|
||||
b.HasIndex("SequenceId")
|
||||
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
|
||||
|
||||
b.ToTable("thinking_thoughts", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnUnpaidAccount", b =>
|
||||
{
|
||||
b.Property<Guid>("AccountId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<DateTime>("MarkedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("marked_at");
|
||||
|
||||
b.HasKey("AccountId")
|
||||
.HasName("pk_unpaid_accounts");
|
||||
|
||||
b.ToTable("unpaid_accounts", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
|
||||
.WithMany()
|
||||
.HasForeignKey("SequenceId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
|
||||
|
||||
b.Navigation("Sequence");
|
||||
});
|
||||
#pragma warning restore 612, 618
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
using System;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Insight.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class AddUnpaidAccounts : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.CreateTable(
|
||||
name: "unpaid_accounts",
|
||||
columns: table => new
|
||||
{
|
||||
account_id = table.Column<Guid>(type: "uuid", nullable: false),
|
||||
marked_at = table.Column<DateTime>(type: "timestamp with time zone", nullable: false)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("pk_unpaid_accounts", x => x.account_id);
|
||||
});
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropTable(
|
||||
name: "unpaid_accounts");
|
||||
}
|
||||
}
|
||||
}
|
||||
163
DysonNetwork.Insight/Migrations/20251116123552_SharableThought.Designer.cs
generated
Normal file
163
DysonNetwork.Insight/Migrations/20251116123552_SharableThought.Designer.cs
generated
Normal file
@@ -0,0 +1,163 @@
|
||||
// <auto-generated />
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using DysonNetwork.Insight;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||
using NodaTime;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Insight.Migrations
|
||||
{
|
||||
[DbContext(typeof(AppDatabase))]
|
||||
[Migration("20251116123552_SharableThought")]
|
||||
partial class SharableThought
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "9.0.11")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingSequence", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Guid>("AccountId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<bool>("IsPublic")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_public");
|
||||
|
||||
b.Property<long>("PaidToken")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("paid_token");
|
||||
|
||||
b.Property<string>("Topic")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("topic");
|
||||
|
||||
b.Property<long>("TotalToken")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("total_token");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_thinking_sequences");
|
||||
|
||||
b.ToTable("thinking_sequences", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
|
||||
b.Property<Instant?>("DeletedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<List<SnCloudFileReferenceObject>>("Files")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("files");
|
||||
|
||||
b.Property<string>("ModelName")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("model_name");
|
||||
|
||||
b.Property<List<SnThinkingMessagePart>>("Parts")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("parts");
|
||||
|
||||
b.Property<int>("Role")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("role");
|
||||
|
||||
b.Property<Guid>("SequenceId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("sequence_id");
|
||||
|
||||
b.Property<long>("TokenCount")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("token_count");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
|
||||
b.HasKey("Id")
|
||||
.HasName("pk_thinking_thoughts");
|
||||
|
||||
b.HasIndex("SequenceId")
|
||||
.HasDatabaseName("ix_thinking_thoughts_sequence_id");
|
||||
|
||||
b.ToTable("thinking_thoughts", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnUnpaidAccount", b =>
|
||||
{
|
||||
b.Property<Guid>("AccountId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<DateTime>("MarkedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("marked_at");
|
||||
|
||||
b.HasKey("AccountId")
|
||||
.HasName("pk_unpaid_accounts");
|
||||
|
||||
b.ToTable("unpaid_accounts", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
|
||||
.WithMany()
|
||||
.HasForeignKey("SequenceId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_thinking_thoughts_thinking_sequences_sequence_id");
|
||||
|
||||
b.Navigation("Sequence");
|
||||
});
|
||||
#pragma warning restore 612, 618
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,17 +2,17 @@
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DysonNetwork.Pass.Migrations
|
||||
namespace DysonNetwork.Insight.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class AddPublicContact : Migration
|
||||
public partial class SharableThought : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<bool>(
|
||||
name: "is_public",
|
||||
table: "account_contacts",
|
||||
table: "thinking_sequences",
|
||||
type: "boolean",
|
||||
nullable: false,
|
||||
defaultValue: false);
|
||||
@@ -23,7 +23,7 @@ namespace DysonNetwork.Pass.Migrations
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "is_public",
|
||||
table: "account_contacts");
|
||||
table: "thinking_sequences");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -20,7 +20,7 @@ namespace DysonNetwork.Insight.Migrations
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "9.0.10")
|
||||
.HasAnnotation("ProductVersion", "9.0.11")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
@@ -44,11 +44,23 @@ namespace DysonNetwork.Insight.Migrations
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("deleted_at");
|
||||
|
||||
b.Property<bool>("IsPublic")
|
||||
.HasColumnType("boolean")
|
||||
.HasColumnName("is_public");
|
||||
|
||||
b.Property<long>("PaidToken")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("paid_token");
|
||||
|
||||
b.Property<string>("Topic")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("topic");
|
||||
|
||||
b.Property<long>("TotalToken")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("total_token");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
@@ -66,15 +78,6 @@ namespace DysonNetwork.Insight.Migrations
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<List<SnThinkingChunk>>("Chunks")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("chunks");
|
||||
|
||||
b.Property<string>("Content")
|
||||
.HasColumnType("text")
|
||||
.HasColumnName("content");
|
||||
|
||||
b.Property<Instant>("CreatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at");
|
||||
@@ -88,6 +91,16 @@ namespace DysonNetwork.Insight.Migrations
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("files");
|
||||
|
||||
b.Property<string>("ModelName")
|
||||
.HasMaxLength(4096)
|
||||
.HasColumnType("character varying(4096)")
|
||||
.HasColumnName("model_name");
|
||||
|
||||
b.Property<List<SnThinkingMessagePart>>("Parts")
|
||||
.IsRequired()
|
||||
.HasColumnType("jsonb")
|
||||
.HasColumnName("parts");
|
||||
|
||||
b.Property<int>("Role")
|
||||
.HasColumnType("integer")
|
||||
.HasColumnName("role");
|
||||
@@ -96,6 +109,10 @@ namespace DysonNetwork.Insight.Migrations
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("sequence_id");
|
||||
|
||||
b.Property<long>("TokenCount")
|
||||
.HasColumnType("bigint")
|
||||
.HasColumnName("token_count");
|
||||
|
||||
b.Property<Instant>("UpdatedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at");
|
||||
@@ -109,6 +126,23 @@ namespace DysonNetwork.Insight.Migrations
|
||||
b.ToTable("thinking_thoughts", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnUnpaidAccount", b =>
|
||||
{
|
||||
b.Property<Guid>("AccountId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("account_id");
|
||||
|
||||
b.Property<DateTime>("MarkedAt")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("marked_at");
|
||||
|
||||
b.HasKey("AccountId")
|
||||
.HasName("pk_unpaid_accounts");
|
||||
|
||||
b.ToTable("unpaid_accounts", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DysonNetwork.Shared.Models.SnThinkingThought", b =>
|
||||
{
|
||||
b.HasOne("DysonNetwork.Shared.Models.SnThinkingSequence", "Sequence")
|
||||
|
||||
@@ -11,11 +11,15 @@ builder.AddServiceDefaults();
|
||||
|
||||
builder.ConfigureAppKestrel(builder.Configuration);
|
||||
|
||||
builder.Services.AddGrpc();
|
||||
builder.Services.AddGrpcReflection();
|
||||
|
||||
builder.Services.AddControllers();
|
||||
builder.Services.AddAppServices();
|
||||
builder.Services.AddAppAuthentication();
|
||||
builder.Services.AddAppFlushHandlers();
|
||||
builder.Services.AddAppBusinessServices();
|
||||
builder.Services.AddAppScheduledJobs();
|
||||
|
||||
builder.Services.AddDysonAuth();
|
||||
builder.Services.AddAccountService();
|
||||
|
||||
33
DysonNetwork.Insight/Reader/ScrapedArticle.cs
Normal file
33
DysonNetwork.Insight/Reader/ScrapedArticle.cs
Normal file
@@ -0,0 +1,33 @@
|
||||
using DysonNetwork.Shared.Models.Embed;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using EmbedLinkEmbed = DysonNetwork.Shared.Models.Embed.LinkEmbed;
|
||||
|
||||
namespace DysonNetwork.Insight.Reader;
|
||||
|
||||
public class ScrapedArticle
|
||||
{
|
||||
public EmbedLinkEmbed LinkEmbed { get; set; } = null!;
|
||||
public string? Content { get; set; }
|
||||
|
||||
public Shared.Proto.ScrapedArticle ToProtoValue()
|
||||
{
|
||||
var proto = new Shared.Proto.ScrapedArticle
|
||||
{
|
||||
LinkEmbed = LinkEmbed.ToProtoValue()
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(Content))
|
||||
proto.Content = Content;
|
||||
|
||||
return proto;
|
||||
}
|
||||
|
||||
public static ScrapedArticle FromProtoValue(Shared.Proto.ScrapedArticle proto)
|
||||
{
|
||||
return new ScrapedArticle
|
||||
{
|
||||
LinkEmbed = EmbedLinkEmbed.FromProtoValue(proto.LinkEmbed),
|
||||
Content = proto.Content == "" ? null : proto.Content
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace DysonNetwork.Sphere.WebReader;
|
||||
namespace DysonNetwork.Insight.Reader;
|
||||
|
||||
[ApiController]
|
||||
[Route("/api/feeds/articles")]
|
||||
90
DysonNetwork.Insight/Reader/WebArticleGrpcService.cs
Normal file
90
DysonNetwork.Insight/Reader/WebArticleGrpcService.cs
Normal file
@@ -0,0 +1,90 @@
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using Grpc.Core;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace DysonNetwork.Insight.Reader;
|
||||
|
||||
public class WebArticleGrpcService(AppDatabase db) : WebArticleService.WebArticleServiceBase
|
||||
{
|
||||
public override async Task<GetWebArticleResponse> GetWebArticle(
|
||||
GetWebArticleRequest request,
|
||||
ServerCallContext context
|
||||
)
|
||||
{
|
||||
if (!Guid.TryParse(request.Id, out var id))
|
||||
throw new RpcException(new Status(StatusCode.InvalidArgument, "invalid id"));
|
||||
|
||||
var article = await db.WebArticles
|
||||
.Include(a => a.Feed)
|
||||
.FirstOrDefaultAsync(a => a.Id == id);
|
||||
|
||||
return article == null
|
||||
? throw new RpcException(new Status(StatusCode.NotFound, "article not found"))
|
||||
: new GetWebArticleResponse { Article = article.ToProtoValue() };
|
||||
}
|
||||
|
||||
public override async Task<GetWebArticleBatchResponse> GetWebArticleBatch(
|
||||
GetWebArticleBatchRequest request,
|
||||
ServerCallContext context
|
||||
)
|
||||
{
|
||||
var ids = request.Ids
|
||||
.Where(s => !string.IsNullOrWhiteSpace(s) && Guid.TryParse(s, out _))
|
||||
.Select(Guid.Parse)
|
||||
.ToList();
|
||||
|
||||
if (ids.Count == 0)
|
||||
return new GetWebArticleBatchResponse();
|
||||
|
||||
var articles = await db.WebArticles
|
||||
.Include(a => a.Feed)
|
||||
.Where(a => ids.Contains(a.Id))
|
||||
.ToListAsync();
|
||||
|
||||
var response = new GetWebArticleBatchResponse();
|
||||
response.Articles.AddRange(articles.Select(a => a.ToProtoValue()));
|
||||
return response;
|
||||
}
|
||||
|
||||
public override async Task<ListWebArticlesResponse> ListWebArticles(
|
||||
ListWebArticlesRequest request,
|
||||
ServerCallContext context
|
||||
)
|
||||
{
|
||||
if (!Guid.TryParse(request.FeedId, out var feedId))
|
||||
throw new RpcException(new Status(StatusCode.InvalidArgument, "invalid feed_id"));
|
||||
|
||||
var query = db.WebArticles
|
||||
.Include(a => a.Feed)
|
||||
.Where(a => a.FeedId == feedId);
|
||||
|
||||
var articles = await query.ToListAsync();
|
||||
|
||||
var response = new ListWebArticlesResponse
|
||||
{
|
||||
TotalSize = articles.Count
|
||||
};
|
||||
response.Articles.AddRange(articles.Select(a => a.ToProtoValue()));
|
||||
return response;
|
||||
}
|
||||
|
||||
public override async Task<GetRecentArticlesResponse> GetRecentArticles(
|
||||
GetRecentArticlesRequest request,
|
||||
ServerCallContext context
|
||||
)
|
||||
{
|
||||
var limit = request.Limit > 0 ? request.Limit : 20;
|
||||
|
||||
var articles = await db.WebArticles
|
||||
.Include(a => a.Feed)
|
||||
.OrderByDescending(a => a.PublishedAt ?? DateTime.MinValue)
|
||||
.ThenByDescending(a => a.CreatedAt)
|
||||
.Take(limit)
|
||||
.ToListAsync();
|
||||
|
||||
var response = new GetRecentArticlesResponse();
|
||||
response.Articles.AddRange(articles.Select(a => a.ToProtoValue()));
|
||||
return response;
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,17 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using DysonNetwork.Shared.Registry;
|
||||
using WebFeedConfig = DysonNetwork.Shared.Models.WebFeedConfig;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
|
||||
namespace DysonNetwork.Sphere.WebReader;
|
||||
namespace DysonNetwork.Insight.Reader;
|
||||
|
||||
[Authorize]
|
||||
[ApiController]
|
||||
[Route("/api/publishers/{pubName}/feeds")]
|
||||
public class WebFeedController(WebFeedService webFeed, Publisher.PublisherService ps) : ControllerBase
|
||||
public class WebFeedController(WebFeedService webFeed, RemotePublisherService ps) : ControllerBase
|
||||
{
|
||||
public record WebFeedRequest(
|
||||
[MaxLength(8192)] string? Url,
|
||||
55
DysonNetwork.Insight/Reader/WebFeedGrpcService.cs
Normal file
55
DysonNetwork.Insight/Reader/WebFeedGrpcService.cs
Normal file
@@ -0,0 +1,55 @@
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using Grpc.Core;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace DysonNetwork.Insight.Reader;
|
||||
|
||||
public class WebFeedGrpcService(WebFeedService service, AppDatabase db)
|
||||
: Shared.Proto.WebFeedService.WebFeedServiceBase
|
||||
{
|
||||
public override async Task<GetWebFeedResponse> GetWebFeed(
|
||||
GetWebFeedRequest request,
|
||||
ServerCallContext context
|
||||
)
|
||||
{
|
||||
SnWebFeed? feed = null;
|
||||
|
||||
switch (request.IdentifierCase)
|
||||
{
|
||||
case GetWebFeedRequest.IdentifierOneofCase.Id:
|
||||
if (!string.IsNullOrWhiteSpace(request.Id) && Guid.TryParse(request.Id, out var id))
|
||||
feed = await service.GetFeedAsync(id);
|
||||
break;
|
||||
case GetWebFeedRequest.IdentifierOneofCase.Url:
|
||||
feed = await db.WebFeeds.FirstOrDefaultAsync(f => f.Url == request.Url);
|
||||
break;
|
||||
case GetWebFeedRequest.IdentifierOneofCase.None:
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentOutOfRangeException();
|
||||
}
|
||||
|
||||
return feed == null
|
||||
? throw new RpcException(new Status(StatusCode.NotFound, "feed not found"))
|
||||
: new GetWebFeedResponse { Feed = feed.ToProtoValue() };
|
||||
}
|
||||
|
||||
public override async Task<ListWebFeedsResponse> ListWebFeeds(
|
||||
ListWebFeedsRequest request,
|
||||
ServerCallContext context
|
||||
)
|
||||
{
|
||||
if (!Guid.TryParse(request.PublisherId, out var publisherId))
|
||||
throw new RpcException(new Status(StatusCode.InvalidArgument, "invalid publisher_id"));
|
||||
|
||||
var feeds = await service.GetFeedsByPublisherAsync(publisherId);
|
||||
|
||||
var response = new ListWebFeedsResponse
|
||||
{
|
||||
TotalSize = feeds.Count
|
||||
};
|
||||
response.Feeds.AddRange(feeds.Select(f => f.ToProtoValue()));
|
||||
return response;
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,10 @@
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace DysonNetwork.Sphere.WebReader;
|
||||
namespace DysonNetwork.Insight.Reader;
|
||||
|
||||
[ApiController]
|
||||
[Route("/api/feeds")]
|
||||
@@ -39,7 +40,7 @@ public class WebFeedPublicController(
|
||||
return Ok(existingSubscription);
|
||||
|
||||
// Create new subscription
|
||||
var subscription = new WebFeedSubscription
|
||||
var subscription = new SnWebFeedSubscription
|
||||
{
|
||||
FeedId = feedId,
|
||||
AccountId = accountId
|
||||
@@ -83,7 +84,7 @@ public class WebFeedPublicController(
|
||||
/// <returns>Subscription status</returns>
|
||||
[HttpGet("{feedId:guid}/subscription")]
|
||||
[Authorize]
|
||||
public async Task<ActionResult<WebFeedSubscription>> GetSubscriptionStatus(Guid feedId)
|
||||
public async Task<ActionResult<SnWebFeedSubscription>> GetSubscriptionStatus(Guid feedId)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return Unauthorized();
|
||||
@@ -105,7 +106,7 @@ public class WebFeedPublicController(
|
||||
/// <returns>List of subscribed feeds</returns>
|
||||
[HttpGet("subscribed")]
|
||||
[Authorize]
|
||||
public async Task<ActionResult<WebFeed>> GetSubscribedFeeds(
|
||||
public async Task<ActionResult<SnWebFeed>> GetSubscribedFeeds(
|
||||
[FromQuery] int offset = 0,
|
||||
[FromQuery] int take = 20
|
||||
)
|
||||
@@ -137,7 +138,7 @@ public class WebFeedPublicController(
|
||||
/// </summary>
|
||||
[HttpGet]
|
||||
[Authorize]
|
||||
public async Task<ActionResult<WebFeed>> GetWebFeedArticles(
|
||||
public async Task<ActionResult<SnWebFeed>> GetWebFeedArticles(
|
||||
[FromQuery] int offset = 0,
|
||||
[FromQuery] int take = 20
|
||||
)
|
||||
@@ -174,7 +175,7 @@ public class WebFeedPublicController(
|
||||
/// <returns>Feed metadata</returns>
|
||||
[AllowAnonymous]
|
||||
[HttpGet("{feedId:guid}")]
|
||||
public async Task<ActionResult<WebFeed>> GetFeedById(Guid feedId)
|
||||
public async Task<ActionResult<SnWebFeed>> GetFeedById(Guid feedId)
|
||||
{
|
||||
var feed = await webFeed.GetFeedAsync(feedId);
|
||||
if (feed == null)
|
||||
@@ -192,7 +193,7 @@ public class WebFeedPublicController(
|
||||
/// <returns>List of articles from the feed</returns>
|
||||
[AllowAnonymous]
|
||||
[HttpGet("{feedId:guid}/articles")]
|
||||
public async Task<ActionResult<WebArticle>> GetFeedArticles(
|
||||
public async Task<ActionResult<SnWebArticle>> GetFeedArticles(
|
||||
[FromRoute] Guid feedId,
|
||||
[FromQuery] int offset = 0,
|
||||
[FromQuery] int take = 20
|
||||
@@ -224,7 +225,7 @@ public class WebFeedPublicController(
|
||||
/// </summary>
|
||||
[HttpGet("explore")]
|
||||
[Authorize]
|
||||
public async Task<ActionResult<WebFeed>> ExploreFeeds(
|
||||
public async Task<ActionResult<SnWebFeed>> ExploreFeeds(
|
||||
[FromQuery] int offset = 0,
|
||||
[FromQuery] int take = 20,
|
||||
[FromQuery] string? query = null
|
||||
@@ -1,7 +1,8 @@
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Quartz;
|
||||
|
||||
namespace DysonNetwork.Sphere.WebReader;
|
||||
namespace DysonNetwork.Insight.Reader;
|
||||
|
||||
[DisallowConcurrentExecution]
|
||||
public class WebFeedScraperJob(
|
||||
@@ -15,7 +16,7 @@ public class WebFeedScraperJob(
|
||||
{
|
||||
logger.LogInformation("Starting web feed scraper job.");
|
||||
|
||||
var feeds = await database.Set<WebFeed>().ToListAsync(context.CancellationToken);
|
||||
var feeds = await database.Set<SnWebFeed>().ToListAsync(context.CancellationToken);
|
||||
|
||||
foreach (var feed in feeds)
|
||||
{
|
||||
@@ -1,20 +1,21 @@
|
||||
using System.ServiceModel.Syndication;
|
||||
using System.Xml;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Models.Embed;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace DysonNetwork.Sphere.WebReader;
|
||||
namespace DysonNetwork.Insight.Reader;
|
||||
|
||||
public class WebFeedService(
|
||||
AppDatabase database,
|
||||
IHttpClientFactory httpClientFactory,
|
||||
ILogger<WebFeedService> logger,
|
||||
WebReaderService webReaderService
|
||||
WebReaderService readerService
|
||||
)
|
||||
{
|
||||
public async Task<WebFeed> CreateWebFeedAsync(Shared.Models.SnPublisher publisher,
|
||||
WebFeedController.WebFeedRequest request)
|
||||
public async Task<SnWebFeed> CreateWebFeedAsync(SnPublisher publisher, WebFeedController.WebFeedRequest request)
|
||||
{
|
||||
var feed = new WebFeed
|
||||
var feed = new SnWebFeed
|
||||
{
|
||||
Url = request.Url!,
|
||||
Title = request.Title!,
|
||||
@@ -29,7 +30,7 @@ public class WebFeedService(
|
||||
return feed;
|
||||
}
|
||||
|
||||
public async Task<WebFeed?> GetFeedAsync(Guid id, Guid? publisherId = null)
|
||||
public async Task<SnWebFeed?> GetFeedAsync(Guid id, Guid? publisherId = null)
|
||||
{
|
||||
var query = database.WebFeeds
|
||||
.Include(a => a.Publisher)
|
||||
@@ -40,12 +41,12 @@ public class WebFeedService(
|
||||
return await query.FirstOrDefaultAsync();
|
||||
}
|
||||
|
||||
public async Task<List<WebFeed>> GetFeedsByPublisherAsync(Guid publisherId)
|
||||
public async Task<List<SnWebFeed>> GetFeedsByPublisherAsync(Guid publisherId)
|
||||
{
|
||||
return await database.WebFeeds.Where(a => a.PublisherId == publisherId).ToListAsync();
|
||||
}
|
||||
|
||||
public async Task<WebFeed> UpdateFeedAsync(WebFeed feed, WebFeedController.WebFeedRequest request)
|
||||
public async Task<SnWebFeed> UpdateFeedAsync(SnWebFeed feed, WebFeedController.WebFeedRequest request)
|
||||
{
|
||||
if (request.Url is not null)
|
||||
feed.Url = request.Url;
|
||||
@@ -76,7 +77,7 @@ public class WebFeedService(
|
||||
return true;
|
||||
}
|
||||
|
||||
public async Task ScrapeFeedAsync(WebFeed feed, CancellationToken cancellationToken = default)
|
||||
public async Task ScrapeFeedAsync(SnWebFeed feed, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var httpClient = httpClientFactory.CreateClient();
|
||||
var response = await httpClient.GetAsync(feed.Url, cancellationToken);
|
||||
@@ -98,7 +99,7 @@ public class WebFeedService(
|
||||
if (string.IsNullOrEmpty(itemUrl))
|
||||
continue;
|
||||
|
||||
var articleExists = await database.Set<WebArticle>()
|
||||
var articleExists = await database.Set<SnWebArticle>()
|
||||
.AnyAsync(a => a.FeedId == feed.Id && a.Url == itemUrl, cancellationToken);
|
||||
|
||||
if (articleExists)
|
||||
@@ -109,17 +110,17 @@ public class WebFeedService(
|
||||
|
||||
if (feed.Config.ScrapPage)
|
||||
{
|
||||
var scrapedArticle = await webReaderService.ScrapeArticleAsync(itemUrl, cancellationToken);
|
||||
var scrapedArticle = await readerService.ScrapeArticleAsync(itemUrl, cancellationToken);
|
||||
preview = scrapedArticle.LinkEmbed;
|
||||
if (scrapedArticle.Content is not null)
|
||||
content = scrapedArticle.Content;
|
||||
}
|
||||
else
|
||||
{
|
||||
preview = await webReaderService.GetLinkPreviewAsync(itemUrl, cancellationToken);
|
||||
preview = await readerService.GetLinkPreviewAsync(itemUrl, cancellationToken);
|
||||
}
|
||||
|
||||
var newArticle = new WebArticle
|
||||
var newArticle = new SnWebArticle
|
||||
{
|
||||
FeedId = feed.Id,
|
||||
Title = item.Title.Text,
|
||||
@@ -1,9 +1,10 @@
|
||||
using DysonNetwork.Shared.Auth;
|
||||
using DysonNetwork.Shared.Models.Embed;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.RateLimiting;
|
||||
|
||||
namespace DysonNetwork.Sphere.WebReader;
|
||||
namespace DysonNetwork.Insight.Reader;
|
||||
|
||||
/// <summary>
|
||||
/// Controller for web scraping and link preview services
|
||||
@@ -59,7 +60,7 @@ public class WebReaderController(WebReaderService reader, ILogger<WebReaderContr
|
||||
/// </summary>
|
||||
[HttpDelete("link/cache")]
|
||||
[Authorize]
|
||||
[RequiredPermission("maintenance", "cache.scrap")]
|
||||
[AskPermission("cache.scrap")]
|
||||
public async Task<IActionResult> InvalidateCache([FromQuery] string url)
|
||||
{
|
||||
if (string.IsNullOrEmpty(url))
|
||||
@@ -76,7 +77,7 @@ public class WebReaderController(WebReaderService reader, ILogger<WebReaderContr
|
||||
/// </summary>
|
||||
[HttpDelete("cache/all")]
|
||||
[Authorize]
|
||||
[RequiredPermission("maintenance", "cache.scrap")]
|
||||
[AskPermission("cache.scrap")]
|
||||
public async Task<IActionResult> InvalidateAllCache()
|
||||
{
|
||||
await reader.InvalidateAllCachedPreviewsAsync();
|
||||
@@ -1,4 +1,4 @@
|
||||
namespace DysonNetwork.Sphere.WebReader;
|
||||
namespace DysonNetwork.Insight.Reader;
|
||||
|
||||
/// <summary>
|
||||
/// Exception thrown when an error occurs during web reading operations
|
||||
49
DysonNetwork.Insight/Reader/WebReaderGrpcService.cs
Normal file
49
DysonNetwork.Insight/Reader/WebReaderGrpcService.cs
Normal file
@@ -0,0 +1,49 @@
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using Grpc.Core;
|
||||
|
||||
namespace DysonNetwork.Insight.Reader;
|
||||
|
||||
public class WebReaderGrpcService(WebReaderService service) : Shared.Proto.WebReaderService.WebReaderServiceBase
|
||||
{
|
||||
public override async Task<ScrapeArticleResponse> ScrapeArticle(
|
||||
ScrapeArticleRequest request,
|
||||
ServerCallContext context
|
||||
)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.Url))
|
||||
throw new RpcException(new Status(StatusCode.InvalidArgument, "url is required"));
|
||||
|
||||
var scrapedArticle = await service.ScrapeArticleAsync(request.Url, context.CancellationToken);
|
||||
return new ScrapeArticleResponse { Article = scrapedArticle.ToProtoValue() };
|
||||
}
|
||||
|
||||
public override async Task<GetLinkPreviewResponse> GetLinkPreview(
|
||||
GetLinkPreviewRequest request,
|
||||
ServerCallContext context
|
||||
)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.Url))
|
||||
throw new RpcException(new Status(StatusCode.InvalidArgument, "url is required"));
|
||||
|
||||
var linkEmbed = await service.GetLinkPreviewAsync(
|
||||
request.Url,
|
||||
context.CancellationToken,
|
||||
bypassCache: request.BypassCache
|
||||
);
|
||||
|
||||
return new GetLinkPreviewResponse { Preview = linkEmbed.ToProtoValue() };
|
||||
}
|
||||
|
||||
public override async Task<InvalidateLinkPreviewCacheResponse> InvalidateLinkPreviewCache(
|
||||
InvalidateLinkPreviewCacheRequest request,
|
||||
ServerCallContext context
|
||||
)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.Url))
|
||||
throw new RpcException(new Status(StatusCode.InvalidArgument, "url is required"));
|
||||
|
||||
await service.InvalidateCacheForUrlAsync(request.Url);
|
||||
|
||||
return new InvalidateLinkPreviewCacheResponse { Success = true };
|
||||
}
|
||||
}
|
||||
@@ -2,9 +2,10 @@ using System.Globalization;
|
||||
using AngleSharp;
|
||||
using AngleSharp.Dom;
|
||||
using DysonNetwork.Shared.Cache;
|
||||
using DysonNetwork.Shared.Models.Embed;
|
||||
using HtmlAgilityPack;
|
||||
|
||||
namespace DysonNetwork.Sphere.WebReader;
|
||||
namespace DysonNetwork.Insight.Reader;
|
||||
|
||||
/// <summary>
|
||||
/// The service is amin to providing scrapping service to the Solar Network.
|
||||
@@ -1,3 +1,4 @@
|
||||
using DysonNetwork.Insight.Reader;
|
||||
using DysonNetwork.Shared.Http;
|
||||
|
||||
namespace DysonNetwork.Insight.Startup;
|
||||
@@ -17,6 +18,11 @@ public static class ApplicationConfiguration
|
||||
|
||||
app.MapControllers();
|
||||
|
||||
app.MapGrpcService<WebReaderGrpcService>();
|
||||
app.MapGrpcService<WebArticleGrpcService>();
|
||||
app.MapGrpcService<WebFeedGrpcService>();
|
||||
app.MapGrpcReflectionService();
|
||||
|
||||
return app;
|
||||
}
|
||||
}
|
||||
|
||||
34
DysonNetwork.Insight/Startup/ScheduledJobsConfiguration.cs
Normal file
34
DysonNetwork.Insight/Startup/ScheduledJobsConfiguration.cs
Normal file
@@ -0,0 +1,34 @@
|
||||
using DysonNetwork.Insight.Reader;
|
||||
using DysonNetwork.Insight.Thought;
|
||||
using Quartz;
|
||||
|
||||
namespace DysonNetwork.Insight.Startup;
|
||||
|
||||
public static class ScheduledJobsConfiguration
|
||||
{
|
||||
public static IServiceCollection AddAppScheduledJobs(this IServiceCollection services)
|
||||
{
|
||||
services.AddQuartz(q =>
|
||||
{
|
||||
var tokenBillingJob = new JobKey("TokenBilling");
|
||||
q.AddJob<TokenBillingJob>(opts => opts.WithIdentity(tokenBillingJob));
|
||||
q.AddTrigger(opts => opts
|
||||
.ForJob(tokenBillingJob)
|
||||
.WithIdentity("TokenBillingTrigger")
|
||||
.WithSimpleSchedule(o => o
|
||||
.WithIntervalInMinutes(5)
|
||||
.RepeatForever())
|
||||
);
|
||||
|
||||
q.AddJob<WebFeedScraperJob>(opts => opts.WithIdentity("WebFeedScraper").StoreDurably());
|
||||
q.AddTrigger(opts => opts
|
||||
.ForJob("WebFeedScraper")
|
||||
.WithIdentity("WebFeedScraperTrigger")
|
||||
.WithCronSchedule("0 0 0 * * ?")
|
||||
);
|
||||
});
|
||||
services.AddQuartzHostedService(q => q.WaitForJobsToComplete = true);
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@ using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using DysonNetwork.Insight.Thought;
|
||||
using DysonNetwork.Shared.Cache;
|
||||
using DysonNetwork.Shared.Registry;
|
||||
using Microsoft.SemanticKernel;
|
||||
using NodaTime;
|
||||
using NodaTime.Serialization.SystemTextJson;
|
||||
@@ -13,9 +14,7 @@ public static class ServiceCollectionExtensions
|
||||
public static IServiceCollection AddAppServices(this IServiceCollection services)
|
||||
{
|
||||
services.AddDbContext<AppDatabase>();
|
||||
services.AddSingleton<IClock>(SystemClock.Instance);
|
||||
services.AddHttpContextAccessor();
|
||||
services.AddSingleton<ICacheService, CacheServiceRedis>();
|
||||
|
||||
services.AddHttpClient();
|
||||
|
||||
|
||||
155
DysonNetwork.Insight/Thought/CLIENT_UPDATE_GUIDE.md
Normal file
155
DysonNetwork.Insight/Thought/CLIENT_UPDATE_GUIDE.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# Client-Side Guide: Handling the New Message Structure
|
||||
|
||||
This document outlines how to update your client application to support the new rich message structure for the thinking/chat feature. The backend now sends structured messages that can include plain text, function calls, and function results, allowing for a more interactive and transparent user experience.
|
||||
|
||||
When using with gateway, all the response type are in snake case
|
||||
|
||||
## 1. Data Models
|
||||
|
||||
When you receive a complete message (a "thought"), it will be in the form of an `SnThinkingThought` object. The core of this object is the `Parts` array, which contains the different components of the message.
|
||||
|
||||
Here are the primary data models you will be working with, represented here in a TypeScript-like format for clarity:
|
||||
|
||||
```typescript
|
||||
// The main message object from the assistant or user
|
||||
interface SnThinkingThought {
|
||||
id: string;
|
||||
parts: SnThinkingMessagePart[];
|
||||
role: 'Assistant' /*Value is (0)*/ | 'User' /*Value is (1)*/;
|
||||
createdAt: string; // ISO 8601 date string
|
||||
// ... other metadata
|
||||
}
|
||||
|
||||
// A single part of a message
|
||||
interface SnThinkingMessagePart {
|
||||
type: ThinkingMessagePartType;
|
||||
text?: string;
|
||||
functionCall?: SnFunctionCall;
|
||||
functionResult?: SnFunctionResult;
|
||||
}
|
||||
|
||||
// Enum for the different part types
|
||||
enum ThinkingMessagePartType {
|
||||
Text = 0,
|
||||
FunctionCall = 1,
|
||||
FunctionResult = 2,
|
||||
}
|
||||
|
||||
// Represents a function/tool call made by the assistant
|
||||
interface SnFunctionCall {
|
||||
id: string;
|
||||
name: string;
|
||||
arguments: string; // A JSON string of the arguments
|
||||
}
|
||||
|
||||
// Represents the result of a function call
|
||||
interface SnFunctionResult {
|
||||
callId: string; // The ID of the corresponding function call
|
||||
result: any; // The data returned by the function
|
||||
isError: boolean;
|
||||
}
|
||||
```
|
||||
|
||||
## 2. Handling the SSE Stream
|
||||
|
||||
The response is streamed using Server-Sent Events (SSE). Your client should listen to this stream and process events as they arrive to build the UI in real-time.
|
||||
|
||||
The stream sends different types of messages, identified by a `type` field in the JSON payload.
|
||||
|
||||
| Event Type | `data` Payload | Client-Side Action |
|
||||
| ------------------------ | -------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `text` | `{ "type": "text", "data": "some text" }` | Append the text content to the current message being displayed. This is the most common event. |
|
||||
| `function_call_update` | `{ "type": "function_call_update", "data": { ... } }` | This provides real-time updates as the AI decides on a function call. You can use this to show an advanced "thinking" state, but it's optional. The key events to handle are `function_call` and `function_result`. |
|
||||
| `function_call` | `{ "type": "function_call", "data": SnFunctionCall }` | The AI has committed to using a tool. Display a "Using tool..." indicator. You can show the `name` of the tool for more clarity. |
|
||||
| `function_result` | `{ "type": "function_result", "data": SnFunctionResult }` | The tool has finished running. You can hide the "thinking" indicator for this tool and optionally display a summary of the result. |
|
||||
| `topic` | `{ "type": "topic", "data": "A new topic" }` | If this is the first message in a new conversation, this event provides the auto-generated topic title. Update your UI accordingly. |
|
||||
| `thought` | `{ "type": "thought", "data": SnThinkingThought }` | This is the **final event** in the stream. It contains the complete, persisted message object with all its `Parts`. You should use this final object to replace the incrementally-built message in your state to ensure consistency. |
|
||||
|
||||
## 3. Rendering a Message from `SnThinkingThought`
|
||||
|
||||
Once you have the final `SnThinkingThought` object (either from the `thought` event in the stream or by fetching conversation history), you can render it by iterating through the `parts` array.
|
||||
|
||||
### Pseudocode for Rendering
|
||||
|
||||
```javascript
|
||||
function renderThought(thought: SnThinkingThought) {
|
||||
const messageContainer = document.createElement('div');
|
||||
messageContainer.className = `message message-role-${thought.role}`;
|
||||
|
||||
// User messages are simple and will only have one text part
|
||||
if (thought.role === 'User') {
|
||||
const textPart = thought.parts[0];
|
||||
messageContainer.innerText = textPart.text;
|
||||
return messageContainer;
|
||||
}
|
||||
|
||||
// Assistant messages can have multiple parts
|
||||
let textBuffer = '';
|
||||
|
||||
thought.parts.forEach(part => {
|
||||
switch (part.type) {
|
||||
case ThinkingMessagePartType.Text:
|
||||
// Buffer text to combine consecutive text parts
|
||||
textBuffer += part.text;
|
||||
break;
|
||||
|
||||
case ThinkingMessagePartType.FunctionCall:
|
||||
// First, render any buffered text
|
||||
if (textBuffer) {
|
||||
messageContainer.appendChild(renderText(textBuffer));
|
||||
textBuffer = '';
|
||||
}
|
||||
// Then, render the function call UI component
|
||||
messageContainer.appendChild(renderFunctionCall(part.functionCall));
|
||||
break;
|
||||
|
||||
case ThinkingMessagePartType.FunctionResult:
|
||||
// Render buffered text
|
||||
if (textBuffer) {
|
||||
messageContainer.appendChild(renderText(textBuffer));
|
||||
textBuffer = '';
|
||||
}
|
||||
// Then, render the function result UI component
|
||||
messageContainer.appendChild(renderFunctionResult(part.functionResult));
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
// Render any remaining text at the end
|
||||
if (textBuffer) {
|
||||
messageContainer.appendChild(renderText(textBuffer));
|
||||
}
|
||||
|
||||
return messageContainer;
|
||||
}
|
||||
|
||||
// Helper functions to create UI components
|
||||
function renderText(text) {
|
||||
const p = document.createElement('p');
|
||||
p.innerText = text;
|
||||
return p;
|
||||
}
|
||||
|
||||
function renderFunctionCall(functionCall) {
|
||||
const el = document.createElement('div');
|
||||
el.className = 'function-call-indicator';
|
||||
el.innerHTML = `<i>Using tool: <strong>${functionCall.name}</strong>...</i>`;
|
||||
// You could add a button to show functionCall.arguments
|
||||
return el;
|
||||
}
|
||||
|
||||
function renderFunctionResult(functionResult) {
|
||||
const el = document.createElement('div');
|
||||
el.className = 'function-result-indicator';
|
||||
if (functionResult.isError) {
|
||||
el.classList.add('error');
|
||||
el.innerText = 'An error occurred while using the tool.';
|
||||
} else {
|
||||
el.innerText = 'Tool finished.';
|
||||
}
|
||||
// You could expand this to show a summary of functionResult.result
|
||||
return el;
|
||||
}
|
||||
```
|
||||
|
||||
This approach ensures that text and tool-use indicators are rendered inline and in the correct order, providing a clear and accurate representation of the assistant's actions.
|
||||
@@ -0,0 +1,28 @@
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using Microsoft.SemanticKernel;
|
||||
|
||||
namespace DysonNetwork.Insight.Thought.Plugins;
|
||||
|
||||
public class SnAccountKernelPlugin(
|
||||
AccountService.AccountServiceClient accountClient
|
||||
)
|
||||
{
|
||||
[KernelFunction("get_account")]
|
||||
public async Task<SnAccount?> GetAccount(string userId)
|
||||
{
|
||||
var request = new GetAccountRequest { Id = userId };
|
||||
var response = await accountClient.GetAccountAsync(request);
|
||||
if (response is null) return null;
|
||||
return SnAccount.FromProtoValue(response);
|
||||
}
|
||||
|
||||
[KernelFunction("get_account_by_name")]
|
||||
public async Task<SnAccount?> GetAccountByName(string username)
|
||||
{
|
||||
var request = new LookupAccountBatchRequest();
|
||||
request.Names.Add(username);
|
||||
var response = await accountClient.LookupAccountBatchAsync(request);
|
||||
return response.Accounts.Count == 0 ? null : SnAccount.FromProtoValue(response.Accounts[0]);
|
||||
}
|
||||
}
|
||||
98
DysonNetwork.Insight/Thought/Plugins/SnPostKernelPlugin.cs
Normal file
98
DysonNetwork.Insight/Thought/Plugins/SnPostKernelPlugin.cs
Normal file
@@ -0,0 +1,98 @@
|
||||
using System.ComponentModel;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using Microsoft.SemanticKernel;
|
||||
using NodaTime;
|
||||
using NodaTime.Serialization.Protobuf;
|
||||
using NodaTime.Text;
|
||||
|
||||
namespace DysonNetwork.Insight.Thought.Plugins;
|
||||
|
||||
public class SnPostKernelPlugin(
|
||||
PostService.PostServiceClient postClient
|
||||
)
|
||||
{
|
||||
[KernelFunction("get_post")]
|
||||
public async Task<SnPost?> GetPost(string postId)
|
||||
{
|
||||
var request = new GetPostRequest { Id = postId };
|
||||
var response = await postClient.GetPostAsync(request);
|
||||
return response is null ? null : SnPost.FromProtoValue(response);
|
||||
}
|
||||
|
||||
[KernelFunction("search_posts")]
|
||||
[Description("Perform a full-text search in all Solar Network posts.")]
|
||||
public async Task<List<SnPost>> SearchPostsContent(string contentQuery, int pageSize = 10, int page = 1)
|
||||
{
|
||||
var request = new SearchPostsRequest
|
||||
{
|
||||
Query = contentQuery,
|
||||
PageSize = pageSize,
|
||||
PageToken = ((page - 1) * pageSize).ToString()
|
||||
};
|
||||
var response = await postClient.SearchPostsAsync(request);
|
||||
return response.Posts.Select(SnPost.FromProtoValue).ToList();
|
||||
}
|
||||
|
||||
public class KernelPostListResult
|
||||
{
|
||||
public List<SnPost> Posts { get; set; } = [];
|
||||
public int TotalCount { get; set; }
|
||||
}
|
||||
|
||||
[KernelFunction("list_posts")]
|
||||
[Description("List all posts on the Solar Network without filters, orderBy can be date or popularity")]
|
||||
public async Task<KernelPostListResult> ListPosts(
|
||||
string orderBy = "date",
|
||||
bool orderDesc = true,
|
||||
int pageSize = 10,
|
||||
int page = 1
|
||||
)
|
||||
{
|
||||
var request = new ListPostsRequest
|
||||
{
|
||||
OrderBy = orderBy,
|
||||
OrderDesc = orderDesc,
|
||||
PageSize = pageSize,
|
||||
PageToken = ((page - 1) * pageSize).ToString()
|
||||
};
|
||||
var response = await postClient.ListPostsAsync(request);
|
||||
return new KernelPostListResult
|
||||
{
|
||||
Posts = response.Posts.Select(SnPost.FromProtoValue).ToList(),
|
||||
TotalCount = response.TotalSize,
|
||||
};
|
||||
}
|
||||
|
||||
[KernelFunction("list_posts_within_time")]
|
||||
[Description(
|
||||
"List posts in a period of time, the time requires ISO-8601 format, one of the start and end must be provided.")]
|
||||
public async Task<KernelPostListResult> ListPostsWithinTime(
|
||||
string? beforeTime,
|
||||
string? afterTime,
|
||||
int pageSize = 10,
|
||||
int page = 1
|
||||
)
|
||||
{
|
||||
var pattern = InstantPattern.General;
|
||||
Instant? before = !string.IsNullOrWhiteSpace(beforeTime)
|
||||
? pattern.Parse(beforeTime).TryGetValue(default, out var beforeValue) ? beforeValue : null
|
||||
: null;
|
||||
Instant? after = !string.IsNullOrWhiteSpace(afterTime)
|
||||
? pattern.Parse(afterTime).TryGetValue(default, out var afterValue) ? afterValue : null
|
||||
: null;
|
||||
var request = new ListPostsRequest
|
||||
{
|
||||
After = after?.ToTimestamp(),
|
||||
Before = before?.ToTimestamp(),
|
||||
PageSize = pageSize,
|
||||
PageToken = ((page - 1) * pageSize).ToString()
|
||||
};
|
||||
var response = await postClient.ListPostsAsync(request);
|
||||
return new KernelPostListResult
|
||||
{
|
||||
Posts = response.Posts.Select(SnPost.FromProtoValue).ToList(),
|
||||
TotalCount = response.TotalSize,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,13 @@
|
||||
using System.Collections.Generic;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using DysonNetwork.Shared.Auth;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.SemanticKernel;
|
||||
using Microsoft.SemanticKernel.ChatCompletion;
|
||||
using Microsoft.SemanticKernel.Connectors.Ollama;
|
||||
|
||||
namespace DysonNetwork.Insight.Thought;
|
||||
|
||||
@@ -17,10 +15,53 @@ namespace DysonNetwork.Insight.Thought;
|
||||
[Route("/api/thought")]
|
||||
public class ThoughtController(ThoughtProvider provider, ThoughtService service) : ControllerBase
|
||||
{
|
||||
public static readonly List<string> AvailableProposals = ["post_create"];
|
||||
|
||||
public class StreamThinkingRequest
|
||||
{
|
||||
[Required] public string UserMessage { get; set; } = null!;
|
||||
public string? ServiceId { get; set; }
|
||||
public Guid? SequenceId { get; set; }
|
||||
public List<string>? AttachedPosts { get; set; } = [];
|
||||
public List<Dictionary<string, dynamic>>? AttachedMessages { get; set; }
|
||||
public List<string> AcceptProposals { get; set; } = [];
|
||||
}
|
||||
|
||||
public class UpdateSharingRequest
|
||||
{
|
||||
public bool IsPublic { get; set; }
|
||||
}
|
||||
|
||||
public class ThoughtServiceInfo
|
||||
{
|
||||
public string ServiceId { get; set; } = null!;
|
||||
public double BillingMultiplier { get; set; }
|
||||
public int PerkLevel { get; set; }
|
||||
}
|
||||
|
||||
public class ThoughtServicesResponse
|
||||
{
|
||||
public string DefaultService { get; set; } = null!;
|
||||
public IEnumerable<ThoughtServiceInfo> Services { get; set; } = null!;
|
||||
}
|
||||
|
||||
[HttpGet("services")]
|
||||
[ProducesResponseType(StatusCodes.Status200OK)]
|
||||
public ActionResult<ThoughtServicesResponse> GetAvailableServices()
|
||||
{
|
||||
var services = provider.GetAvailableServicesInfo()
|
||||
.Select(s => new ThoughtServiceInfo
|
||||
{
|
||||
ServiceId = s.ServiceId,
|
||||
BillingMultiplier = s.BillingMultiplier,
|
||||
PerkLevel = s.PerkLevel
|
||||
});
|
||||
|
||||
return Ok(new ThoughtServicesResponse
|
||||
{
|
||||
DefaultService = provider.GetDefaultServiceId(),
|
||||
Services = services
|
||||
});
|
||||
}
|
||||
|
||||
[HttpPost]
|
||||
@@ -30,6 +71,28 @@ public class ThoughtController(ThoughtProvider provider, ThoughtService service)
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
if (request.AcceptProposals.Any(e => !AvailableProposals.Contains(e)))
|
||||
return BadRequest("Request contains unavailable proposal");
|
||||
|
||||
var serviceId = provider.GetServiceId(request.ServiceId);
|
||||
var serviceInfo = provider.GetServiceInfo(serviceId);
|
||||
if (serviceInfo is null)
|
||||
{
|
||||
return BadRequest("Service not found or configured.");
|
||||
}
|
||||
|
||||
if (serviceInfo.PerkLevel > 0 && !currentUser.IsSuperuser)
|
||||
if (currentUser.PerkSubscription is null ||
|
||||
PerkSubscriptionPrivilege.GetPrivilegeFromIdentifier(currentUser.PerkSubscription.Identifier) <
|
||||
serviceInfo.PerkLevel)
|
||||
return StatusCode(403, "Not enough perk level");
|
||||
|
||||
var kernel = provider.GetKernel(request.ServiceId);
|
||||
if (kernel is null)
|
||||
{
|
||||
return BadRequest("Service not found or configured.");
|
||||
}
|
||||
|
||||
// Generate a topic if creating a new sequence
|
||||
string? topic = null;
|
||||
if (!request.SequenceId.HasValue)
|
||||
@@ -41,7 +104,13 @@ public class ThoughtController(ThoughtProvider provider, ThoughtService service)
|
||||
);
|
||||
summaryHistory.AddUserMessage(request.UserMessage);
|
||||
|
||||
var summaryResult = await provider.Kernel
|
||||
var summaryKernel = provider.GetKernel(); // Get default kernel
|
||||
if (summaryKernel is null)
|
||||
{
|
||||
return BadRequest("Default service not found or configured.");
|
||||
}
|
||||
|
||||
var summaryResult = await summaryKernel
|
||||
.GetRequiredService<IChatCompletionService>()
|
||||
.GetChatMessageContentAsync(summaryHistory);
|
||||
|
||||
@@ -53,14 +122,20 @@ public class ThoughtController(ThoughtProvider provider, ThoughtService service)
|
||||
if (sequence == null) return Forbid(); // or NotFound
|
||||
|
||||
// Save user thought
|
||||
await service.SaveThoughtAsync(sequence, request.UserMessage, ThinkingThoughtRole.User);
|
||||
await service.SaveThoughtAsync(sequence, [
|
||||
new SnThinkingMessagePart
|
||||
{
|
||||
Type = ThinkingMessagePartType.Text,
|
||||
Text = request.UserMessage
|
||||
}
|
||||
], ThinkingThoughtRole.User);
|
||||
|
||||
// Build chat history
|
||||
var chatHistory = new ChatHistory(
|
||||
"You're a helpful assistant on the Solar Network, a social network.\n" +
|
||||
"Your name is Sn-chan (or SN 酱 in chinese), a cute sweet heart with passion for almost everything.\n" +
|
||||
"When you talk to user, you can add some modal particles and emoticons to your response to be cute, but prevent use a lot of emojis." +
|
||||
"Your father (creator) is @littlesheep. (prefer calling him 父亲 in chinese)\n" +
|
||||
"Your creator is @littlesheep, which is also the creator of the Solar Network, if you met some problems you was unable to solve, trying guide the user to ask (DM) the @littlesheep.\n" +
|
||||
"\n" +
|
||||
"The ID on the Solar Network is UUID, so mostly hard to read, so do not show ID to user unless user ask to do so or necessary.\n" +
|
||||
"\n" +
|
||||
@@ -69,99 +144,223 @@ public class ThoughtController(ThoughtProvider provider, ThoughtService service)
|
||||
"When the user asks questions about the Solar Network (also known as SN and Solian), try use the tools you have to get latest and accurate data."
|
||||
);
|
||||
|
||||
chatHistory.AddSystemMessage(
|
||||
"You can issue some proposals to user, like creating a post. The proposal syntax is like a xml tag, with an attribute indicates which proposal.\n" +
|
||||
"Depends on the proposal type, the payload (content inside the xml tag) might be different.\n" +
|
||||
"\n" +
|
||||
"Example: <proposal type=\"post_create\">...post content...</proposal>\n" +
|
||||
"\n" +
|
||||
"Here are some references of the proposals you can issue, but if you want to issue one, make sure the user is accept it.\n" +
|
||||
"1. post_create: body takes simple string, create post for user." +
|
||||
"\n" +
|
||||
$"The user currently accept these proposals: {string.Join(',', request.AcceptProposals)}"
|
||||
);
|
||||
|
||||
chatHistory.AddSystemMessage(
|
||||
$"The user you're currently talking to is {currentUser.Nick} ({currentUser.Name}), ID is {currentUser.Id}"
|
||||
);
|
||||
|
||||
if (request.AttachedPosts is { Count: > 0 })
|
||||
{
|
||||
chatHistory.AddUserMessage(
|
||||
$"Attached post IDs: {string.Join(',', request.AttachedPosts!)}");
|
||||
}
|
||||
|
||||
if (request.AttachedMessages is { Count: > 0 })
|
||||
{
|
||||
chatHistory.AddUserMessage(
|
||||
$"Attached chat messages data: {JsonSerializer.Serialize(request.AttachedMessages)}");
|
||||
}
|
||||
|
||||
// Add previous thoughts (excluding the current user thought, which is the first one since descending)
|
||||
var previousThoughts = await service.GetPreviousThoughtsAsync(sequence);
|
||||
var count = previousThoughts.Count;
|
||||
for (var i = 1; i < count; i++) // skip first (the newest, current user)
|
||||
for (var i = count - 1; i >= 1; i--) // skip first (the newest, current user)
|
||||
{
|
||||
var thought = previousThoughts[i];
|
||||
switch (thought.Role)
|
||||
var textContent = new StringBuilder();
|
||||
var functionCalls = new List<FunctionCallContent>();
|
||||
var functionResults = new List<FunctionResultContent>();
|
||||
|
||||
foreach (var part in thought.Parts)
|
||||
{
|
||||
case ThinkingThoughtRole.User:
|
||||
chatHistory.AddUserMessage(thought.Content ?? "");
|
||||
switch (part.Type)
|
||||
{
|
||||
case ThinkingMessagePartType.Text:
|
||||
textContent.Append(part.Text);
|
||||
break;
|
||||
case ThinkingThoughtRole.Assistant:
|
||||
chatHistory.AddAssistantMessage(thought.Content ?? "");
|
||||
case ThinkingMessagePartType.FunctionCall:
|
||||
var arguments = !string.IsNullOrEmpty(part.FunctionCall!.Arguments)
|
||||
? JsonSerializer.Deserialize<Dictionary<string, object?>>(part.FunctionCall!.Arguments)
|
||||
: null;
|
||||
var kernelArgs = arguments is not null ? new KernelArguments(arguments) : null;
|
||||
|
||||
functionCalls.Add(new FunctionCallContent(
|
||||
functionName: part.FunctionCall!.Name,
|
||||
pluginName: part.FunctionCall.PluginName,
|
||||
id: part.FunctionCall.Id,
|
||||
arguments: kernelArgs
|
||||
));
|
||||
break;
|
||||
case ThinkingMessagePartType.FunctionResult:
|
||||
var resultObject = part.FunctionResult!.Result;
|
||||
var resultString = resultObject as string ?? JsonSerializer.Serialize(resultObject);
|
||||
functionResults.Add(new FunctionResultContent(
|
||||
callId: part.FunctionResult.CallId,
|
||||
functionName: part.FunctionResult.FunctionName,
|
||||
pluginName: part.FunctionResult.PluginName,
|
||||
result: resultString
|
||||
));
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentOutOfRangeException();
|
||||
}
|
||||
}
|
||||
|
||||
if (thought.Role == ThinkingThoughtRole.User)
|
||||
{
|
||||
chatHistory.AddUserMessage(textContent.ToString());
|
||||
}
|
||||
else
|
||||
{
|
||||
var assistantMessage = new ChatMessageContent(AuthorRole.Assistant, textContent.ToString());
|
||||
if (functionCalls.Count > 0)
|
||||
{
|
||||
assistantMessage.Items = [];
|
||||
foreach (var fc in functionCalls)
|
||||
{
|
||||
assistantMessage.Items.Add(fc);
|
||||
}
|
||||
}
|
||||
|
||||
chatHistory.Add(assistantMessage);
|
||||
|
||||
if (functionResults.Count <= 0) continue;
|
||||
foreach (var fr in functionResults)
|
||||
{
|
||||
chatHistory.Add(fr.ToChatMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
chatHistory.AddUserMessage(request.UserMessage);
|
||||
|
||||
// Set response for streaming
|
||||
Response.Headers.Append("Content-Type", "text/event-stream");
|
||||
Response.StatusCode = 200;
|
||||
|
||||
var kernel = provider.Kernel;
|
||||
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();
|
||||
var executionSettings = provider.CreatePromptExecutionSettings(request.ServiceId);
|
||||
|
||||
// Kick off streaming generation
|
||||
var accumulatedContent = new StringBuilder();
|
||||
var thinkingChunks = new List<SnThinkingChunk>();
|
||||
await foreach (var chunk in chatCompletionService.GetStreamingChatMessageContentsAsync(
|
||||
chatHistory,
|
||||
provider.CreatePromptExecutionSettings(),
|
||||
kernel: kernel
|
||||
))
|
||||
{
|
||||
// Process each item in the chunk for detailed streaming
|
||||
foreach (var item in chunk.Items)
|
||||
{
|
||||
var streamingChunk = item switch
|
||||
{
|
||||
StreamingTextContent textContent => new SnThinkingChunk
|
||||
{ Type = StreamingContentType.Text, Data = new() { ["text"] = textContent.Text ?? "" } },
|
||||
StreamingReasoningContent reasoningContent => new SnThinkingChunk
|
||||
{
|
||||
Type = StreamingContentType.Reasoning, Data = new() { ["text"] = reasoningContent.Text ?? "" }
|
||||
},
|
||||
StreamingFunctionCallUpdateContent functionCall => new SnThinkingChunk
|
||||
{
|
||||
Type = StreamingContentType.FunctionCall,
|
||||
Data = JsonSerializer.Deserialize<Dictionary<string, object>>(
|
||||
JsonSerializer.Serialize(functionCall)) ?? new()
|
||||
},
|
||||
_ => new SnThinkingChunk
|
||||
{
|
||||
Type = StreamingContentType.Unknown, Data = new() { ["data"] = JsonSerializer.Serialize(item) }
|
||||
}
|
||||
};
|
||||
thinkingChunks.Add(streamingChunk);
|
||||
var assistantParts = new List<SnThinkingMessagePart>();
|
||||
|
||||
var messageJson = item switch
|
||||
while (true)
|
||||
{
|
||||
StreamingTextContent textContent =>
|
||||
JsonSerializer.Serialize(new { type = "text", data = textContent.Text ?? "" }),
|
||||
StreamingReasoningContent reasoningContent =>
|
||||
JsonSerializer.Serialize(new { type = "reasoning", data = reasoningContent.Text ?? "" }),
|
||||
StreamingFunctionCallUpdateContent functionCall =>
|
||||
JsonSerializer.Serialize(new { type = "function_call", data = functionCall }),
|
||||
_ =>
|
||||
JsonSerializer.Serialize(new { type = "unknown", data = item })
|
||||
};
|
||||
var textContentBuilder = new StringBuilder();
|
||||
AuthorRole? authorRole = null;
|
||||
var functionCallBuilder = new FunctionCallContentBuilder();
|
||||
|
||||
// Write a structured JSON message to the HTTP response as SSE
|
||||
var messageBytes = Encoding.UTF8.GetBytes($"data: {messageJson}\n\n");
|
||||
await Response.Body.WriteAsync(messageBytes);
|
||||
await foreach (
|
||||
var streamingContent in chatCompletionService.GetStreamingChatMessageContentsAsync(
|
||||
chatHistory, executionSettings, kernel)
|
||||
)
|
||||
{
|
||||
authorRole ??= streamingContent.Role;
|
||||
|
||||
if (streamingContent.Content is not null)
|
||||
{
|
||||
textContentBuilder.Append(streamingContent.Content);
|
||||
var messageJson = JsonSerializer.Serialize(new
|
||||
{ type = "text", data = streamingContent.Content });
|
||||
await Response.Body.WriteAsync(Encoding.UTF8.GetBytes($"data: {messageJson}\n\n"));
|
||||
await Response.Body.FlushAsync();
|
||||
}
|
||||
|
||||
// Accumulate content for saving (only text content)
|
||||
accumulatedContent.Append(chunk.Content ?? "");
|
||||
functionCallBuilder.Append(streamingContent);
|
||||
}
|
||||
|
||||
var finalMessageText = textContentBuilder.ToString();
|
||||
if (!string.IsNullOrEmpty(finalMessageText))
|
||||
{
|
||||
assistantParts.Add(new SnThinkingMessagePart
|
||||
{ Type = ThinkingMessagePartType.Text, Text = finalMessageText });
|
||||
}
|
||||
|
||||
var functionCalls = functionCallBuilder.Build()
|
||||
.Where(fc => !string.IsNullOrEmpty(fc.Id)).ToList();
|
||||
|
||||
if (functionCalls.Count == 0)
|
||||
break;
|
||||
|
||||
var assistantMessage = new ChatMessageContent(
|
||||
authorRole ?? AuthorRole.Assistant,
|
||||
string.IsNullOrEmpty(finalMessageText) ? null : finalMessageText
|
||||
);
|
||||
foreach (var functionCall in functionCalls)
|
||||
{
|
||||
assistantMessage.Items.Add(functionCall);
|
||||
}
|
||||
|
||||
chatHistory.Add(assistantMessage);
|
||||
|
||||
foreach (var functionCall in functionCalls)
|
||||
{
|
||||
var part = new SnThinkingMessagePart
|
||||
{
|
||||
Type = ThinkingMessagePartType.FunctionCall,
|
||||
FunctionCall = new SnFunctionCall
|
||||
{
|
||||
Id = functionCall.Id!,
|
||||
PluginName = functionCall.PluginName,
|
||||
Name = functionCall.FunctionName,
|
||||
Arguments = JsonSerializer.Serialize(functionCall.Arguments)
|
||||
}
|
||||
};
|
||||
assistantParts.Add(part);
|
||||
|
||||
var messageJson = JsonSerializer.Serialize(new { type = "function_call", data = part.FunctionCall });
|
||||
await Response.Body.WriteAsync(Encoding.UTF8.GetBytes($"data: {messageJson}\n\n"));
|
||||
await Response.Body.FlushAsync();
|
||||
|
||||
FunctionResultContent resultContent;
|
||||
try
|
||||
{
|
||||
resultContent = await functionCall.InvokeAsync(kernel);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
resultContent = new FunctionResultContent(functionCall.Id!, ex.Message);
|
||||
}
|
||||
|
||||
chatHistory.Add(resultContent.ToChatMessage());
|
||||
|
||||
var resultPart = new SnThinkingMessagePart
|
||||
{
|
||||
Type = ThinkingMessagePartType.FunctionResult,
|
||||
FunctionResult = new SnFunctionResult
|
||||
{
|
||||
CallId = resultContent.CallId!,
|
||||
PluginName = resultContent.PluginName,
|
||||
FunctionName = resultContent.FunctionName,
|
||||
Result = resultContent.Result!,
|
||||
IsError = resultContent.Result is Exception
|
||||
}
|
||||
};
|
||||
assistantParts.Add(resultPart);
|
||||
|
||||
var resultMessageJson =
|
||||
JsonSerializer.Serialize(new { type = "function_result", data = resultPart.FunctionResult });
|
||||
await Response.Body.WriteAsync(Encoding.UTF8.GetBytes($"data: {resultMessageJson}\n\n"));
|
||||
await Response.Body.FlushAsync();
|
||||
}
|
||||
}
|
||||
|
||||
// Save assistant thought
|
||||
var savedThought = await service.SaveThoughtAsync(
|
||||
sequence,
|
||||
accumulatedContent.ToString(),
|
||||
assistantParts,
|
||||
ThinkingThoughtRole.Assistant,
|
||||
thinkingChunks
|
||||
serviceId
|
||||
);
|
||||
|
||||
// Write the topic if it was newly set, then the thought object as JSON to the stream
|
||||
@@ -212,6 +411,25 @@ public class ThoughtController(ThoughtProvider provider, ThoughtService service)
|
||||
return Ok(sequences);
|
||||
}
|
||||
|
||||
[HttpPatch("sequences/{sequenceId:guid}/sharing")]
|
||||
[ProducesResponseType(StatusCodes.Status204NoContent)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
[ProducesResponseType(StatusCodes.Status403Forbidden)]
|
||||
public async Task<ActionResult> UpdateSequenceSharing(Guid sequenceId, [FromBody] UpdateSharingRequest request)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
var sequence = await service.GetSequenceAsync(sequenceId);
|
||||
if (sequence == null) return NotFound();
|
||||
if (sequence.AccountId != accountId) return Forbid();
|
||||
|
||||
sequence.IsPublic = request.IsPublic;
|
||||
await service.UpdateSequenceAsync(sequence);
|
||||
|
||||
return NoContent();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves the thoughts in a specific thinking sequence.
|
||||
/// </summary>
|
||||
@@ -223,12 +441,18 @@ public class ThoughtController(ThoughtProvider provider, ThoughtService service)
|
||||
[ProducesResponseType(StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
public async Task<ActionResult<List<SnThinkingThought>>> GetSequenceThoughts(Guid sequenceId)
|
||||
{
|
||||
var sequence = await service.GetSequenceAsync(sequenceId);
|
||||
if (sequence == null) return NotFound();
|
||||
|
||||
if (!sequence.IsPublic)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
var sequence = await service.GetOrCreateSequenceAsync(accountId, sequenceId);
|
||||
if (sequence == null) return NotFound();
|
||||
if (sequence.AccountId != accountId)
|
||||
return StatusCode(403);
|
||||
}
|
||||
|
||||
var thoughts = await service.GetPreviousThoughtsAsync(sequence);
|
||||
|
||||
|
||||
@@ -1,21 +1,27 @@
|
||||
using System.ClientModel;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using System.Text.Json;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Insight.Thought.Plugins;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using DysonNetwork.Shared.Registry;
|
||||
using Microsoft.SemanticKernel;
|
||||
using Microsoft.SemanticKernel.Connectors.Ollama;
|
||||
using Microsoft.SemanticKernel.Connectors.OpenAI;
|
||||
using OpenAI;
|
||||
using PostType = DysonNetwork.Shared.Proto.PostType;
|
||||
using Microsoft.SemanticKernel.Plugins.Web;
|
||||
using Microsoft.SemanticKernel.Plugins.Web.Bing;
|
||||
using Microsoft.SemanticKernel.Plugins.Web.Google;
|
||||
using NodaTime.Serialization.Protobuf;
|
||||
using NodaTime.Text;
|
||||
|
||||
namespace DysonNetwork.Insight.Thought;
|
||||
|
||||
public class ThoughtServiceModel
|
||||
{
|
||||
public string ServiceId { get; set; } = null!;
|
||||
public string? Provider { get; set; }
|
||||
public string? Model { get; set; }
|
||||
public double BillingMultiplier { get; set; }
|
||||
public int PerkLevel { get; set; }
|
||||
}
|
||||
|
||||
public class ThoughtProvider
|
||||
{
|
||||
private readonly PostService.PostServiceClient _postClient;
|
||||
@@ -23,10 +29,10 @@ public class ThoughtProvider
|
||||
private readonly IConfiguration _configuration;
|
||||
private readonly ILogger<ThoughtProvider> _logger;
|
||||
|
||||
public Kernel Kernel { get; }
|
||||
|
||||
private string? ModelProviderType { get; set; }
|
||||
private string? ModelDefault { get; set; }
|
||||
private readonly Dictionary<string, Kernel> _kernels = new();
|
||||
private readonly Dictionary<string, string> _serviceProviders = new();
|
||||
private readonly Dictionary<string, ThoughtServiceModel> _serviceModels = new();
|
||||
private readonly string _defaultServiceId;
|
||||
|
||||
[Experimental("SKEXP0050")]
|
||||
public ThoughtProvider(
|
||||
@@ -41,118 +47,83 @@ public class ThoughtProvider
|
||||
_accountClient = accountServiceClient;
|
||||
_configuration = configuration;
|
||||
|
||||
Kernel = InitializeThinkingProvider(configuration);
|
||||
InitializeHelperFunctions();
|
||||
var cfg = configuration.GetSection("Thinking");
|
||||
_defaultServiceId = cfg.GetValue<string>("DefaultService")!;
|
||||
var services = cfg.GetSection("Services").GetChildren();
|
||||
|
||||
foreach (var service in services)
|
||||
{
|
||||
var serviceId = service.Key;
|
||||
var serviceModel = new ThoughtServiceModel
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
Provider = service.GetValue<string>("Provider"),
|
||||
Model = service.GetValue<string>("Model"),
|
||||
BillingMultiplier = service.GetValue<double>("BillingMultiplier", 1.0),
|
||||
PerkLevel = service.GetValue<int>("PerkLevel", 0)
|
||||
};
|
||||
_serviceModels[serviceId] = serviceModel;
|
||||
|
||||
var providerType = service.GetValue<string>("Provider")?.ToLower();
|
||||
if (providerType is null) continue;
|
||||
|
||||
var kernel = InitializeThinkingService(service);
|
||||
InitializeHelperFunctions(kernel);
|
||||
_kernels[serviceId] = kernel;
|
||||
_serviceProviders[serviceId] = providerType;
|
||||
}
|
||||
}
|
||||
|
||||
private Kernel InitializeThinkingProvider(IConfiguration configuration)
|
||||
private Kernel InitializeThinkingService(IConfigurationSection serviceConfig)
|
||||
{
|
||||
var cfg = configuration.GetSection("Thinking");
|
||||
ModelProviderType = cfg.GetValue<string>("Provider")?.ToLower();
|
||||
ModelDefault = cfg.GetValue<string>("Model");
|
||||
var endpoint = cfg.GetValue<string>("Endpoint");
|
||||
var apiKey = cfg.GetValue<string>("ApiKey");
|
||||
var providerType = serviceConfig.GetValue<string>("Provider")?.ToLower();
|
||||
var model = serviceConfig.GetValue<string>("Model");
|
||||
var endpoint = serviceConfig.GetValue<string>("Endpoint");
|
||||
var apiKey = serviceConfig.GetValue<string>("ApiKey");
|
||||
|
||||
var builder = Kernel.CreateBuilder();
|
||||
|
||||
switch (ModelProviderType)
|
||||
switch (providerType)
|
||||
{
|
||||
case "ollama":
|
||||
builder.AddOllamaChatCompletion(ModelDefault!, new Uri(endpoint ?? "http://localhost:11434/api"));
|
||||
builder.AddOllamaChatCompletion(
|
||||
model!,
|
||||
new Uri(endpoint ?? "http://localhost:11434/api")
|
||||
);
|
||||
break;
|
||||
case "deepseek":
|
||||
var client = new OpenAIClient(
|
||||
new ApiKeyCredential(apiKey!),
|
||||
new OpenAIClientOptions { Endpoint = new Uri(endpoint ?? "https://api.deepseek.com/v1") }
|
||||
);
|
||||
builder.AddOpenAIChatCompletion(ModelDefault!, client);
|
||||
builder.AddOpenAIChatCompletion(model!, client);
|
||||
break;
|
||||
default:
|
||||
throw new IndexOutOfRangeException("Unknown thinking provider: " + ModelProviderType);
|
||||
throw new IndexOutOfRangeException("Unknown thinking provider: " + providerType);
|
||||
}
|
||||
|
||||
// Add gRPC clients for Thought Plugins
|
||||
builder.Services.AddServiceDiscoveryCore();
|
||||
builder.Services.AddServiceDiscovery();
|
||||
builder.Services.AddAccountService();
|
||||
builder.Services.AddSphereService();
|
||||
|
||||
builder.Plugins.AddFromObject(new SnAccountKernelPlugin(_accountClient));
|
||||
builder.Plugins.AddFromObject(new SnPostKernelPlugin(_postClient));
|
||||
|
||||
return builder.Build();
|
||||
}
|
||||
|
||||
[Experimental("SKEXP0050")]
|
||||
private void InitializeHelperFunctions()
|
||||
private void InitializeHelperFunctions(Kernel kernel)
|
||||
{
|
||||
// Add Solar Network tools plugin
|
||||
Kernel.ImportPluginFromFunctions("solar_network", [
|
||||
KernelFunctionFactory.CreateFromMethod(async (string userId) =>
|
||||
{
|
||||
var request = new GetAccountRequest { Id = userId };
|
||||
var response = await _accountClient.GetAccountAsync(request);
|
||||
return JsonSerializer.Serialize(response, GrpcTypeHelper.SerializerOptions);
|
||||
}, "get_user", "Get a user profile from the Solar Network."),
|
||||
KernelFunctionFactory.CreateFromMethod(async (string postId) =>
|
||||
{
|
||||
var request = new GetPostRequest { Id = postId };
|
||||
var response = await _postClient.GetPostAsync(request);
|
||||
return JsonSerializer.Serialize(response, GrpcTypeHelper.SerializerOptions);
|
||||
}, "get_post", "Get a single post by ID from the Solar Network."),
|
||||
KernelFunctionFactory.CreateFromMethod(async (string query) =>
|
||||
{
|
||||
var request = new SearchPostsRequest { Query = query, PageSize = 10 };
|
||||
var response = await _postClient.SearchPostsAsync(request);
|
||||
return JsonSerializer.Serialize(response.Posts, GrpcTypeHelper.SerializerOptions);
|
||||
}, "search_posts",
|
||||
"Search posts by query from the Solar Network. The input query is will be used to search with title, description and body content"),
|
||||
KernelFunctionFactory.CreateFromMethod(async (
|
||||
string? orderBy = null,
|
||||
string? afterIso = null,
|
||||
string? beforeIso = null
|
||||
) =>
|
||||
{
|
||||
_logger.LogInformation("Begin building request to list post from sphere...");
|
||||
|
||||
var request = new ListPostsRequest
|
||||
{
|
||||
PageSize = 20,
|
||||
OrderBy = orderBy,
|
||||
};
|
||||
if (!string.IsNullOrEmpty(afterIso))
|
||||
try
|
||||
{
|
||||
request.After = InstantPattern.General.Parse(afterIso).Value.ToTimestamp();
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
_logger.LogWarning("Invalid afterIso format: {AfterIso}", afterIso);
|
||||
}
|
||||
if (!string.IsNullOrEmpty(beforeIso))
|
||||
try
|
||||
{
|
||||
request.Before = InstantPattern.General.Parse(beforeIso).Value.ToTimestamp();
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
_logger.LogWarning("Invalid beforeIso format: {BeforeIso}", beforeIso);
|
||||
}
|
||||
|
||||
_logger.LogInformation("Request built, {Request}", request);
|
||||
|
||||
var response = await _postClient.ListPostsAsync(request);
|
||||
|
||||
var data = response.Posts.Select(SnPost.FromProtoValue);
|
||||
_logger.LogInformation("Sphere service returned posts: {Posts}", data);
|
||||
return JsonSerializer.Serialize(data, GrpcTypeHelper.SerializerOptions);
|
||||
}, "list_posts",
|
||||
"Get posts from the Solar Network.\n" +
|
||||
"Parameters:\n" +
|
||||
"orderBy (optional, string: order by published date, accept asc or desc)\n" +
|
||||
"afterIso (optional, string: ISO date for posts after this date)\n" +
|
||||
"beforeIso (optional, string: ISO date for posts before this date)"
|
||||
)
|
||||
]);
|
||||
|
||||
// Add web search plugins if configured
|
||||
var bingApiKey = _configuration.GetValue<string>("Thinking:BingApiKey");
|
||||
if (!string.IsNullOrEmpty(bingApiKey))
|
||||
{
|
||||
var bingConnector = new BingConnector(bingApiKey);
|
||||
var bing = new WebSearchEnginePlugin(bingConnector);
|
||||
Kernel.ImportPluginFromObject(bing, "bing");
|
||||
kernel.ImportPluginFromObject(bing, "bing");
|
||||
}
|
||||
|
||||
var googleApiKey = _configuration.GetValue<string>("Thinking:GoogleApiKey");
|
||||
@@ -163,36 +134,58 @@ public class ThoughtProvider
|
||||
apiKey: googleApiKey,
|
||||
searchEngineId: googleCx);
|
||||
var google = new WebSearchEnginePlugin(googleConnector);
|
||||
Kernel.ImportPluginFromObject(google, "google");
|
||||
kernel.ImportPluginFromObject(google, "google");
|
||||
}
|
||||
}
|
||||
|
||||
public PromptExecutionSettings CreatePromptExecutionSettings()
|
||||
public Kernel? GetKernel(string? serviceId = null)
|
||||
{
|
||||
switch (ModelProviderType)
|
||||
serviceId ??= _defaultServiceId;
|
||||
return _kernels.GetValueOrDefault(serviceId);
|
||||
}
|
||||
|
||||
public string GetServiceId(string? serviceId = null)
|
||||
{
|
||||
case "ollama":
|
||||
return new OllamaPromptExecutionSettings
|
||||
return serviceId ?? _defaultServiceId;
|
||||
}
|
||||
|
||||
public IEnumerable<string> GetAvailableServices()
|
||||
{
|
||||
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(
|
||||
options: new FunctionChoiceBehaviorOptions
|
||||
return _kernels.Keys;
|
||||
}
|
||||
|
||||
public IEnumerable<ThoughtServiceModel> GetAvailableServicesInfo()
|
||||
{
|
||||
AllowParallelCalls = true,
|
||||
AllowConcurrentInvocation = true
|
||||
})
|
||||
return _serviceModels.Values;
|
||||
}
|
||||
|
||||
public ThoughtServiceModel? GetServiceInfo(string? serviceId)
|
||||
{
|
||||
serviceId ??= _defaultServiceId;
|
||||
return _serviceModels.GetValueOrDefault(serviceId);
|
||||
}
|
||||
|
||||
public string GetDefaultServiceId()
|
||||
{
|
||||
return _defaultServiceId;
|
||||
}
|
||||
|
||||
public PromptExecutionSettings CreatePromptExecutionSettings(string? serviceId = null)
|
||||
{
|
||||
serviceId ??= _defaultServiceId;
|
||||
var providerType = _serviceProviders.GetValueOrDefault(serviceId);
|
||||
|
||||
return providerType switch
|
||||
{
|
||||
"ollama" => new OllamaPromptExecutionSettings
|
||||
{
|
||||
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false)
|
||||
},
|
||||
"deepseek" => new OpenAIPromptExecutionSettings
|
||||
{
|
||||
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false), ModelId = serviceId
|
||||
},
|
||||
_ => throw new InvalidOperationException("Unknown provider for service: " + serviceId)
|
||||
};
|
||||
case "deepseek":
|
||||
return new OpenAIPromptExecutionSettings
|
||||
{
|
||||
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(
|
||||
options: new FunctionChoiceBehaviorOptions
|
||||
{
|
||||
AllowParallelCalls = true,
|
||||
AllowConcurrentInvocation = true
|
||||
})
|
||||
};
|
||||
default:
|
||||
throw new InvalidOperationException("Unknown provider: " + ModelProviderType);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,30 @@
|
||||
using DysonNetwork.Shared.Cache;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using PaymentService = DysonNetwork.Shared.Proto.PaymentService;
|
||||
using TransactionType = DysonNetwork.Shared.Proto.TransactionType;
|
||||
using WalletService = DysonNetwork.Shared.Proto.WalletService;
|
||||
|
||||
namespace DysonNetwork.Insight.Thought;
|
||||
|
||||
public class ThoughtService(AppDatabase db, ICacheService cache)
|
||||
public class ThoughtService(
|
||||
AppDatabase db,
|
||||
ICacheService cache,
|
||||
PaymentService.PaymentServiceClient paymentService
|
||||
)
|
||||
{
|
||||
public async Task<SnThinkingSequence?> GetOrCreateSequenceAsync(Guid accountId, Guid? sequenceId,
|
||||
string? topic = null)
|
||||
public async Task<SnThinkingSequence?> GetOrCreateSequenceAsync(
|
||||
Guid accountId,
|
||||
Guid? sequenceId,
|
||||
string? topic = null
|
||||
)
|
||||
{
|
||||
if (sequenceId.HasValue)
|
||||
{
|
||||
var seq = await db.ThinkingSequences.FindAsync(sequenceId.Value);
|
||||
if (seq == null || seq.AccountId != accountId) return null;
|
||||
if (seq == null || seq.AccountId != accountId)
|
||||
return null;
|
||||
return seq;
|
||||
}
|
||||
else
|
||||
@@ -24,21 +36,45 @@ public class ThoughtService(AppDatabase db, ICacheService cache)
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<SnThinkingSequence?> GetSequenceAsync(Guid sequenceId)
|
||||
{
|
||||
return await db.ThinkingSequences.FindAsync(sequenceId);
|
||||
}
|
||||
|
||||
public async Task UpdateSequenceAsync(SnThinkingSequence sequence)
|
||||
{
|
||||
db.ThinkingSequences.Update(sequence);
|
||||
await db.SaveChangesAsync();
|
||||
}
|
||||
|
||||
public async Task<SnThinkingThought> SaveThoughtAsync(
|
||||
SnThinkingSequence sequence,
|
||||
string content,
|
||||
List<SnThinkingMessagePart> parts,
|
||||
ThinkingThoughtRole role,
|
||||
List<SnThinkingChunk>? chunks = null
|
||||
string? model = null
|
||||
)
|
||||
{
|
||||
// Approximate token count (1 token ≈ 4 characters for GPT-like models)
|
||||
var totalChars = parts.Sum(part =>
|
||||
(part.Type == ThinkingMessagePartType.Text ? part.Text?.Length : 0) ?? 0 +
|
||||
(part.Type == ThinkingMessagePartType.FunctionCall ? part.FunctionCall?.Arguments.Length : 0) ?? 0
|
||||
);
|
||||
var tokenCount = totalChars / 4;
|
||||
|
||||
var thought = new SnThinkingThought
|
||||
{
|
||||
SequenceId = sequence.Id,
|
||||
Content = content,
|
||||
Parts = parts,
|
||||
Role = role,
|
||||
Chunks = chunks ?? []
|
||||
TokenCount = tokenCount,
|
||||
ModelName = model,
|
||||
};
|
||||
db.ThinkingThoughts.Add(thought);
|
||||
|
||||
// Update sequence total tokens only for assistant responses
|
||||
if (role == ThinkingThoughtRole.Assistant)
|
||||
sequence.TotalToken += tokenCount;
|
||||
|
||||
await db.SaveChangesAsync();
|
||||
|
||||
// Invalidate cache for this sequence's thoughts
|
||||
@@ -46,29 +82,38 @@ public class ThoughtService(AppDatabase db, ICacheService cache)
|
||||
|
||||
return thought;
|
||||
}
|
||||
|
||||
public async Task<List<SnThinkingThought>> GetPreviousThoughtsAsync(SnThinkingSequence sequence)
|
||||
{
|
||||
var cacheKey = $"thoughts:{sequence.Id}";
|
||||
var (found, cachedThoughts) = await cache.GetAsyncWithStatus<List<SnThinkingThought>>(cacheKey);
|
||||
var (found, cachedThoughts) = await cache.GetAsyncWithStatus<List<SnThinkingThought>>(
|
||||
cacheKey
|
||||
);
|
||||
if (found && cachedThoughts != null)
|
||||
{
|
||||
return cachedThoughts;
|
||||
}
|
||||
|
||||
var thoughts = await db.ThinkingThoughts
|
||||
.Where(t => t.SequenceId == sequence.Id)
|
||||
var thoughts = await db
|
||||
.ThinkingThoughts.Where(t => t.SequenceId == sequence.Id)
|
||||
.OrderByDescending(t => t.CreatedAt)
|
||||
.ToListAsync();
|
||||
|
||||
// Cache for 10 minutes
|
||||
await cache.SetWithGroupsAsync(cacheKey, thoughts, [$"sequence:{sequence.Id}"], TimeSpan.FromMinutes(10));
|
||||
await cache.SetWithGroupsAsync(
|
||||
cacheKey,
|
||||
thoughts,
|
||||
[$"sequence:{sequence.Id}"],
|
||||
TimeSpan.FromMinutes(10)
|
||||
);
|
||||
|
||||
return thoughts;
|
||||
}
|
||||
|
||||
public async Task<(int total, List<SnThinkingSequence> sequences)> ListSequencesAsync(Guid accountId, int offset,
|
||||
int take)
|
||||
public async Task<(int total, List<SnThinkingSequence> sequences)> ListSequencesAsync(
|
||||
Guid accountId,
|
||||
int offset,
|
||||
int take
|
||||
)
|
||||
{
|
||||
var query = db.ThinkingSequences.Where(s => s.AccountId == accountId);
|
||||
var totalCount = await query.CountAsync();
|
||||
@@ -80,4 +125,145 @@ public class ThoughtService(AppDatabase db, ICacheService cache)
|
||||
|
||||
return (totalCount, sequences);
|
||||
}
|
||||
|
||||
public async Task SettleThoughtBills(ILogger logger)
|
||||
{
|
||||
var sequences = await db
|
||||
.ThinkingSequences.Where(s => s.PaidToken < s.TotalToken)
|
||||
.ToListAsync();
|
||||
|
||||
if (sequences.Count == 0)
|
||||
{
|
||||
logger.LogInformation("No unpaid sequences found.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Group by account
|
||||
var groupedByAccount = sequences.GroupBy(s => s.AccountId);
|
||||
|
||||
foreach (var accountGroup in groupedByAccount)
|
||||
{
|
||||
var accountId = accountGroup.Key;
|
||||
|
||||
if (await db.UnpaidAccounts.AnyAsync(u => u.AccountId == accountId))
|
||||
{
|
||||
logger.LogWarning("Skipping billing for marked account {accountId}", accountId);
|
||||
continue;
|
||||
}
|
||||
|
||||
var totalUnpaidTokens = accountGroup.Sum(s => s.TotalToken - s.PaidToken);
|
||||
var cost = (long)Math.Ceiling(totalUnpaidTokens / 10.0);
|
||||
|
||||
if (cost == 0)
|
||||
continue;
|
||||
|
||||
try
|
||||
{
|
||||
var date = DateTime.Now.ToString("yyyy-MM-dd");
|
||||
await paymentService.CreateTransactionWithAccountAsync(
|
||||
new CreateTransactionWithAccountRequest
|
||||
{
|
||||
PayerAccountId = accountId.ToString(),
|
||||
Currency = WalletCurrency.SourcePoint,
|
||||
Amount = cost.ToString(),
|
||||
Remarks = $"Wage for SN-chan on {date}",
|
||||
Type = TransactionType.System,
|
||||
}
|
||||
);
|
||||
|
||||
// Mark all sequences for this account as paid
|
||||
foreach (var sequence in accountGroup)
|
||||
sequence.PaidToken = sequence.TotalToken;
|
||||
|
||||
logger.LogInformation(
|
||||
"Billed {cost} points for account {accountId}",
|
||||
cost,
|
||||
accountId
|
||||
);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Error billing for account {accountId}", accountId);
|
||||
if (!await db.UnpaidAccounts.AnyAsync(u => u.AccountId == accountId))
|
||||
{
|
||||
db.UnpaidAccounts.Add(new SnUnpaidAccount { AccountId = accountId, MarkedAt = DateTime.UtcNow });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await db.SaveChangesAsync();
|
||||
}
|
||||
|
||||
public async Task<(bool success, long cost)> RetryBillingForAccountAsync(Guid accountId, ILogger logger)
|
||||
{
|
||||
var isMarked = await db.UnpaidAccounts.FirstOrDefaultAsync(u => u.AccountId == accountId);
|
||||
if (isMarked == null)
|
||||
{
|
||||
logger.LogInformation("Account {accountId} is not marked for unpaid bills.", accountId);
|
||||
return (true, 0);
|
||||
}
|
||||
|
||||
var sequences = await db
|
||||
.ThinkingSequences.Where(s => s.AccountId == accountId && s.PaidToken < s.TotalToken)
|
||||
.ToListAsync();
|
||||
|
||||
if (!sequences.Any())
|
||||
{
|
||||
logger.LogInformation("No unpaid sequences found for account {accountId}. Unmarking.", accountId);
|
||||
db.UnpaidAccounts.Remove(isMarked);
|
||||
await db.SaveChangesAsync();
|
||||
return (true, 0);
|
||||
}
|
||||
|
||||
var totalUnpaidTokens = sequences.Sum(s => s.TotalToken - s.PaidToken);
|
||||
var cost = (long)Math.Ceiling(totalUnpaidTokens / 10.0);
|
||||
|
||||
if (cost == 0)
|
||||
{
|
||||
logger.LogInformation("Unpaid tokens for {accountId} resulted in zero cost. Marking as paid and unmarking.", accountId);
|
||||
foreach (var sequence in sequences)
|
||||
{
|
||||
sequence.PaidToken = sequence.TotalToken;
|
||||
}
|
||||
db.UnpaidAccounts.Remove(isMarked);
|
||||
await db.SaveChangesAsync();
|
||||
return (true, 0);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var date = DateTime.Now.ToString("yyyy-MM-dd");
|
||||
await paymentService.CreateTransactionWithAccountAsync(
|
||||
new CreateTransactionWithAccountRequest
|
||||
{
|
||||
PayerAccountId = accountId.ToString(),
|
||||
Currency = WalletCurrency.SourcePoint,
|
||||
Amount = cost.ToString(),
|
||||
Remarks = $"Wage for SN-chan on {date} (Retry)",
|
||||
Type = TransactionType.System,
|
||||
}
|
||||
);
|
||||
|
||||
foreach (var sequence in sequences)
|
||||
{
|
||||
sequence.PaidToken = sequence.TotalToken;
|
||||
}
|
||||
|
||||
db.UnpaidAccounts.Remove(isMarked);
|
||||
|
||||
logger.LogInformation(
|
||||
"Successfully billed {cost} points for account {accountId} on retry.",
|
||||
cost,
|
||||
accountId
|
||||
);
|
||||
|
||||
await db.SaveChangesAsync();
|
||||
return (true, cost);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Error retrying billing for account {accountId}", accountId);
|
||||
return (false, cost);
|
||||
}
|
||||
}
|
||||
}
|
||||
11
DysonNetwork.Insight/Thought/TokenBillingJob.cs
Normal file
11
DysonNetwork.Insight/Thought/TokenBillingJob.cs
Normal file
@@ -0,0 +1,11 @@
|
||||
using Quartz;
|
||||
|
||||
namespace DysonNetwork.Insight.Thought;
|
||||
|
||||
public class TokenBillingJob(ThoughtService thoughtService, ILogger<TokenBillingJob> logger) : IJob
|
||||
{
|
||||
public async Task Execute(IJobExecutionContext context)
|
||||
{
|
||||
await thoughtService.SettleThoughtBills(logger);
|
||||
}
|
||||
}
|
||||
@@ -19,9 +19,26 @@
|
||||
"Etcd": {
|
||||
"Insecure": true
|
||||
},
|
||||
"Cache": {
|
||||
"Serializer": "MessagePack"
|
||||
},
|
||||
"Thinking": {
|
||||
"DefaultService": "deepseek-chat",
|
||||
"Services": {
|
||||
"deepseek-chat": {
|
||||
"Provider": "deepseek",
|
||||
"Model": "deepseek-chat",
|
||||
"ApiKey": "sk-cd709f9f1b96432e99d2d992392b4220"
|
||||
"ApiKey": "sk-",
|
||||
"BillingMultiplier": 1.0,
|
||||
"PerkLevel": 0
|
||||
},
|
||||
"deepseek-reasoner": {
|
||||
"Provider": "deepseek",
|
||||
"Model": "deepseek-reasoner",
|
||||
"ApiKey": "sk-",
|
||||
"BillingMultiplier": 1.5,
|
||||
"PerkLevel": 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
5
DysonNetwork.Messager/.gitignore
vendored
Normal file
5
DysonNetwork.Messager/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
Keys
|
||||
Uploads
|
||||
DataProtection-Keys
|
||||
|
||||
.DS_Store
|
||||
139
DysonNetwork.Messager/AppDatabase.cs
Normal file
139
DysonNetwork.Messager/AppDatabase.cs
Normal file
@@ -0,0 +1,139 @@
|
||||
using System.Linq.Expressions;
|
||||
using DysonNetwork.Shared.Data;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Design;
|
||||
using Microsoft.EntityFrameworkCore.Query;
|
||||
using NodaTime;
|
||||
using Quartz;
|
||||
|
||||
namespace DysonNetwork.Messager;
|
||||
|
||||
public class AppDatabase(
|
||||
DbContextOptions<AppDatabase> options,
|
||||
IConfiguration configuration
|
||||
) : DbContext(options)
|
||||
{
|
||||
public DbSet<SnChatRoom> ChatRooms { get; set; } = null!;
|
||||
public DbSet<SnChatMember> ChatMembers { get; set; } = null!;
|
||||
public DbSet<SnChatMessage> ChatMessages { get; set; } = null!;
|
||||
public DbSet<SnRealtimeCall> ChatRealtimeCall { get; set; } = null!;
|
||||
public DbSet<SnChatReaction> ChatReactions { get; set; } = null!;
|
||||
|
||||
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
|
||||
{
|
||||
optionsBuilder.UseNpgsql(
|
||||
configuration.GetConnectionString("App"),
|
||||
opt => opt
|
||||
.ConfigureDataSource(optSource => optSource.EnableDynamicJson())
|
||||
.UseQuerySplittingBehavior(QuerySplittingBehavior.SplitQuery)
|
||||
.UseNodaTime()
|
||||
).UseSnakeCaseNamingConvention();
|
||||
|
||||
base.OnConfiguring(optionsBuilder);
|
||||
}
|
||||
|
||||
protected override void OnModelCreating(ModelBuilder modelBuilder)
|
||||
{
|
||||
base.OnModelCreating(modelBuilder);
|
||||
|
||||
modelBuilder.Entity<SnChatMember>()
|
||||
.HasKey(pm => new { pm.Id });
|
||||
modelBuilder.Entity<SnChatMember>()
|
||||
.HasAlternateKey(pm => new { pm.ChatRoomId, pm.AccountId });
|
||||
modelBuilder.Entity<SnChatMember>()
|
||||
.HasOne(pm => pm.ChatRoom)
|
||||
.WithMany(p => p.Members)
|
||||
.HasForeignKey(pm => pm.ChatRoomId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
modelBuilder.Entity<SnChatMessage>()
|
||||
.HasOne(m => m.ForwardedMessage)
|
||||
.WithMany()
|
||||
.HasForeignKey(m => m.ForwardedMessageId)
|
||||
.OnDelete(DeleteBehavior.Restrict);
|
||||
modelBuilder.Entity<SnChatMessage>()
|
||||
.HasOne(m => m.RepliedMessage)
|
||||
.WithMany()
|
||||
.HasForeignKey(m => m.RepliedMessageId)
|
||||
.OnDelete(DeleteBehavior.Restrict);
|
||||
modelBuilder.Entity<SnRealtimeCall>()
|
||||
.HasOne(m => m.Room)
|
||||
.WithMany()
|
||||
.HasForeignKey(m => m.RoomId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
modelBuilder.Entity<SnRealtimeCall>()
|
||||
.HasOne(m => m.Sender)
|
||||
.WithMany()
|
||||
.HasForeignKey(m => m.SenderId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
|
||||
modelBuilder.ApplySoftDeleteFilters();
|
||||
}
|
||||
|
||||
public override async Task<int> SaveChangesAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
this.ApplyAuditableAndSoftDelete();
|
||||
return await base.SaveChangesAsync(cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
public class AppDatabaseRecyclingJob(AppDatabase db, ILogger<AppDatabaseRecyclingJob> logger) : IJob
|
||||
{
|
||||
public async Task Execute(IJobExecutionContext context)
|
||||
{
|
||||
var now = SystemClock.Instance.GetCurrentInstant();
|
||||
|
||||
logger.LogInformation("Deleting soft-deleted records...");
|
||||
|
||||
var threshold = now - Duration.FromDays(7);
|
||||
|
||||
var entityTypes = db.Model.GetEntityTypes()
|
||||
.Where(t => typeof(ModelBase).IsAssignableFrom(t.ClrType) && t.ClrType != typeof(ModelBase))
|
||||
.Select(t => t.ClrType);
|
||||
|
||||
foreach (var entityType in entityTypes)
|
||||
{
|
||||
var set = (IQueryable)db.GetType().GetMethod(nameof(DbContext.Set), Type.EmptyTypes)!
|
||||
.MakeGenericMethod(entityType).Invoke(db, null)!;
|
||||
var parameter = Expression.Parameter(entityType, "e");
|
||||
var property = Expression.Property(parameter, nameof(ModelBase.DeletedAt));
|
||||
var condition = Expression.LessThan(property, Expression.Constant(threshold, typeof(Instant?)));
|
||||
var notNull = Expression.NotEqual(property, Expression.Constant(null, typeof(Instant?)));
|
||||
var finalCondition = Expression.AndAlso(notNull, condition);
|
||||
var lambda = Expression.Lambda(finalCondition, parameter);
|
||||
|
||||
var queryable = set.Provider.CreateQuery(
|
||||
Expression.Call(
|
||||
typeof(Queryable),
|
||||
"Where",
|
||||
[entityType],
|
||||
set.Expression,
|
||||
Expression.Quote(lambda)
|
||||
)
|
||||
);
|
||||
|
||||
var toListAsync = typeof(EntityFrameworkQueryableExtensions)
|
||||
.GetMethod(nameof(EntityFrameworkQueryableExtensions.ToListAsync))!
|
||||
.MakeGenericMethod(entityType);
|
||||
|
||||
var items = await (dynamic)toListAsync.Invoke(null, [queryable, CancellationToken.None])!;
|
||||
db.RemoveRange(items);
|
||||
}
|
||||
|
||||
await db.SaveChangesAsync();
|
||||
}
|
||||
}
|
||||
|
||||
public class AppDatabaseFactory : IDesignTimeDbContextFactory<AppDatabase>
|
||||
{
|
||||
public AppDatabase CreateDbContext(string[] args)
|
||||
{
|
||||
var configuration = new ConfigurationBuilder()
|
||||
.SetBasePath(Directory.GetCurrentDirectory())
|
||||
.AddJsonFile("appsettings.json")
|
||||
.Build();
|
||||
|
||||
var optionsBuilder = new DbContextOptionsBuilder<AppDatabase>();
|
||||
return new AppDatabase(optionsBuilder.Options, configuration);
|
||||
}
|
||||
}
|
||||
@@ -4,12 +4,16 @@ using DysonNetwork.Shared.Auth;
|
||||
using DysonNetwork.Shared.Data;
|
||||
using DysonNetwork.Shared.Models;
|
||||
using DysonNetwork.Shared.Proto;
|
||||
using DysonNetwork.Sphere.Autocompletion;
|
||||
using DysonNetwork.Messager.Poll;
|
||||
using DysonNetwork.Messager.Wallet;
|
||||
using DysonNetwork.Shared.Models.Embed;
|
||||
using Grpc.Core;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NodaTime;
|
||||
|
||||
namespace DysonNetwork.Sphere.Chat;
|
||||
namespace DysonNetwork.Messager.Chat;
|
||||
|
||||
[ApiController]
|
||||
[Route("/api/chat")]
|
||||
@@ -19,7 +23,8 @@ public partial class ChatController(
|
||||
ChatRoomService crs,
|
||||
FileService.FileServiceClient files,
|
||||
AccountService.AccountServiceClient accounts,
|
||||
AutocompletionService aus
|
||||
PaymentService.PaymentServiceClient paymentClient,
|
||||
PollService.PollServiceClient pollClient
|
||||
) : ControllerBase
|
||||
{
|
||||
public class MarkMessageReadRequest
|
||||
@@ -62,10 +67,26 @@ public partial class ChatController(
|
||||
return Ok(result);
|
||||
}
|
||||
|
||||
[HttpGet("unread")]
|
||||
[Authorize]
|
||||
public async Task<ActionResult<int>> GetTotalUnreadCount()
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
var unreadMessages = await cs.CountUnreadMessageForUser(accountId);
|
||||
|
||||
var totalUnreadCount = unreadMessages.Values.Sum();
|
||||
|
||||
return Ok(totalUnreadCount);
|
||||
}
|
||||
|
||||
public class SendMessageRequest
|
||||
{
|
||||
[MaxLength(4096)] public string? Content { get; set; }
|
||||
[MaxLength(36)] public string? Nonce { get; set; }
|
||||
public Guid? FundId { get; set; }
|
||||
public Guid? PollId { get; set; }
|
||||
public List<string>? AttachmentsId { get; set; }
|
||||
public Dictionary<string, object>? Meta { get; set; }
|
||||
public Guid? RepliedMessageId { get; set; }
|
||||
@@ -90,7 +111,7 @@ public partial class ChatController(
|
||||
.Where(m => m.AccountId == accountId && m.ChatRoomId == roomId && m.JoinedAt != null &&
|
||||
m.LeaveAt == null)
|
||||
.FirstOrDefaultAsync();
|
||||
if (member == null || member.Role < ChatMemberRole.Member)
|
||||
if (member == null)
|
||||
return StatusCode(403, "You are not a member of this chat room.");
|
||||
}
|
||||
|
||||
@@ -133,7 +154,7 @@ public partial class ChatController(
|
||||
.Where(m => m.AccountId == accountId && m.ChatRoomId == roomId && m.JoinedAt != null &&
|
||||
m.LeaveAt == null)
|
||||
.FirstOrDefaultAsync();
|
||||
if (member == null || member.Role < ChatMemberRole.Member)
|
||||
if (member == null)
|
||||
return StatusCode(403, "You are not a member of this chat room.");
|
||||
}
|
||||
|
||||
@@ -220,19 +241,67 @@ public partial class ChatController(
|
||||
|
||||
[HttpPost("{roomId:guid}/messages")]
|
||||
[Authorize]
|
||||
[RequiredPermission("global", "chat.messages.create")]
|
||||
[AskPermission("chat.messages.create")]
|
||||
public async Task<ActionResult> SendMessage([FromBody] SendMessageRequest request, Guid roomId)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
request.Content = TextSanitizer.Sanitize(request.Content);
|
||||
if (string.IsNullOrWhiteSpace(request.Content) &&
|
||||
(request.AttachmentsId == null || request.AttachmentsId.Count == 0))
|
||||
(request.AttachmentsId == null || request.AttachmentsId.Count == 0) &&
|
||||
!request.FundId.HasValue &&
|
||||
!request.PollId.HasValue)
|
||||
return BadRequest("You cannot send an empty message.");
|
||||
|
||||
var member = await crs.GetRoomMember(Guid.Parse(currentUser.Id), roomId);
|
||||
if (member == null || member.Role < ChatMemberRole.Member)
|
||||
return StatusCode(403, "You need to be a normal member to send messages here.");
|
||||
var now = SystemClock.Instance.GetCurrentInstant();
|
||||
var member = await crs.GetRoomMember(accountId, roomId);
|
||||
if (member == null)
|
||||
return StatusCode(403, "You need to be a member to send messages here.");
|
||||
if (member.TimeoutUntil.HasValue && member.TimeoutUntil.Value > now)
|
||||
return StatusCode(403, "You has been timed out in this chat.");
|
||||
|
||||
// Validate fund if provided
|
||||
if (request.FundId.HasValue)
|
||||
{
|
||||
try
|
||||
{
|
||||
var fundResponse = await paymentClient.GetWalletFundAsync(new GetWalletFundRequest
|
||||
{
|
||||
FundId = request.FundId.Value.ToString()
|
||||
});
|
||||
|
||||
// Check if the fund was created by the current user
|
||||
if (fundResponse.CreatorAccountId != member.AccountId.ToString())
|
||||
return BadRequest("You can only share funds that you created.");
|
||||
}
|
||||
catch (RpcException ex) when (ex.StatusCode == Grpc.Core.StatusCode.NotFound)
|
||||
{
|
||||
return BadRequest("The specified fund does not exist.");
|
||||
}
|
||||
catch (RpcException ex) when (ex.StatusCode == Grpc.Core.StatusCode.InvalidArgument)
|
||||
{
|
||||
return BadRequest("Invalid fund ID.");
|
||||
}
|
||||
}
|
||||
|
||||
// Validate poll if provided
|
||||
if (request.PollId.HasValue)
|
||||
{
|
||||
try
|
||||
{
|
||||
var pollResponse = await pollClient.GetPollAsync(new GetPollRequest { Id = request.PollId.Value.ToString() });
|
||||
// Poll validation is handled by gRPC call
|
||||
}
|
||||
catch (RpcException ex) when (ex.StatusCode == Grpc.Core.StatusCode.NotFound)
|
||||
{
|
||||
return BadRequest("The specified poll does not exist.");
|
||||
}
|
||||
catch (RpcException ex) when (ex.StatusCode == Grpc.Core.StatusCode.InvalidArgument)
|
||||
{
|
||||
return BadRequest("Invalid poll ID.");
|
||||
}
|
||||
}
|
||||
|
||||
var message = new SnChatMessage
|
||||
{
|
||||
@@ -242,6 +311,37 @@ public partial class ChatController(
|
||||
Nonce = request.Nonce ?? Guid.NewGuid().ToString(),
|
||||
Meta = request.Meta ?? new Dictionary<string, object>(),
|
||||
};
|
||||
|
||||
// Add embed for fund if provided
|
||||
if (request.FundId.HasValue)
|
||||
{
|
||||
var fundEmbed = new FundEmbed { Id = request.FundId.Value };
|
||||
message.Meta ??= new Dictionary<string, object>();
|
||||
if (
|
||||
!message.Meta.TryGetValue("embeds", out var existingEmbeds)
|
||||
|| existingEmbeds is not List<EmbeddableBase>
|
||||
)
|
||||
message.Meta["embeds"] = new List<Dictionary<string, object>>();
|
||||
var embeds = (List<Dictionary<string, object>>)message.Meta["embeds"];
|
||||
embeds.Add(EmbeddableBase.ToDictionary(fundEmbed));
|
||||
message.Meta["embeds"] = embeds;
|
||||
}
|
||||
|
||||
// Add embed for poll if provided
|
||||
if (request.PollId.HasValue)
|
||||
{
|
||||
var pollResponse = await pollClient.GetPollAsync(new GetPollRequest { Id = request.PollId.Value.ToString() });
|
||||
var pollEmbed = new PollEmbed { Id = Guid.Parse(pollResponse.Id) };
|
||||
message.Meta ??= new Dictionary<string, object>();
|
||||
if (
|
||||
!message.Meta.TryGetValue("embeds", out var existingEmbeds)
|
||||
|| existingEmbeds is not List<EmbeddableBase>
|
||||
)
|
||||
message.Meta["embeds"] = new List<Dictionary<string, object>>();
|
||||
var embeds = (List<Dictionary<string, object>>)message.Meta["embeds"];
|
||||
embeds.Add(EmbeddableBase.ToDictionary(pollEmbed));
|
||||
message.Meta["embeds"] = embeds;
|
||||
}
|
||||
if (request.Content is not null)
|
||||
message.Content = request.Content;
|
||||
if (request.AttachmentsId is not null)
|
||||
@@ -290,6 +390,7 @@ public partial class ChatController(
|
||||
public async Task<ActionResult> UpdateMessage([FromBody] SendMessageRequest request, Guid roomId, Guid messageId)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser) return Unauthorized();
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
|
||||
request.Content = TextSanitizer.Sanitize(request.Content);
|
||||
|
||||
@@ -300,36 +401,117 @@ public partial class ChatController(
|
||||
|
||||
if (message == null) return NotFound();
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
var now = SystemClock.Instance.GetCurrentInstant();
|
||||
if (message.Sender.AccountId != accountId)
|
||||
return StatusCode(403, "You can only edit your own messages.");
|
||||
if (message.Sender.TimeoutUntil.HasValue && message.Sender.TimeoutUntil.Value > now)
|
||||
return StatusCode(403, "You has been timed out in this chat.");
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.Content) &&
|
||||
(request.AttachmentsId == null || request.AttachmentsId.Count == 0))
|
||||
(request.AttachmentsId == null || request.AttachmentsId.Count == 0) &&
|
||||
!request.FundId.HasValue &&
|
||||
!request.PollId.HasValue)
|
||||
return BadRequest("You cannot send an empty message.");
|
||||
|
||||
// Validate reply and forward message IDs exist
|
||||
if (request.RepliedMessageId.HasValue)
|
||||
{
|
||||
var repliedMessage = await db.ChatMessages
|
||||
.FirstOrDefaultAsync(m => m.Id == request.RepliedMessageId.Value && m.ChatRoomId == roomId);
|
||||
if (repliedMessage == null)
|
||||
return BadRequest("The message you're replying to does not exist.");
|
||||
}
|
||||
|
||||
if (request.ForwardedMessageId.HasValue)
|
||||
{
|
||||
var forwardedMessage = await db.ChatMessages
|
||||
.FirstOrDefaultAsync(m => m.Id == request.ForwardedMessageId.Value);
|
||||
if (forwardedMessage == null)
|
||||
return BadRequest("The message you're forwarding does not exist.");
|
||||
}
|
||||
|
||||
// Update mentions based on new content and references
|
||||
var updatedMentions = await ExtractMentionedUsersAsync(request.Content, request.RepliedMessageId,
|
||||
request.ForwardedMessageId, roomId, accountId);
|
||||
message.MembersMentioned = updatedMentions;
|
||||
|
||||
// Handle fund embeds for update
|
||||
if (request.FundId.HasValue)
|
||||
{
|
||||
try
|
||||
{
|
||||
var fundResponse = await paymentClient.GetWalletFundAsync(new GetWalletFundRequest
|
||||
{
|
||||
FundId = request.FundId.Value.ToString()
|
||||
});
|
||||
|
||||
// Check if the fund was created by the current user
|
||||
if (fundResponse.CreatorAccountId != accountId.ToString())
|
||||
return BadRequest("You can only share funds that you created.");
|
||||
|
||||
var fundEmbed = new FundEmbed { Id = request.FundId.Value };
|
||||
message.Meta ??= new Dictionary<string, object>();
|
||||
if (
|
||||
!message.Meta.TryGetValue("embeds", out var existingEmbeds)
|
||||
|| existingEmbeds is not List<EmbeddableBase>
|
||||
)
|
||||
message.Meta["embeds"] = new List<Dictionary<string, object>>();
|
||||
var embeds = (List<Dictionary<string, object>>)message.Meta["embeds"];
|
||||
// Remove all old fund embeds
|
||||
embeds.RemoveAll(e =>
|
||||
e.TryGetValue("type", out var type) && type.ToString() == "fund"
|
||||
);
|
||||
embeds.Add(EmbeddableBase.ToDictionary(fundEmbed));
|
||||
message.Meta["embeds"] = embeds;
|
||||
}
|
||||
catch (RpcException ex) when (ex.StatusCode == Grpc.Core.StatusCode.NotFound)
|
||||
{
|
||||
return BadRequest("The specified fund does not exist.");
|
||||
}
|
||||
catch (RpcException ex) when (ex.StatusCode == Grpc.Core.StatusCode.InvalidArgument)
|
||||
{
|
||||
return BadRequest("Invalid fund ID.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
message.Meta ??= new Dictionary<string, object>();
|
||||
if (
|
||||
!message.Meta.TryGetValue("embeds", out var existingEmbeds)
|
||||
|| existingEmbeds is not List<EmbeddableBase>
|
||||
)
|
||||
message.Meta["embeds"] = new List<Dictionary<string, object>>();
|
||||
var embeds = (List<Dictionary<string, object>>)message.Meta["embeds"];
|
||||
// Remove all old fund embeds
|
||||
embeds.RemoveAll(e => e.TryGetValue("type", out var type) && type.ToString() == "fund");
|
||||
}
|
||||
|
||||
// Handle poll embeds for update
|
||||
if (request.PollId.HasValue)
|
||||
{
|
||||
try
|
||||
{
|
||||
var pollResponse = await pollClient.GetPollAsync(new GetPollRequest { Id = request.PollId.Value.ToString() });
|
||||
var pollEmbed = new PollEmbed { Id = Guid.Parse(pollResponse.Id) };
|
||||
message.Meta ??= new Dictionary<string, object>();
|
||||
if (
|
||||
!message.Meta.TryGetValue("embeds", out var existingEmbeds)
|
||||
|| existingEmbeds is not List<EmbeddableBase>
|
||||
)
|
||||
message.Meta["embeds"] = new List<Dictionary<string, object>>();
|
||||
var embeds = (List<Dictionary<string, object>>)message.Meta["embeds"];
|
||||
// Remove all old poll embeds
|
||||
embeds.RemoveAll(e =>
|
||||
e.TryGetValue("type", out var type) && type.ToString() == "poll"
|
||||
);
|
||||
embeds.Add(EmbeddableBase.ToDictionary(pollEmbed));
|
||||
message.Meta["embeds"] = embeds;
|
||||
}
|
||||
catch (RpcException ex) when (ex.StatusCode == Grpc.Core.StatusCode.NotFound)
|
||||
{
|
||||
return BadRequest("The specified poll does not exist.");
|
||||
}
|
||||
catch (RpcException ex) when (ex.StatusCode == Grpc.Core.StatusCode.InvalidArgument)
|
||||
{
|
||||
return BadRequest("Invalid poll ID.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
message.Meta ??= new Dictionary<string, object>();
|
||||
if (
|
||||
!message.Meta.TryGetValue("embeds", out var existingEmbeds)
|
||||
|| existingEmbeds is not List<EmbeddableBase>
|
||||
)
|
||||
message.Meta["embeds"] = new List<Dictionary<string, object>>();
|
||||
var embeds = (List<Dictionary<string, object>>)message.Meta["embeds"];
|
||||
// Remove all old poll embeds
|
||||
embeds.RemoveAll(e => e.TryGetValue("type", out var type) && type.ToString() == "poll");
|
||||
}
|
||||
|
||||
// Call service method to update the message
|
||||
await cs.UpdateMessageAsync(
|
||||
message,
|
||||
@@ -384,25 +566,10 @@ public partial class ChatController(
|
||||
if (!isMember)
|
||||
return StatusCode(403, "You are not a member of this chat room.");
|
||||
|
||||
var response = await cs.GetSyncDataAsync(roomId, request.LastSyncTimestamp);
|
||||
var response = await cs.GetSyncDataAsync(roomId, request.LastSyncTimestamp, 500);
|
||||
Response.Headers["X-Total"] = response.TotalCount.ToString();
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
[HttpPost("{roomId:guid}/autocomplete")]
|
||||
public async Task<ActionResult<List<DysonNetwork.Shared.Models.Autocompletion>>> ChatAutoComplete(
|
||||
[FromBody] AutocompletionRequest request, Guid roomId)
|
||||
{
|
||||
if (HttpContext.Items["CurrentUser"] is not Account currentUser)
|
||||
return Unauthorized();
|
||||
|
||||
var accountId = Guid.Parse(currentUser.Id);
|
||||
var isMember = await db.ChatMembers
|
||||
.AnyAsync(m =>
|
||||
m.AccountId == accountId && m.ChatRoomId == roomId && m.JoinedAt != null && m.LeaveAt == null);
|
||||
if (!isMember)
|
||||
return StatusCode(403, "You are not a member of this chat room.");
|
||||
|
||||
var result = await aus.GetAutocompletion(request.Content, chatId: roomId, limit: 10);
|
||||
return Ok(result);
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user