@@ -102,11 +105,11 @@ import {
NSelect,
NTag,
NCollapseTransition,
- NFormItem,
type UploadCustomRequestOptions,
type UploadSettledFileInfo,
type SelectOption,
type SelectRenderTag,
+ type UploadFileInfo,
} from 'naive-ui'
import { computed, h, onMounted, ref } from 'vue'
import { CloudUploadRound } from '@vicons/material'
@@ -283,4 +286,18 @@ function createThumbnailUrl(
if (!fileInfo) return undefined
return fileInfo.url ?? undefined
}
+
+function customDownload(file: UploadFileInfo) {
+ const { url, name } = file
+ if (!url)
+ return
+ window.open(url.replace('/api', ''), '_blank')
+}
+
+function customPreview(file: UploadFileInfo, detail: { event: MouseEvent }) {
+ detail.event.preventDefault()
+ const { url, type } = file
+ if (!url) return
+ window.open(url.replace('/api', ''), '_blank')
+}
diff --git a/DysonNetwork.Drive/Client/src/views/secure.ts b/DysonNetwork.Drive/Client/src/views/secure.ts
index 5a4109c..1d8a352 100644
--- a/DysonNetwork.Drive/Client/src/views/secure.ts
+++ b/DysonNetwork.Drive/Client/src/views/secure.ts
@@ -1,92 +1,94 @@
export async function downloadAndDecryptFile(
url: string,
password: string,
- onProgress?: (progress: number) => void
+ fileName: string,
+ onProgress?: (progress: number) => void,
): Promise {
- const response = await fetch(url);
- if (!response.ok) throw new Error(`Failed to fetch: ${response.status}`);
+ const response = await fetch(url)
+ if (!response.ok) throw new Error(`Failed to fetch: ${response.status}`)
- const contentLength = +(response.headers.get('Content-Length') || 0);
- const reader = response.body!.getReader();
- const chunks: Uint8Array[] = [];
- let received = 0;
+ const contentLength = +(response.headers.get('Content-Length') || 0)
+ const reader = response.body!.getReader()
+ const chunks: Uint8Array[] = []
+ let received = 0
while (true) {
- const { done, value } = await reader.read();
- if (done) break;
+ const { done, value } = await reader.read()
+ if (done) break
if (value) {
- chunks.push(value);
- received += value.length;
+ chunks.push(value)
+ received += value.length
if (contentLength && onProgress) {
- onProgress(received / contentLength);
+ onProgress(received / contentLength)
}
}
}
- const fullBuffer = new Uint8Array(received);
- let offset = 0;
+ const fullBuffer = new Uint8Array(received)
+ let offset = 0
for (const chunk of chunks) {
- fullBuffer.set(chunk, offset);
- offset += chunk.length;
+ fullBuffer.set(chunk, offset)
+ offset += chunk.length
}
- const decryptedBytes = await decryptFile(fullBuffer, password);
+ const decryptedBytes = await decryptFile(fullBuffer, password)
// Create a blob and trigger a download
- const blob = new Blob([decryptedBytes]);
- const downloadUrl = URL.createObjectURL(blob);
- const a = document.createElement('a');
- a.href = downloadUrl;
- a.download = 'decrypted_file'; // You may allow customization
- document.body.appendChild(a);
- a.click();
- a.remove();
- URL.revokeObjectURL(downloadUrl);
+ const blob = new Blob([decryptedBytes])
+ const downloadUrl = URL.createObjectURL(blob)
+ const a = document.createElement('a')
+ a.href = downloadUrl
+ a.download = fileName
+ document.body.appendChild(a)
+ a.click()
+ a.remove()
+ URL.revokeObjectURL(downloadUrl)
}
-export async function decryptFile(
- fileBuffer: Uint8Array,
- password: string
-): Promise {
- const salt = fileBuffer.slice(0, 16);
- const nonce = fileBuffer.slice(16, 28);
- const tag = fileBuffer.slice(28, 44);
- const ciphertext = fileBuffer.slice(44);
+export async function decryptFile(fileBuffer: Uint8Array, password: string): Promise {
+ const salt = fileBuffer.slice(0, 16)
+ const nonce = fileBuffer.slice(16, 28)
+ const tag = fileBuffer.slice(28, 44)
+ const ciphertext = fileBuffer.slice(44)
- const enc = new TextEncoder();
+ const enc = new TextEncoder()
const keyMaterial = await crypto.subtle.importKey(
- 'raw', enc.encode(password), { name: 'PBKDF2' }, false, ['deriveKey']
- );
+ 'raw',
+ enc.encode(password),
+ { name: 'PBKDF2' },
+ false,
+ ['deriveKey'],
+ )
const key = await crypto.subtle.deriveKey(
{ name: 'PBKDF2', salt, iterations: 100000, hash: 'SHA-256' },
keyMaterial,
{ name: 'AES-GCM', length: 256 },
false,
- ['decrypt']
- );
+ ['decrypt'],
+ )
- const fullCiphertext = new Uint8Array(ciphertext.length + tag.length);
- fullCiphertext.set(ciphertext);
- fullCiphertext.set(tag, ciphertext.length);
+ const fullCiphertext = new Uint8Array(ciphertext.length + tag.length)
+ fullCiphertext.set(ciphertext)
+ fullCiphertext.set(tag, ciphertext.length)
- let decrypted: ArrayBuffer;
+ let decrypted: ArrayBuffer
try {
decrypted = await crypto.subtle.decrypt(
{ name: 'AES-GCM', iv: nonce, tagLength: 128 },
key,
- fullCiphertext
- );
+ fullCiphertext,
+ )
} catch {
- throw new Error("Incorrect password or corrupted file.");
+ throw new Error('Incorrect password or corrupted file.')
}
- const magic = new TextEncoder().encode("DYSON1");
- const decryptedBytes = new Uint8Array(decrypted);
+ const magic = new TextEncoder().encode('DYSON1')
+ const decryptedBytes = new Uint8Array(decrypted)
for (let i = 0; i < magic.length; i++) {
if (decryptedBytes[i] !== magic[i]) {
- throw new Error("Incorrect password or corrupted file.");
+ throw new Error('Incorrect password or corrupted file.')
}
}
- return decryptedBytes.slice(magic.length);
+ return decryptedBytes.slice(magic.length)
}
diff --git a/DysonNetwork.Drive/Storage/FileService.cs b/DysonNetwork.Drive/Storage/FileService.cs
index 2c28282..a0b01bf 100644
--- a/DysonNetwork.Drive/Storage/FileService.cs
+++ b/DysonNetwork.Drive/Storage/FileService.cs
@@ -126,7 +126,7 @@ public class FileService(
contentType = "application/octet-stream";
}
- var hash = await HashFileAsync(stream, fileSize: fileSize);
+ var hash = await HashFileAsync(ogFilePath);
var file = new CloudFile
{
@@ -136,7 +136,7 @@ public class FileService(
Size = fileSize,
Hash = hash,
AccountId = Guid.Parse(account.Id),
- IsEncrypted = !string.IsNullOrWhiteSpace(encryptPassword)
+ IsEncrypted = !string.IsNullOrWhiteSpace(encryptPassword) && pool.AllowEncryption
};
var existingFile = await db.Files.AsNoTracking().FirstOrDefaultAsync(f => f.Hash == hash);
@@ -274,7 +274,7 @@ public class FileService(
}
///
- /// Handles file optimization (image compression, video thumbnailing) and uploads to remote storage in the background.
+ /// Handles file optimization (image compression, video thumbnail) and uploads to remote storage in the background.
///
private async Task ProcessAndUploadInBackgroundAsync(
string fileId,
@@ -350,15 +350,23 @@ public class FileService(
var snapshotTime = mediaInfo.Duration > TimeSpan.FromSeconds(5)
? TimeSpan.FromSeconds(5)
: TimeSpan.FromSeconds(1);
+
await FFMpeg.SnapshotAsync(originalFilePath, thumbnailPath, captureTime: snapshotTime);
- uploads.Add((thumbnailPath, ".thumbnail.webp", "image/webp", true));
- hasThumbnail = true;
+
+ if (File.Exists(thumbnailPath))
+ {
+ uploads.Add((thumbnailPath, ".thumbnail.webp", "image/webp", true));
+ hasThumbnail = true;
+ }
+ else
+ {
+ logger.LogWarning("FFMpeg did not produce thumbnail for video {FileId}", fileId);
+ }
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to generate thumbnail for video {FileId}", fileId);
}
-
break;
default:
@@ -405,11 +413,12 @@ public class FileService(
}
}
- private static async Task HashFileAsync(Stream stream, int chunkSize = 1024 * 1024, long? fileSize = null)
+ private static async Task HashFileAsync(string filePath, int chunkSize = 1024 * 1024)
{
- fileSize ??= stream.Length;
+ using var stream = File.OpenRead(filePath);
+ var fileSize = stream.Length;
if (fileSize > chunkSize * 1024 * 5)
- return await HashFastApproximateAsync(stream, chunkSize);
+ return await HashFastApproximateAsync(filePath, chunkSize);
using var md5 = MD5.Create();
var hashBytes = await md5.ComputeHashAsync(stream);
@@ -417,8 +426,10 @@ public class FileService(
return Convert.ToHexString(hashBytes).ToLowerInvariant();
}
- private static async Task HashFastApproximateAsync(Stream stream, int chunkSize = 1024 * 1024)
+ private static async Task HashFastApproximateAsync(string filePath, int chunkSize = 1024 * 1024)
{
+ await using var stream = File.OpenRead(filePath);
+
// Scale the chunk size to kB level
chunkSize *= 1024;
diff --git a/DysonNetwork.Drive/package.json b/DysonNetwork.Drive/package.json
deleted file mode 100644
index 318dde6..0000000
--- a/DysonNetwork.Drive/package.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "dependencies": {
- "highlight.js": "^11.11.1"
- }
-}
\ No newline at end of file