✨ Improvement of file processing and video snapshot
This commit is contained in:
194
DysonNetwork.Drive/Migrations/20250725051034_UpdateCloudFileThumbnail.Designer.cs
generated
Normal file
194
DysonNetwork.Drive/Migrations/20250725051034_UpdateCloudFileThumbnail.Designer.cs
generated
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
// <auto-generated />
|
||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using DysonNetwork.Drive;
|
||||||
|
using DysonNetwork.Shared.Data;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||||
|
using Microsoft.EntityFrameworkCore.Migrations;
|
||||||
|
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||||
|
using NodaTime;
|
||||||
|
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||||
|
|
||||||
|
#nullable disable
|
||||||
|
|
||||||
|
namespace DysonNetwork.Drive.Migrations
|
||||||
|
{
|
||||||
|
[DbContext(typeof(AppDatabase))]
|
||||||
|
[Migration("20250725051034_UpdateCloudFileThumbnail")]
|
||||||
|
partial class UpdateCloudFileThumbnail
|
||||||
|
{
|
||||||
|
/// <inheritdoc />
|
||||||
|
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||||
|
{
|
||||||
|
#pragma warning disable 612, 618
|
||||||
|
modelBuilder
|
||||||
|
.HasAnnotation("ProductVersion", "9.0.7")
|
||||||
|
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||||
|
|
||||||
|
NpgsqlModelBuilderExtensions.HasPostgresExtension(modelBuilder, "postgis");
|
||||||
|
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||||
|
|
||||||
|
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFile", b =>
|
||||||
|
{
|
||||||
|
b.Property<string>("Id")
|
||||||
|
.HasMaxLength(32)
|
||||||
|
.HasColumnType("character varying(32)")
|
||||||
|
.HasColumnName("id");
|
||||||
|
|
||||||
|
b.Property<Guid>("AccountId")
|
||||||
|
.HasColumnType("uuid")
|
||||||
|
.HasColumnName("account_id");
|
||||||
|
|
||||||
|
b.Property<Instant>("CreatedAt")
|
||||||
|
.HasColumnType("timestamp with time zone")
|
||||||
|
.HasColumnName("created_at");
|
||||||
|
|
||||||
|
b.Property<Instant?>("DeletedAt")
|
||||||
|
.HasColumnType("timestamp with time zone")
|
||||||
|
.HasColumnName("deleted_at");
|
||||||
|
|
||||||
|
b.Property<string>("Description")
|
||||||
|
.HasMaxLength(4096)
|
||||||
|
.HasColumnType("character varying(4096)")
|
||||||
|
.HasColumnName("description");
|
||||||
|
|
||||||
|
b.Property<Dictionary<string, object>>("FileMeta")
|
||||||
|
.IsRequired()
|
||||||
|
.HasColumnType("jsonb")
|
||||||
|
.HasColumnName("file_meta");
|
||||||
|
|
||||||
|
b.Property<bool>("HasCompression")
|
||||||
|
.HasColumnType("boolean")
|
||||||
|
.HasColumnName("has_compression");
|
||||||
|
|
||||||
|
b.Property<bool>("HasThumbnail")
|
||||||
|
.HasColumnType("boolean")
|
||||||
|
.HasColumnName("has_thumbnail");
|
||||||
|
|
||||||
|
b.Property<string>("Hash")
|
||||||
|
.HasMaxLength(256)
|
||||||
|
.HasColumnType("character varying(256)")
|
||||||
|
.HasColumnName("hash");
|
||||||
|
|
||||||
|
b.Property<bool>("IsMarkedRecycle")
|
||||||
|
.HasColumnType("boolean")
|
||||||
|
.HasColumnName("is_marked_recycle");
|
||||||
|
|
||||||
|
b.Property<string>("MimeType")
|
||||||
|
.HasMaxLength(256)
|
||||||
|
.HasColumnType("character varying(256)")
|
||||||
|
.HasColumnName("mime_type");
|
||||||
|
|
||||||
|
b.Property<string>("Name")
|
||||||
|
.IsRequired()
|
||||||
|
.HasMaxLength(1024)
|
||||||
|
.HasColumnType("character varying(1024)")
|
||||||
|
.HasColumnName("name");
|
||||||
|
|
||||||
|
b.Property<List<ContentSensitiveMark>>("SensitiveMarks")
|
||||||
|
.HasColumnType("jsonb")
|
||||||
|
.HasColumnName("sensitive_marks");
|
||||||
|
|
||||||
|
b.Property<long>("Size")
|
||||||
|
.HasColumnType("bigint")
|
||||||
|
.HasColumnName("size");
|
||||||
|
|
||||||
|
b.Property<string>("StorageId")
|
||||||
|
.HasMaxLength(32)
|
||||||
|
.HasColumnType("character varying(32)")
|
||||||
|
.HasColumnName("storage_id");
|
||||||
|
|
||||||
|
b.Property<string>("StorageUrl")
|
||||||
|
.HasMaxLength(4096)
|
||||||
|
.HasColumnType("character varying(4096)")
|
||||||
|
.HasColumnName("storage_url");
|
||||||
|
|
||||||
|
b.Property<Instant>("UpdatedAt")
|
||||||
|
.HasColumnType("timestamp with time zone")
|
||||||
|
.HasColumnName("updated_at");
|
||||||
|
|
||||||
|
b.Property<Instant?>("UploadedAt")
|
||||||
|
.HasColumnType("timestamp with time zone")
|
||||||
|
.HasColumnName("uploaded_at");
|
||||||
|
|
||||||
|
b.Property<string>("UploadedTo")
|
||||||
|
.HasMaxLength(128)
|
||||||
|
.HasColumnType("character varying(128)")
|
||||||
|
.HasColumnName("uploaded_to");
|
||||||
|
|
||||||
|
b.Property<Dictionary<string, object>>("UserMeta")
|
||||||
|
.HasColumnType("jsonb")
|
||||||
|
.HasColumnName("user_meta");
|
||||||
|
|
||||||
|
b.HasKey("Id")
|
||||||
|
.HasName("pk_files");
|
||||||
|
|
||||||
|
b.ToTable("files", (string)null);
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFileReference", b =>
|
||||||
|
{
|
||||||
|
b.Property<Guid>("Id")
|
||||||
|
.ValueGeneratedOnAdd()
|
||||||
|
.HasColumnType("uuid")
|
||||||
|
.HasColumnName("id");
|
||||||
|
|
||||||
|
b.Property<Instant>("CreatedAt")
|
||||||
|
.HasColumnType("timestamp with time zone")
|
||||||
|
.HasColumnName("created_at");
|
||||||
|
|
||||||
|
b.Property<Instant?>("DeletedAt")
|
||||||
|
.HasColumnType("timestamp with time zone")
|
||||||
|
.HasColumnName("deleted_at");
|
||||||
|
|
||||||
|
b.Property<Instant?>("ExpiredAt")
|
||||||
|
.HasColumnType("timestamp with time zone")
|
||||||
|
.HasColumnName("expired_at");
|
||||||
|
|
||||||
|
b.Property<string>("FileId")
|
||||||
|
.IsRequired()
|
||||||
|
.HasMaxLength(32)
|
||||||
|
.HasColumnType("character varying(32)")
|
||||||
|
.HasColumnName("file_id");
|
||||||
|
|
||||||
|
b.Property<string>("ResourceId")
|
||||||
|
.IsRequired()
|
||||||
|
.HasMaxLength(1024)
|
||||||
|
.HasColumnType("character varying(1024)")
|
||||||
|
.HasColumnName("resource_id");
|
||||||
|
|
||||||
|
b.Property<Instant>("UpdatedAt")
|
||||||
|
.HasColumnType("timestamp with time zone")
|
||||||
|
.HasColumnName("updated_at");
|
||||||
|
|
||||||
|
b.Property<string>("Usage")
|
||||||
|
.IsRequired()
|
||||||
|
.HasMaxLength(1024)
|
||||||
|
.HasColumnType("character varying(1024)")
|
||||||
|
.HasColumnName("usage");
|
||||||
|
|
||||||
|
b.HasKey("Id")
|
||||||
|
.HasName("pk_file_references");
|
||||||
|
|
||||||
|
b.HasIndex("FileId")
|
||||||
|
.HasDatabaseName("ix_file_references_file_id");
|
||||||
|
|
||||||
|
b.ToTable("file_references", (string)null);
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("DysonNetwork.Drive.Storage.CloudFileReference", b =>
|
||||||
|
{
|
||||||
|
b.HasOne("DysonNetwork.Drive.Storage.CloudFile", "File")
|
||||||
|
.WithMany()
|
||||||
|
.HasForeignKey("FileId")
|
||||||
|
.OnDelete(DeleteBehavior.Cascade)
|
||||||
|
.IsRequired()
|
||||||
|
.HasConstraintName("fk_file_references_files_file_id");
|
||||||
|
|
||||||
|
b.Navigation("File");
|
||||||
|
});
|
||||||
|
#pragma warning restore 612, 618
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,29 @@
|
|||||||
|
using Microsoft.EntityFrameworkCore.Migrations;
|
||||||
|
|
||||||
|
#nullable disable
|
||||||
|
|
||||||
|
namespace DysonNetwork.Drive.Migrations
|
||||||
|
{
|
||||||
|
/// <inheritdoc />
|
||||||
|
public partial class UpdateCloudFileThumbnail : Migration
|
||||||
|
{
|
||||||
|
/// <inheritdoc />
|
||||||
|
protected override void Up(MigrationBuilder migrationBuilder)
|
||||||
|
{
|
||||||
|
migrationBuilder.AddColumn<bool>(
|
||||||
|
name: "has_thumbnail",
|
||||||
|
table: "files",
|
||||||
|
type: "boolean",
|
||||||
|
nullable: false,
|
||||||
|
defaultValue: false);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
protected override void Down(MigrationBuilder migrationBuilder)
|
||||||
|
{
|
||||||
|
migrationBuilder.DropColumn(
|
||||||
|
name: "has_thumbnail",
|
||||||
|
table: "files");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -59,6 +59,10 @@ namespace DysonNetwork.Drive.Migrations
|
|||||||
.HasColumnType("boolean")
|
.HasColumnType("boolean")
|
||||||
.HasColumnName("has_compression");
|
.HasColumnName("has_compression");
|
||||||
|
|
||||||
|
b.Property<bool>("HasThumbnail")
|
||||||
|
.HasColumnType("boolean")
|
||||||
|
.HasColumnName("has_thumbnail");
|
||||||
|
|
||||||
b.Property<string>("Hash")
|
b.Property<string>("Hash")
|
||||||
.HasMaxLength(256)
|
.HasMaxLength(256)
|
||||||
.HasColumnType("character varying(256)")
|
.HasColumnType("character varying(256)")
|
||||||
|
@@ -56,6 +56,7 @@ public class CloudFile : ModelBase, ICloudFile, IIdentifiedResource
|
|||||||
public Instant? UploadedAt { get; set; }
|
public Instant? UploadedAt { get; set; }
|
||||||
[MaxLength(128)] public string? UploadedTo { get; set; }
|
[MaxLength(128)] public string? UploadedTo { get; set; }
|
||||||
public bool HasCompression { get; set; } = false;
|
public bool HasCompression { get; set; } = false;
|
||||||
|
public bool HasThumbnail { get; set; } = false;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// The field is set to true if the recycling job plans to delete the file.
|
/// The field is set to true if the recycling job plans to delete the file.
|
||||||
|
@@ -10,6 +10,8 @@ using Minio.DataModel.Args;
|
|||||||
using NetVips;
|
using NetVips;
|
||||||
using NodaTime;
|
using NodaTime;
|
||||||
using tusdotnet.Stores;
|
using tusdotnet.Stores;
|
||||||
|
using System.Linq.Expressions;
|
||||||
|
using Microsoft.EntityFrameworkCore.Query;
|
||||||
|
|
||||||
namespace DysonNetwork.Drive.Storage;
|
namespace DysonNetwork.Drive.Storage;
|
||||||
|
|
||||||
@@ -92,13 +94,11 @@ public class FileService(
|
|||||||
.ToList();
|
.ToList();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static readonly string TempFilePrefix = "dyn-cloudfile";
|
private const string TempFilePrefix = "dyn-cloudfile";
|
||||||
|
|
||||||
private static readonly string[] AnimatedImageTypes =
|
private static readonly string[] AnimatedImageTypes =
|
||||||
["image/gif", "image/apng", "image/webp", "image/avif"];
|
["image/gif", "image/apng", "image/webp", "image/avif"];
|
||||||
|
|
||||||
// The analysis file method no longer will remove the GPS EXIF data
|
|
||||||
// It should be handled on the client side, and for some specific cases it should be keep
|
|
||||||
public async Task<CloudFile> ProcessNewFileAsync(
|
public async Task<CloudFile> ProcessNewFileAsync(
|
||||||
Account account,
|
Account account,
|
||||||
string fileId,
|
string fileId,
|
||||||
@@ -107,8 +107,6 @@ public class FileService(
|
|||||||
string? contentType
|
string? contentType
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
var result = new List<(string filePath, string suffix)>();
|
|
||||||
|
|
||||||
var ogFilePath = Path.GetFullPath(Path.Join(configuration.GetValue<string>("Tus:StorePath"), fileId));
|
var ogFilePath = Path.GetFullPath(Path.Join(configuration.GetValue<string>("Tus:StorePath"), fileId));
|
||||||
var fileSize = stream.Length;
|
var fileSize = stream.Length;
|
||||||
var hash = await HashFileAsync(stream, fileSize: fileSize);
|
var hash = await HashFileAsync(stream, fileSize: fileSize);
|
||||||
@@ -124,82 +122,95 @@ public class FileService(
|
|||||||
AccountId = Guid.Parse(account.Id)
|
AccountId = Guid.Parse(account.Id)
|
||||||
};
|
};
|
||||||
|
|
||||||
var existingFile = await db.Files.FirstOrDefaultAsync(f => f.Hash == hash);
|
var existingFile = await db.Files.AsNoTracking().FirstOrDefaultAsync(f => f.Hash == hash);
|
||||||
file.StorageId = existingFile is not null ? existingFile.StorageId : file.Id;
|
file.StorageId = existingFile?.StorageId ?? file.Id;
|
||||||
|
|
||||||
if (existingFile is not null)
|
if (existingFile is not null)
|
||||||
{
|
{
|
||||||
file.FileMeta = existingFile.FileMeta;
|
file.FileMeta = existingFile.FileMeta;
|
||||||
file.HasCompression = existingFile.HasCompression;
|
file.HasCompression = existingFile.HasCompression;
|
||||||
file.SensitiveMarks = existingFile.SensitiveMarks;
|
file.SensitiveMarks = existingFile.SensitiveMarks;
|
||||||
|
file.MimeType = existingFile.MimeType;
|
||||||
|
file.UploadedAt = existingFile.UploadedAt;
|
||||||
|
file.UploadedTo = existingFile.UploadedTo;
|
||||||
|
|
||||||
db.Files.Add(file);
|
db.Files.Add(file);
|
||||||
await db.SaveChangesAsync();
|
await db.SaveChangesAsync();
|
||||||
|
// Since the file content is a duplicate, we can delete the new upload and we are done.
|
||||||
|
await stream.DisposeAsync();
|
||||||
|
await store.DeleteFileAsync(file.Id, CancellationToken.None);
|
||||||
return file;
|
return file;
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (contentType.Split('/')[0])
|
// Extract metadata on the current thread for a faster initial response
|
||||||
|
await ExtractMetadataAsync(file, ogFilePath, stream);
|
||||||
|
|
||||||
|
db.Files.Add(file);
|
||||||
|
await db.SaveChangesAsync();
|
||||||
|
|
||||||
|
// Offload optimization (image conversion, thumbnailing) and uploading to a background task
|
||||||
|
_ = Task.Run(() =>
|
||||||
|
ProcessAndUploadInBackgroundAsync(file.Id, file.StorageId, contentType, ogFilePath, stream));
|
||||||
|
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Extracts metadata from the file based on its content type.
|
||||||
|
/// This runs synchronously to ensure the initial database record has basic metadata.
|
||||||
|
/// </summary>
|
||||||
|
private async Task ExtractMetadataAsync(CloudFile file, string filePath, Stream stream)
|
||||||
|
{
|
||||||
|
switch (file.MimeType.Split('/')[0])
|
||||||
{
|
{
|
||||||
case "image":
|
case "image":
|
||||||
var blurhash =
|
try
|
||||||
BlurHashSharp.SkiaSharp.BlurHashEncoder.Encode(
|
|
||||||
xComponent: 3,
|
|
||||||
yComponent: 3,
|
|
||||||
filename: ogFilePath
|
|
||||||
);
|
|
||||||
|
|
||||||
// Rewind stream
|
|
||||||
stream.Position = 0;
|
|
||||||
|
|
||||||
// Use NetVips for the rest
|
|
||||||
using (var vipsImage = NetVips.Image.NewFromStream(stream))
|
|
||||||
{
|
{
|
||||||
|
var blurhash = BlurHashSharp.SkiaSharp.BlurHashEncoder.Encode(3, 3, filePath);
|
||||||
|
stream.Position = 0;
|
||||||
|
|
||||||
|
using var vipsImage = Image.NewFromStream(stream);
|
||||||
var width = vipsImage.Width;
|
var width = vipsImage.Width;
|
||||||
var height = vipsImage.Height;
|
var height = vipsImage.Height;
|
||||||
var format = vipsImage.Get("vips-loader") ?? "unknown";
|
var orientation = vipsImage.Get("orientation") as int? ?? 1;
|
||||||
|
|
||||||
// Try to get orientation from exif data
|
|
||||||
var orientation = 1;
|
|
||||||
var meta = new Dictionary<string, object?>
|
var meta = new Dictionary<string, object?>
|
||||||
{
|
{
|
||||||
["blur"] = blurhash,
|
["blur"] = blurhash,
|
||||||
["format"] = format,
|
["format"] = vipsImage.Get("vips-loader") ?? "unknown",
|
||||||
["width"] = width,
|
["width"] = width,
|
||||||
["height"] = height,
|
["height"] = height,
|
||||||
["orientation"] = orientation,
|
["orientation"] = orientation,
|
||||||
};
|
};
|
||||||
Dictionary<string, object> exif = [];
|
var exif = new Dictionary<string, object>();
|
||||||
|
|
||||||
foreach (var field in vipsImage.GetFields())
|
foreach (var field in vipsImage.GetFields())
|
||||||
{
|
{
|
||||||
|
if (IsIgnoredField(field)) continue;
|
||||||
var value = vipsImage.Get(field);
|
var value = vipsImage.Get(field);
|
||||||
|
if (field.StartsWith("exif-"))
|
||||||
// Skip GPS-related EXIF fields to remove location data
|
exif[field.Replace("exif-", "")] = value;
|
||||||
if (IsIgnoredField(field))
|
else
|
||||||
continue;
|
meta[field] = value;
|
||||||
|
|
||||||
if (field.StartsWith("exif-")) exif[field.Replace("exif-", "")] = value;
|
|
||||||
else meta[field] = value;
|
|
||||||
|
|
||||||
if (field == "orientation") orientation = (int)value;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (orientation is 6 or 8)
|
if (orientation is 6 or 8) (width, height) = (height, width);
|
||||||
(width, height) = (height, width);
|
|
||||||
|
|
||||||
var aspectRatio = height != 0 ? (double)width / height : 0;
|
|
||||||
|
|
||||||
meta["exif"] = exif;
|
meta["exif"] = exif;
|
||||||
meta["ratio"] = aspectRatio;
|
meta["ratio"] = height != 0 ? (double)width / height : 0;
|
||||||
file.FileMeta = meta;
|
file.FileMeta = meta;
|
||||||
}
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
logger.LogError(ex, "Failed to analyze image file {FileId}", file.Id);
|
||||||
|
}
|
||||||
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case "video":
|
case "video":
|
||||||
case "audio":
|
case "audio":
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var mediaInfo = await FFProbe.AnalyseAsync(ogFilePath);
|
var mediaInfo = await FFProbe.AnalyseAsync(filePath);
|
||||||
file.FileMeta = new Dictionary<string, object?>
|
file.FileMeta = new Dictionary<string, object?>
|
||||||
{
|
{
|
||||||
["duration"] = mediaInfo.Duration.TotalSeconds,
|
["duration"] = mediaInfo.Duration.TotalSeconds,
|
||||||
@@ -207,116 +218,152 @@ public class FileService(
|
|||||||
["format_long_name"] = mediaInfo.Format.FormatLongName,
|
["format_long_name"] = mediaInfo.Format.FormatLongName,
|
||||||
["start_time"] = mediaInfo.Format.StartTime.ToString(),
|
["start_time"] = mediaInfo.Format.StartTime.ToString(),
|
||||||
["bit_rate"] = mediaInfo.Format.BitRate.ToString(CultureInfo.InvariantCulture),
|
["bit_rate"] = mediaInfo.Format.BitRate.ToString(CultureInfo.InvariantCulture),
|
||||||
["tags"] = mediaInfo.Format.Tags ?? [],
|
["tags"] = mediaInfo.Format.Tags ?? new Dictionary<string, string>(),
|
||||||
["chapters"] = mediaInfo.Chapters,
|
["chapters"] = mediaInfo.Chapters,
|
||||||
|
// Add detailed stream information
|
||||||
|
["video_streams"] = mediaInfo.VideoStreams.Select(s => new
|
||||||
|
{
|
||||||
|
s.AvgFrameRate, s.BitRate, s.CodecName, s.Duration, s.Height, s.Width, s.Language,
|
||||||
|
s.PixelFormat, s.Rotation
|
||||||
|
}).ToList(),
|
||||||
|
["audio_streams"] = mediaInfo.AudioStreams.Select(s => new
|
||||||
|
{
|
||||||
|
s.BitRate, s.Channels, s.ChannelLayout, s.CodecName, s.Duration, s.Language,
|
||||||
|
s.SampleRateHz
|
||||||
|
})
|
||||||
|
.ToList(),
|
||||||
};
|
};
|
||||||
if (mediaInfo.PrimaryVideoStream is not null)
|
if (mediaInfo.PrimaryVideoStream is not null)
|
||||||
file.FileMeta["ratio"] =
|
file.FileMeta["ratio"] = (double)mediaInfo.PrimaryVideoStream.Width /
|
||||||
mediaInfo.PrimaryVideoStream.Width / mediaInfo.PrimaryVideoStream.Height;
|
mediaInfo.PrimaryVideoStream.Height;
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
logger.LogError("File analyzed failed, unable collect video / audio information: {Message}",
|
logger.LogError(ex, "Failed to analyze media file {FileId}", file.Id);
|
||||||
ex.Message);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
db.Files.Add(file);
|
/// <summary>
|
||||||
await db.SaveChangesAsync();
|
/// Handles file optimization (image compression, video thumbnailing) and uploads to remote storage in the background.
|
||||||
|
/// </summary>
|
||||||
|
private async Task ProcessAndUploadInBackgroundAsync(string fileId, string storageId, string contentType,
|
||||||
|
string originalFilePath, Stream stream)
|
||||||
|
{
|
||||||
|
await using var bgStream = stream; // Ensure stream is disposed at the end of this task
|
||||||
|
using var scope = scopeFactory.CreateScope();
|
||||||
|
var nfs = scope.ServiceProvider.GetRequiredService<FileService>();
|
||||||
|
var scopedDb = scope.ServiceProvider.GetRequiredService<AppDatabase>();
|
||||||
|
|
||||||
_ = Task.Run(async () =>
|
var uploads = new List<(string FilePath, string Suffix, string ContentType, bool SelfDestruct)>();
|
||||||
|
var newMimeType = contentType;
|
||||||
|
var hasCompression = false;
|
||||||
|
var hasThumbnail = false;
|
||||||
|
|
||||||
|
try
|
||||||
{
|
{
|
||||||
using var scope = scopeFactory.CreateScope();
|
logger.LogInformation("Processing file {FileId} in background...", fileId);
|
||||||
var nfs = scope.ServiceProvider.GetRequiredService<FileService>();
|
|
||||||
|
|
||||||
try
|
switch (contentType.Split('/')[0])
|
||||||
{
|
{
|
||||||
logger.LogInformation("Processed file {fileId}, now trying optimizing if possible...", fileId);
|
case "image" when !AnimatedImageTypes.Contains(contentType):
|
||||||
|
newMimeType = "image/webp";
|
||||||
if (contentType.Split('/')[0] == "image")
|
using (var vipsImage = Image.NewFromFile(originalFilePath, access: Enums.Access.Sequential))
|
||||||
{
|
|
||||||
// Skip compression for animated image types
|
|
||||||
var animatedMimeTypes = AnimatedImageTypes;
|
|
||||||
if (Enumerable.Contains(animatedMimeTypes, contentType))
|
|
||||||
{
|
{
|
||||||
logger.LogInformation(
|
var imageToWrite = vipsImage;
|
||||||
"File {fileId} is an animated image (MIME: {mime}), skipping WebP conversion.", fileId,
|
|
||||||
contentType
|
if (vipsImage.Interpretation is Enums.Interpretation.Scrgb or Enums.Interpretation.Xyz)
|
||||||
);
|
{
|
||||||
var tempFilePath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}");
|
imageToWrite = vipsImage.Colourspace(Enums.Interpretation.Srgb);
|
||||||
result.Add((tempFilePath, string.Empty));
|
}
|
||||||
return;
|
|
||||||
|
var webpPath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{fileId}.webp");
|
||||||
|
imageToWrite.Autorot().WriteToFile(webpPath,
|
||||||
|
new VOption { { "lossless", true }, { "strip", true } });
|
||||||
|
uploads.Add((webpPath, string.Empty, newMimeType, true));
|
||||||
|
|
||||||
|
if (imageToWrite.Width * imageToWrite.Height >= 1024 * 1024)
|
||||||
|
{
|
||||||
|
var scale = 1024.0 / Math.Max(imageToWrite.Width, imageToWrite.Height);
|
||||||
|
var compressedPath =
|
||||||
|
Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{fileId}-compressed.webp");
|
||||||
|
using var compressedImage = imageToWrite.Resize(scale);
|
||||||
|
compressedImage.Autorot().WriteToFile(compressedPath,
|
||||||
|
new VOption { { "Q", 80 }, { "strip", true } });
|
||||||
|
uploads.Add((compressedPath, ".compressed", newMimeType, true));
|
||||||
|
hasCompression = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!ReferenceEquals(imageToWrite, vipsImage))
|
||||||
|
{
|
||||||
|
imageToWrite.Dispose(); // Clean up manually created colourspace-converted image
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
file.MimeType = "image/webp";
|
break;
|
||||||
|
|
||||||
using var vipsImage = Image.NewFromFile(ogFilePath);
|
case "video":
|
||||||
var imagePath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}");
|
uploads.Add((originalFilePath, string.Empty, contentType, false));
|
||||||
vipsImage.Autorot().WriteToFile(imagePath + ".webp",
|
var thumbnailPath = Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{fileId}.thumbnail.webp");
|
||||||
new VOption { { "lossless", true }, { "strip", true } });
|
try
|
||||||
result.Add((imagePath + ".webp", string.Empty));
|
|
||||||
|
|
||||||
if (vipsImage.Width * vipsImage.Height >= 1024 * 1024)
|
|
||||||
{
|
{
|
||||||
var scale = 1024.0 / Math.Max(vipsImage.Width, vipsImage.Height);
|
var mediaInfo = await FFProbe.AnalyseAsync(originalFilePath);
|
||||||
var imageCompressedPath =
|
var snapshotTime = mediaInfo.Duration > TimeSpan.FromSeconds(5)
|
||||||
Path.Join(Path.GetTempPath(), $"{TempFilePrefix}#{file.Id}-compressed");
|
? TimeSpan.FromSeconds(5)
|
||||||
|
: TimeSpan.FromSeconds(1);
|
||||||
// Create and save image within the same synchronous block to avoid disposal issues
|
await FFMpeg.SnapshotAsync(originalFilePath, thumbnailPath, captureTime: snapshotTime);
|
||||||
using var compressedImage = vipsImage.Resize(scale);
|
uploads.Add((thumbnailPath, ".thumbnail.webp", "image/webp", true));
|
||||||
compressedImage.Autorot().WriteToFile(imageCompressedPath + ".webp",
|
hasThumbnail = true;
|
||||||
new VOption { { "Q", 80 }, { "strip", true } });
|
}
|
||||||
|
catch (Exception ex)
|
||||||
result.Add((imageCompressedPath + ".webp", ".compressed"));
|
{
|
||||||
file.HasCompression = true;
|
logger.LogError(ex, "Failed to generate thumbnail for video {FileId}", fileId);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// No extra process for video, add it to the upload queue.
|
|
||||||
result.Add((ogFilePath, string.Empty));
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.LogInformation("Optimized file {fileId}, now uploading...", fileId);
|
break;
|
||||||
|
|
||||||
if (result.Count > 0)
|
default:
|
||||||
{
|
uploads.Add((originalFilePath, string.Empty, contentType, false));
|
||||||
List<Task<CloudFile>> tasks = [];
|
break;
|
||||||
tasks.AddRange(result.Select(item =>
|
}
|
||||||
nfs.UploadFileToRemoteAsync(file, item.filePath, null, item.suffix, true))
|
|
||||||
);
|
|
||||||
|
|
||||||
await Task.WhenAll(tasks);
|
logger.LogInformation("Optimized file {FileId}, now uploading...", fileId);
|
||||||
file = await tasks.First();
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
file = await nfs.UploadFileToRemoteAsync(file, stream, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.LogInformation("Uploaded file {fileId} done!", fileId);
|
if (uploads.Count > 0)
|
||||||
|
{
|
||||||
|
var uploadedTo = configuration.GetValue<string>("Storage:PreferredRemote")!;
|
||||||
|
var uploadTasks = uploads.Select(item =>
|
||||||
|
nfs.UploadFileToRemoteAsync(storageId, uploadedTo, item.FilePath, item.Suffix, item.ContentType,
|
||||||
|
item.SelfDestruct)
|
||||||
|
).ToList();
|
||||||
|
|
||||||
var scopedDb = scope.ServiceProvider.GetRequiredService<AppDatabase>();
|
await Task.WhenAll(uploadTasks);
|
||||||
await scopedDb.Files.Where(f => f.Id == file.Id).ExecuteUpdateAsync(setter => setter
|
|
||||||
.SetProperty(f => f.UploadedAt, file.UploadedAt)
|
logger.LogInformation("Uploaded file {FileId} done!", fileId);
|
||||||
.SetProperty(f => f.UploadedTo, file.UploadedTo)
|
|
||||||
.SetProperty(f => f.MimeType, file.MimeType)
|
var fileToUpdate = await scopedDb.Files.FirstAsync(f => f.Id == fileId);
|
||||||
.SetProperty(f => f.HasCompression, file.HasCompression)
|
if (hasThumbnail) fileToUpdate.HasThumbnail = true;
|
||||||
|
|
||||||
|
await scopedDb.Files.Where(f => f.Id == fileId).ExecuteUpdateAsync(setter => setter
|
||||||
|
.SetProperty(f => f.UploadedAt, Instant.FromDateTimeUtc(DateTime.UtcNow))
|
||||||
|
.SetProperty(f => f.UploadedTo, uploadedTo)
|
||||||
|
.SetProperty(f => f.MimeType, newMimeType)
|
||||||
|
.SetProperty(f => f.HasCompression, hasCompression)
|
||||||
|
.SetProperty(f => f.HasThumbnail, hasThumbnail)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
catch (Exception err)
|
}
|
||||||
{
|
catch (Exception err)
|
||||||
logger.LogError(err, "Failed to process {fileId}", fileId);
|
{
|
||||||
}
|
logger.LogError(err, "Failed to process and upload {FileId}", fileId);
|
||||||
|
}
|
||||||
await stream.DisposeAsync();
|
finally
|
||||||
await store.DeleteFileAsync(file.Id, CancellationToken.None);
|
{
|
||||||
await nfs._PurgeCacheAsync(file.Id);
|
await store.DeleteFileAsync(fileId, CancellationToken.None);
|
||||||
});
|
await nfs._PurgeCacheAsync(fileId);
|
||||||
|
}
|
||||||
return file;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async Task<string> HashFileAsync(Stream stream, int chunkSize = 1024 * 1024, long? fileSize = null)
|
private static async Task<string> HashFileAsync(Stream stream, int chunkSize = 1024 * 1024, long? fileSize = null)
|
||||||
@@ -327,6 +374,7 @@ public class FileService(
|
|||||||
|
|
||||||
using var md5 = MD5.Create();
|
using var md5 = MD5.Create();
|
||||||
var hashBytes = await md5.ComputeHashAsync(stream);
|
var hashBytes = await md5.ComputeHashAsync(stream);
|
||||||
|
stream.Position = 0; // Reset stream position after reading
|
||||||
return Convert.ToHexString(hashBytes).ToLowerInvariant();
|
return Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -349,45 +397,38 @@ public class FileService(
|
|||||||
}
|
}
|
||||||
|
|
||||||
var hash = md5.ComputeHash(buffer, 0, bytesRead);
|
var hash = md5.ComputeHash(buffer, 0, bytesRead);
|
||||||
|
stream.Position = 0; // Reset stream position
|
||||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<CloudFile> UploadFileToRemoteAsync(CloudFile file, string filePath, string? targetRemote,
|
public async Task UploadFileToRemoteAsync(string storageId, string targetRemote, string filePath,
|
||||||
string? suffix = null, bool selfDestruct = false)
|
string? suffix = null, string? contentType = null, bool selfDestruct = false)
|
||||||
{
|
{
|
||||||
var fileStream = File.OpenRead(filePath);
|
await using var fileStream = File.OpenRead(filePath);
|
||||||
var result = await UploadFileToRemoteAsync(file, fileStream, targetRemote, suffix);
|
await UploadFileToRemoteAsync(storageId, targetRemote, fileStream, suffix, contentType);
|
||||||
if (selfDestruct) File.Delete(filePath);
|
if (selfDestruct) File.Delete(filePath);
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<CloudFile> UploadFileToRemoteAsync(CloudFile file, Stream stream, string? targetRemote,
|
public async Task UploadFileToRemoteAsync(string storageId, string targetRemote, Stream stream,
|
||||||
string? suffix = null)
|
string? suffix = null, string? contentType = null)
|
||||||
{
|
{
|
||||||
if (file.UploadedAt.HasValue) return file;
|
var dest = GetRemoteStorageConfig(targetRemote);
|
||||||
|
|
||||||
file.UploadedTo = targetRemote ?? configuration.GetValue<string>("Storage:PreferredRemote")!;
|
|
||||||
|
|
||||||
var dest = GetRemoteStorageConfig(file.UploadedTo);
|
|
||||||
var client = CreateMinioClient(dest);
|
var client = CreateMinioClient(dest);
|
||||||
if (client is null)
|
if (client is null)
|
||||||
throw new InvalidOperationException(
|
throw new InvalidOperationException(
|
||||||
$"Failed to configure client for remote destination '{file.UploadedTo}'"
|
$"Failed to configure client for remote destination '{targetRemote}'"
|
||||||
);
|
);
|
||||||
|
|
||||||
var bucket = dest.Bucket;
|
var bucket = dest.Bucket;
|
||||||
var contentType = file.MimeType ?? "application/octet-stream";
|
contentType ??= "application/octet-stream";
|
||||||
|
|
||||||
await client.PutObjectAsync(new PutObjectArgs()
|
await client.PutObjectAsync(new PutObjectArgs()
|
||||||
.WithBucket(bucket)
|
.WithBucket(bucket)
|
||||||
.WithObject(string.IsNullOrWhiteSpace(suffix) ? file.Id : file.Id + suffix)
|
.WithObject(string.IsNullOrWhiteSpace(suffix) ? storageId : storageId + suffix)
|
||||||
.WithStreamData(stream) // Fix this disposed
|
.WithStreamData(stream)
|
||||||
.WithObjectSize(stream.Length)
|
.WithObjectSize(stream.Length)
|
||||||
.WithContentType(contentType)
|
.WithContentType(contentType)
|
||||||
);
|
);
|
||||||
|
|
||||||
file.UploadedAt = Instant.FromDateTimeUtc(DateTime.UtcNow);
|
|
||||||
return file;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<CloudFile> UpdateFileAsync(CloudFile file, FieldMask updateMask)
|
public async Task<CloudFile> UpdateFileAsync(CloudFile file, FieldMask updateMask)
|
||||||
@@ -398,58 +439,38 @@ public class FileService(
|
|||||||
throw new InvalidOperationException($"File with ID {file.Id} not found.");
|
throw new InvalidOperationException($"File with ID {file.Id} not found.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var updatable = new UpdatableCloudFile(existingFile);
|
||||||
|
|
||||||
foreach (var path in updateMask.Paths)
|
foreach (var path in updateMask.Paths)
|
||||||
{
|
{
|
||||||
switch (path)
|
switch (path)
|
||||||
{
|
{
|
||||||
case "name":
|
case "name":
|
||||||
existingFile.Name = file.Name;
|
updatable.Name = file.Name;
|
||||||
break;
|
break;
|
||||||
case "description":
|
case "description":
|
||||||
existingFile.Description = file.Description;
|
updatable.Description = file.Description;
|
||||||
break;
|
break;
|
||||||
case "file_meta":
|
case "file_meta":
|
||||||
existingFile.FileMeta = file.FileMeta;
|
updatable.FileMeta = file.FileMeta;
|
||||||
break;
|
break;
|
||||||
case "user_meta":
|
case "user_meta":
|
||||||
existingFile.UserMeta = file.UserMeta;
|
updatable.UserMeta = file.UserMeta;
|
||||||
break;
|
|
||||||
case "mime_type":
|
|
||||||
existingFile.MimeType = file.MimeType;
|
|
||||||
break;
|
|
||||||
case "hash":
|
|
||||||
existingFile.Hash = file.Hash;
|
|
||||||
break;
|
|
||||||
case "size":
|
|
||||||
existingFile.Size = file.Size;
|
|
||||||
break;
|
|
||||||
case "uploaded_at":
|
|
||||||
existingFile.UploadedAt = file.UploadedAt;
|
|
||||||
break;
|
|
||||||
case "uploaded_to":
|
|
||||||
existingFile.UploadedTo = file.UploadedTo;
|
|
||||||
break;
|
|
||||||
case "has_compression":
|
|
||||||
existingFile.HasCompression = file.HasCompression;
|
|
||||||
break;
|
break;
|
||||||
case "is_marked_recycle":
|
case "is_marked_recycle":
|
||||||
existingFile.IsMarkedRecycle = file.IsMarkedRecycle;
|
updatable.IsMarkedRecycle = file.IsMarkedRecycle;
|
||||||
break;
|
|
||||||
case "storage_id":
|
|
||||||
existingFile.StorageId = file.StorageId;
|
|
||||||
break;
|
|
||||||
case "storage_url":
|
|
||||||
existingFile.StorageUrl = file.StorageUrl;
|
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
logger.LogWarning("Attempted to update unknown field: {Field}", path);
|
logger.LogWarning("Attempted to update unmodifiable field: {Field}", path);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await db.SaveChangesAsync();
|
await db.Files.Where(f => f.Id == file.Id).ExecuteUpdateAsync(updatable.ToSetPropertyCalls());
|
||||||
|
|
||||||
await _PurgeCacheAsync(file.Id);
|
await _PurgeCacheAsync(file.Id);
|
||||||
return existingFile;
|
// Re-fetch the file to return the updated state
|
||||||
|
return await db.Files.AsNoTracking().FirstAsync(f => f.Id == file.Id);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task DeleteFileAsync(CloudFile file)
|
public async Task DeleteFileAsync(CloudFile file)
|
||||||
@@ -618,46 +639,46 @@ public class FileService(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Checks if an EXIF field contains GPS location data
|
/// Checks if an EXIF field should be ignored (e.g., GPS data).
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="fieldName">The EXIF field name</param>
|
private static bool IsIgnoredField(string fieldName)
|
||||||
/// <returns>True if the field contains GPS data, false otherwise</returns>
|
|
||||||
private static bool IsGpsExifField(string fieldName)
|
|
||||||
{
|
{
|
||||||
// Common GPS EXIF field names
|
// Common GPS EXIF field names
|
||||||
var gpsFields = new[]
|
var gpsFields = new[]
|
||||||
{
|
{
|
||||||
"gps-latitude",
|
"gps-latitude", "gps-longitude", "gps-altitude", "gps-latitude-ref", "gps-longitude-ref",
|
||||||
"gps-longitude",
|
"gps-altitude-ref", "gps-timestamp", "gps-datestamp", "gps-speed", "gps-speed-ref", "gps-track",
|
||||||
"gps-altitude",
|
"gps-track-ref", "gps-img-direction", "gps-img-direction-ref", "gps-dest-latitude",
|
||||||
"gps-latitude-ref",
|
"gps-dest-longitude", "gps-dest-latitude-ref", "gps-dest-longitude-ref", "gps-processing-method",
|
||||||
"gps-longitude-ref",
|
|
||||||
"gps-altitude-ref",
|
|
||||||
"gps-timestamp",
|
|
||||||
"gps-datestamp",
|
|
||||||
"gps-speed",
|
|
||||||
"gps-speed-ref",
|
|
||||||
"gps-track",
|
|
||||||
"gps-track-ref",
|
|
||||||
"gps-img-direction",
|
|
||||||
"gps-img-direction-ref",
|
|
||||||
"gps-dest-latitude",
|
|
||||||
"gps-dest-longitude",
|
|
||||||
"gps-dest-latitude-ref",
|
|
||||||
"gps-dest-longitude-ref",
|
|
||||||
"gps-processing-method",
|
|
||||||
"gps-area-information"
|
"gps-area-information"
|
||||||
};
|
};
|
||||||
|
|
||||||
return gpsFields.Any(gpsField =>
|
if (fieldName.StartsWith("exif-GPS")) return true;
|
||||||
fieldName.Equals(gpsField, StringComparison.OrdinalIgnoreCase) ||
|
if (fieldName.StartsWith("ifd3-GPS")) return true;
|
||||||
fieldName.StartsWith("gps", StringComparison.OrdinalIgnoreCase));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static bool IsIgnoredField(string fieldName)
|
|
||||||
{
|
|
||||||
if (IsGpsExifField(fieldName)) return true;
|
|
||||||
if (fieldName.EndsWith("-data")) return true;
|
if (fieldName.EndsWith("-data")) return true;
|
||||||
return false;
|
return gpsFields.Any(gpsField => fieldName.StartsWith(gpsField, StringComparison.OrdinalIgnoreCase));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// A helper class to build an ExecuteUpdateAsync call for CloudFile.
|
||||||
|
/// </summary>
|
||||||
|
file class UpdatableCloudFile(CloudFile file)
|
||||||
|
{
|
||||||
|
public string Name { get; set; } = file.Name;
|
||||||
|
public string? Description { get; set; } = file.Description;
|
||||||
|
public Dictionary<string, object?>? FileMeta { get; set; } = file.FileMeta;
|
||||||
|
public Dictionary<string, object?>? UserMeta { get; set; } = file.UserMeta;
|
||||||
|
public bool IsMarkedRecycle { get; set; } = file.IsMarkedRecycle;
|
||||||
|
|
||||||
|
public Expression<Func<SetPropertyCalls<CloudFile>, SetPropertyCalls<CloudFile>>> ToSetPropertyCalls()
|
||||||
|
{
|
||||||
|
var userMeta = UserMeta ?? new Dictionary<string, object?>();
|
||||||
|
return setter => setter
|
||||||
|
.SetProperty(f => f.Name, Name)
|
||||||
|
.SetProperty(f => f.Description, Description)
|
||||||
|
.SetProperty(f => f.FileMeta, FileMeta)
|
||||||
|
.SetProperty(f => f.UserMeta, userMeta!)
|
||||||
|
.SetProperty(f => f.IsMarkedRecycle, IsMarkedRecycle);
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -74,7 +74,7 @@ public class ChatMember : ModelBase
|
|||||||
public Guid ChatRoomId { get; set; }
|
public Guid ChatRoomId { get; set; }
|
||||||
public ChatRoom ChatRoom { get; set; } = null!;
|
public ChatRoom ChatRoom { get; set; } = null!;
|
||||||
public Guid AccountId { get; set; }
|
public Guid AccountId { get; set; }
|
||||||
[NotMapped] public Account Account { get; set; } = null!;
|
[NotMapped] public Account? Account { get; set; }
|
||||||
|
|
||||||
[MaxLength(1024)] public string? Nick { get; set; }
|
[MaxLength(1024)] public string? Nick { get; set; }
|
||||||
|
|
||||||
|
@@ -260,7 +260,8 @@ public partial class ChatService(
|
|||||||
}
|
}
|
||||||
else if (member.Notify == ChatMemberNotify.Mentions) continue;
|
else if (member.Notify == ChatMemberNotify.Mentions) continue;
|
||||||
|
|
||||||
accountsToNotify.Add(member.Account.ToProtoValue());
|
if (member.Account is not null)
|
||||||
|
accountsToNotify.Add(member.Account.ToProtoValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.LogInformation($"Trying to deliver message to {accountsToNotify.Count} accounts...");
|
logger.LogInformation($"Trying to deliver message to {accountsToNotify.Count} accounts...");
|
||||||
|
@@ -38,6 +38,7 @@
|
|||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AEntityFrameworkServiceCollectionExtensions_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2024_002E3_003Fresharper_002Dhost_003FSourcesCache_003F4a28847852ee9ba45fd3107526c0a749a733bd4f4ebf33aa3c9a59737a3f758_003FEntityFrameworkServiceCollectionExtensions_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AEntityFrameworkServiceCollectionExtensions_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2024_002E3_003Fresharper_002Dhost_003FSourcesCache_003F4a28847852ee9ba45fd3107526c0a749a733bd4f4ebf33aa3c9a59737a3f758_003FEntityFrameworkServiceCollectionExtensions_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AEnumerable_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2024_002E3_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F832399abc13b45b6bdbabfa022e4a28487e00_003F7f_003F7aece4dd_003FEnumerable_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AEnumerable_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2024_002E3_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F832399abc13b45b6bdbabfa022e4a28487e00_003F7f_003F7aece4dd_003FEnumerable_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AEnumerable_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003Fadcd336f9cde4e71998a851d7eb945bb87e00_003F0c_003F96dc130e_003FEnumerable_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AEnumerable_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003Fadcd336f9cde4e71998a851d7eb945bb87e00_003F0c_003F96dc130e_003FEnumerable_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AEnums_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F5aa524c330cf4033930e4a8661c62bc331a00_003F9e_003F4e134017_003FEnums_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AEvents_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2024_002E3_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F8bb08a178b5b43c5bac20a5a54159a5b2a800_003F20_003F86914b63_003FEvents_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AEvents_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2024_002E3_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F8bb08a178b5b43c5bac20a5a54159a5b2a800_003F20_003F86914b63_003FEvents_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AExceptionDispatchInfo_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F109293935a4844d5aa1610150b96edcde55000_003Fb7_003F8b7e5594_003FExceptionDispatchInfo_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AExceptionDispatchInfo_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F109293935a4844d5aa1610150b96edcde55000_003Fb7_003F8b7e5594_003FExceptionDispatchInfo_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AExceptionDispatchInfo_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F3bef61b8a21d4c8e96872ecdd7782fa0e55000_003F49_003F21ccb952_003FExceptionDispatchInfo_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=7020124F_002D9FFC_002D4AC3_002D8F3D_002DAAB8E0240759_002Ff_003AExceptionDispatchInfo_002Ecs_002Fl_003A_002E_002E_003F_002E_002E_003F_002E_002E_003FLibrary_003FApplication_0020Support_003FJetBrains_003FRider2025_002E1_003Fresharper_002Dhost_003FDecompilerCache_003Fdecompiler_003F3bef61b8a21d4c8e96872ecdd7782fa0e55000_003F49_003F21ccb952_003FExceptionDispatchInfo_002Ecs/@EntryIndexedValue">ForceIncluded</s:String>
|
||||||
|
Reference in New Issue
Block a user