♻️ Move the web reader to insight completely

This commit is contained in:
2026-01-02 01:23:45 +08:00
parent ede49333f8
commit 07b8c99682
65 changed files with 806 additions and 864 deletions

View File

@@ -8,6 +8,14 @@
<ItemGroup>
<PackageReference Include="AngleSharp" Version="1.4.0" />
<PackageReference Include="Google.Protobuf" Version="3.33.2" />
<PackageReference Include="Grpc.AspNetCore.Server.ClientFactory" Version="2.76.0" />
<PackageReference Include="Grpc.AspNetCore.Server.Reflection" Version="2.76.0" />
<PackageReference Include="Grpc.Net.Client" Version="2.76.0" />
<PackageReference Include="Grpc.Tools" Version="2.76.0">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.1" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="10.0.1">
<PrivateAssets>all</PrivateAssets>
@@ -30,4 +38,8 @@
<Folder Include="Controllers\" />
</ItemGroup>
<ItemGroup>
<Protobuf Remove="..\DysonNetwork.Shared\Proto\**" />
</ItemGroup>
</Project>

View File

@@ -11,6 +11,9 @@ builder.AddServiceDefaults();
builder.ConfigureAppKestrel(builder.Configuration);
builder.Services.AddGrpc();
builder.Services.AddGrpcReflection();
builder.Services.AddControllers();
builder.Services.AddAppServices();
builder.Services.AddAppAuthentication();

View File

@@ -1,9 +1,33 @@
using DysonNetwork.Shared.Models.Embed;
using DysonNetwork.Shared.Proto;
using EmbedLinkEmbed = DysonNetwork.Shared.Models.Embed.LinkEmbed;
namespace DysonNetwork.Insight.Reader;
public class ScrapedArticle
{
public LinkEmbed LinkEmbed { get; set; } = null!;
public EmbedLinkEmbed LinkEmbed { get; set; } = null!;
public string? Content { get; set; }
public Shared.Proto.ScrapedArticle ToProtoValue()
{
var proto = new Shared.Proto.ScrapedArticle
{
LinkEmbed = LinkEmbed.ToProtoValue()
};
if (!string.IsNullOrEmpty(Content))
proto.Content = Content;
return proto;
}
public static ScrapedArticle FromProtoValue(Shared.Proto.ScrapedArticle proto)
{
return new ScrapedArticle
{
LinkEmbed = EmbedLinkEmbed.FromProtoValue(proto.LinkEmbed),
Content = proto.Content == "" ? null : proto.Content
};
}
}

View File

@@ -0,0 +1,90 @@
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using Grpc.Core;
using Microsoft.EntityFrameworkCore;
namespace DysonNetwork.Insight.Reader;
public class WebArticleGrpcService(AppDatabase db) : WebArticleService.WebArticleServiceBase
{
public override async Task<GetWebArticleResponse> GetWebArticle(
GetWebArticleRequest request,
ServerCallContext context
)
{
if (!Guid.TryParse(request.Id, out var id))
throw new RpcException(new Status(StatusCode.InvalidArgument, "invalid id"));
var article = await db.WebArticles
.Include(a => a.Feed)
.FirstOrDefaultAsync(a => a.Id == id);
return article == null
? throw new RpcException(new Status(StatusCode.NotFound, "article not found"))
: new GetWebArticleResponse { Article = article.ToProtoValue() };
}
public override async Task<GetWebArticleBatchResponse> GetWebArticleBatch(
GetWebArticleBatchRequest request,
ServerCallContext context
)
{
var ids = request.Ids
.Where(s => !string.IsNullOrWhiteSpace(s) && Guid.TryParse(s, out _))
.Select(Guid.Parse)
.ToList();
if (ids.Count == 0)
return new GetWebArticleBatchResponse();
var articles = await db.WebArticles
.Include(a => a.Feed)
.Where(a => ids.Contains(a.Id))
.ToListAsync();
var response = new GetWebArticleBatchResponse();
response.Articles.AddRange(articles.Select(a => a.ToProtoValue()));
return response;
}
public override async Task<ListWebArticlesResponse> ListWebArticles(
ListWebArticlesRequest request,
ServerCallContext context
)
{
if (!Guid.TryParse(request.FeedId, out var feedId))
throw new RpcException(new Status(StatusCode.InvalidArgument, "invalid feed_id"));
var query = db.WebArticles
.Include(a => a.Feed)
.Where(a => a.FeedId == feedId);
var articles = await query.ToListAsync();
var response = new ListWebArticlesResponse
{
TotalSize = articles.Count
};
response.Articles.AddRange(articles.Select(a => a.ToProtoValue()));
return response;
}
public override async Task<GetRecentArticlesResponse> GetRecentArticles(
GetRecentArticlesRequest request,
ServerCallContext context
)
{
var limit = request.Limit > 0 ? request.Limit : 20;
var articles = await db.WebArticles
.Include(a => a.Feed)
.OrderByDescending(a => a.PublishedAt ?? DateTime.MinValue)
.ThenByDescending(a => a.CreatedAt)
.Take(limit)
.ToListAsync();
var response = new GetRecentArticlesResponse();
response.Articles.AddRange(articles.Select(a => a.ToProtoValue()));
return response;
}
}

View File

@@ -2,6 +2,7 @@ using System.ComponentModel.DataAnnotations;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using DysonNetwork.Shared.Registry;
using WebFeedConfig = DysonNetwork.Shared.Models.WebFeedConfig;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;

View File

@@ -0,0 +1,55 @@
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using Grpc.Core;
using Microsoft.EntityFrameworkCore;
namespace DysonNetwork.Insight.Reader;
public class WebFeedGrpcService(WebFeedService service, AppDatabase db)
: Shared.Proto.WebFeedService.WebFeedServiceBase
{
public override async Task<GetWebFeedResponse> GetWebFeed(
GetWebFeedRequest request,
ServerCallContext context
)
{
SnWebFeed? feed = null;
switch (request.IdentifierCase)
{
case GetWebFeedRequest.IdentifierOneofCase.Id:
if (!string.IsNullOrWhiteSpace(request.Id) && Guid.TryParse(request.Id, out var id))
feed = await service.GetFeedAsync(id);
break;
case GetWebFeedRequest.IdentifierOneofCase.Url:
feed = await db.WebFeeds.FirstOrDefaultAsync(f => f.Url == request.Url);
break;
case GetWebFeedRequest.IdentifierOneofCase.None:
break;
default:
throw new ArgumentOutOfRangeException();
}
return feed == null
? throw new RpcException(new Status(StatusCode.NotFound, "feed not found"))
: new GetWebFeedResponse { Feed = feed.ToProtoValue() };
}
public override async Task<ListWebFeedsResponse> ListWebFeeds(
ListWebFeedsRequest request,
ServerCallContext context
)
{
if (!Guid.TryParse(request.PublisherId, out var publisherId))
throw new RpcException(new Status(StatusCode.InvalidArgument, "invalid publisher_id"));
var feeds = await service.GetFeedsByPublisherAsync(publisherId);
var response = new ListWebFeedsResponse
{
TotalSize = feeds.Count
};
response.Feeds.AddRange(feeds.Select(f => f.ToProtoValue()));
return response;
}
}

View File

@@ -0,0 +1,49 @@
using DysonNetwork.Shared.Proto;
using Grpc.Core;
namespace DysonNetwork.Insight.Reader;
public class WebReaderGrpcService(WebReaderService service) : Shared.Proto.WebReaderService.WebReaderServiceBase
{
public override async Task<ScrapeArticleResponse> ScrapeArticle(
ScrapeArticleRequest request,
ServerCallContext context
)
{
if (string.IsNullOrWhiteSpace(request.Url))
throw new RpcException(new Status(StatusCode.InvalidArgument, "url is required"));
var scrapedArticle = await service.ScrapeArticleAsync(request.Url, context.CancellationToken);
return new ScrapeArticleResponse { Article = scrapedArticle.ToProtoValue() };
}
public override async Task<GetLinkPreviewResponse> GetLinkPreview(
GetLinkPreviewRequest request,
ServerCallContext context
)
{
if (string.IsNullOrWhiteSpace(request.Url))
throw new RpcException(new Status(StatusCode.InvalidArgument, "url is required"));
var linkEmbed = await service.GetLinkPreviewAsync(
request.Url,
context.CancellationToken,
bypassCache: request.BypassCache
);
return new GetLinkPreviewResponse { Preview = linkEmbed.ToProtoValue() };
}
public override async Task<InvalidateLinkPreviewCacheResponse> InvalidateLinkPreviewCache(
InvalidateLinkPreviewCacheRequest request,
ServerCallContext context
)
{
if (string.IsNullOrWhiteSpace(request.Url))
throw new RpcException(new Status(StatusCode.InvalidArgument, "url is required"));
await service.InvalidateCacheForUrlAsync(request.Url);
return new InvalidateLinkPreviewCacheResponse { Success = true };
}
}

View File

@@ -1,3 +1,4 @@
using DysonNetwork.Insight.Reader;
using DysonNetwork.Shared.Http;
namespace DysonNetwork.Insight.Startup;
@@ -17,6 +18,11 @@ public static class ApplicationConfiguration
app.MapControllers();
app.MapGrpcService<WebReaderGrpcService>();
app.MapGrpcService<WebArticleGrpcService>();
app.MapGrpcService<WebFeedGrpcService>();
app.MapGrpcReflectionService();
return app;
}
}

View File

@@ -70,12 +70,6 @@ public class AppDatabase(
modelBuilder.ApplySoftDeleteFilters();
}
private static void SetSoftDeleteFilter<TEntity>(ModelBuilder modelBuilder)
where TEntity : ModelBase
{
modelBuilder.Entity<TEntity>().HasQueryFilter(e => e.DeletedAt == null);
}
public override async Task<int> SaveChangesAsync(CancellationToken cancellationToken = default)
{
this.ApplyAuditableAndSoftDelete();

View File

@@ -6,7 +6,7 @@ using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using DysonNetwork.Messager.Poll;
using DysonNetwork.Messager.Wallet;
using DysonNetwork.Messager.WebReader;
using DysonNetwork.Shared.Models.Embed;
using Grpc.Core;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;

View File

@@ -2,8 +2,9 @@ using System.Text.RegularExpressions;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
using DysonNetwork.Messager.Chat.Realtime;
using DysonNetwork.Shared.Models.Embed;
using DysonNetwork.Shared.Registry;
using Microsoft.EntityFrameworkCore;
using DysonNetwork.Messager.WebReader;
using NodaTime;
using WebSocketPacket = DysonNetwork.Shared.Proto.WebSocketPacket;
@@ -16,7 +17,8 @@ public partial class ChatService(
FileReferenceService.FileReferenceServiceClient fileRefs,
IServiceScopeFactory scopeFactory,
IRealtimeService realtime,
ILogger<ChatService> logger
ILogger<ChatService> logger,
RemoteWebReaderService webReader
)
{
private const string ChatFileUsageIdentifier = "chat";
@@ -36,10 +38,9 @@ public partial class ChatService(
// Create a new scope for database operations
using var scope = scopeFactory.CreateScope();
var dbContext = scope.ServiceProvider.GetRequiredService<AppDatabase>();
var webReader = scope.ServiceProvider.GetRequiredService<WebReaderService>();
// Preview the links in the message
var updatedMessage = await CreateLinkPreviewAsync(message, webReader);
var updatedMessage = await CreateLinkPreviewAsync(message);
// If embeds were added, update the message in the database
if (updatedMessage.Meta != null &&
@@ -111,7 +112,7 @@ public partial class ChatService(
/// <param name="message">The message to process</param>
/// <param name="webReader">The web reader service</param>
/// <returns>The message with link previews added to its meta data</returns>
public async Task<SnChatMessage> CreateLinkPreviewAsync(SnChatMessage message, WebReaderService? webReader = null)
public async Task<SnChatMessage> CreateLinkPreviewAsync(SnChatMessage message)
{
if (string.IsNullOrEmpty(message.Content))
return message;
@@ -133,7 +134,6 @@ public partial class ChatService(
}
var embeds = (List<Dictionary<string, object>>)message.Meta["embeds"];
webReader ??= scopeFactory.CreateScope().ServiceProvider.GetRequiredService<WebReaderService>();
// Process up to 3 links to avoid excessive processing
var processedLinks = 0;
@@ -153,7 +153,7 @@ public partial class ChatService(
continue;
// Preview the link
var linkEmbed = await webReader.GetLinkPreviewAsync(url);
var linkEmbed = await webReader.GetLinkPreview(url);
embeds.Add(EmbeddableBase.ToDictionary(linkEmbed));
processedLinks++;
}

View File

@@ -18,6 +18,7 @@ builder.Services.AddAccountService();
builder.Services.AddRingService();
builder.Services.AddDriveService();
builder.Services.AddSphereService();
builder.Services.AddInsightService();
builder.Services.AddAppBusinessServices(builder.Configuration);
builder.Services.AddAppScheduledJobs();

View File

@@ -1,41 +0,0 @@
using System.Text.Json;
using DysonNetwork.Shared.Proto;
namespace DysonNetwork.Messager.WebReader;
/// <summary>
/// The embeddable can be used in the post or messages' meta's embeds fields
/// To render a richer type of content.
///
/// A simple example of using link preview embed:
/// <code>
/// {
/// // ... post content
/// "meta": {
/// "embeds": [
/// {
/// "type": "link",
/// "title: "...",
/// /// ...
/// }
/// ]
/// }
/// }
/// </code>
/// </summary>
public abstract class EmbeddableBase
{
public abstract string Type { get; }
public static Dictionary<string, object> ToDictionary(dynamic input)
{
var jsonRaw = JsonSerializer.Serialize(
input,
GrpcTypeHelper.SerializerOptionsWithoutIgnore
);
return JsonSerializer.Deserialize<Dictionary<string, object>>(
jsonRaw,
GrpcTypeHelper.SerializerOptionsWithoutIgnore
);
}
}

View File

@@ -1,55 +0,0 @@
namespace DysonNetwork.Messager.WebReader;
/// <summary>
/// The link embed is a part of the embeddable implementations
/// It can be used in the post or messages' meta's embeds fields
/// </summary>
public class LinkEmbed : EmbeddableBase
{
public override string Type => "link";
/// <summary>
/// The original URL that was processed
/// </summary>
public required string Url { get; set; }
/// <summary>
/// Title of the linked content (from OpenGraph og:title, meta title, or page title)
/// </summary>
public string? Title { get; set; }
/// <summary>
/// Description of the linked content (from OpenGraph og:description or meta description)
/// </summary>
public string? Description { get; set; }
/// <summary>
/// URL to the thumbnail image (from OpenGraph og:image or other meta tags)
/// </summary>
public string? ImageUrl { get; set; }
/// <summary>
/// The favicon URL of the site
/// </summary>
public string? FaviconUrl { get; set; }
/// <summary>
/// The site name (from OpenGraph og:site_name)
/// </summary>
public string? SiteName { get; set; }
/// <summary>
/// Type of the content (from OpenGraph og:type)
/// </summary>
public string? ContentType { get; set; }
/// <summary>
/// Author of the content if available
/// </summary>
public string? Author { get; set; }
/// <summary>
/// Published date of the content if available
/// </summary>
public DateTime? PublishedDate { get; set; }
}

View File

@@ -1,7 +0,0 @@
namespace DysonNetwork.Messager.WebReader;
public class ScrapedArticle
{
public LinkEmbed LinkEmbed { get; set; } = null!;
public string? Content { get; set; }
}

View File

@@ -1,110 +0,0 @@
using DysonNetwork.Shared.Auth;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.RateLimiting;
namespace DysonNetwork.Messager.WebReader;
/// <summary>
/// Controller for web scraping and link preview services
/// </summary>
[ApiController]
[Route("/api/scrap")]
[EnableRateLimiting("fixed")]
public class WebReaderController(WebReaderService reader, ILogger<WebReaderController> logger)
: ControllerBase
{
/// <summary>
/// Retrieves a preview for the provided URL
/// </summary>
/// <param name="url">URL-encoded link to generate preview for</param>
/// <returns>Link preview data including title, description, and image</returns>
[HttpGet("link")]
public async Task<ActionResult<LinkEmbed>> ScrapLink([FromQuery] string url)
{
if (string.IsNullOrEmpty(url))
{
return BadRequest(new { error = "URL parameter is required" });
}
try
{
// Ensure URL is properly decoded
var decodedUrl = UrlDecoder.Decode(url);
// Validate URL format
if (!Uri.TryCreate(decodedUrl, UriKind.Absolute, out _))
{
return BadRequest(new { error = "Invalid URL format" });
}
var linkEmbed = await reader.GetLinkPreviewAsync(decodedUrl);
return Ok(linkEmbed);
}
catch (WebReaderException ex)
{
logger.LogWarning(ex, "Error scraping link: {Url}", url);
return BadRequest(new { error = ex.Message });
}
catch (Exception ex)
{
logger.LogError(ex, "Unexpected error scraping link: {Url}", url);
return StatusCode(StatusCodes.Status500InternalServerError,
new { error = "An unexpected error occurred while processing the link" });
}
}
/// <summary>
/// Force invalidates the cache for a specific URL
/// </summary>
[HttpDelete("link/cache")]
[Authorize]
[AskPermission("cache.scrap")]
public async Task<IActionResult> InvalidateCache([FromQuery] string url)
{
if (string.IsNullOrEmpty(url))
{
return BadRequest(new { error = "URL parameter is required" });
}
await reader.InvalidateCacheForUrlAsync(url);
return Ok(new { message = "Cache invalidated for URL" });
}
/// <summary>
/// Force invalidates all cached link previews
/// </summary>
[HttpDelete("cache/all")]
[Authorize]
[AskPermission("cache.scrap")]
public async Task<IActionResult> InvalidateAllCache()
{
await reader.InvalidateAllCachedPreviewsAsync();
return Ok(new { message = "All link preview caches invalidated" });
}
}
/// <summary>
/// Helper class for URL decoding
/// </summary>
public static class UrlDecoder
{
public static string Decode(string url)
{
// First check if URL is already decoded
if (!url.Contains('%') && !url.Contains('+'))
{
return url;
}
try
{
return System.Net.WebUtility.UrlDecode(url);
}
catch
{
// If decoding fails, return the original string
return url;
}
}
}

View File

@@ -1,15 +0,0 @@
namespace DysonNetwork.Messager.WebReader;
/// <summary>
/// Exception thrown when an error occurs during web reading operations
/// </summary>
public class WebReaderException : Exception
{
public WebReaderException(string message) : base(message)
{
}
public WebReaderException(string message, Exception innerException) : base(message, innerException)
{
}
}

View File

@@ -1,367 +0,0 @@
using System.Globalization;
using AngleSharp;
using AngleSharp.Dom;
using DysonNetwork.Shared.Cache;
using HtmlAgilityPack;
namespace DysonNetwork.Messager.WebReader;
/// <summary>
/// The service is amin to providing scrapping service to the Solar Network.
/// Such as news feed, external articles and link preview.
/// </summary>
public class WebReaderService(
IHttpClientFactory httpClientFactory,
ILogger<WebReaderService> logger,
ICacheService cache
)
{
private const string LinkPreviewCachePrefix = "scrap:preview:";
private const string LinkPreviewCacheGroup = "scrap:preview";
public async Task<ScrapedArticle> ScrapeArticleAsync(string url, CancellationToken cancellationToken = default)
{
var linkEmbed = await GetLinkPreviewAsync(url, cancellationToken);
var content = await GetArticleContentAsync(url, cancellationToken);
return new ScrapedArticle
{
LinkEmbed = linkEmbed,
Content = content
};
}
private async Task<string?> GetArticleContentAsync(string url, CancellationToken cancellationToken)
{
var httpClient = httpClientFactory.CreateClient("WebReader");
var response = await httpClient.GetAsync(url, cancellationToken);
if (!response.IsSuccessStatusCode)
{
logger.LogWarning("Failed to scrap article content for URL: {Url}", url);
return null;
}
var html = await response.Content.ReadAsStringAsync(cancellationToken);
var doc = new HtmlDocument();
doc.LoadHtml(html);
var articleNode = doc.DocumentNode.SelectSingleNode("//article");
return articleNode?.InnerHtml;
}
/// <summary>
/// Generate a link preview embed from a URL
/// </summary>
/// <param name="url">The URL to generate the preview for</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <param name="bypassCache">If true, bypass cache and fetch fresh data</param>
/// <param name="cacheExpiry">Custom cache expiration time</param>
/// <returns>A LinkEmbed object containing the preview data</returns>
public async Task<LinkEmbed> GetLinkPreviewAsync(
string url,
CancellationToken cancellationToken = default,
TimeSpan? cacheExpiry = null,
bool bypassCache = false
)
{
// Ensure URL is valid
if (!Uri.TryCreate(url, UriKind.Absolute, out var uri))
{
throw new ArgumentException(@"Invalid URL format", nameof(url));
}
// Try to get from cache if not bypassing
if (!bypassCache)
{
var cachedPreview = await GetCachedLinkPreview(url);
if (cachedPreview is not null)
return cachedPreview;
}
// Cache miss or bypass, fetch fresh data
logger.LogDebug("Fetching fresh link preview for URL: {Url}", url);
var httpClient = httpClientFactory.CreateClient("WebReader");
httpClient.MaxResponseContentBufferSize =
10 * 1024 * 1024; // 10MB, prevent scrap some directly accessible files
httpClient.Timeout = TimeSpan.FromSeconds(3);
// Setting UA to facebook's bot to get the opengraph.
httpClient.DefaultRequestHeaders.Add("User-Agent", "facebookexternalhit/1.1");
try
{
var response = await httpClient.GetAsync(url, cancellationToken);
response.EnsureSuccessStatusCode();
var contentType = response.Content.Headers.ContentType?.MediaType;
if (contentType == null || !contentType.StartsWith("text/html"))
{
logger.LogWarning("URL is not an HTML page: {Url}, ContentType: {ContentType}", url, contentType);
var nonHtmlEmbed = new LinkEmbed
{
Url = url,
Title = uri.Host,
ContentType = contentType
};
// Cache non-HTML responses too
await CacheLinkPreview(nonHtmlEmbed, url, cacheExpiry);
return nonHtmlEmbed;
}
var html = await response.Content.ReadAsStringAsync(cancellationToken);
var linkEmbed = await ExtractLinkData(url, html, uri);
// Cache the result
await CacheLinkPreview(linkEmbed, url, cacheExpiry);
return linkEmbed;
}
catch (HttpRequestException ex)
{
logger.LogError(ex, "Failed to fetch URL: {Url}", url);
throw new WebReaderException($"Failed to fetch URL: {url}", ex);
}
}
private async Task<LinkEmbed> ExtractLinkData(string url, string html, Uri uri)
{
var embed = new LinkEmbed
{
Url = url
};
// Configure AngleSharp context
var config = Configuration.Default;
var context = BrowsingContext.New(config);
var document = await context.OpenAsync(req => req.Content(html));
// Extract OpenGraph tags
var ogTitle = GetMetaTagContent(document, "og:title");
var ogDescription = GetMetaTagContent(document, "og:description");
var ogImage = GetMetaTagContent(document, "og:image");
var ogSiteName = GetMetaTagContent(document, "og:site_name");
var ogType = GetMetaTagContent(document, "og:type");
// Extract Twitter card tags as fallback
var twitterTitle = GetMetaTagContent(document, "twitter:title");
var twitterDescription = GetMetaTagContent(document, "twitter:description");
var twitterImage = GetMetaTagContent(document, "twitter:image");
// Extract standard meta tags as final fallback
var metaTitle = GetMetaTagContent(document, "title") ??
GetMetaContent(document, "title");
var metaDescription = GetMetaTagContent(document, "description");
// Extract page title
var pageTitle = document.Title?.Trim();
// Extract publish date
var publishedTime = GetMetaTagContent(document, "article:published_time") ??
GetMetaTagContent(document, "datePublished") ??
GetMetaTagContent(document, "pubdate");
// Extract author
var author = GetMetaTagContent(document, "author") ??
GetMetaTagContent(document, "article:author");
// Extract favicon
var faviconUrl = GetFaviconUrl(document, uri);
// Populate the embed with the data, prioritizing OpenGraph
embed.Title = ogTitle ?? twitterTitle ?? metaTitle ?? pageTitle ?? uri.Host;
embed.Description = ogDescription ?? twitterDescription ?? metaDescription;
embed.ImageUrl = ResolveRelativeUrl(ogImage ?? twitterImage, uri);
embed.SiteName = ogSiteName ?? uri.Host;
embed.ContentType = ogType;
embed.FaviconUrl = faviconUrl;
embed.Author = author;
// Parse and set published date
if (!string.IsNullOrEmpty(publishedTime) &&
DateTime.TryParse(publishedTime, CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal,
out DateTime parsedDate))
{
embed.PublishedDate = parsedDate;
}
return embed;
}
private static string? GetMetaTagContent(IDocument doc, string property)
{
// Check for OpenGraph/Twitter style meta tags
var node = doc.QuerySelector($"meta[property='{property}'][content]")
?? doc.QuerySelector($"meta[name='{property}'][content]");
return node?.GetAttribute("content")?.Trim();
}
private static string? GetMetaContent(IDocument doc, string name)
{
var node = doc.QuerySelector($"meta[name='{name}'][content]");
return node?.GetAttribute("content")?.Trim();
}
private static string? GetFaviconUrl(IDocument doc, Uri baseUri)
{
// Look for apple-touch-icon first as it's typically higher quality
var appleIconNode = doc.QuerySelector("link[rel='apple-touch-icon'][href]");
if (appleIconNode != null)
{
return ResolveRelativeUrl(appleIconNode.GetAttribute("href"), baseUri);
}
// Then check for standard favicon
var faviconNode = doc.QuerySelector("link[rel='icon'][href]") ??
doc.QuerySelector("link[rel='shortcut icon'][href]");
return faviconNode != null
? ResolveRelativeUrl(faviconNode.GetAttribute("href"), baseUri)
: new Uri(baseUri, "/favicon.ico").ToString();
}
private static string? ResolveRelativeUrl(string? url, Uri baseUri)
{
if (string.IsNullOrEmpty(url))
{
return null;
}
if (Uri.TryCreate(url, UriKind.Absolute, out _))
{
return url; // Already absolute
}
return Uri.TryCreate(baseUri, url, out var absoluteUri) ? absoluteUri.ToString() : null;
}
/// <summary>
/// Generate a hash-based cache key for a URL
/// </summary>
private string GenerateUrlCacheKey(string url)
{
// Normalize the URL first
var normalizedUrl = NormalizeUrl(url);
// Create SHA256 hash of the normalized URL
using var sha256 = System.Security.Cryptography.SHA256.Create();
var urlBytes = System.Text.Encoding.UTF8.GetBytes(normalizedUrl);
var hashBytes = sha256.ComputeHash(urlBytes);
// Convert to hex string
var hashString = BitConverter.ToString(hashBytes).Replace("-", "").ToLowerInvariant();
// Return prefixed key
return $"{LinkPreviewCachePrefix}{hashString}";
}
/// <summary>
/// Normalize URL by trimming trailing slashes but preserving query parameters
/// </summary>
private string NormalizeUrl(string url)
{
if (string.IsNullOrEmpty(url))
return string.Empty;
// First ensure we have a valid URI
if (!Uri.TryCreate(url, UriKind.Absolute, out var uri))
return url.TrimEnd('/');
// Rebuild the URL without trailing slashes but with query parameters
var scheme = uri.Scheme;
var host = uri.Host;
var port = uri.IsDefaultPort ? string.Empty : $":{uri.Port}";
var path = uri.AbsolutePath.TrimEnd('/');
var query = uri.Query;
return $"{scheme}://{host}{port}{path}{query}".ToLowerInvariant();
}
/// <summary>
/// Cache a link preview
/// </summary>
private async Task CacheLinkPreview(LinkEmbed? linkEmbed, string url, TimeSpan? expiry = null)
{
if (linkEmbed == null || string.IsNullOrEmpty(url))
return;
try
{
var cacheKey = GenerateUrlCacheKey(url);
var expiryTime = expiry ?? TimeSpan.FromHours(24);
await cache.SetWithGroupsAsync(
cacheKey,
linkEmbed,
[LinkPreviewCacheGroup],
expiryTime);
logger.LogDebug("Cached link preview for URL: {Url} with key: {CacheKey}", url, cacheKey);
}
catch (Exception ex)
{
// Log but don't throw - caching failures shouldn't break the main functionality
logger.LogWarning(ex, "Failed to cache link preview for URL: {Url}", url);
}
}
/// <summary>
/// Try to get a cached link preview
/// </summary>
private async Task<LinkEmbed?> GetCachedLinkPreview(string url)
{
if (string.IsNullOrEmpty(url))
return null;
try
{
var cacheKey = GenerateUrlCacheKey(url);
var cachedPreview = await cache.GetAsync<LinkEmbed>(cacheKey);
if (cachedPreview is not null)
logger.LogDebug("Retrieved cached link preview for URL: {Url}", url);
return cachedPreview;
}
catch (Exception ex)
{
logger.LogWarning(ex, "Failed to retrieve cached link preview for URL: {Url}", url);
return null;
}
}
/// <summary>
/// Invalidate cache for a specific URL
/// </summary>
public async Task InvalidateCacheForUrlAsync(string url)
{
if (string.IsNullOrEmpty(url))
return;
try
{
var cacheKey = GenerateUrlCacheKey(url);
await cache.RemoveAsync(cacheKey);
logger.LogDebug("Invalidated cache for URL: {Url} with key: {CacheKey}", url, cacheKey);
}
catch (Exception ex)
{
logger.LogWarning(ex, "Failed to invalidate cache for URL: {Url}", url);
}
}
/// <summary>
/// Invalidate all cached link previews
/// </summary>
public async Task InvalidateAllCachedPreviewsAsync()
{
try
{
await cache.RemoveGroupAsync(LinkPreviewCacheGroup);
logger.LogInformation("Invalidated all cached link previews");
}
catch (Exception ex)
{
logger.LogWarning(ex, "Failed to invalidate all cached link previews");
}
}
}

View File

@@ -1,3 +1,6 @@
using DysonNetwork.Shared.Proto;
using Google.Protobuf.WellKnownTypes;
namespace DysonNetwork.Shared.Models.Embed;
/// <summary>
@@ -52,4 +55,54 @@ public class LinkEmbed : EmbeddableBase
/// Published date of the content if available
/// </summary>
public DateTime? PublishedDate { get; set; }
public Proto.LinkEmbed ToProtoValue()
{
var proto = new Proto.LinkEmbed
{
Url = Url
};
if (!string.IsNullOrEmpty(Title))
proto.Title = Title;
if (!string.IsNullOrEmpty(Description))
proto.Description = Description;
if (!string.IsNullOrEmpty(ImageUrl))
proto.ImageUrl = ImageUrl;
if (!string.IsNullOrEmpty(FaviconUrl))
proto.FaviconUrl = FaviconUrl;
if (!string.IsNullOrEmpty(SiteName))
proto.SiteName = SiteName;
if (!string.IsNullOrEmpty(ContentType))
proto.ContentType = ContentType;
if (!string.IsNullOrEmpty(Author))
proto.Author = Author;
if (PublishedDate.HasValue)
proto.PublishedDate = Timestamp.FromDateTime(PublishedDate.Value.ToUniversalTime());
return proto;
}
public static LinkEmbed FromProtoValue(Proto.LinkEmbed proto)
{
return new LinkEmbed
{
Url = proto.Url,
Title = proto.Title == "" ? null : proto.Title,
Description = proto.Description == "" ? null : proto.Description,
ImageUrl = proto.ImageUrl == "" ? null : proto.ImageUrl,
FaviconUrl = proto.FaviconUrl == "" ? null : proto.FaviconUrl,
SiteName = proto.SiteName == "" ? null : proto.SiteName,
ContentType = proto.ContentType == "" ? null : proto.ContentType,
Author = proto.Author == "" ? null : proto.Author,
PublishedDate = proto.PublishedDate != null ? proto.PublishedDate.ToDateTime() : null
};
}
}

View File

@@ -2,7 +2,10 @@ using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Text.Json.Serialization;
using DysonNetwork.Shared.Models.Embed;
using DysonNetwork.Shared.Proto;
using Google.Protobuf.WellKnownTypes;
using NodaTime;
using EmbedLinkEmbed = DysonNetwork.Shared.Models.Embed.LinkEmbed;
namespace DysonNetwork.Shared.Models;
@@ -13,9 +16,9 @@ public class SnWebArticle : ModelBase
[MaxLength(4096)] public string Title { get; set; } = null!;
[MaxLength(8192)] public string Url { get; set; } = null!;
[MaxLength(4096)] public string? Author { get; set; }
[Column(TypeName = "jsonb")] public Dictionary<string, object>? Meta { get; set; }
[Column(TypeName = "jsonb")] public LinkEmbed? Preview { get; set; }
[Column(TypeName = "jsonb")] public EmbedLinkEmbed? Preview { get; set; }
// ReSharper disable once EntityFramework.ModelValidation.UnlimitedStringLength
public string? Content { get; set; }
@@ -24,11 +27,79 @@ public class SnWebArticle : ModelBase
public Guid FeedId { get; set; }
public SnWebFeed Feed { get; set; } = null!;
public WebArticle ToProtoValue()
{
var proto = new WebArticle
{
Id = Id.ToString(),
Title = Title,
Url = Url,
FeedId = FeedId.ToString(),
CreatedAt = Timestamp.FromDateTimeOffset(CreatedAt.ToDateTimeOffset()),
UpdatedAt = Timestamp.FromDateTimeOffset(UpdatedAt.ToDateTimeOffset())
};
if (!string.IsNullOrEmpty(Author))
proto.Author = Author;
if (Meta != null)
proto.Meta = GrpcTypeHelper.ConvertObjectToByteString(Meta);
if (Preview != null)
proto.Preview = Preview.ToProtoValue();
if (!string.IsNullOrEmpty(Content))
proto.Content = Content;
if (PublishedAt.HasValue)
proto.PublishedAt = Timestamp.FromDateTime(PublishedAt.Value.ToUniversalTime());
if (DeletedAt.HasValue)
proto.DeletedAt = Timestamp.FromDateTimeOffset(DeletedAt.Value.ToDateTimeOffset());
return proto;
}
public static SnWebArticle FromProtoValue(WebArticle proto)
{
return new SnWebArticle
{
Id = Guid.Parse(proto.Id),
Title = proto.Title,
Url = proto.Url,
FeedId = Guid.Parse(proto.FeedId),
Author = proto.Author == "" ? null : proto.Author,
Meta = proto.Meta != null ? GrpcTypeHelper.ConvertByteStringToObject<Dictionary<string, object>>(proto.Meta) : null,
Preview = proto.Preview != null ? EmbedLinkEmbed.FromProtoValue(proto.Preview) : null,
Content = proto.Content == "" ? null : proto.Content,
PublishedAt = proto.PublishedAt != null ? proto.PublishedAt.ToDateTime() : null,
CreatedAt = Instant.FromDateTimeOffset(proto.CreatedAt.ToDateTimeOffset()),
UpdatedAt = Instant.FromDateTimeOffset(proto.UpdatedAt.ToDateTimeOffset()),
DeletedAt = proto.DeletedAt != null ? Instant.FromDateTimeOffset(proto.DeletedAt.ToDateTimeOffset()) : null
};
}
}
public class WebFeedConfig
{
public bool ScrapPage { get; set; }
public Proto.WebFeedConfig ToProtoValue()
{
return new Proto.WebFeedConfig
{
ScrapPage = ScrapPage
};
}
public static WebFeedConfig FromProtoValue(Proto.WebFeedConfig proto)
{
return new WebFeedConfig
{
ScrapPage = proto.ScrapPage
};
}
}
public class SnWebFeed : ModelBase
@@ -37,25 +108,105 @@ public class SnWebFeed : ModelBase
[MaxLength(8192)] public string Url { get; set; } = null!;
[MaxLength(4096)] public string Title { get; set; } = null!;
[MaxLength(8192)] public string? Description { get; set; }
public Instant? VerifiedAt { get; set; }
[JsonIgnore] [MaxLength(8192)] public string? VerificationKey { get; set; }
[Column(TypeName = "jsonb")] public LinkEmbed? Preview { get; set; }
[Column(TypeName = "jsonb")] public EmbedLinkEmbed? Preview { get; set; }
[Column(TypeName = "jsonb")] public WebFeedConfig Config { get; set; } = new();
public Guid PublisherId { get; set; }
public SnPublisher Publisher { get; set; } = null!;
[JsonIgnore] public List<SnWebArticle> Articles { get; set; } = new();
public WebFeed ToProtoValue()
{
var proto = new WebFeed
{
Id = Id.ToString(),
Url = Url,
Title = Title,
Config = Config.ToProtoValue(),
PublisherId = PublisherId.ToString(),
CreatedAt = Timestamp.FromDateTimeOffset(CreatedAt.ToDateTimeOffset()),
UpdatedAt = Timestamp.FromDateTimeOffset(UpdatedAt.ToDateTimeOffset())
};
if (!string.IsNullOrEmpty(Description))
proto.Description = Description;
if (VerifiedAt.HasValue)
proto.VerifiedAt = Timestamp.FromDateTimeOffset(VerifiedAt.Value.ToDateTimeOffset());
if (Preview != null)
proto.Preview = Preview.ToProtoValue();
if (Publisher != null)
proto.Publisher = Publisher.ToProtoValue();
if (DeletedAt.HasValue)
proto.DeletedAt = Timestamp.FromDateTimeOffset(DeletedAt.Value.ToDateTimeOffset());
return proto;
}
public static SnWebFeed FromProtoValue(WebFeed proto)
{
return new SnWebFeed
{
Id = Guid.Parse(proto.Id),
Url = proto.Url,
Title = proto.Title,
Description = proto.Description == "" ? null : proto.Description,
VerifiedAt = proto.VerifiedAt != null ? Instant.FromDateTimeOffset(proto.VerifiedAt.ToDateTimeOffset()) : null,
Preview = proto.Preview != null ? EmbedLinkEmbed.FromProtoValue(proto.Preview) : null,
Config = WebFeedConfig.FromProtoValue(proto.Config),
PublisherId = Guid.Parse(proto.PublisherId),
Publisher = proto.Publisher != null ? SnPublisher.FromProtoValue(proto.Publisher) : null,
CreatedAt = Instant.FromDateTimeOffset(proto.CreatedAt.ToDateTimeOffset()),
UpdatedAt = Instant.FromDateTimeOffset(proto.UpdatedAt.ToDateTimeOffset()),
DeletedAt = proto.DeletedAt != null ? Instant.FromDateTimeOffset(proto.DeletedAt.ToDateTimeOffset()) : null
};
}
}
public class SnWebFeedSubscription : ModelBase
{
public Guid Id { get; set; } = Guid.NewGuid();
public Guid FeedId { get; set; }
public SnWebFeed Feed { get; set; } = null!;
public Guid AccountId { get; set; }
[NotMapped] public SnAccount Account { get; set; } = null!;
public WebFeedSubscription ToProtoValue()
{
var proto = new WebFeedSubscription
{
Id = Id.ToString(),
FeedId = FeedId.ToString(),
AccountId = AccountId.ToString(),
CreatedAt = Timestamp.FromDateTimeOffset(CreatedAt.ToDateTimeOffset()),
UpdatedAt = Timestamp.FromDateTimeOffset(UpdatedAt.ToDateTimeOffset())
};
if (Feed != null)
proto.Feed = Feed.ToProtoValue();
return proto;
}
public static SnWebFeedSubscription FromProtoValue(WebFeedSubscription proto)
{
return new SnWebFeedSubscription
{
Id = Guid.Parse(proto.Id),
FeedId = Guid.Parse(proto.FeedId),
Feed = proto.Feed != null ? SnWebFeed.FromProtoValue(proto.Feed) : null,
AccountId = Guid.Parse(proto.AccountId),
CreatedAt = Instant.FromDateTimeOffset(proto.CreatedAt.ToDateTimeOffset()),
UpdatedAt = Instant.FromDateTimeOffset(proto.UpdatedAt.ToDateTimeOffset())
};
}
}

View File

@@ -0,0 +1,19 @@
syntax = "proto3";
package proto;
option csharp_namespace = "DysonNetwork.Shared.Proto";
import "google/protobuf/timestamp.proto";
message LinkEmbed {
string url = 1;
optional string title = 2;
optional string description = 3;
optional string image_url = 4;
optional string favicon_url = 5;
optional string site_name = 6;
optional string content_type = 7;
optional string author = 8;
optional google.protobuf.Timestamp published_date = 9;
}

View File

@@ -0,0 +1,160 @@
syntax = "proto3";
package proto;
option csharp_namespace = "DysonNetwork.Shared.Proto";
import "google/protobuf/timestamp.proto";
import "embed.proto";
import "publisher.proto";
message WebFeedConfig {
bool scrap_page = 1;
}
message WebFeed {
string id = 1;
string url = 2;
string title = 3;
optional string description = 4;
optional google.protobuf.Timestamp verified_at = 5;
optional LinkEmbed preview = 6;
WebFeedConfig config = 7;
string publisher_id = 8;
optional Publisher publisher = 9;
google.protobuf.Timestamp created_at = 10;
google.protobuf.Timestamp updated_at = 11;
optional google.protobuf.Timestamp deleted_at = 12;
}
message WebArticle {
string id = 1;
string title = 2;
string url = 3;
optional string author = 4;
optional bytes meta = 5;
optional LinkEmbed preview = 6;
optional string content = 7;
optional google.protobuf.Timestamp published_at = 8;
string feed_id = 9;
optional WebFeed feed = 10;
google.protobuf.Timestamp created_at = 11;
google.protobuf.Timestamp updated_at = 12;
optional google.protobuf.Timestamp deleted_at = 13;
}
message WebFeedSubscription {
string id = 1;
string feed_id = 2;
optional WebFeed feed = 3;
string account_id = 4;
google.protobuf.Timestamp created_at = 5;
google.protobuf.Timestamp updated_at = 6;
}
message ScrapedArticle {
LinkEmbed link_embed = 1;
optional string content = 2;
}
message GetWebArticleRequest {
string id = 1;
}
message GetWebArticleResponse {
WebArticle article = 1;
}
message GetWebArticleBatchRequest {
repeated string ids = 1;
}
message GetWebArticleBatchResponse {
repeated WebArticle articles = 1;
}
message ListWebArticlesRequest {
string feed_id = 1;
int32 page_size = 2;
string page_token = 3;
}
message ListWebArticlesResponse {
repeated WebArticle articles = 1;
string next_page_token = 2;
int32 total_size = 3;
}
message GetRecentArticlesRequest {
int32 limit = 1;
}
message GetRecentArticlesResponse {
repeated WebArticle articles = 1;
}
message GetWebFeedRequest {
oneof identifier {
string id = 1;
string url = 2;
}
}
message GetWebFeedResponse {
WebFeed feed = 1;
}
message ListWebFeedsRequest {
string publisher_id = 1;
int32 page_size = 2;
string page_token = 3;
}
message ListWebFeedsResponse {
repeated WebFeed feeds = 1;
string next_page_token = 2;
int32 total_size = 3;
}
message ScrapeArticleRequest {
string url = 1;
}
message ScrapeArticleResponse {
ScrapedArticle article = 1;
}
message GetLinkPreviewRequest {
string url = 1;
bool bypass_cache = 2;
}
message GetLinkPreviewResponse {
LinkEmbed preview = 1;
}
message InvalidateLinkPreviewCacheRequest {
string url = 1;
}
message InvalidateLinkPreviewCacheResponse {
bool success = 1;
}
service WebArticleService {
rpc GetWebArticle(GetWebArticleRequest) returns (GetWebArticleResponse);
rpc GetWebArticleBatch(GetWebArticleBatchRequest) returns (GetWebArticleBatchResponse);
rpc ListWebArticles(ListWebArticlesRequest) returns (ListWebArticlesResponse);
rpc GetRecentArticles(GetRecentArticlesRequest) returns (GetRecentArticlesResponse);
}
service WebFeedService {
rpc GetWebFeed(GetWebFeedRequest) returns (GetWebFeedResponse);
rpc ListWebFeeds(ListWebFeedsRequest) returns (ListWebFeedsResponse);
}
service WebReaderService {
rpc ScrapeArticle(ScrapeArticleRequest) returns (ScrapeArticleResponse);
rpc GetLinkPreview(GetLinkPreviewRequest) returns (GetLinkPreviewResponse);
rpc InvalidateLinkPreviewCache(InvalidateLinkPreviewCacheRequest) returns (InvalidateLinkPreviewCacheResponse);
}

View File

@@ -0,0 +1,36 @@
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
namespace DysonNetwork.Shared.Registry;
public class RemoteWebArticleService(WebArticleService.WebArticleServiceClient webArticles)
{
public async Task<SnWebArticle> GetWebArticle(Guid id)
{
var request = new GetWebArticleRequest { Id = id.ToString() };
var response = await webArticles.GetWebArticleAsync(request);
return response.Article != null ? SnWebArticle.FromProtoValue(response.Article) : null!;
}
public async Task<List<SnWebArticle>> GetWebArticleBatch(List<Guid> ids)
{
var request = new GetWebArticleBatchRequest();
request.Ids.AddRange(ids.Select(id => id.ToString()));
var response = await webArticles.GetWebArticleBatchAsync(request);
return response.Articles.Select(SnWebArticle.FromProtoValue).ToList();
}
public async Task<List<SnWebArticle>> ListWebArticles(Guid feedId)
{
var request = new ListWebArticlesRequest { FeedId = feedId.ToString() };
var response = await webArticles.ListWebArticlesAsync(request);
return response.Articles.Select(SnWebArticle.FromProtoValue).ToList();
}
public async Task<List<SnWebArticle>> GetRecentArticles(int limit = 20)
{
var request = new GetRecentArticlesRequest { Limit = limit };
var response = await webArticles.GetRecentArticlesAsync(request);
return response.Articles.Select(SnWebArticle.FromProtoValue).ToList();
}
}

View File

@@ -0,0 +1,28 @@
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Proto;
namespace DysonNetwork.Shared.Registry;
public class RemoteWebFeedService(WebFeedService.WebFeedServiceClient webFeeds)
{
public async Task<SnWebFeed> GetWebFeed(Guid id)
{
var request = new GetWebFeedRequest { Id = id.ToString() };
var response = await webFeeds.GetWebFeedAsync(request);
return response.Feed != null ? SnWebFeed.FromProtoValue(response.Feed) : null!;
}
public async Task<SnWebFeed> GetWebFeedByUrl(string url)
{
var request = new GetWebFeedRequest { Url = url };
var response = await webFeeds.GetWebFeedAsync(request);
return response.Feed != null ? SnWebFeed.FromProtoValue(response.Feed) : null!;
}
public async Task<List<SnWebFeed>> ListWebFeeds(Guid publisherId)
{
var request = new ListWebFeedsRequest { PublisherId = publisherId.ToString() };
var response = await webFeeds.ListWebFeedsAsync(request);
return response.Feeds.Select(SnWebFeed.FromProtoValue).ToList();
}
}

View File

@@ -0,0 +1,34 @@
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Models.Embed;
using DysonNetwork.Shared.Proto;
using ProtoLinkEmbed = DysonNetwork.Shared.Proto.LinkEmbed;
using ModelsLinkEmbed = DysonNetwork.Shared.Models.Embed.LinkEmbed;
namespace DysonNetwork.Shared.Registry;
public class RemoteWebReaderService(WebReaderService.WebReaderServiceClient webReader)
{
public async Task<(ModelsLinkEmbed LinkEmbed, string? Content)> ScrapeArticle(string url)
{
var request = new ScrapeArticleRequest { Url = url };
var response = await webReader.ScrapeArticleAsync(request);
return (
LinkEmbed: response.Article?.LinkEmbed != null ? ModelsLinkEmbed.FromProtoValue(response.Article.LinkEmbed) : null!,
Content: response.Article?.Content == "" ? null : response.Article?.Content
);
}
public async Task<ModelsLinkEmbed> GetLinkPreview(string url, bool bypassCache = false)
{
var request = new GetLinkPreviewRequest { Url = url, BypassCache = bypassCache };
var response = await webReader.GetLinkPreviewAsync(request);
return response.Preview != null ? ModelsLinkEmbed.FromProtoValue(response.Preview) : null!;
}
public async Task<bool> InvalidateLinkPreviewCache(string url)
{
var request = new InvalidateLinkPreviewCacheRequest { Url = url };
var response = await webReader.InvalidateLinkPreviewCacheAsync(request);
return response.Success;
}
}

View File

@@ -21,7 +21,6 @@ public static class ServiceInjectionHelper
services.AddGrpcClientWithSharedChannel<AuthService.AuthServiceClient>(
"https://_grpc.pass",
"AuthService");
services.AddGrpcClientWithSharedChannel<PermissionService.PermissionServiceClient>(
"https://_grpc.pass",
"PermissionService");
@@ -39,19 +38,15 @@ public static class ServiceInjectionHelper
services.AddGrpcClientWithSharedChannel<BotAccountReceiverService.BotAccountReceiverServiceClient>(
"https://_grpc.pass",
"BotAccountReceiverService");
services.AddGrpcClientWithSharedChannel<ActionLogService.ActionLogServiceClient>(
"https://_grpc.pass",
"ActionLogService");
services.AddGrpcClientWithSharedChannel<PaymentService.PaymentServiceClient>(
"https://_grpc.pass",
"PaymentService");
services.AddGrpcClientWithSharedChannel<WalletService.WalletServiceClient>(
"https://_grpc.pass",
"WalletService");
services.AddGrpcClientWithSharedChannel<RealmService.RealmServiceClient>(
"https://_grpc.pass",
"RealmService");
@@ -107,5 +102,24 @@ public static class ServiceInjectionHelper
return services;
}
public IServiceCollection AddInsightService()
{
services.AddGrpcClientWithSharedChannel<WebFeedService.WebFeedServiceClient>(
"https://_grpc.insight",
"WebFeedServiceClient");
services.AddGrpcClientWithSharedChannel<WebArticleService.WebArticleServiceClient>(
"https://_grpc.insight",
"WebArticleService");
services.AddGrpcClientWithSharedChannel<WebReaderService.WebReaderServiceClient>(
"https://_grpc.insight",
"WebReaderServiceClient");
services.AddSingleton<RemoteWebFeedService>();
services.AddSingleton<RemoteWebReaderService>();
services.AddSingleton<RemoteWebArticleService>();
return services;
}
}
}

View File

@@ -149,12 +149,6 @@ public class AppDatabase(
modelBuilder.ApplySoftDeleteFilters();
}
private static void SetSoftDeleteFilter<TEntity>(ModelBuilder modelBuilder)
where TEntity : ModelBase
{
modelBuilder.Entity<TEntity>().HasQueryFilter(e => e.DeletedAt == null);
}
public override async Task<int> SaveChangesAsync(CancellationToken cancellationToken = default)
{
this.ApplyAuditableAndSoftDelete();

View File

@@ -98,6 +98,7 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\DysonNetwork.Insight\DysonNetwork.Insight.csproj" />
<ProjectReference Include="..\DysonNetwork.Shared\DysonNetwork.Shared.csproj" />
</ItemGroup>

View File

@@ -3,7 +3,6 @@ using System;
using System.Collections.Generic;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1304,9 +1303,9 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
// b.Property<LinkEmbed>("Preview")
// .HasColumnType("jsonb")
// .HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1366,9 +1365,9 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
// b.Property<LinkEmbed>("Preview")
// .HasColumnType("jsonb")
// .HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -1,5 +1,5 @@
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore.Migrations;
using NodaTime;
using NpgsqlTypes;
@@ -439,7 +439,6 @@ namespace DysonNetwork.Sphere.Migrations
url = table.Column<string>(type: "character varying(8192)", maxLength: 8192, nullable: false),
title = table.Column<string>(type: "character varying(4096)", maxLength: 4096, nullable: false),
description = table.Column<string>(type: "character varying(8192)", maxLength: 8192, nullable: true),
preview = table.Column<LinkEmbed>(type: "jsonb", nullable: true),
config = table.Column<WebFeedConfig>(type: "jsonb", nullable: false),
publisher_id = table.Column<Guid>(type: "uuid", nullable: false),
created_at = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
@@ -692,7 +691,6 @@ namespace DysonNetwork.Sphere.Migrations
url = table.Column<string>(type: "character varying(8192)", maxLength: 8192, nullable: false),
author = table.Column<string>(type: "character varying(4096)", maxLength: 4096, nullable: true),
meta = table.Column<Dictionary<string, object>>(type: "jsonb", nullable: true),
preview = table.Column<LinkEmbed>(type: "jsonb", nullable: true),
content = table.Column<string>(type: "text", nullable: true),
published_at = table.Column<DateTime>(type: "timestamp with time zone", nullable: true),
feed_id = table.Column<Guid>(type: "uuid", nullable: false),

View File

@@ -3,7 +3,6 @@ using System;
using System.Collections.Generic;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1304,9 +1303,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1366,9 +1362,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,6 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1451,9 +1450,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1513,9 +1509,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,6 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1487,9 +1486,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1549,9 +1545,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1305,9 +1305,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1367,9 +1364,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1345,9 +1345,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1407,9 +1404,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1352,9 +1352,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1414,9 +1411,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1352,9 +1352,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1414,9 +1411,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1360,9 +1360,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1422,9 +1419,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1360,9 +1360,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1422,9 +1419,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1364,9 +1364,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1426,9 +1423,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1407,9 +1407,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1469,9 +1466,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1460,9 +1460,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1522,9 +1519,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1459,9 +1459,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1521,9 +1518,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1463,9 +1463,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1525,9 +1522,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1467,9 +1467,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1529,9 +1526,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1468,9 +1468,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1530,9 +1527,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1337,9 +1337,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1399,9 +1396,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1317,9 +1317,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1379,9 +1376,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1303,9 +1303,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1365,9 +1362,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1405,9 +1405,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1467,9 +1464,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1303,9 +1303,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1365,9 +1362,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1307,9 +1307,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1369,9 +1366,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1306,9 +1306,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1368,9 +1365,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1306,9 +1306,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1368,9 +1365,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
@@ -1761,9 +1761,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1823,9 +1820,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.Text.Json;
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
@@ -1758,9 +1758,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("jsonb")
.HasColumnName("meta");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<DateTime?>("PublishedAt")
.HasColumnType("timestamp with time zone")
@@ -1820,9 +1817,6 @@ namespace DysonNetwork.Sphere.Migrations
.HasColumnType("character varying(8192)")
.HasColumnName("description");
b.Property<LinkEmbed>("Preview")
.HasColumnType("jsonb")
.HasColumnName("preview");
b.Property<Guid>("PublisherId")
.HasColumnType("uuid")

View File

@@ -1,5 +1,6 @@
using DysonNetwork.Shared.Models;
using DysonNetwork.Sphere.WebReader;
using DysonNetwork.Shared.Models.Embed;
namespace DysonNetwork.Sphere.Poll;

View File

@@ -3,11 +3,12 @@ using System.Globalization;
using DysonNetwork.Shared.Auth;
using DysonNetwork.Shared.Data;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Models.Embed;
using DysonNetwork.Shared.Proto;
using DysonNetwork.Shared.Registry;
using DysonNetwork.Sphere.Poll;
using DysonNetwork.Sphere.Wallet;
using DysonNetwork.Sphere.WebReader;
using Grpc.Core;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;

View File

@@ -3,7 +3,7 @@ using DysonNetwork.Shared;
using DysonNetwork.Shared.Cache;
using DysonNetwork.Shared.Proto;
using DysonNetwork.Shared.Registry;
using DysonNetwork.Sphere.WebReader;
using DysonNetwork.Sphere.Localization;
using DysonNetwork.Sphere.Publisher;
using DysonNetwork.Sphere.ActivityPub;
@@ -12,6 +12,7 @@ using Microsoft.Extensions.Localization;
using NodaTime;
using Markdig;
using DysonNetwork.Shared.Models;
using DysonNetwork.Shared.Models.Embed;
namespace DysonNetwork.Sphere.Post;
@@ -25,7 +26,7 @@ public partial class PostService(
FileService.FileServiceClient files,
FileReferenceService.FileReferenceServiceClient fileRefs,
Publisher.PublisherService ps,
WebReaderService reader,
RemoteWebReaderService reader,
AccountService.AccountServiceClient accounts,
ActivityPubObjectFactory objFactory
)
@@ -365,7 +366,7 @@ public partial class PostService(
continue;
// Preview the link
var linkEmbed = await reader.GetLinkPreviewAsync(url);
var linkEmbed = await reader.GetLinkPreview(url);
embeds.Add(EmbeddableBase.ToDictionary(linkEmbed));
processedLinks++;
}

View File

@@ -20,6 +20,7 @@ builder.Services.AddDysonAuth();
builder.Services.AddAccountService();
builder.Services.AddRingService();
builder.Services.AddDriveService();
builder.Services.AddInsightService();
builder.Services.AddAppFlushHandlers();
builder.Services.AddAppBusinessServices(builder.Configuration);

View File

@@ -1,7 +1,8 @@
using DysonNetwork.Insight.Reader;
using DysonNetwork.Sphere.ActivityPub;
using DysonNetwork.Sphere.Post;
using DysonNetwork.Sphere.Publisher;
using DysonNetwork.Sphere.WebReader;
using Quartz;
namespace DysonNetwork.Sphere.Startup;

View File

@@ -12,7 +12,7 @@ using DysonNetwork.Sphere.Post;
using DysonNetwork.Sphere.Publisher;
using DysonNetwork.Sphere.Timeline;
using DysonNetwork.Sphere.Translation;
using DysonNetwork.Sphere.WebReader;
using NodaTime;
using NodaTime.Serialization.SystemTextJson;
@@ -93,8 +93,6 @@ public static class ServiceCollectionExtensions
services.AddScoped<PublisherSubscriptionService>();
services.AddScoped<TimelineService>();
services.AddScoped<PostService>();
services.AddScoped<WebReaderService>();
services.AddScoped<WebFeedService>();
services.AddScoped<DiscoveryService>();
services.AddScoped<PollService>();
services.AddScoped<AutocompletionService>();

View File

@@ -3,7 +3,6 @@ using DysonNetwork.Shared.Proto;
using DysonNetwork.Shared.Registry;
using DysonNetwork.Sphere.Discovery;
using DysonNetwork.Sphere.Post;
using DysonNetwork.Sphere.WebReader;
using Microsoft.EntityFrameworkCore;
using NodaTime;
@@ -15,7 +14,8 @@ public class TimelineService(
Post.PostService ps,
RemoteRealmService rs,
DiscoveryService ds,
AccountService.AccountServiceClient accounts
AccountService.AccountServiceClient accounts,
RemoteWebArticleService webArticles
)
{
private static double CalculateHotRank(SnPost post, Instant now)
@@ -235,40 +235,10 @@ public class TimelineService(
).ToActivity();
}
private async Task<SnTimelineEvent?> GetArticleDiscoveryActivity(
int count = 5,
int feedSampleSize = 10
)
private async Task<SnTimelineEvent?> GetArticleDiscoveryActivity(int count = 5)
{
var now = SystemClock.Instance.GetCurrentInstant();
var today = now.InZone(DateTimeZone.Utc).Date;
var todayBegin = today.AtStartOfDayInZone(DateTimeZone.Utc).ToInstant();
var todayEnd = today.PlusDays(1).AtStartOfDayInZone(DateTimeZone.Utc).ToInstant();
var recentFeedIds = await db
.WebArticles.Where(a => a.CreatedAt >= todayBegin && a.CreatedAt < todayEnd)
.GroupBy(a => a.FeedId)
.OrderByDescending(g => g.Max(a => a.PublishedAt))
.Take(feedSampleSize)
.Select(g => g.Key)
.ToListAsync();
var recentArticles = new List<WebArticle>();
var random = new Random();
foreach (var feedId in recentFeedIds.OrderBy(_ => random.Next()))
{
var article = await db
.WebArticles.Include(a => a.Feed)
.Where(a => a.FeedId == feedId)
.OrderBy(_ => EF.Functions.Random())
.FirstOrDefaultAsync();
if (article == null)
continue;
recentArticles.Add(article);
if (recentArticles.Count >= count)
break;
}
var recentArticles = await webArticles.GetRecentArticles(count);
return recentArticles.Count > 0
? new TimelineDiscoveryEvent(
@@ -379,4 +349,4 @@ public class TimelineService(
var postCount = posts.Count;
return score + postCount;
}
}
}

View File

@@ -1,4 +1,4 @@
using DysonNetwork.Sphere.WebReader;
using DysonNetwork.Shared.Models.Embed;
namespace DysonNetwork.Sphere.Wallet;

View File

@@ -30,6 +30,7 @@
<PackageReference Include="Quartz" Version="3.15.1" />
<PackageReference Include="Quartz.AspNetCore" Version="3.15.1" />
<PackageReference Include="SimpleMvcSitemap" Version="4.0.1" />
<PackageReference Include="System.ServiceModel.Syndication" Version="10.0.1" />
</ItemGroup>
<ItemGroup>