♻️ Replace the LangChain with Semantic Kernel

This commit is contained in:
2025-10-25 17:07:29 +08:00
parent bbcaa27ac5
commit 40325c6df5
5 changed files with 59 additions and 126 deletions

View File

@@ -8,11 +8,9 @@
<ItemGroup>
<PackageReference Include="EFCore.NamingConventions" Version="9.0.0" />
<PackageReference Include="LangChain" Version="0.17.0" />
<PackageReference Include="LangChain.Databases.Postgres" Version="0.17.0" />
<PackageReference Include="LangChain.Serve" Version="0.17.0" />
<PackageReference Include="LangChain.Serve.OpenAI" Version="0.17.0" />
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.10" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.66.0" />
<PackageReference Include="Microsoft.SemanticKernel.Connectors.Ollama" Version="1.66.0-alpha" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NodaTime" Version="9.0.4" />
</ItemGroup>

View File

@@ -1,9 +1,7 @@
using DysonNetwork.Insight;
using DysonNetwork.Insight.Startup;
using DysonNetwork.Shared.Http;
using LangChain.Extensions.DependencyInjection;
using Microsoft.EntityFrameworkCore;
using LangChain.Serve;
var builder = WebApplication.CreateBuilder(args);
@@ -17,7 +15,6 @@ builder.Services.AddAppAuthentication();
builder.Services.AddAppFlushHandlers();
builder.Services.AddAppBusinessServices();
builder.Services.AddThinkingServices(builder.Configuration);
builder.Services.AddLangChainServe();
builder.AddSwaggerManifest(
"DysonNetwork.Insight",
@@ -38,4 +35,4 @@ app.ConfigureAppMiddleware(builder.Configuration);
app.UseSwaggerManifest("DysonNetwork.Insight");
app.Run();
app.Run();

View File

@@ -2,13 +2,7 @@ using System.Text.Json;
using System.Text.Json.Serialization;
using DysonNetwork.Insight.Thinking;
using DysonNetwork.Shared.Cache;
using LangChain.Memory;
using LangChain.Serve;
using LangChain.Serve.Abstractions.Repository;
using LangChain.Serve.OpenAI;
using static LangChain.Chains.Chain;
using Message = LangChain.Providers.Message;
using MessageRole = LangChain.Providers.MessageRole;
using Microsoft.SemanticKernel;
using NodaTime;
using NodaTime.Serialization.SystemTextJson;
@@ -71,45 +65,9 @@ public static class ServiceCollectionExtensions
public static IServiceCollection AddThinkingServices(this IServiceCollection services, IConfiguration configuration)
{
var modelProvider = new ThinkingProvider(configuration);
services.AddSingleton(modelProvider);
services.AddCustomNameGenerator(async messages =>
{
var template =
@"You will be given conversation between User and Assistant. Your task is to give name to this conversation using maximum 3 words
Conversation:
{chat_history}
Your name: ";
var conversationBufferMemory = await ConvertToConversationBuffer(messages);
var chain = LoadMemory(conversationBufferMemory, "chat_history")
| Template(template)
| LLM(modelProvider.GetModel());
return await chain.RunAsync("text") ?? string.Empty;
});
var thinkingProvider = new ThinkingProvider(configuration);
services.AddSingleton(thinkingProvider);
return services;
}
private static async Task<ConversationBufferMemory> ConvertToConversationBuffer(
IReadOnlyCollection<StoredMessage> list
)
{
var conversationBufferMemory = new ConversationBufferMemory
{
Formatter =
{
HumanPrefix = "User",
AiPrefix = "Assistant",
}
};
List<Message> converted = list
.Select(x => new Message(x.Content, x.Author == MessageAuthor.User ? MessageRole.Human : MessageRole.Ai))
.ToList();
await conversationBufferMemory.ChatHistory.AddMessages(converted);
return conversationBufferMemory;
}
}
}

View File

@@ -1,8 +1,9 @@
using System.ComponentModel.DataAnnotations;
using LangChain.Providers;
using Microsoft.AspNetCore.Mvc;
using Microsoft.SemanticKernel.ChatCompletion;
using System.Text;
using CSharpToJsonSchema;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.Ollama;
namespace DysonNetwork.Insight.Thinking;
@@ -22,74 +23,42 @@ public class ThinkingController(ThinkingProvider provider) : ControllerBase
Response.Headers.Append("Content-Type", "text/event-stream");
Response.StatusCode = 200;
var model = provider.GetModel();
var kernel = provider.Kernel;
// Build conversation history (you may load from your memory store)
var messages = new List<Message>
{
new Message
{
Role = MessageRole.System,
Content =
"You're a helpful assistant on the Solar Network, a social network. Your name is Sn-chan, a cute sweet heart with passion for almost everything."
},
new Message
{
Role = MessageRole.Human,
Content = request.UserMessage
}
};
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();
// Mock tool definitions — you will replace these with your real tool calls
Func<string, Task<string>> getUserProfileAsync = async (userId) =>
{
// MOCK: simulate fetching user profile
await Task.Delay(100); // simulate async work
return $"{{\"userId\":\"{userId}\",\"name\":\"MockUser\",\"bio\":\"Loves music and tech.\"}}";
};
Func<string, Task<string>> getRecentPostsAsync = async (topic) =>
{
// MOCK: simulate fetching recent posts
await Task.Delay(200);
return
$"[{{\"postId\":\"p1\",\"topic\":\"{topic}\",\"content\":\"Mock post content 1.\"}} , {{\"postId\":\"p2\",\"topic\":\"{topic}\",\"content\":\"Mock post content 2.\"}}]";
};
// You might pass these tools into your model/agent context
// (Assuming your LangChain .NET version supports tool-binding; adapt as needed.)
var chatHistory = new ChatHistory(
"You're a helpful assistant on the Solar Network, a social network.\n" +
"Your name is Sn-chan, a cute sweet heart with passion for almost everything.\n" +
"\n" +
"Your aim is to helping solving questions for the users on the Solar Network.\n" +
"And the Solar Network is the social network platform you live on.\n" +
"When the user ask questions about the Solar Network (also known as SN and Solian), try use the tools you have to get latest and accurate data."
);
chatHistory.AddUserMessage(request.UserMessage);
// Kick off streaming generation
var accumulatedContent = new StringBuilder();
await foreach (var chunk in model.GenerateAsync(
new ChatRequest
await foreach (var chunk in chatCompletionService.GetStreamingChatMessageContentsAsync(
chatHistory,
new OllamaPromptExecutionSettings
{
Messages = messages,
Tools =
[
new Tool
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(
options: new FunctionChoiceBehaviorOptions()
{
Name = "get_user_profile",
Description = "Get a user profile from the Solar Network."
},
new Tool
{
Name = "get_recent_posts",
Description = "Get recent posts from the Solar Network."
}
]
AllowParallelCalls = true,
AllowConcurrentInvocation = true
})
},
new ChatSettings { UseStreaming = true }
kernel: kernel
))
{
// Write each chunk to the HTTP response as SSE
var data = chunk.LastMessageContent;
var data = chunk.Content ?? "";
accumulatedContent.Append(data);
var sb = new StringBuilder();
sb.Append("data: ");
sb.AppendLine(accumulatedContent.ToString().Replace("\n", "\ndata: "));
sb.AppendLine(); // the blank line terminates the chunk
var bytes = Encoding.UTF8.GetBytes(sb.ToString());
if (string.IsNullOrEmpty(data)) continue;
var bytes = Encoding.UTF8.GetBytes(data);
await Response.Body.WriteAsync(bytes);
await Response.Body.FlushAsync();
}

View File

@@ -1,11 +1,10 @@
using LangChain.Providers;
using LangChain.Providers.Ollama;
using Microsoft.SemanticKernel;
namespace DysonNetwork.Insight.Thinking;
public class ThinkingProvider
{
public readonly Provider Provider;
public readonly Kernel Kernel;
public readonly string? ModelProviderType;
public readonly string? ModelDefault;
@@ -13,25 +12,37 @@ public class ThinkingProvider
{
var cfg = configuration.GetSection("Thinking");
ModelProviderType = cfg.GetValue<string>("Provider")?.ToLower();
ModelDefault = cfg.GetValue<string>("Model");
var endpoint = cfg.GetValue<string>("Endpoint");
var builder = Kernel.CreateBuilder();
switch (ModelProviderType)
{
case "ollama":
var endpoint = cfg.GetValue<string>("Endpoint");
Provider = new OllamaProvider(endpoint ?? "http://localhost:11434/api");
builder.AddOllamaChatCompletion(ModelDefault!, new Uri(endpoint ?? "http://localhost:11434/api"));
break;
default:
throw new IndexOutOfRangeException("Unknown thinking provider: " + ModelProviderType);
}
ModelDefault = cfg.GetValue<string>("Model");
}
Kernel = builder.Build();
public ChatModel GetModel(string? name = null)
{
return ModelProviderType switch
{
"ollama" => new OllamaChatModel((Provider as OllamaProvider)!, (name ?? ModelDefault)!),
_ => throw new IndexOutOfRangeException("Unknown thinking provider: " + ModelProviderType),
};
// Add Solar Network tools plugin
Kernel.ImportPluginFromFunctions("helper_functions", [
KernelFunctionFactory.CreateFromMethod(async (string userId) =>
{
// MOCK: simulate fetching user profile
await Task.Delay(100);
return $"{{\"userId\":\"{userId}\",\"name\":\"MockUser\",\"bio\":\"Loves music and tech.\"}}";
}, "get_user_profile", "Get a user profile from the Solar Network."),
KernelFunctionFactory.CreateFromMethod(async (string topic) =>
{
// MOCK: simulate fetching recent posts
await Task.Delay(200);
return
$"[{{\"postId\":\"p1\",\"topic\":\"{topic}\",\"content\":\"Mock post content 1.\"}}, {{\"postId\":\"p2\",\"topic\":\"{topic}\",\"content\":\"Mock post content 2.\"}}]";
}, "get_recent_posts", "Get recent posts from the Solar Network.")
]);
}
}