♻️ Replace the LangChain with Semantic Kernel

This commit is contained in:
2025-10-25 17:07:29 +08:00
parent bbcaa27ac5
commit 40325c6df5
5 changed files with 59 additions and 126 deletions

View File

@@ -1,8 +1,9 @@
using System.ComponentModel.DataAnnotations;
using LangChain.Providers;
using Microsoft.AspNetCore.Mvc;
using Microsoft.SemanticKernel.ChatCompletion;
using System.Text;
using CSharpToJsonSchema;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.Ollama;
namespace DysonNetwork.Insight.Thinking;
@@ -22,74 +23,42 @@ public class ThinkingController(ThinkingProvider provider) : ControllerBase
Response.Headers.Append("Content-Type", "text/event-stream");
Response.StatusCode = 200;
var model = provider.GetModel();
var kernel = provider.Kernel;
// Build conversation history (you may load from your memory store)
var messages = new List<Message>
{
new Message
{
Role = MessageRole.System,
Content =
"You're a helpful assistant on the Solar Network, a social network. Your name is Sn-chan, a cute sweet heart with passion for almost everything."
},
new Message
{
Role = MessageRole.Human,
Content = request.UserMessage
}
};
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();
// Mock tool definitions — you will replace these with your real tool calls
Func<string, Task<string>> getUserProfileAsync = async (userId) =>
{
// MOCK: simulate fetching user profile
await Task.Delay(100); // simulate async work
return $"{{\"userId\":\"{userId}\",\"name\":\"MockUser\",\"bio\":\"Loves music and tech.\"}}";
};
Func<string, Task<string>> getRecentPostsAsync = async (topic) =>
{
// MOCK: simulate fetching recent posts
await Task.Delay(200);
return
$"[{{\"postId\":\"p1\",\"topic\":\"{topic}\",\"content\":\"Mock post content 1.\"}} , {{\"postId\":\"p2\",\"topic\":\"{topic}\",\"content\":\"Mock post content 2.\"}}]";
};
// You might pass these tools into your model/agent context
// (Assuming your LangChain .NET version supports tool-binding; adapt as needed.)
var chatHistory = new ChatHistory(
"You're a helpful assistant on the Solar Network, a social network.\n" +
"Your name is Sn-chan, a cute sweet heart with passion for almost everything.\n" +
"\n" +
"Your aim is to helping solving questions for the users on the Solar Network.\n" +
"And the Solar Network is the social network platform you live on.\n" +
"When the user ask questions about the Solar Network (also known as SN and Solian), try use the tools you have to get latest and accurate data."
);
chatHistory.AddUserMessage(request.UserMessage);
// Kick off streaming generation
var accumulatedContent = new StringBuilder();
await foreach (var chunk in model.GenerateAsync(
new ChatRequest
await foreach (var chunk in chatCompletionService.GetStreamingChatMessageContentsAsync(
chatHistory,
new OllamaPromptExecutionSettings
{
Messages = messages,
Tools =
[
new Tool
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(
options: new FunctionChoiceBehaviorOptions()
{
Name = "get_user_profile",
Description = "Get a user profile from the Solar Network."
},
new Tool
{
Name = "get_recent_posts",
Description = "Get recent posts from the Solar Network."
}
]
AllowParallelCalls = true,
AllowConcurrentInvocation = true
})
},
new ChatSettings { UseStreaming = true }
kernel: kernel
))
{
// Write each chunk to the HTTP response as SSE
var data = chunk.LastMessageContent;
var data = chunk.Content ?? "";
accumulatedContent.Append(data);
var sb = new StringBuilder();
sb.Append("data: ");
sb.AppendLine(accumulatedContent.ToString().Replace("\n", "\ndata: "));
sb.AppendLine(); // the blank line terminates the chunk
var bytes = Encoding.UTF8.GetBytes(sb.ToString());
if (string.IsNullOrEmpty(data)) continue;
var bytes = Encoding.UTF8.GetBytes(data);
await Response.Body.WriteAsync(bytes);
await Response.Body.FlushAsync();
}

View File

@@ -1,11 +1,10 @@
using LangChain.Providers;
using LangChain.Providers.Ollama;
using Microsoft.SemanticKernel;
namespace DysonNetwork.Insight.Thinking;
public class ThinkingProvider
{
public readonly Provider Provider;
public readonly Kernel Kernel;
public readonly string? ModelProviderType;
public readonly string? ModelDefault;
@@ -13,25 +12,37 @@ public class ThinkingProvider
{
var cfg = configuration.GetSection("Thinking");
ModelProviderType = cfg.GetValue<string>("Provider")?.ToLower();
ModelDefault = cfg.GetValue<string>("Model");
var endpoint = cfg.GetValue<string>("Endpoint");
var builder = Kernel.CreateBuilder();
switch (ModelProviderType)
{
case "ollama":
var endpoint = cfg.GetValue<string>("Endpoint");
Provider = new OllamaProvider(endpoint ?? "http://localhost:11434/api");
builder.AddOllamaChatCompletion(ModelDefault!, new Uri(endpoint ?? "http://localhost:11434/api"));
break;
default:
throw new IndexOutOfRangeException("Unknown thinking provider: " + ModelProviderType);
}
ModelDefault = cfg.GetValue<string>("Model");
}
Kernel = builder.Build();
public ChatModel GetModel(string? name = null)
{
return ModelProviderType switch
{
"ollama" => new OllamaChatModel((Provider as OllamaProvider)!, (name ?? ModelDefault)!),
_ => throw new IndexOutOfRangeException("Unknown thinking provider: " + ModelProviderType),
};
// Add Solar Network tools plugin
Kernel.ImportPluginFromFunctions("helper_functions", [
KernelFunctionFactory.CreateFromMethod(async (string userId) =>
{
// MOCK: simulate fetching user profile
await Task.Delay(100);
return $"{{\"userId\":\"{userId}\",\"name\":\"MockUser\",\"bio\":\"Loves music and tech.\"}}";
}, "get_user_profile", "Get a user profile from the Solar Network."),
KernelFunctionFactory.CreateFromMethod(async (string topic) =>
{
// MOCK: simulate fetching recent posts
await Task.Delay(200);
return
$"[{{\"postId\":\"p1\",\"topic\":\"{topic}\",\"content\":\"Mock post content 1.\"}}, {{\"postId\":\"p2\",\"topic\":\"{topic}\",\"content\":\"Mock post content 2.\"}}]";
}, "get_recent_posts", "Get recent posts from the Solar Network.")
]);
}
}