From 7cb8f99e7ebc21fca4130328c2fa1b0fb07f836a Mon Sep 17 00:00:00 2001
From: zyxucp <286513187@qq.com>
Date: Sat, 27 Apr 2024 23:03:46 +0800
Subject: [PATCH 1/2] =?UTF-8?q?fix=20=E5=A4=84=E7=90=86=E8=81=8A=E5=A4=A9?=
=?UTF-8?q?=E5=AF=B9=E8=AF=9D?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
src/AntSK.Domain/AntSK.Domain.xml | 2 +-
.../Domain/Interface/IChatService.cs | 4 +-
.../Domain/Service/ChatService.cs | 73 +++++++++--------
.../Domain/Service/KernelService.cs | 3 +
src/AntSK/Pages/AppPage/AddApp.razor | 2 +-
.../ChatPage/Components/ChatView.razor.cs | 37 ++++++---
.../Pages/Setting/AIModel/AddModel.razor | 7 +-
src/AntSK/Services/OpenApi/OpenApiService.cs | 80 +++++++++----------
8 files changed, 115 insertions(+), 93 deletions(-)
diff --git a/src/AntSK.Domain/AntSK.Domain.xml b/src/AntSK.Domain/AntSK.Domain.xml
index 362ee1c5..97f872cf 100644
--- a/src/AntSK.Domain/AntSK.Domain.xml
+++ b/src/AntSK.Domain/AntSK.Domain.xml
@@ -199,7 +199,7 @@
-
+
发送消息
diff --git a/src/AntSK.Domain/Domain/Interface/IChatService.cs b/src/AntSK.Domain/Domain/Interface/IChatService.cs
index f7be8895..1793a910 100644
--- a/src/AntSK.Domain/Domain/Interface/IChatService.cs
+++ b/src/AntSK.Domain/Domain/Interface/IChatService.cs
@@ -14,10 +14,10 @@ namespace AntSK.Domain.Domain.Interface
{
public interface IChatService
{
- IAsyncEnumerable SendChatByAppAsync(Apps app, string questions, ChatHistory history);
+ IAsyncEnumerable SendChatByAppAsync(Apps app, ChatHistory history);
IAsyncEnumerable SendKmsByAppAsync(Apps app, string questions, ChatHistory history, string filePath, List relevantSources = null);
Task SendImgByAppAsync(Apps app, string questions);
- Task GetChatHistory(List MessageList);
+ Task GetChatHistory(List MessageList, ChatHistory history);
}
}
\ No newline at end of file
diff --git a/src/AntSK.Domain/Domain/Service/ChatService.cs b/src/AntSK.Domain/Domain/Service/ChatService.cs
index 7771eb1e..7506c6db 100644
--- a/src/AntSK.Domain/Domain/Service/ChatService.cs
+++ b/src/AntSK.Domain/Domain/Service/ChatService.cs
@@ -10,6 +10,7 @@
using Markdig;
using Microsoft.KernelMemory;
using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using System.Diagnostics;
using System.Drawing;
@@ -35,45 +36,54 @@ IAIModels_Repositories _aIModels_Repositories
///
///
///
- public async IAsyncEnumerable SendChatByAppAsync(Apps app, string questions, ChatHistory history)
+ public async IAsyncEnumerable SendChatByAppAsync(Apps app, ChatHistory history)
{
-
- if (string.IsNullOrEmpty(app.Prompt) || !app.Prompt.Contains("{{$input}}"))
- {
- //如果模板为空,给默认提示词
- app.Prompt = app.Prompt.ConvertToString() + "{{$input}}";
- }
- KernelArguments args = new KernelArguments();
- if (history.Count > 10)
- {
- app.Prompt = @"${{ConversationSummaryPlugin.SummarizeConversation $history}}" + app.Prompt;
- args = new() {
- { "history", string.Join("\n", history.Select(x => x.Role + ": " + x.Content)) },
- { "input", questions }
- };
- }
- else
- {
- args = new()
- {
- { "input", $"{string.Join("\n", history.Select(x => x.Role + ": " + x.Content))}{Environment.NewLine} user:{questions}" }
- };
- }
-
var _kernel = _kernelService.GetKernelByApp(app);
+ var chat = _kernel.GetRequiredService();
var temperature = app.Temperature / 100;//存的是0~100需要缩小
OpenAIPromptExecutionSettings settings = new() { Temperature = temperature };
+ List completionList = new List();
if (!string.IsNullOrEmpty(app.ApiFunctionList) || !string.IsNullOrEmpty(app.NativeFunctionList))//这里还需要加上本地插件的
{
_kernelService.ImportFunctionsByApp(app, _kernel);
- settings.ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions;
+ settings.ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions;
+ while (true)
+ {
+ ChatMessageContent result = await chat.GetChatMessageContentAsync(history, settings, _kernel);
+ if (result.Content is not null)
+ {
+ string chunkCompletion = result.Content.ConvertToString();
+ completionList.Add(chunkCompletion);
+ foreach (var content in completionList)
+ {
+ yield return content.ConvertToString();
+ }
+ break;
+ }
+
+ history.Add(result);
+
+ IEnumerable functionCalls = FunctionCallContent.GetFunctionCalls(result);
+ if (!functionCalls.Any())
+ {
+ break;
+ }
+
+ foreach (var functionCall in functionCalls)
+ {
+ FunctionResultContent resultContent = await functionCall.InvokeAsync(_kernel);
+
+ history.Add(resultContent.ToChatMessage());
+ }
+ }
}
- var func = _kernel.CreateFunctionFromPrompt(app.Prompt, settings);
- var chatResult = _kernel.InvokeStreamingAsync(function: func,
- arguments: args);
- await foreach (var content in chatResult)
+ else
{
- yield return content;
+ var chatResult = chat.GetStreamingChatMessageContentsAsync(history, settings, _kernel);
+ await foreach (var content in chatResult)
+ {
+ yield return content.ConvertToString();
+ }
}
}
@@ -318,9 +328,8 @@ public async Task SendImgByAppAsync(Apps app, string questions)
}
}
- public async Task GetChatHistory(List MessageList)
+ public async Task GetChatHistory(List MessageList, ChatHistory history)
{
- ChatHistory history = new ChatHistory();
if (MessageList.Count > 1)
{
diff --git a/src/AntSK.Domain/Domain/Service/KernelService.cs b/src/AntSK.Domain/Domain/Service/KernelService.cs
index 9130293f..c0a84e14 100644
--- a/src/AntSK.Domain/Domain/Service/KernelService.cs
+++ b/src/AntSK.Domain/Domain/Service/KernelService.cs
@@ -20,6 +20,8 @@
using DocumentFormat.OpenXml.Drawing;
using Microsoft.KernelMemory;
using OpenCvSharp.ML;
+using LLamaSharp.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.ChatCompletion;
namespace AntSK.Domain.Domain.Service
{
@@ -105,6 +107,7 @@ private void WithTextGenerationByAIType(IKernelBuilder builder,AIModels chatMode
var (weights, parameters) = LLamaConfig.GetLLamaConfig(chatModel.ModelName);
var ex = new StatelessExecutor(weights, parameters);
builder.Services.AddKeyedSingleton("local-llama", new LLamaSharpTextCompletion(ex));
+ builder.Services.AddKeyedSingleton("local-llama-chat", new LLamaSharpChatCompletion(ex));
break;
case Model.Enum.AIType.SparkDesk:
diff --git a/src/AntSK/Pages/AppPage/AddApp.razor b/src/AntSK/Pages/AppPage/AddApp.razor
index 1195a8be..5b586c30 100644
--- a/src/AntSK/Pages/AppPage/AddApp.razor
+++ b/src/AntSK/Pages/AppPage/AddApp.razor
@@ -53,7 +53,7 @@
-
+
更确定
diff --git a/src/AntSK/Pages/ChatPage/Components/ChatView.razor.cs b/src/AntSK/Pages/ChatPage/Components/ChatView.razor.cs
index c2ca2fcd..8b69cddf 100644
--- a/src/AntSK/Pages/ChatPage/Components/ChatView.razor.cs
+++ b/src/AntSK/Pages/ChatPage/Components/ChatView.razor.cs
@@ -224,26 +224,40 @@ await Task.Run(() =>
///
protected async Task SendAsync(string questions, string? filePath)
{
- ChatHistory history = new ChatHistory();
+
//处理多轮会话
Apps app = _apps_Repositories.GetFirst(p => p.Id == AppId);
- if (MessageList.Count > 0)
- {
- history = await _chatService.GetChatHistory(MessageList);
- }
+ ChatHistory history;
if (app.Type == AppType.chat.ToString() && (filePath == null || app.EmbeddingModelID.IsNull()))
{
- await SendChat(questions, history, app);
+ if (string.IsNullOrEmpty(app.Prompt))
+ {
+ app.Prompt = "你叫AntSK,是一个人工智能助手";
+ }
+ //聊天应用增加系统角色
+ history = new ChatHistory(app.Prompt.ConvertToString());
+
+ if (MessageList.Count > 0)
+ {
+ history = await _chatService.GetChatHistory(MessageList, history);
+ }
+ await SendChat(history, app);
}
else if (app.Type == AppType.kms.ToString() || filePath != null || app.EmbeddingModelID.IsNotNull())
{
+ history = new ChatHistory();
+
+ if (MessageList.Count > 0)
+ {
+ history = await _chatService.GetChatHistory(MessageList, history);
+ }
await SendKms(questions, history, app, filePath);
-
+
}
else if (app.Type == AppType.img.ToString())
{
- await SendImg(questions,app);
+ await SendImg(questions, app);
}
//缓存消息记录
@@ -253,7 +267,7 @@ protected async Task SendAsync(string questions, string? filePath)
if (OnRelevantSources.IsNotNull())
{
await OnRelevantSources.InvokeAsync(_relevantSources);
- }
+ }
}
@@ -318,14 +332,13 @@ private async Task SendKms(string questions, ChatHistory history, Apps app, stri
///
/// 发送普通对话
///
- ///
///
///
///
- private async Task SendChat(string questions, ChatHistory history, Apps app)
+ private async Task SendChat(ChatHistory history, Apps app)
{
Chats info = null;
- var chatResult = _chatService.SendChatByAppAsync(app, questions, history);
+ var chatResult = _chatService.SendChatByAppAsync(app, history);
await foreach (var content in chatResult)
{
if (info == null)
diff --git a/src/AntSK/Pages/Setting/AIModel/AddModel.razor b/src/AntSK/Pages/Setting/AIModel/AddModel.razor
index 7b0c1ecd..f08e3f70 100644
--- a/src/AntSK/Pages/Setting/AIModel/AddModel.razor
+++ b/src/AntSK/Pages/Setting/AIModel/AddModel.razor
@@ -85,12 +85,13 @@
-
-
-
+
+
+
+
}
@if (context.AIType == AIType.DashScope)
{
diff --git a/src/AntSK/Services/OpenApi/OpenApiService.cs b/src/AntSK/Services/OpenApi/OpenApiService.cs
index 8af06003..8a33490b 100644
--- a/src/AntSK/Services/OpenApi/OpenApiService.cs
+++ b/src/AntSK/Services/OpenApi/OpenApiService.cs
@@ -41,13 +41,14 @@ public async Task Chat(OpenAIModel model, string sk, HttpContext HttpContext)
{
case "chat":
//普通会话
+ history.AddUserMessage(questions);
if (model.stream)
{
OpenAIStreamResult result1 = new OpenAIStreamResult();
result1.created = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
result1.choices = new List()
{ new StreamChoicesModel() { delta = new OpenAIMessage() { role = "assistant" } } };
- await SendChatStream(HttpContext, result1, app, questions,history);
+ await SendChatStream(HttpContext, result1, app,history);
HttpContext.Response.ContentType = "application/json";
await HttpContext.Response.WriteAsync(JsonConvert.SerializeObject(result1));
await HttpContext.Response.CompleteAsync();
@@ -59,14 +60,12 @@ public async Task Chat(OpenAIModel model, string sk, HttpContext HttpContext)
result2.created = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
result2.choices = new List()
{ new ChoicesModel() { message = new OpenAIMessage() { role = "assistant" } } };
- result2.choices[0].message.content = await SendChat(questions,history, app);
+ result2.choices[0].message.content = await SendChat(history, app);
HttpContext.Response.ContentType = "application/json";
await HttpContext.Response.WriteAsync(JsonConvert.SerializeObject(result2));
await HttpContext.Response.CompleteAsync();
}
-
break;
-
case "kms":
//知识库问答
if (model.stream)
@@ -91,16 +90,15 @@ public async Task Chat(OpenAIModel model, string sk, HttpContext HttpContext)
await HttpContext.Response.WriteAsync(JsonConvert.SerializeObject(result4));
await HttpContext.Response.CompleteAsync();
}
-
break;
}
}
}
- private async Task SendChatStream(HttpContext HttpContext, OpenAIStreamResult result, Apps app,string questions, ChatHistory history)
+ private async Task SendChatStream(HttpContext HttpContext, OpenAIStreamResult result, Apps app, ChatHistory history)
{
HttpContext.Response.Headers.Add("Content-Type", "text/event-stream");
- var chatResult = _chatService.SendChatByAppAsync(app, questions, history);
+ var chatResult = _chatService.SendChatByAppAsync(app, history);
await foreach (var content in chatResult)
{
result.choices[0].delta.content = content.ConvertToString();
@@ -113,7 +111,6 @@ private async Task SendChatStream(HttpContext HttpContext, OpenAIStreamResult re
await HttpContext.Response.WriteAsync("data: [DONE]");
await HttpContext.Response.Body.FlushAsync();
-
await HttpContext.Response.CompleteAsync();
}
@@ -124,49 +121,48 @@ private async Task SendChatStream(HttpContext HttpContext, OpenAIStreamResult re
///
///
///
- private async Task SendChat(string questions, ChatHistory history, Apps app)
+ private async Task SendChat(ChatHistory history, Apps app)
{
- string result = "";
-
- if (string.IsNullOrEmpty(app.Prompt) || !app.Prompt.Contains("{{$input}}"))
- {
- //如果模板为空,给默认提示词
- app.Prompt = app.Prompt.ConvertToString() + "{{$input}}";
- }
- KernelArguments args = new KernelArguments();
- if (history.Count > 10)
- {
- app.Prompt = @"${{ConversationSummaryPlugin.SummarizeConversation $history}}" + app.Prompt;
- args = new() {
- { "history", string.Join("\n", history.Select(x => x.Role + ": " + x.Content)) },
- { "input", questions }
- };
- }
- else
- {
- args = new()
- {
- { "input", $"{string.Join("\n", history.Select(x => x.Role + ": " + x.Content))}{Environment.NewLine} user:{questions}" }
- };
- }
-
var _kernel = _kernelService.GetKernelByApp(app);
- var temperature = app.Temperature / 100; //存的是0~100需要缩小
+ var chat = _kernel.GetRequiredService();
+
+ var temperature = app.Temperature / 100;//存的是0~100需要缩小
OpenAIPromptExecutionSettings settings = new() { Temperature = temperature };
+ List completionList = new List();
if (!string.IsNullOrEmpty(app.ApiFunctionList) || !string.IsNullOrEmpty(app.NativeFunctionList))//这里还需要加上本地插件的
{
_kernelService.ImportFunctionsByApp(app, _kernel);
- settings.ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions;
+ settings.ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions;
+ while (true)
+ {
+ ChatMessageContent result = await chat.GetChatMessageContentAsync(history, settings, _kernel);
+ if (result.Content is not null)
+ {
+ string chunkCompletion = result.Content.ConvertToString();
+ completionList.Add(chunkCompletion);
+ return chunkCompletion;
+ }
+ history.Add(result);
+ IEnumerable functionCalls = FunctionCallContent.GetFunctionCalls(result);
+ if (!functionCalls.Any())
+ {
+ break;
+ }
+
+ foreach (var functionCall in functionCalls)
+ {
+ FunctionResultContent resultContent = await functionCall.InvokeAsync(_kernel);
+
+ history.Add(resultContent.ToChatMessage());
+ }
+ }
}
- var func = _kernel.CreateFunctionFromPrompt(app.Prompt, settings);
- var chatResult =await _kernel.InvokeAsync(function: func, arguments: args);
- if (chatResult.IsNotNull())
+ else
{
- string answers = chatResult.GetValue();
- result = answers;
+ ChatMessageContent result = await chat.GetChatMessageContentAsync(history, settings, _kernel);
+ return result.Content.ConvertToString();
}
-
- return result;
+ return "";
}
private async Task SendKmsStream(HttpContext HttpContext, OpenAIStreamResult result, Apps app, string questions,ChatHistory history)
From 0ea52eced956b330e5f4cde59b55244c0904360a Mon Sep 17 00:00:00 2001
From: zyxucp <286513187@qq.com>
Date: Sun, 28 Apr 2024 20:37:37 +0800
Subject: [PATCH 2/2] =?UTF-8?q?fix=20=E4=BF=AE=E6=94=B9=E8=81=8A=E5=A4=A9?=
=?UTF-8?q?=E4=B8=BAchathistory?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../Domain/Service/KernelService.cs | 2 +
src/AntSk.LLM/Mock/MockChatCompletion.cs | 55 +++++
.../SparkDesk/SparkDeskChatCompletion.cs | 231 ++++++++++++++++++
3 files changed, 288 insertions(+)
create mode 100644 src/AntSk.LLM/Mock/MockChatCompletion.cs
create mode 100644 src/AntSk.LLM/SparkDesk/SparkDeskChatCompletion.cs
diff --git a/src/AntSK.Domain/Domain/Service/KernelService.cs b/src/AntSK.Domain/Domain/Service/KernelService.cs
index c0a84e14..3b929c41 100644
--- a/src/AntSK.Domain/Domain/Service/KernelService.cs
+++ b/src/AntSK.Domain/Domain/Service/KernelService.cs
@@ -113,6 +113,7 @@ private void WithTextGenerationByAIType(IKernelBuilder builder,AIModels chatMode
case Model.Enum.AIType.SparkDesk:
var options = new SparkDeskOptions { AppId = chatModel.EndPoint, ApiSecret = chatModel.ModelKey, ApiKey = chatModel.ModelName, ModelVersion = Sdcb.SparkDesk.ModelVersion.V3_5 };
builder.Services.AddKeyedSingleton("spark-desk", new SparkDeskTextCompletion(options, chatModel.Id));
+ builder.Services.AddKeyedSingleton("spark-desk-chat", new SparkDeskChatCompletion(options, chatModel.Id));
break;
case Model.Enum.AIType.DashScope:
@@ -121,6 +122,7 @@ private void WithTextGenerationByAIType(IKernelBuilder builder,AIModels chatMode
case Model.Enum.AIType.Mock:
builder.Services.AddKeyedSingleton("mock", new MockTextCompletion());
+ builder.Services.AddKeyedSingleton("mock-chat", new MockChatCompletion());
break;
case Model.Enum.AIType.LLamaFactory:
builder.AddOpenAIChatCompletion(
diff --git a/src/AntSk.LLM/Mock/MockChatCompletion.cs b/src/AntSk.LLM/Mock/MockChatCompletion.cs
new file mode 100644
index 00000000..371be4d0
--- /dev/null
+++ b/src/AntSk.LLM/Mock/MockChatCompletion.cs
@@ -0,0 +1,55 @@
+using AntSK.LLM.SparkDesk;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel;
+using Sdcb.SparkDesk;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Text;
+using System.Text.Encodings.Web;
+using System.Text.Json.Serialization;
+using System.Text.Json;
+using System.Text.Unicode;
+using System.Threading.Tasks;
+
+namespace AntSK.LLM.Mock
+{
+ public class MockChatCompletion : IChatCompletionService
+ {
+ private readonly Dictionary _attributes = new();
+ private readonly SparkDeskClient _client;
+ private string _chatId;
+ private readonly SparkDeskOptions _options;
+
+ private static readonly JsonSerializerOptions _jsonSerializerOptions = new()
+ {
+ NumberHandling = JsonNumberHandling.AllowReadingFromString,
+ Encoder = JavaScriptEncoder.Create(UnicodeRanges.All)
+ };
+
+ public IReadOnlyDictionary Attributes => _attributes;
+
+ public MockChatCompletion()
+ {
+
+ }
+
+ public async Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ StringBuilder sb = new();
+ string result = $"这是一条Mock数据,便于聊天测试,你的消息是:{chatHistory.LastOrDefault().ToString()}";
+ return [new(AuthorRole.Assistant, result.ToString())];
+ }
+
+ public async IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ StringBuilder sb = new();
+ string result = $"这是一条Mock数据,便于聊天测试,你的消息是:{chatHistory.LastOrDefault().ToString()}";
+ foreach (var c in result)
+ {
+ yield return new StreamingChatMessageContent(AuthorRole.Assistant, c.ToString());
+ }
+ }
+ }
+}
diff --git a/src/AntSk.LLM/SparkDesk/SparkDeskChatCompletion.cs b/src/AntSk.LLM/SparkDesk/SparkDeskChatCompletion.cs
new file mode 100644
index 00000000..a8f4a625
--- /dev/null
+++ b/src/AntSk.LLM/SparkDesk/SparkDeskChatCompletion.cs
@@ -0,0 +1,231 @@
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel;
+using Sdcb.SparkDesk;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Text;
+using System.Text.Encodings.Web;
+using System.Text.Json.Serialization;
+using System.Text.Json;
+using System.Text.Unicode;
+using System.Threading.Tasks;
+
+namespace AntSK.LLM.SparkDesk
+{
+ public class SparkDeskChatCompletion : IChatCompletionService
+ {
+ private readonly Dictionary _attributes = new();
+ private readonly SparkDeskClient _client;
+ private string _chatId;
+ private readonly SparkDeskOptions _options;
+
+ private static readonly JsonSerializerOptions _jsonSerializerOptions = new()
+ {
+ NumberHandling = JsonNumberHandling.AllowReadingFromString,
+ Encoder = JavaScriptEncoder.Create(UnicodeRanges.All)
+ };
+
+ public IReadOnlyDictionary Attributes => _attributes;
+
+ public SparkDeskChatCompletion(SparkDeskOptions options, string chatId)
+ {
+ _options = options;
+ _chatId = chatId;
+ _client = new(options.AppId, options.ApiKey, options.ApiSecret);
+ }
+
+ public async Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
+ {
+ StringBuilder sb = new();
+ var parameters = new ChatRequestParameters
+ {
+ ChatId = _chatId,
+ };
+
+ OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
+
+ parameters.Temperature = (float)chatExecutionSettings.Temperature;
+ parameters.MaxTokens = chatExecutionSettings.MaxTokens ?? parameters.MaxTokens;
+
+ IList functions = kernel?.Plugins.GetFunctionsMetadata().Where(x => x.PluginName == "AntSkFunctions").ToList() ?? [];
+ var functionDefs = functions.Select(func => new FunctionDef(func.Name, func.Description, func.Parameters.Select(p => new FunctionParametersDef(p.Name, p.ParameterType?.IsClass == true ? "object" : "string", p.Description, p.IsRequired)).ToList())).ToList();
+
+ List messages = GetSparkMessage(chatHistory);
+
+ var result = await _client.ChatAsync(_options.ModelVersion, messages.ToArray(), parameters, functionDefs.Count > 0 ? [.. functionDefs] : null, cancellationToken: cancellationToken);
+
+ if (result.FunctionCall != null)
+ {
+ var func = functions.Where(x => x.Name == result.FunctionCall.Name).FirstOrDefault();
+
+ if (func == null)
+ {
+ return new List { new(AuthorRole.Assistant, $"插件{result.FunctionCall.Name}未注册") }.AsReadOnly();
+ }
+
+ if (kernel.Plugins.TryGetFunction(func.PluginName, func.Name, out var function))
+ {
+ var arguments = new KernelArguments();
+
+ var JsonElement = JsonDocument.Parse(result.FunctionCall.Arguments).RootElement;
+ foreach (var parameter in func.Parameters)
+ {
+ var error = "";
+ try
+ {
+ if (JsonElement.TryGetProperty(parameter.Name, out var property))
+ {
+ arguments.Add(parameter.Name, property.Deserialize(parameter.ParameterType!, _jsonSerializerOptions));
+ }
+ }
+ catch (Exception ex)
+ {
+ error = $"参数{parameter.Name}解析错误:{ex.Message}";
+ }
+
+ if (!string.IsNullOrEmpty(error))
+ {
+ return new List { new(AuthorRole.Assistant, error) }.AsReadOnly();
+
+ }
+ }
+ var functionResult = await function.InvokeAsync(kernel, arguments, cancellationToken);
+ messages = [ ChatMessage.FromUser(messages.LastOrDefault().Content),
+ ChatMessage.FromSystem($@"
+ 执行函数调用成功
+ 函数描述:{func.Description}
+ 函数执行结果:{functionResult}
+ "),
+ ChatMessage.FromUser("请根据函数调用结果回答我的问题,不要超出函数调用结果的返回,以及不要有多余描述:")];
+
+
+ var callResult = await _client.ChatAsync(_options.ModelVersion, messages.ToArray(), parameters, null);
+ ChatMessageContent chatMessageContent = new(AuthorRole.Assistant, callResult.Text.ToString(), modelId: "SparkDesk");
+
+ return new List { chatMessageContent }.AsReadOnly();
+
+ }
+ return new List { new(AuthorRole.Assistant, "未找到插件") }.AsReadOnly();
+
+ }
+ else
+ {
+
+ ChatMessageContent chatMessageContent = new(AuthorRole.Assistant, result.Text.ToString(), modelId: "SparkDesk");
+
+ return new List { chatMessageContent }.AsReadOnly();
+ }
+
+ }
+
+
+ public async IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ var parameters = new ChatRequestParameters
+ {
+ ChatId = _chatId,
+ };
+ OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
+
+ parameters.Temperature = (float)chatExecutionSettings.Temperature;
+ parameters.MaxTokens = chatExecutionSettings.MaxTokens ?? parameters.MaxTokens;
+
+ IList functions = kernel?.Plugins.GetFunctionsMetadata().Where(x => x.PluginName == "AntSkFunctions").ToList() ?? [];
+ var functionDefs = functions.Select(func => new FunctionDef(func.Name, func.Description, func.Parameters.Select(p => new FunctionParametersDef(p.Name, p.ParameterType?.IsClass == true ? "object" : "string", p.Description, p.IsRequired)).ToList())).ToList();
+ List messages = GetSparkMessage(chatHistory);
+ await foreach (StreamedChatResponse msg in _client.ChatAsStreamAsync(_options.ModelVersion, messages.ToArray(), parameters, functionDefs.Count > 0 ? [.. functionDefs] : null, cancellationToken: cancellationToken))
+ {
+
+ yield return new StreamingChatMessageContent(AuthorRole.Assistant, msg);
+
+ };
+
+ }
+
+ private static List GetSparkMessage(ChatHistory chatHistory)
+ {
+ List messages = new List();
+ foreach (var msg in chatHistory.ToList())
+ {
+ string role = "";
+ if (msg.Role == AuthorRole.User)
+ {
+ role = "user";
+ }
+ else if (msg.Role == AuthorRole.System)
+ {
+ role = "system";
+ }
+ else
+ {
+ role = "assistant";
+ }
+ messages.Add(new ChatMessage(role, msg.ToString()));
+ }
+
+ return messages;
+ }
+
+
+ private static string? ProcessFunctionResult(object functionResult, ToolCallBehavior? toolCallBehavior)
+ {
+ if (functionResult is string stringResult)
+ {
+ return stringResult;
+ }
+
+ if (functionResult is ChatMessageContent chatMessageContent)
+ {
+ return chatMessageContent.ToString();
+ }
+
+ return JsonSerializer.Serialize(functionResult, _jsonSerializerOptions);
+ }
+
+ public static Dictionary ParseJsonElement(JsonElement element, string propertyName)
+ {
+ Dictionary dict = new();
+
+ switch (element.ValueKind)
+ {
+ case JsonValueKind.Object:
+ foreach (JsonProperty property in element.EnumerateObject())
+ {
+ dict.Add(property.Name, ParseJsonElement(property.Value, property.Name));
+ }
+ break;
+
+ case JsonValueKind.Array:
+ List