238 lines
8.9 KiB
C#
238 lines
8.9 KiB
C#
|
|
using System;
|
|||
|
|
using System.Collections.Generic;
|
|||
|
|
using System.Collections.ObjectModel;
|
|||
|
|
using System.ComponentModel;
|
|||
|
|
using System.Diagnostics;
|
|||
|
|
using System.Linq;
|
|||
|
|
using System.Threading;
|
|||
|
|
using System.Threading.Tasks;
|
|||
|
|
using System.Windows;
|
|||
|
|
using System.Windows.Controls;
|
|||
|
|
|
|||
|
|
using CommunityToolkit.Mvvm.ComponentModel;
|
|||
|
|
using CommunityToolkit.Mvvm.Input;
|
|||
|
|
|
|||
|
|
using LangChain.Chains.LLM;
|
|||
|
|
using LangChain.Memory;
|
|||
|
|
using LangChain.Prompts;
|
|||
|
|
using LangChain.Providers;
|
|||
|
|
using LangChain.Providers.DeepSeek;
|
|||
|
|
using LangChain.Providers.DeepSeek.Predefined;
|
|||
|
|
using LangChain.Providers.OpenAI;
|
|||
|
|
using LangChain.Providers.OpenAI.Predefined;
|
|||
|
|
using LangChain.Schema;
|
|||
|
|
|
|||
|
|
using Markdig;
|
|||
|
|
using Markdig.Wpf.ColorCode;
|
|||
|
|
|
|||
|
|
using tryAGI.OpenAI;
|
|||
|
|
|
|||
|
|
//using static LangChain.Chains.Chain;
|
|||
|
|
namespace WPFUI.Test
|
|||
|
|
{
|
|||
|
|
public partial class ChatDialogueViewModel : ObservableObject
|
|||
|
|
{
|
|||
|
|
static readonly DeepSeekConfiguration config = new DeepSeekConfiguration()
|
|||
|
|
{
|
|||
|
|
ApiKey = "sk-3a3126167f1343228b1a5745bcd0bf01",
|
|||
|
|
Endpoint = "https://api.deepseek.com",
|
|||
|
|
ChatSettings = new() { UseStreaming = true }
|
|||
|
|
};
|
|||
|
|
private ScrollViewer scrollViewer;
|
|||
|
|
/// <summary>
|
|||
|
|
/// 当前AI的回复
|
|||
|
|
/// </summary>
|
|||
|
|
[ObservableProperty]
|
|||
|
|
public partial Message? CurrentRespone { get; set; } = Message.Ai(string.Empty);
|
|||
|
|
/// <summary>
|
|||
|
|
/// 发送给AI的消息,包括上下文记录
|
|||
|
|
/// </summary>
|
|||
|
|
private Message? CurrentRequest { get; set; } = Message.Empty;
|
|||
|
|
public MarkdownPipeline Pipeline { get; set; } = new MarkdownPipelineBuilder().UseAdvancedExtensions().UseColorCodeWpf().Build();
|
|||
|
|
|
|||
|
|
public ChatDialogueViewModel()
|
|||
|
|
{
|
|||
|
|
ChatHistory ??= new ObservableCollection<Message?>();
|
|||
|
|
}
|
|||
|
|
/// <summary>
|
|||
|
|
/// 用户输入
|
|||
|
|
/// </summary>
|
|||
|
|
[ObservableProperty]
|
|||
|
|
[NotifyCanExecuteChangedFor(nameof(SendCommand))]
|
|||
|
|
public partial string UserInput { get; set; }
|
|||
|
|
private bool CanSend()
|
|||
|
|
{
|
|||
|
|
return !string.IsNullOrEmpty(UserInput);
|
|||
|
|
}
|
|||
|
|
[RelayCommand]
|
|||
|
|
private async void PromptChat()
|
|||
|
|
{
|
|||
|
|
try
|
|||
|
|
{
|
|||
|
|
var deepseekLLM = new DeepSeekChatModel(new DeepSeekProvider(config));
|
|||
|
|
var prompt = new PromptTemplate(new PromptTemplateInput(
|
|||
|
|
template: "Revit二次开发中,使用变量doc和uidoc两个变量,构造一个保证可以执行的C#代码块,添加相应注释,不需要方法签名和using命名空间,但使用时需要完整的命名空间。实现{需求}的功能", inputVariables: ["需求"]));
|
|||
|
|
deepseekLLM.RequestSent += DeepseekLLM_RequestSent;
|
|||
|
|
var chain = new LlmChain(new LlmChainInput(deepseekLLM, prompt));
|
|||
|
|
var result = await chain.CallAsync(new ChainValues(new Dictionary<string, object>
|
|||
|
|
{
|
|||
|
|
{ "需求", UserInput}
|
|||
|
|
})).ConfigureAwait(true);
|
|||
|
|
// The result is an object with a `text` property.
|
|||
|
|
var respones = result.Value["text"].ToString();
|
|||
|
|
}
|
|||
|
|
catch (Exception ex)
|
|||
|
|
{
|
|||
|
|
|
|||
|
|
}
|
|||
|
|
}
|
|||
|
|
/// <summary>
|
|||
|
|
/// 历史聊天记录
|
|||
|
|
/// </summary>
|
|||
|
|
public ObservableCollection<Message?> ChatHistory { get; set; }
|
|||
|
|
|
|||
|
|
[RelayCommand(CanExecute = nameof(CanSend), IncludeCancelCommand = true)]
|
|||
|
|
private async Task SendAsync(object obj, CancellationToken cancellationToken)
|
|||
|
|
{
|
|||
|
|
try
|
|||
|
|
{
|
|||
|
|
if (obj is ScrollViewer scroll)
|
|||
|
|
{
|
|||
|
|
scrollViewer = scroll;
|
|||
|
|
}
|
|||
|
|
|
|||
|
|
ChatHistory.Add(Message.Human(UserInput));
|
|||
|
|
//UserInput.Content=string.Empty;
|
|||
|
|
|
|||
|
|
|
|||
|
|
#region DeepSeek
|
|||
|
|
var deepseekLLM = new DeepSeekChatModel(new DeepSeekProvider(config));
|
|||
|
|
deepseekLLM.ResponseReceived += DeepseekLLM_ResponseReceived;
|
|||
|
|
|
|||
|
|
deepseekLLM.DeltaReceived += DeepseekLLM_DeltaReceived;
|
|||
|
|
|
|||
|
|
//deepseekLLM.RequestSent += DeepseekLLM_RequestSent;
|
|||
|
|
|
|||
|
|
CurrentRequest += Message.Human(UserInput);
|
|||
|
|
UserInput = string.Empty;
|
|||
|
|
/*var result = */
|
|||
|
|
await deepseekLLM.GenerateAsync(CurrentRequest, cancellationToken: cancellationToken);
|
|||
|
|
#endregion
|
|||
|
|
|
|||
|
|
// Since the LLMChain is a single-input, single-output chain, we can also call it with `run`.
|
|||
|
|
// This takes in a string and returns the `text` property.
|
|||
|
|
}
|
|||
|
|
catch (Exception ex)
|
|||
|
|
{
|
|||
|
|
MessageBox.Show(ex.Message);
|
|||
|
|
}
|
|||
|
|
}
|
|||
|
|
|
|||
|
|
private async static Task RunChainAsync()
|
|||
|
|
{
|
|||
|
|
try
|
|||
|
|
{
|
|||
|
|
var client = new OpenAiClient("sk-3a3126167f1343228b1a5745bcd0bf01");
|
|||
|
|
OpenAiProvider provider = new OpenAiProvider(client);
|
|||
|
|
var llm = new OpenAiLatestFastChatModel(provider);
|
|||
|
|
var embeddingModel = new TextEmbeddingV3SmallModel(provider);
|
|||
|
|
var prompt = new PromptTemplate(new PromptTemplateInput(
|
|||
|
|
template: "Revit二次开发中,使用变量doc和uidoc两个变量,构造一个保证可以执行的C#代码块,添加相应注释,不需要方法签名和using命名空间,但使用时需要完整的命名空间。实现{需求}的功能", inputVariables: ["需求"]));
|
|||
|
|
var chain = new LlmChain(new LlmChainInput(llm, prompt));
|
|||
|
|
|
|||
|
|
var result2 = await chain.RunAsync("彩色长筒靴");
|
|||
|
|
Console.WriteLine(result2);
|
|||
|
|
var chatPrompt = ChatPromptTemplate.FromPromptMessages([
|
|||
|
|
SystemMessagePromptTemplate.FromTemplate(
|
|||
|
|
"You are a helpful assistant that translates {input_language} to {output_language}."),
|
|||
|
|
HumanMessagePromptTemplate.FromTemplate("{text}")
|
|||
|
|
]);
|
|||
|
|
|
|||
|
|
var chainB = new LlmChain(new LlmChainInput(llm, chatPrompt)
|
|||
|
|
{
|
|||
|
|
Verbose = true
|
|||
|
|
});
|
|||
|
|
|
|||
|
|
var resultB = await chainB.CallAsync(new ChainValues(new Dictionary<string, object>(3)
|
|||
|
|
{
|
|||
|
|
{"input_language", "English"},
|
|||
|
|
{"output_language", "French"},
|
|||
|
|
{"text", "I love programming"},
|
|||
|
|
}));
|
|||
|
|
Console.WriteLine(resultB.Value["text"]);
|
|||
|
|
}
|
|||
|
|
catch (Exception ex)
|
|||
|
|
{
|
|||
|
|
|
|||
|
|
}
|
|||
|
|
}
|
|||
|
|
|
|||
|
|
private void DeepseekLLM_RequestSent(object sender, ChatRequest e)
|
|||
|
|
{
|
|||
|
|
Debug.WriteLine("-------------RequestSent-------------");
|
|||
|
|
foreach (var mes in e.Messages)
|
|||
|
|
{
|
|||
|
|
Debug.WriteLine($"{mes}");
|
|||
|
|
|
|||
|
|
}
|
|||
|
|
//Debug.WriteLine("-------------RequestSent-------------");
|
|||
|
|
//Debug.WriteLine($"发送者{sender}");
|
|||
|
|
//scrollViewer.Dispatcher.Invoke(() =>
|
|||
|
|
//{
|
|||
|
|
// ChatHistory.Add(CurrentRespone);
|
|||
|
|
//});
|
|||
|
|
}
|
|||
|
|
//接收完毕
|
|||
|
|
private void DeepseekLLM_ResponseReceived(object sender, ChatResponse e)
|
|||
|
|
{
|
|||
|
|
//Debug.WriteLine("-------------ResponseReceived-------------");
|
|||
|
|
Application.Current.Dispatcher.Invoke(() =>
|
|||
|
|
{
|
|||
|
|
//Debug.WriteLine($"发送者:{sender};使用量:{e.Usage}");
|
|||
|
|
CurrentRequest += e.LastMessage;
|
|||
|
|
CurrentRespone = Message.Ai(string.Empty);
|
|||
|
|
//最后一条完整的消息
|
|||
|
|
//Debug.WriteLine($"{ChatHistory}");
|
|||
|
|
//ChatHistory.Add(e.LastMessage);
|
|||
|
|
//Respones.Content += e.Content;
|
|||
|
|
});
|
|||
|
|
}
|
|||
|
|
//partial void OnCurrentResponeChanged(Message? value)
|
|||
|
|
//{
|
|||
|
|
//}
|
|||
|
|
//接收Delta
|
|||
|
|
private void DeepseekLLM_DeltaReceived(object sender, ChatResponseDelta e)
|
|||
|
|
{
|
|||
|
|
if (string.IsNullOrEmpty(e.Content))
|
|||
|
|
{
|
|||
|
|
return;
|
|||
|
|
}
|
|||
|
|
scrollViewer.Dispatcher.Invoke(() =>
|
|||
|
|
{
|
|||
|
|
ChatHistory.Remove(CurrentRespone);
|
|||
|
|
//Debug.WriteLine("-------------DeltaReceived-------------");
|
|||
|
|
|
|||
|
|
Debug.WriteLine($"{e.Content}");
|
|||
|
|
|
|||
|
|
CurrentRespone += Message.Ai(e.Content);
|
|||
|
|
ChatHistory.Add(CurrentRespone);
|
|||
|
|
Task.Delay(1);
|
|||
|
|
//ChatHistory.
|
|||
|
|
//判断滚动条是否在底部
|
|||
|
|
if (scrollViewer.VerticalOffset == scrollViewer.ExtentHeight - scrollViewer.ViewportHeight)
|
|||
|
|
{
|
|||
|
|
scrollViewer?.ScrollToEnd();
|
|||
|
|
}
|
|||
|
|
//Respones.Content += e.Content;
|
|||
|
|
});
|
|||
|
|
}
|
|||
|
|
|
|||
|
|
[RelayCommand]
|
|||
|
|
private void NewSession()
|
|||
|
|
{
|
|||
|
|
ChatHistory?.Clear();
|
|||
|
|
CurrentRequest = Message.Empty;
|
|||
|
|
}
|
|||
|
|
|
|||
|
|
}
|
|||
|
|
}
|