Skip to content

Commit

Permalink
Merge pull request #3008 from JeffreySu/Developer
Browse files Browse the repository at this point in the history
Developer
  • Loading branch information
JeffreySu committed May 24, 2024
2 parents ec15df4 + d721b11 commit 1ad36e9
Show file tree
Hide file tree
Showing 14 changed files with 423 additions and 32 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -63,3 +63,4 @@ node_modules
yarn.lock

/Samples/Work/Senparc.Weixin.Sample.Work/App_Data/WeChat_Work
/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/appsettings.Development.json
53 changes: 51 additions & 2 deletions Samples with AI/readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

内容将涵盖:

1. [ ] 微信公众号 Chat 机器人(文字)
1. [X] 微信公众号 Chat 机器人(文字) - 已于 2024 年 5 月 25 日上线
2. [ ] 微信公众号 Chat 机器人(图片)
3. [ ] 微信公众号 Chat 机器人(多模态混合)
4. [ ] 微信公众号带搜索功能的 Chat 机器人
Expand All @@ -21,4 +21,53 @@

AI 功能将整合在 [/Samples/All/net8-mvc](../Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/) 集成案例中。

更多说明将在对应功能上线后在本文档中补充。
更多说明将在对应功能上线后在本文档中补充。

## 【微信公众号 Chat 机器人(文字)】开发说明

1. 使用常规步骤开发微信公众号
2.`OnTextRequestAsync` 事件中,加入对进入 AI 对话状态的激活关键字(从节约 AI 用量和用户体验,以及公众号实际功能考虑,建议不要始终保持 AI 对话),如:

```
.Keyword("AI", () => this.StartAIChatAsync().Result)
```

> [查看代码](https://github.com/JeffreySu/WeiXinMPSDK/blob/f28a5995b3e5f01b3be384b5c7462324ec6f0886/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/CustomMessageHandler.cs#L194-L194)
其中 `StartAIChatAsync()` 用于激活当前用户对话山下文的 AI 对话状态

> [查看代码](https://github.com/JeffreySu/WeiXinMPSDK/blob/f28a5995b3e5f01b3be384b5c7462324ec6f0886/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/CustomMessageHandler_AI.cs#L41-L41)

3. 为了能够让系统优先判断当前是否在 AI 状态,需要在上述代码执行前,加入尝试 AI 对话的代码,如:

```
var aiResponseMessage = await this.AIChatAsync(requestMessage);
if (aiResponseMessage != null)
{
return aiResponseMessage;
}
```

> [查看代码](https://github.com/JeffreySu/WeiXinMPSDK/blob/f28a5995b3e5f01b3be384b5c7462324ec6f0886/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/CustomMessageHandler.cs#L179-L179)
其中 `AIChatAsync()` 方法用于提供尝试向 AI 发送对话消息的业务逻辑(如果不在对话状态则返回 null,程序继续执行常规代码)

> [查看代码](https://github.com/JeffreySu/WeiXinMPSDK/blob/f28a5995b3e5f01b3be384b5c7462324ec6f0886/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/CustomMessageHandler_AI.cs#L41-L41)
4. 配置 AI 参数,请参考 `Senparc.AI 【开发过程】第一步:配置账号`,在 appsettings.json 文件中追加 ”SenparcAiSetting“ 节点([查看](https://github.com/Senparc/Senparc.AI/blob/main/README.md#%E7%AC%AC%E4%B8%80%E6%AD%A5%E9%85%8D%E7%BD%AE%E8%B4%A6%E5%8F%B7))(注意:通常只需设置其中一种平台的配置)

5. 引用 Senparc.AI.Kernel 包,并在启动代码中激活 Senparc.AI:

```
services.AddSenparcAI(Configuration) // 注册 AI
```

> [查看代码](https://github.com/JeffreySu/WeiXinMPSDK/blob/f28a5995b3e5f01b3be384b5c7462324ec6f0886/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Startup.cs#L88-L88)
```
registerService.UseSenparcAI();// 启用 AI
```

> [查看代码](https://github.com/JeffreySu/WeiXinMPSDK/blob/f28a5995b3e5f01b3be384b5c7462324ec6f0886/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Startup.cs#L452-L452)
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
/*----------------------------------------------------------------
Copyright (C) 2024 Senparc
文件名:ChatStore.cs
文件功能描述:按个人信息隔离的 Chat 缓存
创建标识:Senparc - 20240524
----------------------------------------------------------------*/

using Senparc.AI.Kernel.Handlers;

namespace Senparc.Weixin.MP.Sample.CommonService.AI.MessageHandlers
{
/// <summary>
/// 按个人信息隔离的 Chat 缓存
/// </summary>
public class ChatStore
{
public ChatStatus Status { get; set; }

public string History { get; set; }
}

/// <summary>
/// 聊天状态
/// </summary>
public enum ChatStatus
{
/// <summary>
/// 默认状态(可能是转换失败)
/// </summary>
None,
/// <summary>
/// 聊天中
/// </summary>
Chat,
/// <summary>
/// 暂停
/// </summary>
Paused
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,205 @@
/*----------------------------------------------------------------
Copyright (C) 2024 Senparc
文件名:CustomMessageHandler_AI.cs
文件功能描述:自定义MessageHandler(AI 方法)
创建标识:Senparc - 20240524
----------------------------------------------------------------*/

using System;
using System.Threading.Tasks;
using Senparc.AI.Entities;
using Senparc.AI.Kernel;
using Senparc.CO2NET.Extensions;
using Senparc.CO2NET.Helpers;
using Senparc.CO2NET.Trace;
using Senparc.NeuChar.Entities;
using Senparc.Weixin.MP.Entities;
using Senparc.Weixin.MP.Sample.CommonService.AI.MessageHandlers;

namespace Senparc.Weixin.Sample.CommonService.CustomMessageHandler
{
/// <summary>
/// 自定义MessageHandler(公众号)
/// </summary>
public partial class CustomMessageHandler
{

const string WELCOME_MESSAGE = @"
输入“p”暂停,可以暂时保留记忆
输入“e”退出,彻底删除记忆
[结果由 AI 生成,仅供参考]";

/// <summary>
/// 开始 AI 对话
/// </summary>
/// <param name="requestMessage"></param>
/// <returns></returns>
private async Task<IResponseMessageBase> StartAIChatAsync()
{
var currentMessageContext = await base.GetCurrentMessageContext();


//新建个人对话缓存(由于使用了 CurrentMessageContext,多用户之前完全隔离,对话不会串)
var storage = new ChatStore()
{
Status = ChatStatus.Chat,
History = ""
};

currentMessageContext.StorageData = storage.ToJson();//为了提升兼容性,采用字符格式

await GlobalMessageContext.UpdateMessageContextAsync(currentMessageContext);//储存到缓存

var responseMessage = base.CreateResponseMessage<ResponseMessageText>();
responseMessage.Content = "小嗨 Bot 已启动!" + WELCOME_MESSAGE;

return responseMessage;
}

/// <summary>
/// 开始 AI 对话
/// </summary>
/// <param name="requestMessage"></param>
/// <returns></returns>
private async Task<IResponseMessageBase> AIChatAsync(RequestMessageBase requestMessage)
{
var currentMessageContext = await base.GetCurrentMessageContext();

if (!(currentMessageContext.StorageData is string chatJson))
{
return null;
}

ChatStore chatStore;

try
{
chatStore = chatJson.GetObject<ChatStore>();
if (chatStore == null || chatStore.Status == ChatStatus.None || chatStore.History == null)
{
return null;
}
}
catch
{
return null;
}

try
{
if (requestMessage is RequestMessageText requestMessageText)
{
string prompt;

if (requestMessageText.Content.Equals("E", StringComparison.OrdinalIgnoreCase))
{
prompt = $"我即将结束对话,请发送一段文字和我告别,并提醒我:输入“AI”可以再次启动对话。";

//消除状态记录
await UpdateMessageContext(currentMessageContext, null);
}
else if (requestMessageText.Content.Equals("P", StringComparison.OrdinalIgnoreCase))
{
prompt = $"我即将临时暂停对话,请发送一段文字和我告别,并提醒我:输入“AI”可以再次启动对话。请记住,下次启动会话时,发送再次欢迎我回来的信息。";

// 修改状态记录
chatStore.Status = ChatStatus.Paused;
await UpdateMessageContext(currentMessageContext, chatStore);
}
else if (chatStore.Status == ChatStatus.Paused)
{
if (requestMessageText.Content.Equals("AI", StringComparison.OrdinalIgnoreCase))
{
prompt = @"我将重新开始对话,请发送一段欢迎信息,并且在最后提示我(注意保留换行):" + WELCOME_MESSAGE;

// 修改状态记录
chatStore.Status = ChatStatus.Chat;
await UpdateMessageContext(currentMessageContext, chatStore);
}
else
{
return null;
}
}
else
{
prompt = requestMessageText.Content;
}

#region 请求 AI 模型进入 Chat 的经典模式

/* 模型配置
* 注意:需要在 appsettings.json 中的 <SenparcAiSetting> 节点配置 AI 模型参数,否则无法使用 AI 能力
*/
var setting = (SenparcAiSetting)Senparc.AI.Config.SenparcAiSetting;//也可以留空,将自动获取

//模型请求参数
var parameter = new PromptConfigParameter()
{
MaxTokens = 2000,
Temperature = 0.7,
TopP = 0.5,
};

//最大保存 AI 对话记录数
var maxHistoryCount = 10;

//默认 SystemMessage(可根据自己需要修改)
var systemMessage = Senparc.AI.DefaultSetting.DEFAULT_SYSTEM_MESSAGE;

var aiHandler = new SemanticAiHandler(setting);
var iWantToRun = aiHandler.ChatConfig(parameter,
userId: "Jeffrey",
maxHistoryStore: maxHistoryCount,
chatSystemMessage: systemMessage,
senparcAiSetting: setting).iWantToRun;

//注入历史记录(也可以把 iWantToRun 对象缓存起来,其中会自动包含 history,不需要每次读取或者保存)
iWantToRun.StoredAiArguments.Context["history"] = chatStore.History;

//获取请求(注意:因为微信需要一次返回所有文本,所以此处不使用 AI 流行的 Stream(流式)输出
var result = await aiHandler.ChatAsync(iWantToRun, prompt);

#endregion


//保存历史记录
chatStore.History = iWantToRun.StoredAiArguments.Context["history"]?.ToString();
await UpdateMessageContext(currentMessageContext, chatStore);

//组织返回消息
var responseMessage = base.CreateResponseMessage<ResponseMessageText>();
responseMessage.Content = result.OutputString;
return responseMessage;
}
else
{
var responseMessage = base.CreateResponseMessage<ResponseMessageText>();
responseMessage.Content = "暂时不支持此数据格式!";
return responseMessage;
}
}
catch (Exception ex)
{
SenparcTrace.BaseExceptionLog(ex);

var responseMessage = base.CreateResponseMessage<ResponseMessageText>();
responseMessage.Content = "系统忙,请稍后再试!";
return responseMessage;
}

}

private async Task UpdateMessageContext(CustomMessageContext currentMessageContext, ChatStore chatStore)
{
currentMessageContext.StorageData = chatStore==null?null : chatStore.ToJson();
await GlobalMessageContext.UpdateMessageContextAsync(currentMessageContext);//储存到缓存
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,12 @@ public override async Task OnExecutingAsync(CancellationToken cancellationToken)
{
//演示:MessageContext.StorageData

var currentMessageContext = await base.GetUnsafeMessageContext();//为了在分布式缓存下提高读写效率,使用此方法,如果需要获取实时数据,应该使用 base.GetCurrentMessageContext()
if (currentMessageContext.StorageData == null || !(currentMessageContext.StorageData is int))
//var currentMessageContext = await base.GetUnsafeMessageContext();//为了在分布式缓存下提高读写效率,使用此方法,如果需要获取实时数据,应该使用 base.GetCurrentMessageContext()
var currentMessageContext = await base.GetCurrentMessageContext();

if (currentMessageContext.StorageData == null)
{
currentMessageContext.StorageData = (int)0;
currentMessageContext.StorageData = 0;
//await GlobalMessageContext.UpdateMessageContextAsync(currentMessageContext);//储存到缓存
}
await base.OnExecutingAsync(cancellationToken);
Expand All @@ -41,10 +43,15 @@ public override async Task OnExecutedAsync(CancellationToken cancellationToken)
{
//演示:MessageContext.StorageData

var currentMessageContext = await base.GetUnsafeMessageContext();//为了在分布式缓存下提高读写效率,使用此方法,如果需要获取实时数据,应该使用 base.GetCurrentMessageContext()
currentMessageContext.StorageData = ((int)currentMessageContext.StorageData) + 1;
GlobalMessageContext.UpdateMessageContext(currentMessageContext);//储存到缓存
await base.OnExecutedAsync(cancellationToken);
//var currentMessageContext = await base.GetUnsafeMessageContext();//为了在分布式缓存下提高读写效率,使用此方法,如果需要获取实时数据,应该使用 base.GetCurrentMessageContext()
var currentMessageContext = await base.GetCurrentMessageContext();

if (currentMessageContext.StorageData is int data)
{
currentMessageContext.StorageData = data + 1;
GlobalMessageContext.UpdateMessageContext(currentMessageContext);//储存到缓存
await base.OnExecutedAsync(cancellationToken);
}
}
}
}

0 comments on commit 1ad36e9

Please sign in to comment.