diff --git a/.gitignore b/.gitignore index 846abb8aa9..50368c4c13 100644 --- a/.gitignore +++ b/.gitignore @@ -63,3 +63,4 @@ node_modules yarn.lock /Samples/Work/Senparc.Weixin.Sample.Work/App_Data/WeChat_Work +/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/appsettings.Development.json diff --git a/Samples with AI/readme.md b/Samples with AI/readme.md index 2f1abeb46f..458147f6ef 100644 --- a/Samples with AI/readme.md +++ b/Samples with AI/readme.md @@ -8,7 +8,7 @@ 内容将涵盖: -1. [ ] 微信公众号 Chat 机器人(文字) +1. [X] 微信公众号 Chat 机器人(文字) - 已于 2024 年 5 月 25 日上线 2. [ ] 微信公众号 Chat 机器人(图片) 3. [ ] 微信公众号 Chat 机器人(多模态混合) 4. [ ] 微信公众号带搜索功能的 Chat 机器人 @@ -21,4 +21,53 @@ AI 功能将整合在 [/Samples/All/net8-mvc](../Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/) 集成案例中。 -更多说明将在对应功能上线后在本文档中补充。 \ No newline at end of file +更多说明将在对应功能上线后在本文档中补充。 + +## 【微信公众号 Chat 机器人(文字)】开发说明 + +1. 使用常规步骤开发微信公众号 +2. 在 `OnTextRequestAsync` 事件中,加入对进入 AI 对话状态的激活关键字(从节约 AI 用量和用户体验,以及公众号实际功能考虑,建议不要始终保持 AI 对话),如: + +``` +.Keyword("AI", () => this.StartAIChatAsync().Result) +``` + +> [查看代码](https://github.com/JeffreySu/WeiXinMPSDK/blob/f28a5995b3e5f01b3be384b5c7462324ec6f0886/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/CustomMessageHandler.cs#L194-L194) + +其中 `StartAIChatAsync()` 用于激活当前用户对话山下文的 AI 对话状态 + +> [查看代码](https://github.com/JeffreySu/WeiXinMPSDK/blob/f28a5995b3e5f01b3be384b5c7462324ec6f0886/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/CustomMessageHandler_AI.cs#L41-L41) + + +3. 为了能够让系统优先判断当前是否在 AI 状态,需要在上述代码执行前,加入尝试 AI 对话的代码,如: + +``` +var aiResponseMessage = await this.AIChatAsync(requestMessage); +if (aiResponseMessage != null) +{ + return aiResponseMessage; +} +``` + +> [查看代码](https://github.com/JeffreySu/WeiXinMPSDK/blob/f28a5995b3e5f01b3be384b5c7462324ec6f0886/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/CustomMessageHandler.cs#L179-L179) + +其中 `AIChatAsync()` 方法用于提供尝试向 AI 发送对话消息的业务逻辑(如果不在对话状态则返回 null,程序继续执行常规代码) + +> [查看代码](https://github.com/JeffreySu/WeiXinMPSDK/blob/f28a5995b3e5f01b3be384b5c7462324ec6f0886/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/CustomMessageHandler_AI.cs#L41-L41) + +4. 配置 AI 参数,请参考 `Senparc.AI 【开发过程】第一步:配置账号`,在 appsettings.json 文件中追加 ”SenparcAiSetting“ 节点([查看](https://github.com/Senparc/Senparc.AI/blob/main/README.md#%E7%AC%AC%E4%B8%80%E6%AD%A5%E9%85%8D%E7%BD%AE%E8%B4%A6%E5%8F%B7))(注意:通常只需设置其中一种平台的配置) + +5. 引用 Senparc.AI.Kernel 包,并在启动代码中激活 Senparc.AI: + +``` +services.AddSenparcAI(Configuration) // 注册 AI +``` + +> [查看代码](https://github.com/JeffreySu/WeiXinMPSDK/blob/f28a5995b3e5f01b3be384b5c7462324ec6f0886/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Startup.cs#L88-L88) + +``` +registerService.UseSenparcAI();// 启用 AI +``` + +> [查看代码](https://github.com/JeffreySu/WeiXinMPSDK/blob/f28a5995b3e5f01b3be384b5c7462324ec6f0886/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Startup.cs#L452-L452) + diff --git a/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/ChatStore.cs b/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/ChatStore.cs new file mode 100644 index 0000000000..862957d002 --- /dev/null +++ b/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/ChatStore.cs @@ -0,0 +1,44 @@ +/*---------------------------------------------------------------- + Copyright (C) 2024 Senparc + + 文件名:ChatStore.cs + 文件功能描述:按个人信息隔离的 Chat 缓存 + + + 创建标识:Senparc - 20240524 + +----------------------------------------------------------------*/ + +using Senparc.AI.Kernel.Handlers; + +namespace Senparc.Weixin.MP.Sample.CommonService.AI.MessageHandlers +{ + /// + /// 按个人信息隔离的 Chat 缓存 + /// + public class ChatStore + { + public ChatStatus Status { get; set; } + + public string History { get; set; } + } + + /// + /// 聊天状态 + /// + public enum ChatStatus + { + /// + /// 默认状态(可能是转换失败) + /// + None, + /// + /// 聊天中 + /// + Chat, + /// + /// 暂停 + /// + Paused + } +} diff --git a/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/CustomMessageHandler_AI.cs b/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/CustomMessageHandler_AI.cs new file mode 100644 index 0000000000..742a2050e5 --- /dev/null +++ b/Samples/All/Senparc.Weixin.Sample.CommonService/AI/MessageHandlers/CustomMessageHandler_AI.cs @@ -0,0 +1,205 @@ +/*---------------------------------------------------------------- + Copyright (C) 2024 Senparc + + 文件名:CustomMessageHandler_AI.cs + 文件功能描述:自定义MessageHandler(AI 方法) + + + 创建标识:Senparc - 20240524 + +----------------------------------------------------------------*/ + +using System; +using System.Threading.Tasks; +using Senparc.AI.Entities; +using Senparc.AI.Kernel; +using Senparc.CO2NET.Extensions; +using Senparc.CO2NET.Helpers; +using Senparc.CO2NET.Trace; +using Senparc.NeuChar.Entities; +using Senparc.Weixin.MP.Entities; +using Senparc.Weixin.MP.Sample.CommonService.AI.MessageHandlers; + +namespace Senparc.Weixin.Sample.CommonService.CustomMessageHandler +{ + /// + /// 自定义MessageHandler(公众号) + /// + public partial class CustomMessageHandler + { + + const string WELCOME_MESSAGE = @" + +输入“p”暂停,可以暂时保留记忆 +输入“e”退出,彻底删除记忆 + +[结果由 AI 生成,仅供参考]"; + + /// + /// 开始 AI 对话 + /// + /// + /// + private async Task StartAIChatAsync() + { + var currentMessageContext = await base.GetCurrentMessageContext(); + + + //新建个人对话缓存(由于使用了 CurrentMessageContext,多用户之前完全隔离,对话不会串) + var storage = new ChatStore() + { + Status = ChatStatus.Chat, + History = "" + }; + + currentMessageContext.StorageData = storage.ToJson();//为了提升兼容性,采用字符格式 + + await GlobalMessageContext.UpdateMessageContextAsync(currentMessageContext);//储存到缓存 + + var responseMessage = base.CreateResponseMessage(); + responseMessage.Content = "小嗨 Bot 已启动!" + WELCOME_MESSAGE; + + return responseMessage; + } + + /// + /// 开始 AI 对话 + /// + /// + /// + private async Task AIChatAsync(RequestMessageBase requestMessage) + { + var currentMessageContext = await base.GetCurrentMessageContext(); + + if (!(currentMessageContext.StorageData is string chatJson)) + { + return null; + } + + ChatStore chatStore; + + try + { + chatStore = chatJson.GetObject(); + if (chatStore == null || chatStore.Status == ChatStatus.None || chatStore.History == null) + { + return null; + } + } + catch + { + return null; + } + + try + { + if (requestMessage is RequestMessageText requestMessageText) + { + string prompt; + + if (requestMessageText.Content.Equals("E", StringComparison.OrdinalIgnoreCase)) + { + prompt = $"我即将结束对话,请发送一段文字和我告别,并提醒我:输入“AI”可以再次启动对话。"; + + //消除状态记录 + await UpdateMessageContext(currentMessageContext, null); + } + else if (requestMessageText.Content.Equals("P", StringComparison.OrdinalIgnoreCase)) + { + prompt = $"我即将临时暂停对话,请发送一段文字和我告别,并提醒我:输入“AI”可以再次启动对话。请记住,下次启动会话时,发送再次欢迎我回来的信息。"; + + // 修改状态记录 + chatStore.Status = ChatStatus.Paused; + await UpdateMessageContext(currentMessageContext, chatStore); + } + else if (chatStore.Status == ChatStatus.Paused) + { + if (requestMessageText.Content.Equals("AI", StringComparison.OrdinalIgnoreCase)) + { + prompt = @"我将重新开始对话,请发送一段欢迎信息,并且在最后提示我(注意保留换行):" + WELCOME_MESSAGE; + + // 修改状态记录 + chatStore.Status = ChatStatus.Chat; + await UpdateMessageContext(currentMessageContext, chatStore); + } + else + { + return null; + } + } + else + { + prompt = requestMessageText.Content; + } + + #region 请求 AI 模型进入 Chat 的经典模式 + + /* 模型配置 + * 注意:需要在 appsettings.json 中的 节点配置 AI 模型参数,否则无法使用 AI 能力 + */ + var setting = (SenparcAiSetting)Senparc.AI.Config.SenparcAiSetting;//也可以留空,将自动获取 + + //模型请求参数 + var parameter = new PromptConfigParameter() + { + MaxTokens = 2000, + Temperature = 0.7, + TopP = 0.5, + }; + + //最大保存 AI 对话记录数 + var maxHistoryCount = 10; + + //默认 SystemMessage(可根据自己需要修改) + var systemMessage = Senparc.AI.DefaultSetting.DEFAULT_SYSTEM_MESSAGE; + + var aiHandler = new SemanticAiHandler(setting); + var iWantToRun = aiHandler.ChatConfig(parameter, + userId: "Jeffrey", + maxHistoryStore: maxHistoryCount, + chatSystemMessage: systemMessage, + senparcAiSetting: setting).iWantToRun; + + //注入历史记录(也可以把 iWantToRun 对象缓存起来,其中会自动包含 history,不需要每次读取或者保存) + iWantToRun.StoredAiArguments.Context["history"] = chatStore.History; + + //获取请求(注意:因为微信需要一次返回所有文本,所以此处不使用 AI 流行的 Stream(流式)输出 + var result = await aiHandler.ChatAsync(iWantToRun, prompt); + + #endregion + + + //保存历史记录 + chatStore.History = iWantToRun.StoredAiArguments.Context["history"]?.ToString(); + await UpdateMessageContext(currentMessageContext, chatStore); + + //组织返回消息 + var responseMessage = base.CreateResponseMessage(); + responseMessage.Content = result.OutputString; + return responseMessage; + } + else + { + var responseMessage = base.CreateResponseMessage(); + responseMessage.Content = "暂时不支持此数据格式!"; + return responseMessage; + } + } + catch (Exception ex) + { + SenparcTrace.BaseExceptionLog(ex); + + var responseMessage = base.CreateResponseMessage(); + responseMessage.Content = "系统忙,请稍后再试!"; + return responseMessage; + } + + } + + private async Task UpdateMessageContext(CustomMessageContext currentMessageContext, ChatStore chatStore) + { + currentMessageContext.StorageData = chatStore==null?null : chatStore.ToJson(); + await GlobalMessageContext.UpdateMessageContextAsync(currentMessageContext);//储存到缓存 + } + } +} \ No newline at end of file diff --git a/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/Async/CustomMessageHandlerAsync.cs b/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/Async/CustomMessageHandlerAsync.cs index 8c356ede35..86b96a8275 100644 --- a/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/Async/CustomMessageHandlerAsync.cs +++ b/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/Async/CustomMessageHandlerAsync.cs @@ -28,10 +28,12 @@ public override async Task OnExecutingAsync(CancellationToken cancellationToken) { //演示:MessageContext.StorageData - var currentMessageContext = await base.GetUnsafeMessageContext();//为了在分布式缓存下提高读写效率,使用此方法,如果需要获取实时数据,应该使用 base.GetCurrentMessageContext() - if (currentMessageContext.StorageData == null || !(currentMessageContext.StorageData is int)) + //var currentMessageContext = await base.GetUnsafeMessageContext();//为了在分布式缓存下提高读写效率,使用此方法,如果需要获取实时数据,应该使用 base.GetCurrentMessageContext() + var currentMessageContext = await base.GetCurrentMessageContext(); + + if (currentMessageContext.StorageData == null) { - currentMessageContext.StorageData = (int)0; + currentMessageContext.StorageData = 0; //await GlobalMessageContext.UpdateMessageContextAsync(currentMessageContext);//储存到缓存 } await base.OnExecutingAsync(cancellationToken); @@ -41,10 +43,15 @@ public override async Task OnExecutedAsync(CancellationToken cancellationToken) { //演示:MessageContext.StorageData - var currentMessageContext = await base.GetUnsafeMessageContext();//为了在分布式缓存下提高读写效率,使用此方法,如果需要获取实时数据,应该使用 base.GetCurrentMessageContext() - currentMessageContext.StorageData = ((int)currentMessageContext.StorageData) + 1; - GlobalMessageContext.UpdateMessageContext(currentMessageContext);//储存到缓存 - await base.OnExecutedAsync(cancellationToken); + //var currentMessageContext = await base.GetUnsafeMessageContext();//为了在分布式缓存下提高读写效率,使用此方法,如果需要获取实时数据,应该使用 base.GetCurrentMessageContext() + var currentMessageContext = await base.GetCurrentMessageContext(); + + if (currentMessageContext.StorageData is int data) + { + currentMessageContext.StorageData = data + 1; + GlobalMessageContext.UpdateMessageContext(currentMessageContext);//储存到缓存 + await base.OnExecutedAsync(cancellationToken); + } } } } \ No newline at end of file diff --git a/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/CustomMessageHandler.cs b/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/CustomMessageHandler.cs index 550be225fc..97cbc32ad2 100644 --- a/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/CustomMessageHandler.cs +++ b/Samples/All/Senparc.Weixin.Sample.CommonService/MessageHandlers/CustomMessageHandler/CustomMessageHandler.cs @@ -127,7 +127,7 @@ public override async Task OnTextRequestAsync(RequestMessa { //说明:实际项目中这里的逻辑可以交给Service处理具体信息,参考OnLocationRequest方法或/Service/LocationSercice.cs - #region 书中例子 + #region 《微信开发深度解析》书中例子 //if (requestMessage.Content == "你好") //{ // var responseMessage = base.CreateResponseMessage(); @@ -173,10 +173,27 @@ public override async Task OnTextRequestAsync(RequestMessa #endregion + + #region 查看是否需要进入 AI 对话 + + var aiResponseMessage = await this.AIChatAsync(requestMessage); + if (aiResponseMessage != null) + { + return aiResponseMessage; + } + + #endregion + + var defaultResponseMessage = base.CreateResponseMessage(); var requestHandler = await requestMessage.StartHandler() - //关键字不区分大小写,按照顺序匹配成功后将不再运行下面的逻辑 + /* 关键字不区分大小写,按照顺序匹配成功后将不再运行下面的逻辑 */ + + //启动 AI 对话 + .Keyword("AI", () => this.StartAIChatAsync().Result) + + //测试浏览器约束 .Keyword("约束", () => { defaultResponseMessage.Content = @@ -185,9 +202,10 @@ public override async Task OnTextRequestAsync(RequestMessa 或: 点击这里进行客户端约束测试(地址:https://sdk.weixin.senparc.com/FilterTest/Redirect),如果在微信外打开将重定向一次URL。"; return defaultResponseMessage; - }). + }) + //匹配任一关键字 - Keywords(new[] { "托管", "代理" }, () => + .Keywords(new[] { "托管", "代理" }, () => { //开始用代理托管,把请求转到其他服务器上去,然后拿回结果 //甚至也可以将所有请求在DefaultResponseMessage()中托管到外部。 @@ -245,6 +263,8 @@ public override async Task OnTextRequestAsync(RequestMessa } return agentResponseMessage;//可能出现多种类型,直接在这里返回 }) + + //命中任意一个关键字即可触发 .Keywords(new[] { "测试", "退出" }, () => { /* @@ -375,8 +395,6 @@ public override async Task OnTextRequestAsync(RequestMessa defaultResponseMessage.Content = sb.ToString(); return defaultResponseMessage; }) - - //选择菜单,关键字:101(微信服务器端最终格式:id="s:101",content="满意") .SelectMenuKeyword("101", () => { @@ -449,8 +467,7 @@ public override async Task OnTextRequestAsync(RequestMessa var currentMessageContext = await base.GetCurrentMessageContext(); if (currentMessageContext.RequestMessages.Count > 1) { - result.AppendFormat("您此前还发送了如下消息({0}/{1}):\r\n", currentMessageContext.RequestMessages.Count, - currentMessageContext.StorageData); + result.AppendFormat("您此前还发送了如下消息({0}):\r\n", currentMessageContext.RequestMessages.Count); for (int i = currentMessageContext.RequestMessages.Count - 2; i >= 0; i--) { var historyMessage = currentMessageContext.RequestMessages[i]; diff --git a/Samples/All/Senparc.Weixin.Sample.CommonService/Senparc.Weixin.Net8Sample.CommonService.csproj b/Samples/All/Senparc.Weixin.Sample.CommonService/Senparc.Weixin.Net8Sample.CommonService.csproj index 37db2bc5ac..e223ea5e63 100644 --- a/Samples/All/Senparc.Weixin.Sample.CommonService/Senparc.Weixin.Net8Sample.CommonService.csproj +++ b/Samples/All/Senparc.Weixin.Sample.CommonService/Senparc.Weixin.Net8Sample.CommonService.csproj @@ -19,6 +19,8 @@ + + diff --git a/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Senparc.Weixin.Sample.net8.csproj b/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Senparc.Weixin.Sample.net8.csproj index 82580d729d..2bf6729490 100644 --- a/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Senparc.Weixin.Sample.net8.csproj +++ b/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Senparc.Weixin.Sample.net8.csproj @@ -3,11 +3,26 @@ net8.0 6.9.0 + enable 35e58786-0820-4cde-b1ff-f4c6198d00f7 false + + + + + + + + + + + + + + @@ -47,12 +62,28 @@ Always + + Never + + + Never + + + Never + + + Never + + + + + + - - - + + diff --git a/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Startup.cs b/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Startup.cs index 8eeb35369e..18551545a0 100644 --- a/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Startup.cs +++ b/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/Startup.cs @@ -8,6 +8,7 @@ using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Options; using NuGet.Protocol; +using Senparc.AI.Kernel; using Senparc.CO2NET; using Senparc.CO2NET.AspNet; using Senparc.CO2NET.Cache; @@ -84,6 +85,7 @@ public void ConfigureServices(IServiceCollection services) services.AddSenparcWeixinServices(Configuration, Env)//Senparc.Weixin 注册(必须) .AddSenparcWebSocket() //Senparc.WebSocket 注册(按需) -- DPBMARK WebSocket DPBMARK_END + .AddSenparcAI(Configuration) //注册 Senparc.AI,提供 AI 能力(可选) ; //启用 WebApi(可选) @@ -445,6 +447,12 @@ public void ConfigureServices(IServiceCollection services) #endregion + #region AI + + registerService.UseSenparcAI();//启用 AI(可选) + + #endregion + app.UseAuthorization();//需要在注册微信 SDK 之后执行 app.UseEndpoints(endpoints => diff --git a/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/appsettings.Development.json b/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/appsettings.Development.json deleted file mode 100644 index e203e9407e..0000000000 --- a/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/appsettings.Development.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Debug", - "System": "Information", - "Microsoft": "Information" - } - } -} diff --git a/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/appsettings.json b/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/appsettings.json index ca3f8340a8..cd1aee9057 100644 --- a/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/appsettings.json +++ b/Samples/All/net8-mvc/Senparc.Weixin.Sample.Net8/appsettings.json @@ -149,5 +149,32 @@ } //更多,任意数量、任意模块、任意组合... } + }, + //Senparc.AI 设置 + "SenparcAiSetting": { + "IsDebug": true, + "AiPlatform": "AzureOpenAI", //注意修改为自己平台对应的枚举值 + "NeuCharAIKeys": { + "ApiKey": "", //在 https://www.neuchar.com/Developer/AiApp 申请 + "NeuCharEndpoint": "https://www.neuchar.com/", //查看 ApiKey 时可看到 DeveloperId + "ModelName": { + "Chat": "gpt-35-turbo" + } + }, + "AzureOpenAIKeys": { + "ApiKey": "", //TODO:加密 + "AzureEndpoint": "", //https://xxxx.openai.azure.com/ + "AzureOpenAIApiVersion": "2022-12-01", //调用限制请参考:https://learn.microsoft.com/en-us/azure/cognitive-services/openai/quotas-limits + "ModelName": { + "Chat": "gpt-35-turbo" + } + }, + "OpenAIKeys": { + "ApiKey": "", //TODO:加密 + "OrganizationId": "", + "ModelName": { + "Chat": "gpt-35-turbo" + } + } } } diff --git a/readme.en.md b/readme.en.md index 7614f09b3f..a7df7bff0c 100644 --- a/readme.en.md +++ b/readme.en.md @@ -39,8 +39,9 @@ Since its inception in 2013, we have been continuously updating the project and If you like this project and want us to continue improving it, please give us a ★Star :) ## 🔔 Announcement - ->
+ +> 🔥 AI chatbot wechat integration Sample is online! [click here to view](/Samples%20with%20AI)
+> 📺
At the 2024 Microsoft MVP Global Summit, I had the privilege of interviewing Scott Hanselman and discussed a range of topics related to AI. We will continue our conversation on April 13, 2024, during the "Senparc 3.14 Technology Open Day" event, where we will delve deeper into the impact of AI and Agents on the future of the software industry and open-source software. Everyone is welcome to follow along! [Watch the video](https://github.com/JeffreySu/WeiXinMPSDK/wiki/%E5%BE%AE%E8%BD%AF-MVP-%E5%85%A8%E7%90%83%E5%B3%B0%E4%BC%9A%E9%87%87%E8%AE%BF-Scott-Hanselman)
> ⚡ Sample now supports .NET 8.0 (backward compatible), [click here to view](/Samples/All/net8-mvc)!
> 🔒 [Wechat Pay V3 module (V1.0)](https://github.com/JeffreySu/WeiXinMPSDK/tree/master/src/Senparc.Weixin.TenPay/Senparc.Weixin.TenPayV3) is now online! [Nuget](https://www.nuget.org/packages/Senparc.Weixin.TenPayV3)
diff --git a/readme.md b/readme.md index 84bd63797c..3fd2afea62 100644 --- a/readme.md +++ b/readme.md @@ -41,7 +41,8 @@ Senparc.Weixin SDK 是目前使用率最高的微信 .NET SDK,也是国内最 ## 🔔 公告 ->
+> 🔥 AI 聊天机器人微信集成 Sample 已上线![查看](/Samples%20with%20AI)
+> 📺
在 2024 微软 MVP 全球峰会上有幸采访到 Scott Hanselman,并交流了一部分有关 AI 的话题,我们将会在 2024 年 4 月 13 日的“盛派 3.14 技术开放日”活动上再次进行对话,深入交流关于 AI 和 Agent 对未来软件行业以及开源软件的影响,欢迎大家关注![查看视频](https://github.com/JeffreySu/WeiXinMPSDK/wiki/%E5%BE%AE%E8%BD%AF-MVP-%E5%85%A8%E7%90%83%E5%B3%B0%E4%BC%9A%E9%87%87%E8%AE%BF-Scott-Hanselman)
> ⚡ Sample 已支持 .NET 8.0(向下兼容),[点击这里查看](/Samples/All/net8-mvc)!
> 🔒 [微信支付 V3 模块(V1.0)](https://github.com/JeffreySu/WeiXinMPSDK/tree/master/src/Senparc.Weixin.TenPay/Senparc.Weixin.TenPayV3)已上线![Nuget](https://www.nuget.org/packages/Senparc.Weixin.TenPayV3)
diff --git a/src/Senparc.Weixin.AspNet/RegisterServices/SenparcWeixinRegisterServiceExtension.cs b/src/Senparc.Weixin.AspNet/RegisterServices/SenparcWeixinRegisterServiceExtension.cs index ef02a278be..9592c680d5 100644 --- a/src/Senparc.Weixin.AspNet/RegisterServices/SenparcWeixinRegisterServiceExtension.cs +++ b/src/Senparc.Weixin.AspNet/RegisterServices/SenparcWeixinRegisterServiceExtension.cs @@ -68,6 +68,13 @@ public static class RegisterServiceExtension /// public static IServiceCollection AddSenparcWeixinServices(this IServiceCollection serviceCollection, IConfiguration configuration, IHostEnvironment env, bool autoCreateApi = false) { + //// 加载 appsettings.json + //configuration.AddJsonFile("appsettings.json", optional: false, reloadOnChange: true) + // .AddJsonFile($"appsettings.{env.EnvironmentName}.json", optional: true, reloadOnChange: true); + + //// 加载环境变量 + //configuration.AddEnvironmentVariables(); + serviceCollection.Configure(configuration.GetSection("SenparcWeixinSetting"));