-
Notifications
You must be signed in to change notification settings - Fork 3.3k
/
Example79_ChatCompletionAgent.cs
327 lines (272 loc) · 12.8 KB
/
Example79_ChatCompletionAgent.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
// Copyright (c) Microsoft. All rights reserved.
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Azure.AI.OpenAI;
using Kusto.Cloud.Platform.Utils;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel.Experimental.Agents;
using Pipelines.Sockets.Unofficial.Arenas;
using Xunit;
using Xunit.Abstractions;
namespace Examples;
public class Example79_ChatCompletionAgent : BaseTest
{
/// <summary>
/// This example demonstrates a chat with the chat completion agent that utilizes the SK ChatCompletion API to communicate with LLM.
/// </summary>
[Fact]
public async Task ChatWithAgentAsync()
{
var agent = new ChatCompletionAgent(
kernel: this._kernel,
instructions: "You act as a professional financial adviser. However, clients may not know the terminology, so please provide a simple explanation.",
description: "Financial Adviser",
executionSettings: new OpenAIPromptExecutionSettings
{
MaxTokens = 500,
Temperature = 0.7,
TopP = 1.0,
PresencePenalty = 0.0,
FrequencyPenalty = 0.0,
}
);
var prompt = PrintPrompt("I need help with my investment portfolio. Please guide me.");
PrintConversation(await agent.InvokeAsync(new[] { new ChatMessageContent(AuthorRole.User, prompt) }));
}
/// <summary>
/// This example demonstrates a round-robin chat between two chat completion agents using the TurnBasedChat collaboration experience.
/// </summary>
[Fact]
public async Task TurnBasedAgentsChatAsync()
{
var settings = new OpenAIPromptExecutionSettings
{
MaxTokens = 1500,
Temperature = 0.7,
TopP = 1.0,
PresencePenalty = 0.0,
FrequencyPenalty = 0.0,
};
var fitnessTrainer = new ChatCompletionAgent(
kernel: this._kernel,
instructions: "As a fitness trainer, suggest workout routines, and exercises for beginners. " +
"You are not a stress management expert, so refrain from recommending stress management strategies. " +
"Collaborate with the stress management expert to create a holistic wellness plan." +
"Always incorporate stress reduction techniques provided by the stress management expert into the fitness plan." +
"Always include your role at the beginning of each response, such as 'As a fitness trainer.",
description: "Fitness Trainer",
executionSettings: settings
);
var stressManagementExpert = new ChatCompletionAgent(
kernel: this._kernel,
instructions: "As a stress management expert, provide guidance on stress reduction strategies. " +
"Collaborate with the fitness trainer to create a simple and holistic wellness plan." +
"You are not a fitness expert; therefore, avoid recommending fitness exercises." +
"If the plan is not aligned with recommended stress reduction plan, ask the fitness trainer to rework it to incorporate recommended stress reduction techniques. " +
"Only you can stop the conversation by saying WELLNESS_PLAN_COMPLETE if suggested fitness plan is good." +
"Always include your role at the beginning of each response such as 'As a stress management expert.",
description: "Stress Management Expert",
executionSettings: settings
);
var chat = new TurnBasedChat(new[] { fitnessTrainer, stressManagementExpert }, (chatHistory, replies, turn) =>
turn >= 10 || // Limit the number of turns to 10
replies.Any(
message => message.Role == AuthorRole.Assistant &&
message.Items.OfType<TextContent>().Any(c => c.Text!.Contains("WELLNESS_PLAN_COMPLETE", StringComparison.InvariantCulture)))); // Exit when the message "WELLNESS_PLAN_COMPLETE" received from agent
var prompt = "I need help creating a simple wellness plan for a beginner. Please guide me.";
PrintConversation(await chat.SendMessageAsync(prompt));
}
/// <summary>
/// This example demonstrates the auto function invocation capability of the chat completion agent.
/// </summary>
[Fact]
public async Task AgentAutoFunctionInvocationAsync()
{
this._kernel.Plugins.AddFromType<CRM>();
var settings = new OpenAIPromptExecutionSettings
{
MaxTokens = 1500,
Temperature = 0.7,
TopP = 1.0,
PresencePenalty = 0.0,
FrequencyPenalty = 0.0,
ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions
};
var agent = new ChatCompletionAgent(
kernel: this._kernel,
instructions: "As a fitness trainer, suggest workout routines, and exercises for beginners.",
description: "Fitness Trainer",
executionSettings: settings);
var prompt = PrintPrompt("I need help creating a simple wellness plan for my client James that is appropriate for his age. Please guide me.");
PrintConversation(await agent.InvokeAsync(new[] { new ChatMessageContent(AuthorRole.User, prompt) }));
}
/// <summary>
/// This example demonstrates the manual function invocation capability of the chat completion agent.
/// </summary>
[Fact]
public async Task AgentManualFunctionInvocationAsync()
{
this._kernel.Plugins.AddFromType<CRM>();
var settings = new OpenAIPromptExecutionSettings
{
MaxTokens = 1500,
Temperature = 0.7,
TopP = 1.0,
PresencePenalty = 0.0,
FrequencyPenalty = 0.0,
ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions,
ResultsPerPrompt = 1
};
KernelAgent agent = new ChatCompletionAgent(
kernel: this._kernel,
instructions: "As a fitness trainer, suggest workout routines, and exercises for beginners.",
description: "Fitness Trainer",
executionSettings: settings);
// Register a post-processor to handle the agent's response to manually invoke the CRM function.
agent = new AgentDecorator(agent, postProcessor: async messages =>
{
var message = messages.Single();
if (message is not OpenAIChatMessageContent openAIChatMessageContent)
{
return messages;
}
var toolCalls = openAIChatMessageContent.ToolCalls.OfType<ChatCompletionsFunctionToolCall>().ToList();
if (toolCalls.Count == 0)
{
return messages;
}
var result = new List<ChatMessageContent>(messages); // The original tool calling "request" from LLM is already included in the messages list.
if (message.Source is not KernelAgent agent)
{
throw new KernelException("The kernel agent is not available in the message metadata.");
}
foreach (var toolCall in toolCalls)
{
string content = "Unable to find function. Please try again!";
if (agent.Kernel.Plugins.TryGetFunctionAndArguments(toolCall, out KernelFunction? function, out KernelArguments? arguments))
{
var functionResult = await function.InvokeAsync(agent.Kernel, arguments);
// A custom logic can be added here that would interpret the function's result, update the agent's message, remove it, or replace it with a different one.
content = JsonSerializer.Serialize(functionResult.GetValue<object>());
}
result.Add(new ChatMessageContent(
AuthorRole.Tool,
content,
metadata: new Dictionary<string, object?>(1) { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } }));
}
return result;
});
var prompt = PrintPrompt("I need help creating a simple wellness plan for my client James that is appropriate for his age. Please guide me.");
PrintConversation(await agent.InvokeAsync(new[] { new ChatMessageContent(AuthorRole.User, prompt) }));
}
private string PrintPrompt(string prompt)
{
this.WriteLine($"Prompt: {prompt}");
return prompt;
}
private void PrintConversation(IEnumerable<ChatMessageContent> messages)
{
foreach (var message in messages)
{
this.WriteLine($"------------------------------- {message.Role} ------------------------------");
foreach (var etxContent in message.Items.OfType<TextContent>())
{
this.WriteLine(etxContent.Text);
}
this.WriteLine();
this.WriteLine();
}
this.WriteLine();
}
/// <summary>
/// The turn-based chat. For demonstration purposes only.
/// </summary>
private sealed class TurnBasedChat
{
public TurnBasedChat(IEnumerable<KernelAgent> agents, Func<IReadOnlyList<ChatMessageContent>, IEnumerable<ChatMessageContent>, int, bool> exitPredicate)
{
this._agents = agents.ToArray();
this._exitCondition = exitPredicate;
}
public async Task<IReadOnlyList<ChatMessageContent>> SendMessageAsync(string message, CancellationToken cancellationToken = default)
{
var chat = new List<ChatMessageContent>();
chat.Add(new ChatMessageContent(AuthorRole.User, message));
IReadOnlyList<ChatMessageContent> result = new List<ChatMessageContent>();
var turn = 0;
do
{
var agent = this._agents[turn % this._agents.Length];
result = await agent.InvokeAsync(chat, cancellationToken: cancellationToken);
chat.AddRange(result);
turn++;
}
while (!this._exitCondition(chat, result, turn));
return chat;
}
private readonly KernelAgent[] _agents;
private readonly Func<IReadOnlyList<ChatMessageContent>, IEnumerable<ChatMessageContent>, int, bool> _exitCondition;
}
/// <summary>
/// The agent decorator for pre/post-processing agent messages. This is for demonstration purposes only.
/// </summary>
private sealed class AgentDecorator : KernelAgent
{
private readonly Func<IReadOnlyList<ChatMessageContent>, Task<IReadOnlyList<ChatMessageContent>>>? _preProcessor;
private readonly Func<IReadOnlyList<ChatMessageContent>, Task<IReadOnlyList<ChatMessageContent>>>? _postProcessor;
private readonly KernelAgent _agent;
public AgentDecorator(
KernelAgent agent,
Func<IReadOnlyList<ChatMessageContent>, Task<IReadOnlyList<ChatMessageContent>>>? preProcessor = null,
Func<IReadOnlyList<ChatMessageContent>, Task<IReadOnlyList<ChatMessageContent>>>? postProcessor = null) : base(agent.Kernel, agent.Description)
{
this._agent = agent;
this._preProcessor = preProcessor;
this._postProcessor = postProcessor;
}
public override async Task<IReadOnlyList<ChatMessageContent>> InvokeAsync(IReadOnlyList<ChatMessageContent> messages, PromptExecutionSettings? executionSettings = null, CancellationToken cancellationToken = default)
{
if (this._preProcessor != null)
{
messages = await this._preProcessor(messages);
}
var result = await this._agent.InvokeAsync(messages, executionSettings, cancellationToken);
if (this._postProcessor != null)
{
result = await this._postProcessor(result);
}
return result;
}
}
private sealed class CRM
{
[KernelFunction, Description("Returns client details")]
public static ClientDetails GetClientDetails(string name)
{
return name switch
{
"James" => new ClientDetails { Name = name, Age = 60 },
_ => throw new NotSupportedException($"Unknown client '{name}'."),
};
}
}
private sealed class ClientDetails
{
public string Name { get; set; }
public byte Age { get; set; }
}
public Example79_ChatCompletionAgent(ITestOutputHelper output) : base(output)
{
this._kernel = Kernel.CreateBuilder()
.AddOpenAIChatCompletion("gpt-4-1106-preview", TestConfiguration.OpenAI.ApiKey)
.Build();
}
private readonly Kernel _kernel;
}