-
Notifications
You must be signed in to change notification settings - Fork 3.1k
/
Example59_OpenAIFunctionCalling.cs
144 lines (125 loc) · 6.12 KB
/
Example59_OpenAIFunctionCalling.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
// Copyright (c) Microsoft. All rights reserved.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using System.Threading.Tasks;
using Azure.AI.OpenAI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Xunit;
using Xunit.Abstractions;
namespace Examples;
// This example shows how to use OpenAI's tool calling capability via the chat completions interface.
public class Example59_OpenAIFunctionCalling : BaseTest
{
[Fact]
public async Task RunAsync()
{
// Create kernel.
IKernelBuilder builder = Kernel.CreateBuilder();
// We recommend the usage of OpenAI latest models for the best experience with tool calling.
// i.e. gpt-3.5-turbo-1106 or gpt-4-1106-preview
builder.AddOpenAIChatCompletion("gpt-3.5-turbo-1106", TestConfiguration.OpenAI.ApiKey);
builder.Services.AddLogging(services => services.AddConsole().SetMinimumLevel(LogLevel.Trace));
Kernel kernel = builder.Build();
// Add a plugin with some helper functions we want to allow the model to utilize.
kernel.ImportPluginFromFunctions("HelperFunctions", new[]
{
kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."),
kernel.CreateFunctionFromMethod((string cityName) =>
cityName switch
{
"Boston" => "61 and rainy",
"London" => "55 and cloudy",
"Miami" => "80 and sunny",
"Paris" => "60 and rainy",
"Tokyo" => "50 and sunny",
"Sydney" => "75 and sunny",
"Tel Aviv" => "80 and sunny",
_ => "31 and snowing",
}, "Get_Weather_For_City", "Gets the current weather for the specified city"),
});
WriteLine("======== Example 1: Use automated function calling with a non-streaming prompt ========");
{
OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
WriteLine(await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings)));
WriteLine();
}
WriteLine("======== Example 2: Use automated function calling with a streaming prompt ========");
{
OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
await foreach (var update in kernel.InvokePromptStreamingAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings)))
{
Write(update);
}
WriteLine();
}
WriteLine("======== Example 3: Use manual function calling with a non-streaming prompt ========");
{
var chat = kernel.GetRequiredService<IChatCompletionService>();
var chatHistory = new ChatHistory();
OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?");
while (true)
{
var result = (OpenAIChatMessageContent)await chat.GetChatMessageContentAsync(chatHistory, settings, kernel);
if (result.Content is not null)
{
Write(result.Content);
}
List<ChatCompletionsFunctionToolCall> toolCalls = result.ToolCalls.OfType<ChatCompletionsFunctionToolCall>().ToList();
if (toolCalls.Count == 0)
{
break;
}
chatHistory.Add(result);
foreach (var toolCall in toolCalls)
{
string content = kernel.Plugins.TryGetFunctionAndArguments(toolCall, out KernelFunction? function, out KernelArguments? arguments) ?
JsonSerializer.Serialize((await function.InvokeAsync(kernel, arguments)).GetValue<object>()) :
"Unable to find function. Please try again!";
chatHistory.Add(new ChatMessageContent(
AuthorRole.Tool,
content,
metadata: new Dictionary<string, object?>(1) { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } }));
}
}
WriteLine();
}
/* Uncomment this to try in a console chat loop.
Console.WriteLine("======== Example 4: Use automated function calling with a streaming chat ========");
{
OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
var chat = kernel.GetRequiredService<IChatCompletionService>();
var chatHistory = new ChatHistory();
while (true)
{
Console.Write("Question (Type \"quit\" to leave): ");
string question = Console.ReadLine() ?? string.Empty;
if (question == "quit")
{
break;
}
chatHistory.AddUserMessage(question);
StringBuilder sb = new();
await foreach (var update in chat.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel))
{
if (update.Content is not null)
{
Console.Write(update.Content);
sb.Append(update.Content);
}
}
chatHistory.AddAssistantMessage(sb.ToString());
Console.WriteLine();
}
}*/
}
public Example59_OpenAIFunctionCalling(ITestOutputHelper output) : base(output)
{
}
}