Skip to content

Commit

Permalink
.Net: Function call content types (#5800)
Browse files Browse the repository at this point in the history
Today, in SK, LLM function calling is supported exclusively by the
OpenAI connector, and the function calling model is specific to that
connector. The new AI connectors being added to SK, which support
function calling, introduce their specific models for function calling.
The design, where each new connector introduces its own specific model
class for function calling, does not scale well from the connector
development perspective and does not allow for polymorphic use of
connectors by the SK consumer code.

This ADR describes the high-level details of the service-agnostic
function-calling model classes, while leaving the low-level details to
the implementation phase. Additionally, this ADR outlines the identified
options for various aspects of the design.

Requirements - #5153

### Description
ADR PR:  #5696

### Contribution Checklist

<!-- Before submitting this PR, please make sure: -->

- [x] The code builds clean without any errors or warnings
- [x] The PR follows the [SK Contribution
Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md)
and the [pre-submission formatting
script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts)
raises no violations
- [x] All unit tests pass, and I have added new tests where possible
- [x] I didn't break anyone 😄

---------

Co-authored-by: Stephen Toub <stoub@microsoft.com>
Co-authored-by: Chris <66376200+crickman@users.noreply.github.com>
Co-authored-by: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com>
  • Loading branch information
4 people committed Apr 17, 2024
1 parent c72080d commit c8ce249
Show file tree
Hide file tree
Showing 18 changed files with 1,552 additions and 50 deletions.
7 changes: 7 additions & 0 deletions dotnet/SK-dotnet.sln
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "System", "System", "{3CDE10B2-AE8F-4FC4-8D55-92D4AD32E144}"
ProjectSection(SolutionItems) = preProject
src\InternalUtilities\src\System\EnvExtensions.cs = src\InternalUtilities\src\System\EnvExtensions.cs
src\InternalUtilities\src\System\IListExtensions.cs = src\InternalUtilities\src\System\IListExtensions.cs
src\InternalUtilities\src\System\InternalTypeConverter.cs = src\InternalUtilities\src\System\InternalTypeConverter.cs
src\InternalUtilities\src\System\NonNullCollection.cs = src\InternalUtilities\src\System\NonNullCollection.cs
src\InternalUtilities\src\System\TypeConverterFactory.cs = src\InternalUtilities\src\System\TypeConverterFactory.cs
Expand Down Expand Up @@ -252,6 +253,11 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AgentSyntaxExamples", "samp
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Agents.Core", "src\Agents\Core\Agents.Core.csproj", "{91B8BEAF-4ADC-4014-AC6B-C563F41A8DD1}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Functions", "Functions", "{4DFB3897-0319-4DF2-BCFE-E6E0648297D2}"
ProjectSection(SolutionItems) = preProject
src\InternalUtilities\src\Functions\FunctionName.cs = src\InternalUtilities\src\Functions\FunctionName.cs
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Expand Down Expand Up @@ -691,6 +697,7 @@ Global
{F238CE75-C17C-471A-AC9A-6C94D3D946FD} = {6823CD5E-2ABE-41EB-B865-F86EC13F0CF9}
{9753B382-8E17-4B03-B0D3-790F3466CB7D} = {FA3720F1-C99A-49B2-9577-A940257098BF}
{91B8BEAF-4ADC-4014-AC6B-C563F41A8DD1} = {6823CD5E-2ABE-41EB-B865-F86EC13F0CF9}
{4DFB3897-0319-4DF2-BCFE-E6E0648297D2} = {958AD708-F048-4FAF-94ED-D2F2B92748B9}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using System.Threading.Tasks;
using Azure.AI.OpenAI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
Expand Down Expand Up @@ -70,44 +68,96 @@ await foreach (var update in kernel.InvokePromptStreamingAsync("Given the curren
WriteLine("======== Example 3: Use manual function calling with a non-streaming prompt ========");
{
var chat = kernel.GetRequiredService<IChatCompletionService>();
var chatHistory = new ChatHistory();

OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };

var chatHistory = new ChatHistory();
chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?");

while (true)
{
var result = (OpenAIChatMessageContent)await chat.GetChatMessageContentAsync(chatHistory, settings, kernel);

ChatMessageContent result = await chat.GetChatMessageContentAsync(chatHistory, settings, kernel);
if (result.Content is not null)
{
Write(result.Content);
}

List<ChatCompletionsFunctionToolCall> toolCalls = result.ToolCalls.OfType<ChatCompletionsFunctionToolCall>().ToList();
if (toolCalls.Count == 0)
IEnumerable<FunctionCallContent> functionCalls = FunctionCallContent.GetFunctionCalls(result);
if (!functionCalls.Any())
{
break;
}

chatHistory.Add(result);
foreach (var toolCall in toolCalls)
chatHistory.Add(result); // Adding LLM response containing function calls(requests) to chat history as it's required by LLMs.

foreach (var functionCall in functionCalls)
{
string content = kernel.Plugins.TryGetFunctionAndArguments(toolCall, out KernelFunction? function, out KernelArguments? arguments) ?
JsonSerializer.Serialize((await function.InvokeAsync(kernel, arguments)).GetValue<object>()) :
"Unable to find function. Please try again!";

chatHistory.Add(new ChatMessageContent(
AuthorRole.Tool,
content,
metadata: new Dictionary<string, object?>(1) { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } }));
try
{
FunctionResultContent resultContent = await functionCall.InvokeAsync(kernel); // Executing each function.

chatHistory.Add(resultContent.ToChatMessage());
}
catch (Exception ex)
{
chatHistory.Add(new FunctionResultContent(functionCall, ex).ToChatMessage()); // Adding function result to chat history.
// Adding exception to chat history.
// or
//string message = "Error details that LLM can reason about.";
//chatHistory.Add(new FunctionResultContent(functionCall, message).ToChatMessageContent()); // Adding function result to chat history.
}
}

WriteLine();
}
}

WriteLine();
WriteLine("======== Example 4: Simulated function calling with a non-streaming prompt ========");
{
var chat = kernel.GetRequiredService<IChatCompletionService>();

OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };

var chatHistory = new ChatHistory();
chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?");

while (true)
{
ChatMessageContent result = await chat.GetChatMessageContentAsync(chatHistory, settings, kernel);
if (result.Content is not null)
{
Write(result.Content);
}

chatHistory.Add(result); // Adding LLM response containing function calls(requests) to chat history as it's required by LLMs.

IEnumerable<FunctionCallContent> functionCalls = FunctionCallContent.GetFunctionCalls(result);
if (!functionCalls.Any())
{
break;
}

foreach (var functionCall in functionCalls)
{
FunctionResultContent resultContent = await functionCall.InvokeAsync(kernel); // Executing each function.

chatHistory.Add(resultContent.ToChatMessage());
}

// Adding a simulated function call to the connector response message
var simulatedFunctionCall = new FunctionCallContent("weather-alert", id: "call_123");
result.Items.Add(simulatedFunctionCall);

// Adding a simulated function result to chat history
var simulatedFunctionResult = "A Tornado Watch has been issued, with potential for severe thunderstorms causing unusual sky colors like green, yellow, or dark gray. Stay informed and follow safety instructions from authorities.";
chatHistory.Add(new FunctionResultContent(simulatedFunctionCall, simulatedFunctionResult).ToChatMessage());

WriteLine();
}
}

/* Uncomment this to try in a console chat loop.
Console.WriteLine("======== Example 4: Use automated function calling with a streaming chat ========");
Console.WriteLine("======== Example 5: Use automated function calling with a streaming chat ========");
{
OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
var chat = kernel.GetRequiredService<IChatCompletionService>();
Expand Down
Loading

0 comments on commit c8ce249

Please sign in to comment.