Skip to content

Commit 17a2e41

Browse files
authored
.Net - Agents: Support assistant tool content generation (microsoft#6627)
### Motivation and Context <!-- Thank you for your contribution to the semantic-kernel repo! Please help reviewers and future users, providing the following information: 1. Why is this change required? 2. What problem does it solve? 3. What scenario does it contribute to? 4. If it fixes an open issue, please link to the issue here. --> Emit kernel-content for function-calling and code-interpreter for `OpenAIAssistantAgent` to maintain parity with `ChatCompletionAgent` ### Description <!-- Describe your changes, the overall approach, the underlying design. These notes will help understanding how your code works. Thanks! --> - Emit `FunctionCallContent` prior to executing function / result processing (OpenAIAgentAssistant) - Emit `FunctionResultContent` when processing completed function tool-step (OpenAIAgentAssistant) - Emit code-interpreter tool content (the generated code) (OpenAIAgentAssistant) - Enhance samples to demonstrate code-interpreter content - Enhance base-class `WriteLine` to avoid `FormatException` ### Contribution Checklist <!-- Before submitting this PR, please make sure: --> - [x] The code builds clean without any errors or warnings - [x] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [x] All unit tests pass, and I have added new tests where possible - [x] I didn't break anyone 😄
1 parent a9d361a commit 17a2e41

17 files changed

+1039
-239
lines changed

dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ Select which participant will take the next turn based on the conversation histo
9494
""";
9595

9696
[Fact]
97-
public async Task RunAsync()
97+
public async Task NestedChatWithAggregatorAgentAsync()
9898
{
9999
Console.WriteLine($"! {Model}");
100100

dotnet/samples/Concepts/Agents/MixedChat_Agents.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ Consider suggestions when refining an idea.
3333
""";
3434

3535
[Fact]
36-
public async Task RunAsync()
36+
public async Task ChatWithOpenAIAssistantAgentAndChatCompletionAgentAsync()
3737
{
3838
// Define the agents: one of each type
3939
ChatCompletionAgent agentReviewer =
@@ -55,7 +55,7 @@ await OpenAIAssistantAgent.CreateAsync(
5555
ModelId = this.Model,
5656
});
5757

58-
// Create a nexus for agent interaction.
58+
// Create a chat for agent interaction.
5959
var chat =
6060
new AgentGroupChat(agentWriter, agentReviewer)
6161
{

dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseTest(out
2121
private const string AgentInstructions = "Create charts as requested without explanation.";
2222

2323
[Fact]
24-
public async Task RunAsync()
24+
public async Task GenerateChartWithOpenAIAssistantAgentAsync()
2525
{
2626
// Define the agent
2727
OpenAIAssistantAgent agent =
@@ -77,7 +77,7 @@ async Task InvokeAgentAsync(string input)
7777

7878
foreach (var fileReference in message.Items.OfType<FileReferenceContent>())
7979
{
80-
Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}: #{fileReference.FileId}");
80+
Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}: @{fileReference.FileId}");
8181
}
8282
}
8383
}

dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,10 @@ namespace Agents;
1111
/// </summary>
1212
public class OpenAIAssistant_CodeInterpreter(ITestOutputHelper output) : BaseTest(output)
1313
{
14+
protected override bool ForceOpenAI => true;
15+
1416
[Fact]
15-
public async Task RunAsync()
17+
public async Task UseCodeInterpreterToolWithOpenAIAssistantAgentAsync()
1618
{
1719
// Define the agent
1820
OpenAIAssistantAgent agent =
@@ -31,8 +33,7 @@ await OpenAIAssistantAgent.CreateAsync(
3133
// Respond to user input
3234
try
3335
{
34-
await InvokeAgentAsync("What is the solution to `3x + 2 = 14`?");
35-
await InvokeAgentAsync("What is the fibinacci sequence until 101?");
36+
await InvokeAgentAsync("Use code to determine the values in the Fibonacci sequence that that are less then the value of 101?");
3637
}
3738
finally
3839
{
Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
// Copyright (c) Microsoft. All rights reserved.
2+
using System.Text;
3+
using Microsoft.SemanticKernel;
4+
using Microsoft.SemanticKernel.Agents;
5+
using Microsoft.SemanticKernel.Agents.OpenAI;
6+
using Microsoft.SemanticKernel.ChatCompletion;
7+
using Microsoft.SemanticKernel.Connectors.OpenAI;
8+
using Resources;
9+
10+
namespace Agents;
11+
12+
/// <summary>
13+
/// Demonstrate using code-interpreter to manipulate and generate csv files with <see cref="OpenAIAssistantAgent"/> .
14+
/// </summary>
15+
public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseTest(output)
16+
{
17+
/// <summary>
18+
/// Target OpenAI services.
19+
/// </summary>
20+
protected override bool ForceOpenAI => true;
21+
22+
[Fact]
23+
public async Task AnalyzeCSVFileUsingOpenAIAssistantAgentAsync()
24+
{
25+
OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
26+
27+
OpenAIFileReference uploadFile =
28+
await fileService.UploadContentAsync(
29+
new BinaryContent(await EmbeddedResource.ReadAllAsync("sales.csv"), mimeType: "text/plain"),
30+
new OpenAIFileUploadExecutionSettings("sales.csv", OpenAIFilePurpose.Assistants));
31+
32+
Console.WriteLine(this.ApiKey);
33+
34+
// Define the agent
35+
OpenAIAssistantAgent agent =
36+
await OpenAIAssistantAgent.CreateAsync(
37+
kernel: new(),
38+
config: new(this.ApiKey, this.Endpoint),
39+
new()
40+
{
41+
EnableCodeInterpreter = true, // Enable code-interpreter
42+
ModelId = this.Model,
43+
FileIds = [uploadFile.Id] // Associate uploaded file
44+
});
45+
46+
// Create a chat for agent interaction.
47+
var chat = new AgentGroupChat();
48+
49+
// Respond to user input
50+
try
51+
{
52+
await InvokeAgentAsync("Which segment had the most sales?");
53+
await InvokeAgentAsync("List the top 5 countries that generated the most profit.");
54+
await InvokeAgentAsync("Create a tab delimited file report of profit by each country per month.");
55+
}
56+
finally
57+
{
58+
await agent.DeleteAsync();
59+
await fileService.DeleteFileAsync(uploadFile.Id);
60+
}
61+
62+
// Local function to invoke agent and display the conversation messages.
63+
async Task InvokeAgentAsync(string input)
64+
{
65+
chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
66+
67+
Console.WriteLine($"# {AuthorRole.User}: '{input}'");
68+
69+
await foreach (var content in chat.InvokeAsync(agent))
70+
{
71+
Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
72+
73+
foreach (var annotation in content.Items.OfType<AnnotationContent>())
74+
{
75+
Console.WriteLine($"\n* '{annotation.Quote}' => {annotation.FileId}");
76+
BinaryContent fileContent = await fileService.GetFileContentAsync(annotation.FileId!);
77+
byte[] byteContent = fileContent.Data?.ToArray() ?? [];
78+
Console.WriteLine(Encoding.Default.GetString(byteContent));
79+
}
80+
}
81+
}
82+
}
83+
}
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
// Copyright (c) Microsoft. All rights reserved.
2+
using Azure.AI.OpenAI.Assistants;
3+
using Microsoft.SemanticKernel;
4+
using Microsoft.SemanticKernel.Connectors.OpenAI;
5+
using Resources;
6+
7+
namespace Agents;
8+
9+
/// <summary>
10+
/// Demonstrate uploading and retrieving files with <see cref="OpenAIFileService"/> .
11+
/// </summary>
12+
public class OpenAIAssistant_FileService(ITestOutputHelper output) : BaseTest(output)
13+
{
14+
/// <summary>
15+
/// Retrieval tool not supported on Azure OpenAI.
16+
/// </summary>
17+
protected override bool ForceOpenAI => true;
18+
19+
[Fact]
20+
public async Task UploadAndRetrieveFilesAsync()
21+
{
22+
var openAIClient = new AssistantsClient(TestConfiguration.OpenAI.ApiKey);
23+
OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
24+
25+
BinaryContent[] files = [
26+
new AudioContent(await EmbeddedResource.ReadAllAsync("test_audio.wav")!, mimeType: "audio/wav") { InnerContent = "test_audio.wav" },
27+
new ImageContent(await EmbeddedResource.ReadAllAsync("sample_image.jpg")!, mimeType: "image/jpeg") { InnerContent = "sample_image.jpg" },
28+
new ImageContent(await EmbeddedResource.ReadAllAsync("test_image.jpg")!, mimeType: "image/jpeg") { InnerContent = "test_image.jpg" },
29+
new BinaryContent(data: await EmbeddedResource.ReadAllAsync("travelinfo.txt"), mimeType: "text/plain") { InnerContent = "travelinfo.txt" }
30+
];
31+
32+
var fileIds = new Dictionary<string, BinaryContent>();
33+
foreach (var file in files)
34+
{
35+
var result = await openAIClient.UploadFileAsync(new BinaryData(file.Data), Azure.AI.OpenAI.Assistants.OpenAIFilePurpose.FineTune);
36+
fileIds.Add(result.Value.Id, file);
37+
}
38+
39+
foreach (var file in (await openAIClient.GetFilesAsync(Azure.AI.OpenAI.Assistants.OpenAIFilePurpose.FineTune)).Value)
40+
{
41+
if (!fileIds.ContainsKey(file.Id))
42+
{
43+
continue;
44+
}
45+
46+
var data = (await openAIClient.GetFileContentAsync(file.Id)).Value;
47+
48+
var mimeType = fileIds[file.Id].MimeType;
49+
var fileName = fileIds[file.Id].InnerContent!.ToString();
50+
var metadata = new Dictionary<string, object?> { ["id"] = file.Id };
51+
var uri = new Uri($"https://api.openai.com/v1/files/{file.Id}/content");
52+
var content = mimeType switch
53+
{
54+
"image/jpeg" => new ImageContent(data, mimeType) { Uri = uri, InnerContent = fileName, Metadata = metadata },
55+
"audio/wav" => new AudioContent(data, mimeType) { Uri = uri, InnerContent = fileName, Metadata = metadata },
56+
_ => new BinaryContent(data, mimeType) { Uri = uri, InnerContent = fileName, Metadata = metadata }
57+
};
58+
59+
// Display the the file-name and mime-tyupe for each content type.
60+
Console.WriteLine($"File: {fileName} - {mimeType}");
61+
62+
// Display the each content type-name.
63+
Console.WriteLine($"Type: {content}");
64+
65+
// Delete the test file remotely
66+
await openAIClient.DeleteFileAsync(file.Id);
67+
}
68+
}
69+
}

dotnet/samples/Concepts/Agents/OpenAIAssistant_MultipleContents.cs

Lines changed: 0 additions & 146 deletions
This file was deleted.

dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ public class OpenAIAssistant_Retrieval(ITestOutputHelper output) : BaseTest(outp
1919
protected override bool ForceOpenAI => true;
2020

2121
[Fact]
22-
public async Task RunAsync()
22+
public async Task UseRetrievalToolWithOpenAIAssistantAgentAsync()
2323
{
2424
OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
2525

0 commit comments

Comments
 (0)