-
Notifications
You must be signed in to change notification settings - Fork 3.6k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
.Net: Sample Code Showcasing Usage of Reasoning Models in OpenAI and …
…AzureOpenAI (#10558) ### Motivation and Context 1. Required: showing usage of reasoning effort. 2. Problem: Controlling reasoning effort - no sample 3. Scenario: Using reasoning effort to benefit from the new amazing models :) ### Description This pull request adds sample code that demonstrates how to leverage reasoning models in a ChatCompletion on AzureOpenAI and OpenAI. This implementation how to leverage LLM Reasoning capabilities but also complements the phenomenal code from Roger Barreto (@RogerBarreto), further strengthening Semantic Kernel overall solution. ### Contribution Checklist <!-- Before submitting this PR, please make sure: --> - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone 😄 --------- Co-authored-by: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
- Loading branch information
1 parent
fd27470
commit 4fdaf67
Showing
4 changed files
with
216 additions
and
39 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
100 changes: 100 additions & 0 deletions
100
dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion_WithReasoning.cs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,100 @@ | ||
// Copyright (c) Microsoft. All rights reserved. | ||
|
||
using System.Text; | ||
using Microsoft.SemanticKernel; | ||
using Microsoft.SemanticKernel.ChatCompletion; | ||
using Microsoft.SemanticKernel.Connectors.AzureOpenAI; | ||
using OpenAI.Chat; | ||
|
||
namespace ChatCompletion; | ||
|
||
// The following example shows how to use Semantic Kernel with Azure OpenAI API | ||
public class AzureOpenAI_ChatCompletion_WithReasoning(ITestOutputHelper output) : BaseTest(output) | ||
{ | ||
/// <summary> | ||
/// Sample showing how to use <see cref="Kernel"/> with chat completion and chat prompt syntax. | ||
/// </summary> | ||
[Fact] | ||
public async Task ChatPromptWithReasoningAsync() | ||
{ | ||
Console.WriteLine("======== Azure Open AI - Chat Completion with Reasoning ========"); | ||
|
||
Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); | ||
Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); | ||
Assert.NotNull(TestConfiguration.AzureOpenAI.ApiKey); | ||
|
||
var kernel = Kernel.CreateBuilder() | ||
.AddAzureOpenAIChatCompletion( | ||
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, | ||
endpoint: TestConfiguration.AzureOpenAI.Endpoint, | ||
apiKey: TestConfiguration.AzureOpenAI.ApiKey, | ||
modelId: TestConfiguration.AzureOpenAI.ChatModelId) | ||
.Build(); | ||
|
||
// Create execution settings with high reasoning effort. | ||
var executionSettings = new AzureOpenAIPromptExecutionSettings //OpenAIPromptExecutionSettings | ||
{ | ||
// Flags Azure SDK to use the new token property. | ||
SetNewMaxCompletionTokensEnabled = true, | ||
MaxTokens = 2000, | ||
// Note: reasoning effort is only available for reasoning models (at this moment o3-mini & o1 models) | ||
ReasoningEffort = ChatReasoningEffortLevel.Low | ||
}; | ||
|
||
// Create KernelArguments using the execution settings. | ||
var kernelArgs = new KernelArguments(executionSettings); | ||
|
||
StringBuilder chatPrompt = new(""" | ||
<message role="developer">You are an expert software engineer, specialized in the Semantic Kernel SDK and NET framework</message> | ||
<message role="user">Hi, Please craft me an example code in .NET using Semantic Kernel that implements a chat loop .</message> | ||
"""); | ||
|
||
// Invoke the prompt with high reasoning effort. | ||
var reply = await kernel.InvokePromptAsync(chatPrompt.ToString(), kernelArgs); | ||
|
||
Console.WriteLine(reply); | ||
} | ||
|
||
/// <summary> | ||
/// Sample showing how to use <see cref="IChatCompletionService"/> directly with a <see cref="ChatHistory"/>. | ||
/// </summary> | ||
[Fact] | ||
public async Task ServicePromptWithReasoningAsync() | ||
{ | ||
Console.WriteLine("======== Azure Open AI - Chat Completion with Azure Default Credential with Reasoning ========"); | ||
|
||
Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); | ||
Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); | ||
Assert.NotNull(TestConfiguration.AzureOpenAI.ApiKey); | ||
|
||
IChatCompletionService chatCompletionService = new AzureOpenAIChatCompletionService( | ||
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, | ||
endpoint: TestConfiguration.AzureOpenAI.Endpoint, | ||
apiKey: TestConfiguration.AzureOpenAI.ApiKey, | ||
modelId: TestConfiguration.AzureOpenAI.ChatModelId); | ||
|
||
// Create execution settings with high reasoning effort. | ||
var executionSettings = new AzureOpenAIPromptExecutionSettings | ||
{ | ||
// Flags Azure SDK to use the new token property. | ||
SetNewMaxCompletionTokensEnabled = true, | ||
MaxTokens = 2000, | ||
// Note: reasoning effort is only available for reasoning models (at this moment o3-mini & o1 models) | ||
ReasoningEffort = ChatReasoningEffortLevel.Low | ||
}; | ||
|
||
// Create a ChatHistory and add messages. | ||
var chatHistory = new ChatHistory(); | ||
chatHistory.AddDeveloperMessage( | ||
"You are an expert software engineer, specialized in the Semantic Kernel SDK and .NET framework."); | ||
chatHistory.AddUserMessage( | ||
"Hi, Please craft me an example code in .NET using Semantic Kernel that implements a chat loop."); | ||
|
||
// Instead of a prompt string, call GetChatMessageContentAsync with the chat history. | ||
var reply = await chatCompletionService.GetChatMessageContentAsync( | ||
chatHistory: chatHistory, | ||
executionSettings: executionSettings); | ||
|
||
Console.WriteLine(reply); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
86 changes: 86 additions & 0 deletions
86
dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion_WithReasoning.cs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,86 @@ | ||
// Copyright (c) Microsoft. All rights reserved. | ||
|
||
using System.Text; | ||
using Microsoft.SemanticKernel; | ||
using Microsoft.SemanticKernel.ChatCompletion; | ||
using Microsoft.SemanticKernel.Connectors.OpenAI; | ||
using OpenAI.Chat; | ||
|
||
namespace ChatCompletion; | ||
|
||
// The following example shows how to use Semantic Kernel with OpenAI API | ||
public class OpenAI_ChatCompletion_WithReasoning(ITestOutputHelper output) : BaseTest(output) | ||
{ | ||
/// <summary> | ||
/// Sample showing how to use <see cref="Kernel"/> with chat completion and chat prompt syntax. | ||
/// </summary> | ||
[Fact] | ||
public async Task ChatPromptWithReasoningAsync() | ||
{ | ||
Console.WriteLine("======== Open AI - Chat Completion with Reasoning ========"); | ||
|
||
Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); | ||
Assert.NotNull(TestConfiguration.OpenAI.ApiKey); | ||
|
||
var kernel = Kernel.CreateBuilder() | ||
.AddOpenAIChatCompletion( | ||
modelId: TestConfiguration.OpenAI.ChatModelId, | ||
apiKey: TestConfiguration.OpenAI.ApiKey) | ||
.Build(); | ||
|
||
// Create execution settings with low reasoning effort. | ||
var executionSettings = new OpenAIPromptExecutionSettings //OpenAIPromptExecutionSettings | ||
{ | ||
MaxTokens = 2000, | ||
ReasoningEffort = ChatReasoningEffortLevel.Low // Only available for reasoning models (i.e: o3-mini, o1, ...) | ||
}; | ||
|
||
// Create KernelArguments using the execution settings. | ||
var kernelArgs = new KernelArguments(executionSettings); | ||
|
||
StringBuilder chatPrompt = new(""" | ||
<message role="developer">You are an expert software engineer, specialized in the Semantic Kernel SDK and NET framework</message> | ||
<message role="user">Hi, Please craft me an example code in .NET using Semantic Kernel that implements a chat loop .</message> | ||
"""); | ||
|
||
// Invoke the prompt with high reasoning effort. | ||
var reply = await kernel.InvokePromptAsync(chatPrompt.ToString(), kernelArgs); | ||
|
||
Console.WriteLine(reply); | ||
} | ||
|
||
/// <summary> | ||
/// Sample showing how to use <see cref="IChatCompletionService"/> directly with a <see cref="ChatHistory"/>. | ||
/// </summary> | ||
[Fact] | ||
public async Task ServicePromptWithReasoningAsync() | ||
{ | ||
Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); | ||
Assert.NotNull(TestConfiguration.OpenAI.ApiKey); | ||
|
||
Console.WriteLine("======== Open AI - Chat Completion with Reasoning ========"); | ||
|
||
OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); | ||
|
||
// Create execution settings with low reasoning effort. | ||
var executionSettings = new OpenAIPromptExecutionSettings | ||
{ | ||
MaxTokens = 2000, | ||
ReasoningEffort = ChatReasoningEffortLevel.Low // Only available for reasoning models (i.e: o3-mini, o1, ...) | ||
}; | ||
|
||
// Create a ChatHistory and add messages. | ||
var chatHistory = new ChatHistory(); | ||
chatHistory.AddDeveloperMessage( | ||
"You are an expert software engineer, specialized in the Semantic Kernel SDK and .NET framework."); | ||
chatHistory.AddUserMessage( | ||
"Hi, Please craft me an example code in .NET using Semantic Kernel that implements a chat loop."); | ||
|
||
// Instead of a prompt string, call GetChatMessageContentAsync with the chat history. | ||
var reply = await chatCompletionService.GetChatMessageContentAsync( | ||
chatHistory: chatHistory, | ||
executionSettings: executionSettings); | ||
|
||
Console.WriteLine(reply); | ||
} | ||
} |