Skip to content

Commit f9a53a1

Browse files
.Net Simplify configuration by ServiceId on Multi Model Scenarios. (microsoft#6416)
### Motivation and Context Setting multiple execution settings is not simple and demands creating a dictionary on the caller side to set directly into the `ExecutionSettings` setter property. This change adds a `ServiceId` property to the execution settings which will be used during the initialization and deserialization to set the expected `Key` in the dictionary as well as the setting for filtering and executing a service specific function invocation. With this change were also added new constructors for `PromptTemplateConfig`, `KernelArguments` accepting multiple `PromptExecutionSettings` as well as added multiple for `Kernel.CreateFunctionFromPrompt` and `KernelFunctionFromPrompt.Create` ### ServiceId Settings Before: ```csharp KernelArguments arguments = []; arguments.ExecutionSettings = new Dictionary<string, PromptExecutionSettings>() { { serviceId, new PromptExecutionSettings() } }; var result = await kernel.InvokePromptAsync(prompt, arguments); ``` After: ```csharp var result = await kernel.InvokePromptAsync(prompt, new(new PromptExecutionSettings { ServiceId = serviceId })); ``` ### ModelIds Settings Before: ```csharp string[] modelIds = ["model1", "model2", ...]; var modelSettings = new Dictionary<string, PromptExecutionSettings>(); foreach (var modelId in modelIds) { modelSettings.Add(modelId, new PromptExecutionSettings() { ModelId = modelId }); } var promptConfig = new PromptTemplateConfig(prompt) { Name = "HelloAI", ExecutionSettings = modelSettings }; var function = kernel.CreateFunctionFromPrompt(promptConfig); ``` After: ```csharp string[] modelIds = ["model1", "model2", ...]; var function = kernel.CreateFunctionFromPrompt(prompt, modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId })); ``` The same can be done for ServiceId settings: ```csharp string[] serviceIds = ["service1", "service2"... ]; var function = kernel.CreateFunctionFromPrompt(prompt, serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId })); ``` --------- Co-authored-by: Mark Wallace <127216156+markwallace-microsoft@users.noreply.github.com>
1 parent 8d7845d commit f9a53a1

File tree

13 files changed

+734
-58
lines changed

13 files changed

+734
-58
lines changed
Lines changed: 150 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -1,82 +1,185 @@
11
// Copyright (c) Microsoft. All rights reserved.
22

33
using Microsoft.SemanticKernel;
4-
using xRetry;
54

65
namespace ChatCompletion;
76

87
public class Connectors_WithMultipleLLMs(ITestOutputHelper output) : BaseTest(output)
98
{
10-
/// <summary>
11-
/// Show how to run a prompt function and specify a specific service to use.
12-
/// </summary>
13-
[RetryFact(typeof(HttpOperationException))]
14-
public async Task RunAsync()
9+
private const string ChatPrompt = "Hello AI, what can you do for me?";
10+
11+
private static Kernel BuildKernel()
1512
{
16-
Kernel kernel = Kernel.CreateBuilder()
17-
.AddAzureOpenAIChatCompletion(
18-
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
19-
endpoint: TestConfiguration.AzureOpenAI.Endpoint,
20-
apiKey: TestConfiguration.AzureOpenAI.ApiKey,
21-
serviceId: "AzureOpenAIChat",
22-
modelId: TestConfiguration.AzureOpenAI.ChatModelId)
23-
.AddOpenAIChatCompletion(
24-
modelId: TestConfiguration.OpenAI.ChatModelId,
25-
apiKey: TestConfiguration.OpenAI.ApiKey,
26-
serviceId: "OpenAIChat")
27-
.Build();
28-
29-
await RunByServiceIdAsync(kernel, "AzureOpenAIChat");
30-
await RunByModelIdAsync(kernel, TestConfiguration.OpenAI.ChatModelId);
31-
await RunByFirstModelIdAsync(kernel, "gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId);
13+
return Kernel.CreateBuilder()
14+
.AddAzureOpenAIChatCompletion(
15+
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
16+
endpoint: TestConfiguration.AzureOpenAI.Endpoint,
17+
apiKey: TestConfiguration.AzureOpenAI.ApiKey,
18+
serviceId: "AzureOpenAIChat",
19+
modelId: TestConfiguration.AzureOpenAI.ChatModelId)
20+
.AddOpenAIChatCompletion(
21+
modelId: TestConfiguration.OpenAI.ChatModelId,
22+
apiKey: TestConfiguration.OpenAI.ApiKey,
23+
serviceId: "OpenAIChat")
24+
.Build();
3225
}
3326

34-
private async Task RunByServiceIdAsync(Kernel kernel, string serviceId)
27+
/// <summary>
28+
/// Shows how to invoke a prompt and specify the service id of the preferred AI service. When the prompt is executed the AI Service with the matching service id will be selected.
29+
/// </summary>
30+
/// <param name="serviceId">Service Id</param>
31+
[Theory]
32+
[InlineData("AzureOpenAIChat")]
33+
public async Task InvokePromptByServiceIdAsync(string serviceId)
3534
{
35+
var kernel = BuildKernel();
3636
Console.WriteLine($"======== Service Id: {serviceId} ========");
3737

38-
var prompt = "Hello AI, what can you do for me?";
38+
var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId }));
3939

40-
KernelArguments arguments = [];
41-
arguments.ExecutionSettings = new Dictionary<string, PromptExecutionSettings>()
42-
{
43-
{ serviceId, new PromptExecutionSettings() }
44-
};
45-
var result = await kernel.InvokePromptAsync(prompt, arguments);
4640
Console.WriteLine(result.GetValue<string>());
4741
}
4842

49-
private async Task RunByModelIdAsync(Kernel kernel, string modelId)
43+
/// <summary>
44+
/// Shows how to invoke a prompt and specify the model id of the preferred AI service. When the prompt is executed the AI Service with the matching model id will be selected.
45+
/// </summary>
46+
[Fact]
47+
private async Task InvokePromptByModelIdAsync()
5048
{
49+
var modelId = TestConfiguration.OpenAI.ChatModelId;
50+
var kernel = BuildKernel();
5151
Console.WriteLine($"======== Model Id: {modelId} ========");
5252

53-
var prompt = "Hello AI, what can you do for me?";
53+
var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings() { ModelId = modelId }));
5454

55-
var result = await kernel.InvokePromptAsync(
56-
prompt,
57-
new(new PromptExecutionSettings()
58-
{
59-
ModelId = modelId
60-
}));
6155
Console.WriteLine(result.GetValue<string>());
6256
}
6357

64-
private async Task RunByFirstModelIdAsync(Kernel kernel, params string[] modelIds)
58+
/// <summary>
59+
/// Shows how to invoke a prompt and specify the service ids of the preferred AI services.
60+
/// When the prompt is executed the AI Service will be selected based on the order of the provided service ids.
61+
/// </summary>
62+
[Fact]
63+
public async Task InvokePromptFunctionWithFirstMatchingServiceIdAsync()
64+
{
65+
string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"];
66+
var kernel = BuildKernel();
67+
Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========");
68+
69+
var result = await kernel.InvokePromptAsync(ChatPrompt, new(serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId })));
70+
71+
Console.WriteLine(result.GetValue<string>());
72+
}
73+
74+
/// <summary>
75+
/// Shows how to invoke a prompt and specify the model ids of the preferred AI services.
76+
/// When the prompt is executed the AI Service will be selected based on the order of the provided model ids.
77+
/// </summary>
78+
[Fact]
79+
public async Task InvokePromptFunctionWithFirstMatchingModelIdAsync()
6580
{
81+
string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId];
82+
var kernel = BuildKernel();
6683
Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========");
6784

68-
var prompt = "Hello AI, what can you do for me?";
85+
var result = await kernel.InvokePromptAsync(ChatPrompt, new(modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId })));
6986

70-
var modelSettings = new Dictionary<string, PromptExecutionSettings>();
71-
foreach (var modelId in modelIds)
72-
{
73-
modelSettings.Add(modelId, new PromptExecutionSettings() { ModelId = modelId });
74-
}
75-
var promptConfig = new PromptTemplateConfig(prompt) { Name = "HelloAI", ExecutionSettings = modelSettings };
87+
Console.WriteLine(result.GetValue<string>());
88+
}
89+
90+
/// <summary>
91+
/// Shows how to create a KernelFunction from a prompt and specify the service ids of the preferred AI services.
92+
/// When the function is invoked the AI Service will be selected based on the order of the provided service ids.
93+
/// </summary>
94+
[Fact]
95+
public async Task InvokePreconfiguredFunctionWithFirstMatchingServiceIdAsync()
96+
{
97+
string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"];
98+
var kernel = BuildKernel();
99+
Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========");
100+
101+
var function = kernel.CreateFunctionFromPrompt(ChatPrompt, serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId }));
102+
var result = await kernel.InvokeAsync(function);
76103

77-
var function = kernel.CreateFunctionFromPrompt(promptConfig);
104+
Console.WriteLine(result.GetValue<string>());
105+
}
106+
107+
/// <summary>
108+
/// Shows how to create a KernelFunction from a prompt and specify the model ids of the preferred AI services.
109+
/// When the function is invoked the AI Service will be selected based on the order of the provided model ids.
110+
/// </summary>
111+
[Fact]
112+
public async Task InvokePreconfiguredFunctionWithFirstMatchingModelIdAsync()
113+
{
114+
string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId];
115+
var kernel = BuildKernel();
116+
117+
Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========");
78118

119+
var function = kernel.CreateFunctionFromPrompt(ChatPrompt, modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId }));
79120
var result = await kernel.InvokeAsync(function);
121+
80122
Console.WriteLine(result.GetValue<string>());
81123
}
124+
125+
/// <summary>
126+
/// Shows how to invoke a KernelFunction and specify the model id of the AI Service the function will use.
127+
/// </summary>
128+
[Fact]
129+
public async Task InvokePreconfiguredFunctionByModelIdAsync()
130+
{
131+
var modelId = TestConfiguration.OpenAI.ChatModelId;
132+
var kernel = BuildKernel();
133+
Console.WriteLine($"======== Model Id: {modelId} ========");
134+
135+
var function = kernel.CreateFunctionFromPrompt(ChatPrompt);
136+
var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ModelId = modelId }));
137+
138+
Console.WriteLine(result.GetValue<string>());
139+
}
140+
141+
/// <summary>
142+
/// Shows how to invoke a KernelFunction and specify the service id of the AI Service the function will use.
143+
/// </summary>
144+
/// <param name="serviceId">Service Id</param>
145+
[Theory]
146+
[InlineData("AzureOpenAIChat")]
147+
public async Task InvokePreconfiguredFunctionByServiceIdAsync(string serviceId)
148+
{
149+
var kernel = BuildKernel();
150+
Console.WriteLine($"======== Service Id: {serviceId} ========");
151+
152+
var function = kernel.CreateFunctionFromPrompt(ChatPrompt);
153+
var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ServiceId = serviceId }));
154+
155+
Console.WriteLine(result.GetValue<string>());
156+
}
157+
158+
/// <summary>
159+
/// Shows when specifying a non-existent ServiceId the kernel throws an exception.
160+
/// </summary>
161+
/// <param name="serviceId">Service Id</param>
162+
[Theory]
163+
[InlineData("NotFound")]
164+
public async Task InvokePromptByNonExistingServiceIdThrowsExceptionAsync(string serviceId)
165+
{
166+
var kernel = BuildKernel();
167+
Console.WriteLine($"======== Service Id: {serviceId} ========");
168+
169+
await Assert.ThrowsAsync<KernelException>(async () => await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId })));
170+
}
171+
172+
/// <summary>
173+
/// Shows how in the execution settings when no model id is found it falls back to the default service.
174+
/// </summary>
175+
/// <param name="modelId">Model Id</param>
176+
[Theory]
177+
[InlineData("NotFound")]
178+
public async Task InvokePromptByNonExistingModelIdUsesDefaultServiceAsync(string modelId)
179+
{
180+
var kernel = BuildKernel();
181+
Console.WriteLine($"======== Model Id: {modelId} ========");
182+
183+
await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ModelId = modelId }));
184+
}
82185
}

dotnet/src/Experimental/Orchestration.Flow/FlowOrchestrator.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,6 @@ public async Task<FunctionResult> ExecuteFlowAsync(
7373
}
7474

7575
var executor = new FlowExecutor(this._kernelBuilder, this._flowStatusProvider, this._globalPluginCollection, this._config);
76-
return await executor.ExecuteFlowAsync(flow, sessionId, input, kernelArguments ?? new KernelArguments(null)).ConfigureAwait(false);
76+
return await executor.ExecuteFlowAsync(flow, sessionId, input, kernelArguments ?? new KernelArguments()).ConfigureAwait(false);
7777
}
7878
}

dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
using System;
44
using System.Collections.Generic;
55
using System.Collections.ObjectModel;
6+
using System.Diagnostics.CodeAnalysis;
67
using System.Text.Json.Serialization;
78
using Microsoft.SemanticKernel.ChatCompletion;
89
using Microsoft.SemanticKernel.TextGeneration;
@@ -27,6 +28,27 @@ public class PromptExecutionSettings
2728
/// </remarks>
2829
public static string DefaultServiceId => "default";
2930

31+
/// <summary>
32+
/// Service identifier.
33+
/// This identifies the service these settings are configured for e.g., azure_openai_eastus, openai, ollama, huggingface, etc.
34+
/// </summary>
35+
/// <remarks>
36+
/// When provided, this service identifier will be the key in a dictionary collection of execution settings for both <see cref="KernelArguments"/> and <see cref="PromptTemplateConfig"/>.
37+
/// If not provided the service identifier will be the default value in <see cref="DefaultServiceId"/>.
38+
/// </remarks>
39+
[Experimental("SKEXP0001")]
40+
[JsonPropertyName("service_id")]
41+
public string? ServiceId
42+
{
43+
get => this._serviceId;
44+
45+
set
46+
{
47+
this.ThrowIfFrozen();
48+
this._serviceId = value;
49+
}
50+
}
51+
3052
/// <summary>
3153
/// Model identifier.
3254
/// This identifies the AI model these settings are configured for e.g., gpt-4, gpt-3.5-turbo
@@ -93,6 +115,7 @@ public virtual PromptExecutionSettings Clone()
93115
return new()
94116
{
95117
ModelId = this.ModelId,
118+
ServiceId = this.ServiceId,
96119
ExtensionData = this.ExtensionData is not null ? new Dictionary<string, object>(this.ExtensionData) : null
97120
};
98121
}
@@ -113,6 +136,7 @@ protected void ThrowIfFrozen()
113136

114137
private string? _modelId;
115138
private IDictionary<string, object>? _extensionData;
139+
private string? _serviceId;
116140

117141
#endregion
118142
}

dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs

Lines changed: 51 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ public sealed class KernelArguments : IDictionary<string, object?>, IReadOnlyDic
2121
{
2222
/// <summary>Dictionary of name/values for all the arguments in the instance.</summary>
2323
private readonly Dictionary<string, object?> _arguments;
24+
private IReadOnlyDictionary<string, PromptExecutionSettings>? _executionSettings;
2425

2526
/// <summary>
2627
/// Initializes a new instance of the <see cref="KernelArguments"/> class with the specified AI execution settings.
@@ -36,12 +37,36 @@ public KernelArguments()
3637
/// </summary>
3738
/// <param name="executionSettings">The prompt execution settings.</param>
3839
public KernelArguments(PromptExecutionSettings? executionSettings)
40+
: this(executionSettings is null ? null : [executionSettings])
3941
{
40-
this._arguments = new(StringComparer.OrdinalIgnoreCase);
42+
}
4143

44+
/// <summary>
45+
/// Initializes a new instance of the <see cref="KernelArguments"/> class with the specified AI execution settings.
46+
/// </summary>
47+
/// <param name="executionSettings">The prompt execution settings.</param>
48+
public KernelArguments(IEnumerable<PromptExecutionSettings>? executionSettings)
49+
{
50+
this._arguments = new(StringComparer.OrdinalIgnoreCase);
4251
if (executionSettings is not null)
4352
{
44-
this.ExecutionSettings = new Dictionary<string, PromptExecutionSettings>() { { PromptExecutionSettings.DefaultServiceId, executionSettings } };
53+
var newExecutionSettings = new Dictionary<string, PromptExecutionSettings>();
54+
foreach (var settings in executionSettings)
55+
{
56+
var targetServiceId = settings.ServiceId ?? PromptExecutionSettings.DefaultServiceId;
57+
if (newExecutionSettings.ContainsKey(targetServiceId))
58+
{
59+
var exceptionMessage = (targetServiceId == PromptExecutionSettings.DefaultServiceId)
60+
? $"Multiple prompt execution settings with the default service id '{PromptExecutionSettings.DefaultServiceId}' or no service id have been provided. Specify a single default prompt execution settings and provide a unique service id for all other instances."
61+
: $"Multiple prompt execution settings with the service id '{targetServiceId}' have been provided. Provide a unique service id for all instances.";
62+
63+
throw new ArgumentException(exceptionMessage, nameof(executionSettings));
64+
}
65+
66+
newExecutionSettings[targetServiceId] = settings;
67+
}
68+
69+
this.ExecutionSettings = newExecutionSettings;
4570
}
4671
}
4772

@@ -65,7 +90,30 @@ public KernelArguments(IDictionary<string, object?> source, Dictionary<string, P
6590
/// <summary>
6691
/// Gets or sets the prompt execution settings.
6792
/// </summary>
68-
public IReadOnlyDictionary<string, PromptExecutionSettings>? ExecutionSettings { get; set; }
93+
/// <remarks>
94+
/// The settings dictionary is keyed by the service ID, or <see cref="PromptExecutionSettings.DefaultServiceId"/> for the default execution settings.
95+
/// When setting, the service id of each <see cref="PromptExecutionSettings"/> must match the key in the dictionary.
96+
/// </remarks>
97+
public IReadOnlyDictionary<string, PromptExecutionSettings>? ExecutionSettings
98+
{
99+
get => this._executionSettings;
100+
set
101+
{
102+
if (value is { Count: > 0 })
103+
{
104+
foreach (var kv in value!)
105+
{
106+
// Ensures that if a service id is specified it needs to match to the current key in the dictionary.
107+
if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId) && kv.Key != kv.Value.ServiceId)
108+
{
109+
throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings));
110+
}
111+
}
112+
}
113+
114+
this._executionSettings = value;
115+
}
116+
}
69117

70118
/// <summary>
71119
/// Gets the number of arguments contained in the <see cref="KernelArguments"/>.

0 commit comments

Comments
 (0)