|
1 | 1 | // Copyright (c) Microsoft. All rights reserved.
|
2 | 2 |
|
3 | 3 | using Microsoft.SemanticKernel;
|
4 |
| -using xRetry; |
5 | 4 |
|
6 | 5 | namespace ChatCompletion;
|
7 | 6 |
|
8 | 7 | public class Connectors_WithMultipleLLMs(ITestOutputHelper output) : BaseTest(output)
|
9 | 8 | {
|
10 |
| - /// <summary> |
11 |
| - /// Show how to run a prompt function and specify a specific service to use. |
12 |
| - /// </summary> |
13 |
| - [RetryFact(typeof(HttpOperationException))] |
14 |
| - public async Task RunAsync() |
| 9 | + private const string ChatPrompt = "Hello AI, what can you do for me?"; |
| 10 | + |
| 11 | + private static Kernel BuildKernel() |
15 | 12 | {
|
16 |
| - Kernel kernel = Kernel.CreateBuilder() |
17 |
| - .AddAzureOpenAIChatCompletion( |
18 |
| - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, |
19 |
| - endpoint: TestConfiguration.AzureOpenAI.Endpoint, |
20 |
| - apiKey: TestConfiguration.AzureOpenAI.ApiKey, |
21 |
| - serviceId: "AzureOpenAIChat", |
22 |
| - modelId: TestConfiguration.AzureOpenAI.ChatModelId) |
23 |
| - .AddOpenAIChatCompletion( |
24 |
| - modelId: TestConfiguration.OpenAI.ChatModelId, |
25 |
| - apiKey: TestConfiguration.OpenAI.ApiKey, |
26 |
| - serviceId: "OpenAIChat") |
27 |
| - .Build(); |
28 |
| - |
29 |
| - await RunByServiceIdAsync(kernel, "AzureOpenAIChat"); |
30 |
| - await RunByModelIdAsync(kernel, TestConfiguration.OpenAI.ChatModelId); |
31 |
| - await RunByFirstModelIdAsync(kernel, "gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId); |
| 13 | + return Kernel.CreateBuilder() |
| 14 | + .AddAzureOpenAIChatCompletion( |
| 15 | + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, |
| 16 | + endpoint: TestConfiguration.AzureOpenAI.Endpoint, |
| 17 | + apiKey: TestConfiguration.AzureOpenAI.ApiKey, |
| 18 | + serviceId: "AzureOpenAIChat", |
| 19 | + modelId: TestConfiguration.AzureOpenAI.ChatModelId) |
| 20 | + .AddOpenAIChatCompletion( |
| 21 | + modelId: TestConfiguration.OpenAI.ChatModelId, |
| 22 | + apiKey: TestConfiguration.OpenAI.ApiKey, |
| 23 | + serviceId: "OpenAIChat") |
| 24 | + .Build(); |
32 | 25 | }
|
33 | 26 |
|
34 |
| - private async Task RunByServiceIdAsync(Kernel kernel, string serviceId) |
| 27 | + /// <summary> |
| 28 | + /// Shows how to invoke a prompt and specify the service id of the preferred AI service. When the prompt is executed the AI Service with the matching service id will be selected. |
| 29 | + /// </summary> |
| 30 | + /// <param name="serviceId">Service Id</param> |
| 31 | + [Theory] |
| 32 | + [InlineData("AzureOpenAIChat")] |
| 33 | + public async Task InvokePromptByServiceIdAsync(string serviceId) |
35 | 34 | {
|
| 35 | + var kernel = BuildKernel(); |
36 | 36 | Console.WriteLine($"======== Service Id: {serviceId} ========");
|
37 | 37 |
|
38 |
| - var prompt = "Hello AI, what can you do for me?"; |
| 38 | + var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId })); |
39 | 39 |
|
40 |
| - KernelArguments arguments = []; |
41 |
| - arguments.ExecutionSettings = new Dictionary<string, PromptExecutionSettings>() |
42 |
| - { |
43 |
| - { serviceId, new PromptExecutionSettings() } |
44 |
| - }; |
45 |
| - var result = await kernel.InvokePromptAsync(prompt, arguments); |
46 | 40 | Console.WriteLine(result.GetValue<string>());
|
47 | 41 | }
|
48 | 42 |
|
49 |
| - private async Task RunByModelIdAsync(Kernel kernel, string modelId) |
| 43 | + /// <summary> |
| 44 | + /// Shows how to invoke a prompt and specify the model id of the preferred AI service. When the prompt is executed the AI Service with the matching model id will be selected. |
| 45 | + /// </summary> |
| 46 | + [Fact] |
| 47 | + private async Task InvokePromptByModelIdAsync() |
50 | 48 | {
|
| 49 | + var modelId = TestConfiguration.OpenAI.ChatModelId; |
| 50 | + var kernel = BuildKernel(); |
51 | 51 | Console.WriteLine($"======== Model Id: {modelId} ========");
|
52 | 52 |
|
53 |
| - var prompt = "Hello AI, what can you do for me?"; |
| 53 | + var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings() { ModelId = modelId })); |
54 | 54 |
|
55 |
| - var result = await kernel.InvokePromptAsync( |
56 |
| - prompt, |
57 |
| - new(new PromptExecutionSettings() |
58 |
| - { |
59 |
| - ModelId = modelId |
60 |
| - })); |
61 | 55 | Console.WriteLine(result.GetValue<string>());
|
62 | 56 | }
|
63 | 57 |
|
64 |
| - private async Task RunByFirstModelIdAsync(Kernel kernel, params string[] modelIds) |
| 58 | + /// <summary> |
| 59 | + /// Shows how to invoke a prompt and specify the service ids of the preferred AI services. |
| 60 | + /// When the prompt is executed the AI Service will be selected based on the order of the provided service ids. |
| 61 | + /// </summary> |
| 62 | + [Fact] |
| 63 | + public async Task InvokePromptFunctionWithFirstMatchingServiceIdAsync() |
| 64 | + { |
| 65 | + string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"]; |
| 66 | + var kernel = BuildKernel(); |
| 67 | + Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========"); |
| 68 | + |
| 69 | + var result = await kernel.InvokePromptAsync(ChatPrompt, new(serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId }))); |
| 70 | + |
| 71 | + Console.WriteLine(result.GetValue<string>()); |
| 72 | + } |
| 73 | + |
| 74 | + /// <summary> |
| 75 | + /// Shows how to invoke a prompt and specify the model ids of the preferred AI services. |
| 76 | + /// When the prompt is executed the AI Service will be selected based on the order of the provided model ids. |
| 77 | + /// </summary> |
| 78 | + [Fact] |
| 79 | + public async Task InvokePromptFunctionWithFirstMatchingModelIdAsync() |
65 | 80 | {
|
| 81 | + string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId]; |
| 82 | + var kernel = BuildKernel(); |
66 | 83 | Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========");
|
67 | 84 |
|
68 |
| - var prompt = "Hello AI, what can you do for me?"; |
| 85 | + var result = await kernel.InvokePromptAsync(ChatPrompt, new(modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId }))); |
69 | 86 |
|
70 |
| - var modelSettings = new Dictionary<string, PromptExecutionSettings>(); |
71 |
| - foreach (var modelId in modelIds) |
72 |
| - { |
73 |
| - modelSettings.Add(modelId, new PromptExecutionSettings() { ModelId = modelId }); |
74 |
| - } |
75 |
| - var promptConfig = new PromptTemplateConfig(prompt) { Name = "HelloAI", ExecutionSettings = modelSettings }; |
| 87 | + Console.WriteLine(result.GetValue<string>()); |
| 88 | + } |
| 89 | + |
| 90 | + /// <summary> |
| 91 | + /// Shows how to create a KernelFunction from a prompt and specify the service ids of the preferred AI services. |
| 92 | + /// When the function is invoked the AI Service will be selected based on the order of the provided service ids. |
| 93 | + /// </summary> |
| 94 | + [Fact] |
| 95 | + public async Task InvokePreconfiguredFunctionWithFirstMatchingServiceIdAsync() |
| 96 | + { |
| 97 | + string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"]; |
| 98 | + var kernel = BuildKernel(); |
| 99 | + Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========"); |
| 100 | + |
| 101 | + var function = kernel.CreateFunctionFromPrompt(ChatPrompt, serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId })); |
| 102 | + var result = await kernel.InvokeAsync(function); |
76 | 103 |
|
77 |
| - var function = kernel.CreateFunctionFromPrompt(promptConfig); |
| 104 | + Console.WriteLine(result.GetValue<string>()); |
| 105 | + } |
| 106 | + |
| 107 | + /// <summary> |
| 108 | + /// Shows how to create a KernelFunction from a prompt and specify the model ids of the preferred AI services. |
| 109 | + /// When the function is invoked the AI Service will be selected based on the order of the provided model ids. |
| 110 | + /// </summary> |
| 111 | + [Fact] |
| 112 | + public async Task InvokePreconfiguredFunctionWithFirstMatchingModelIdAsync() |
| 113 | + { |
| 114 | + string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId]; |
| 115 | + var kernel = BuildKernel(); |
| 116 | + |
| 117 | + Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========"); |
78 | 118 |
|
| 119 | + var function = kernel.CreateFunctionFromPrompt(ChatPrompt, modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId })); |
79 | 120 | var result = await kernel.InvokeAsync(function);
|
| 121 | + |
80 | 122 | Console.WriteLine(result.GetValue<string>());
|
81 | 123 | }
|
| 124 | + |
| 125 | + /// <summary> |
| 126 | + /// Shows how to invoke a KernelFunction and specify the model id of the AI Service the function will use. |
| 127 | + /// </summary> |
| 128 | + [Fact] |
| 129 | + public async Task InvokePreconfiguredFunctionByModelIdAsync() |
| 130 | + { |
| 131 | + var modelId = TestConfiguration.OpenAI.ChatModelId; |
| 132 | + var kernel = BuildKernel(); |
| 133 | + Console.WriteLine($"======== Model Id: {modelId} ========"); |
| 134 | + |
| 135 | + var function = kernel.CreateFunctionFromPrompt(ChatPrompt); |
| 136 | + var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ModelId = modelId })); |
| 137 | + |
| 138 | + Console.WriteLine(result.GetValue<string>()); |
| 139 | + } |
| 140 | + |
| 141 | + /// <summary> |
| 142 | + /// Shows how to invoke a KernelFunction and specify the service id of the AI Service the function will use. |
| 143 | + /// </summary> |
| 144 | + /// <param name="serviceId">Service Id</param> |
| 145 | + [Theory] |
| 146 | + [InlineData("AzureOpenAIChat")] |
| 147 | + public async Task InvokePreconfiguredFunctionByServiceIdAsync(string serviceId) |
| 148 | + { |
| 149 | + var kernel = BuildKernel(); |
| 150 | + Console.WriteLine($"======== Service Id: {serviceId} ========"); |
| 151 | + |
| 152 | + var function = kernel.CreateFunctionFromPrompt(ChatPrompt); |
| 153 | + var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ServiceId = serviceId })); |
| 154 | + |
| 155 | + Console.WriteLine(result.GetValue<string>()); |
| 156 | + } |
| 157 | + |
| 158 | + /// <summary> |
| 159 | + /// Shows when specifying a non-existent ServiceId the kernel throws an exception. |
| 160 | + /// </summary> |
| 161 | + /// <param name="serviceId">Service Id</param> |
| 162 | + [Theory] |
| 163 | + [InlineData("NotFound")] |
| 164 | + public async Task InvokePromptByNonExistingServiceIdThrowsExceptionAsync(string serviceId) |
| 165 | + { |
| 166 | + var kernel = BuildKernel(); |
| 167 | + Console.WriteLine($"======== Service Id: {serviceId} ========"); |
| 168 | + |
| 169 | + await Assert.ThrowsAsync<KernelException>(async () => await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId }))); |
| 170 | + } |
| 171 | + |
| 172 | + /// <summary> |
| 173 | + /// Shows how in the execution settings when no model id is found it falls back to the default service. |
| 174 | + /// </summary> |
| 175 | + /// <param name="modelId">Model Id</param> |
| 176 | + [Theory] |
| 177 | + [InlineData("NotFound")] |
| 178 | + public async Task InvokePromptByNonExistingModelIdUsesDefaultServiceAsync(string modelId) |
| 179 | + { |
| 180 | + var kernel = BuildKernel(); |
| 181 | + Console.WriteLine($"======== Model Id: {modelId} ========"); |
| 182 | + |
| 183 | + await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ModelId = modelId })); |
| 184 | + } |
82 | 185 | }
|
0 commit comments