Skip to content

Commit

Permalink
.Net Updating OpenAI/AzureOpenAI ModelResults to be per Choice (micro…
Browse files Browse the repository at this point in the history
…soft#2214)

### Motivation and Context

Prior to this change, when using the ModelResult in a multiple results
scenario the choices were being repeated for each ModelResult object,
not being clear which Choice was related to the Result.

Resolves microsoft#1658
Closes microsoft#1658

### Description

Added two new representations for both `ChatResult` and `TextResult`
that has one to one reference to the `choice` details coming from the
LLM.

Updated GetModelResult example with the expected behavior and an easier
example to track the changes and different prompts.

### Contribution Checklist

- [x] The code builds clean without any errors or warnings
- [x] The PR follows the [SK Contribution
Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md)
and the [pre-submission formatting
script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts)
raises no violations
- [x] All unit tests pass, and I have added new tests where possible
- [x] I didn't break anyone 😄
  • Loading branch information
RogerBarreto authored and SOE-YoungS committed Oct 31, 2023
1 parent 450b253 commit 6629c82
Show file tree
Hide file tree
Showing 6 changed files with 104 additions and 22 deletions.
23 changes: 7 additions & 16 deletions dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion;
using RepoUtils;

#pragma warning disable RCS1192 // (Unnecessary usage of verbatim string literal)

// ReSharper disable once InconsistentNaming
public static class Example43_GetModelResult
{
Expand All @@ -25,29 +27,18 @@ public static async Task RunAsync()
.Build();

// Function defined using few-shot design pattern
const string FunctionDefinition = @"
Generate a creative reason or excuse for the given event.
Be creative and be funny. Let your imagination run wild.
Event: I am running late.
Excuse: I was being held ransom by giraffe gangsters.
Event: I haven't been to the gym for a year
Excuse: I've been too busy training my pet dragon.
Event: {{$input}}
";
const string FunctionDefinition = "Hi, give me 5 book suggestions about: {{$input}}";

var excuseFunction = kernel.CreateSemanticFunction(FunctionDefinition, maxTokens: 100, temperature: 0.4, topP: 1);
var myFunction = kernel.CreateSemanticFunction(FunctionDefinition);

// Using InvokeAsync with 3 results (Currently invoke only supports 1 result, but you can get the other results from the ModelResults)
var textResult = await excuseFunction.InvokeAsync("I missed the F1 final race", new CompleteRequestSettings { ResultsPerPrompt = 3 });
var textResult = await myFunction.InvokeAsync("Sci-fi", new CompleteRequestSettings { ResultsPerPrompt = 3, MaxTokens = 500, Temperature = 1, TopP = 0.5 });
Console.WriteLine(textResult);
Console.WriteLine(textResult.ModelResults.Select(result => result.GetOpenAITextResult()).AsJson());
Console.WriteLine();

// Using the Kernel RunAsync
textResult = await kernel.RunAsync("sorry I forgot your birthday", excuseFunction);
textResult = await kernel.RunAsync("sorry I forgot your birthday", myFunction);
Console.WriteLine(textResult);
Console.WriteLine(textResult.ModelResults.LastOrDefault()?.GetOpenAITextResult()?.Usage.AsJson());
Console.WriteLine();
Expand All @@ -58,7 +49,7 @@ Be creative and be funny. Let your imagination run wild.
apiKey: TestConfiguration.OpenAI.ApiKey);
var prompt = FunctionDefinition.Replace("{{$input}}", $"Translate this date {DateTimeOffset.Now:f} to French format", StringComparison.InvariantCultureIgnoreCase);

IReadOnlyList<ITextResult> completionResults = await chatCompletion.GetCompletionsAsync(prompt, new CompleteRequestSettings() { MaxTokens = 100, Temperature = 0.4, TopP = 1 });
IReadOnlyList<ITextResult> completionResults = await chatCompletion.GetCompletionsAsync(prompt, new CompleteRequestSettings() { MaxTokens = 500, Temperature = 1, TopP = 0.5 });

Console.WriteLine(await completionResults[0].GetCompletionAsync());
Console.WriteLine(completionResults[0].ModelResult.GetOpenAIChatResult().Usage.AsJson());
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using System.Collections.Generic;
using Azure.AI.OpenAI;

namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk;

/// <summary> Represents a singular result of a chat completion.</summary>
public class ChatModelResult
{
/// <summary> A unique identifier associated with this chat completion response. </summary>
public string Id { get; }

/// <summary>
/// The first timestamp associated with generation activity for this completions response,
/// represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970.
/// </summary>
public DateTimeOffset Created { get; }

/// <summary>
/// Content filtering results for zero or more prompts in the request.
/// </summary>
public IReadOnlyList<PromptFilterResult> PromptFilterResults { get; }

/// <summary>
/// The completion choice associated with this completion result.
/// </summary>
public ChatChoice Choice { get; }

/// <summary> Usage information for tokens processed and generated as part of this completions operation. </summary>
public CompletionsUsage Usage { get; }

/// <summary> Initializes a new instance of TextModelResult. </summary>
/// <param name="completionsData"> A completions response object to populate the fields relative the the response.</param>
/// <param name="choiceData"> A choice object to populate the fields relative to the resulting choice.</param>
internal ChatModelResult(ChatCompletions completionsData, ChatChoice choiceData)
{
this.Id = completionsData.Id;
this.Created = completionsData.Created;
this.PromptFilterResults = completionsData.PromptFilterResults;
this.Choice = choiceData;
this.Usage = completionsData.Usage;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public ChatResult(ChatCompletions resultData, ChatChoice choice)
{
Verify.NotNull(choice);
this._choice = choice;
this.ModelResult = new ModelResult(resultData);
this.ModelResult = new(new ChatModelResult(resultData, choice));
}

public ModelResult ModelResult { get; }
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using System.Collections.Generic;
using Azure.AI.OpenAI;

namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk;

/// <summary> Represents a singular result of a text completion.</summary>
public sealed class TextModelResult
{
/// <summary> A unique identifier associated with this text completion response. </summary>
public string Id { get; }

/// <summary>
/// The first timestamp associated with generation activity for this completions response,
/// represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970.
/// </summary>
public DateTimeOffset Created { get; }

/// <summary>
/// Content filtering results for zero or more prompts in the request.
/// </summary>
public IReadOnlyList<PromptFilterResult> PromptFilterResults { get; }

/// <summary>
/// The completion choice associated with this completion result.
/// </summary>
public Choice Choice { get; }

/// <summary> Usage information for tokens processed and generated as part of this completions operation. </summary>
public CompletionsUsage Usage { get; }

/// <summary> Initializes a new instance of TextModelResult. </summary>
/// <param name="completionsData"> A completions response object to populate the fields relative the the response.</param>
/// <param name="choiceData"> A choice object to populate the fields relative to the resulting choice.</param>
internal TextModelResult(Completions completionsData, Choice choiceData)
{
this.Id = completionsData.Id;
this.Created = completionsData.Created;
this.PromptFilterResults = completionsData.PromptFilterResults;
this.Choice = choiceData;
this.Usage = completionsData.Usage;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ internal sealed class TextResult : ITextResult

public TextResult(Completions resultData, Choice choice)
{
this._modelResult = new ModelResult(resultData);
this._modelResult = new(new TextModelResult(resultData, choice));
this._choice = choice;
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.

using Azure.AI.OpenAI;
using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk;
using Microsoft.SemanticKernel.Orchestration;

#pragma warning disable IDE0130
Expand All @@ -14,18 +15,18 @@ public static class OpenAIModelResultExtension
/// </summary>
/// <param name="resultBase">Current context</param>
/// <returns>OpenAI / AzureOpenAI result<see cref="Completions"/></returns>
public static Completions GetOpenAITextResult(this ModelResult resultBase)
public static TextModelResult GetOpenAITextResult(this ModelResult resultBase)
{
return resultBase.GetResult<Completions>();
return resultBase.GetResult<TextModelResult>();
}

/// <summary>
/// Retrieves a typed <see cref="ChatCompletions"/> OpenAI / AzureOpenAI result from chat completion prompt.
/// </summary>
/// <param name="resultBase">Current context</param>
/// <returns>OpenAI / AzureOpenAI result<see cref="ChatCompletions"/></returns>
public static ChatCompletions GetOpenAIChatResult(this ModelResult resultBase)
public static ChatModelResult GetOpenAIChatResult(this ModelResult resultBase)
{
return resultBase.GetResult<ChatCompletions>();
return resultBase.GetResult<ChatModelResult>();
}
}

0 comments on commit 6629c82

Please sign in to comment.