Skip to content

Commit

Permalink
Chaning description and pattern to match existing for TextCompletion …
Browse files Browse the repository at this point in the history
…Samples
  • Loading branch information
RogerBarreto authored and shawncal committed May 5, 2023
1 parent 4fb856c commit 0de719e
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 42 deletions.
8 changes: 4 additions & 4 deletions samples/dotnet/kernel-syntax-examples/Example17_ChatGPT.cs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ public static async Task RunAsync()

private static async Task OpenAIChatSampleAsync()
{
Console.WriteLine("======== Open AI ChatGPT ========");
Console.WriteLine("======== Open AI - ChatGPT ========");

IKernel kernel = new KernelBuilder().WithLogger(ConsoleLogger.Log).Build();

Expand All @@ -75,7 +75,7 @@ private static async Task OpenAIChatSampleAsync()

private static async Task AzureOpenAIChatSampleAsync()
{
Console.WriteLine("======== SK with ChatGPT ========");
Console.WriteLine("======== Azure Open AI - ChatGPT ========");

IKernel kernel = new KernelBuilder().WithLogger(ConsoleLogger.Log).Build();

Expand All @@ -99,7 +99,7 @@ private static async Task AzureOpenAIChatSampleAsync()

private static async Task OpenAIChatStreamSampleAsync()
{
Console.WriteLine("======== Open AI ChatGPT - Stream ========");
Console.WriteLine("======== Open AI - ChatGPT Streaming ========");

IKernel kernel = new KernelBuilder().WithLogger(ConsoleLogger.Log).Build();

Expand All @@ -118,7 +118,7 @@ await foreach (var message in StreamingChatAsync(chatGPT))

private static async Task AzureOpenAIChatStreamSampleAsync()
{
Console.WriteLine("======== Azure OpenAI ChatGPT - Stream ========");
Console.WriteLine("======== Azure Open AI - ChatGPT Streaming ========");

IKernel kernel = new KernelBuilder().WithLogger(ConsoleLogger.Log).Build();

Expand Down
81 changes: 43 additions & 38 deletions samples/dotnet/kernel-syntax-examples/Example33_CustomChat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,48 @@
using RepoUtils;

/**
* The following example shows how to use Semantic Kernel with OpenAI ChatGPT API
* The following example shows how to plug use a custom chat completion model.
*
* This might be useful in a few scenarios, for example:
* - You are not using OpenAI or Azure OpenAI models
* - You are using OpenAI/Azure OpenAI models but the models are behind a web service with a different API schema
* - You want to use a local model
*/
public sealed class MyChatCompletionService : IChatCompletion
{
private const string OutputAssistantResult = "Hi I'm your SK Custom Assistant and I'm here to help you to create custom chats like this. :)";

public ChatHistory CreateNewChat(string instructions = "")
{
var chatHistory = new ChatHistory();

if (!string.IsNullOrWhiteSpace(instructions))
{
chatHistory.AddMessage(ChatHistory.AuthorRoles.System, instructions);
}

return chatHistory;
}

public Task<string> GenerateMessageAsync(ChatHistory chat, ChatRequestSettings? requestSettings = null, CancellationToken cancellationToken = default)
{
return Task.FromResult(OutputAssistantResult);
}

public async IAsyncEnumerable<string> GenerateMessageStreamAsync(
ChatHistory chat,
ChatRequestSettings? requestSettings = null,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var streamedOutput = OutputAssistantResult.Split(' ');
foreach (string word in streamedOutput)
{
await Task.Delay(200, cancellationToken);
yield return $"{word} ";
}
}
}

// ReSharper disable once InconsistentNaming
public static class Example33_CustomChat
{
Expand Down Expand Up @@ -50,7 +90,7 @@ await foreach (string message in chatGPT.GenerateMessageStreamAsync(chatHistory)

private static async Task CustomChatSampleAsync()
{
Console.WriteLine("======== Custom Chat ========");
Console.WriteLine("======== Custom LLM - Chat Completion ========");

IKernel kernel = new KernelBuilder().WithLogger(ConsoleLogger.Log).Build();

Expand Down Expand Up @@ -86,7 +126,7 @@ private static async Task<ChatHistory> CustomPrepareChatHistoryAsync(IChatComple

private static async Task CustomChatStreamSampleAsync()
{
Console.WriteLine("======== Custom Chat - Stream ========");
Console.WriteLine("======== Custom LLM - Chat Completion Streaming ========");

IKernel kernel = new KernelBuilder().WithLogger(ConsoleLogger.Log).Build();

Expand Down Expand Up @@ -124,39 +164,4 @@ string MessageToString(ChatHistory.Message message)
return $"{message.AuthorRole}: {message.Content}\n------------------------\n";
}
}

private sealed class MyChatCompletionService : IChatCompletion
{
private readonly string _outputAssistantResult = "Hi I'm your SK Custom Assistant and I'm here to help you to create custom chats like this. :)";

public ChatHistory CreateNewChat(string instructions = "")
{
var chatHistory = new ChatHistory();

if (!string.IsNullOrWhiteSpace(instructions))
{
chatHistory.AddMessage(ChatHistory.AuthorRoles.System, instructions);
}

return chatHistory;
}

public Task<string> GenerateMessageAsync(ChatHistory chat, ChatRequestSettings? requestSettings = null, CancellationToken cancellationToken = default)
{
return Task.FromResult(this._outputAssistantResult);
}

public async IAsyncEnumerable<string> GenerateMessageStreamAsync(
ChatHistory chat,
ChatRequestSettings? requestSettings = null,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var streamedOutput = this._outputAssistantResult.Split(' ');
foreach (string word in streamedOutput)
{
await Task.Delay(200, cancellationToken);
yield return $"{word} ";
}
}
}
}

0 comments on commit 0de719e

Please sign in to comment.