Skip to content

Commit

Permalink
Merge commit '10fa20b8bc2c1e35e4776bf7d88fa99701ac66e0'
Browse files Browse the repository at this point in the history
  • Loading branch information
lindexi committed Jun 18, 2024
2 parents 6065ca8 + 10fa20b commit 39b3443
Show file tree
Hide file tree
Showing 3 changed files with 112 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.DirectML" Version="0.2.0-rc7" />
<PackageReference Include="feiyun0112.SemanticKernel.Connectors.OnnxRuntimeGenAI.DirectML" Version="1.0.0" />

<PackageReference Include="Microsoft.Extensions.Configuration.UserSecrets" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="8.0.0" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.13.0" />
</ItemGroup>
</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.9.34607.119
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BemjawhufawJairkihawyawkerene", "BemjawhufawJairkihawyawkerene.csproj", "{106E7417-736B-416C-B158-0F4F0F0EE295}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{106E7417-736B-416C-B158-0F4F0F0EE295}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{106E7417-736B-416C-B158-0F4F0F0EE295}.Debug|Any CPU.Build.0 = Debug|Any CPU
{106E7417-736B-416C-B158-0F4F0F0EE295}.Release|Any CPU.ActiveCfg = Release|Any CPU
{106E7417-736B-416C-B158-0F4F0F0EE295}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {2215563E-750D-4320-AD33-AACC36E7D05A}
EndGlobalSection
EndGlobal
69 changes: 69 additions & 0 deletions SemanticKernelSamples/BemjawhufawJairkihawyawkerene/Program.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.ML.OnnxRuntimeGenAI;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;

var modelPath = @"C:\lindexi\Phi3\directml-int4-awq-block-128";

// create kernel
var builder = Kernel.CreateBuilder();
//builder.Services.AddSingleton<IChatCompletionService>(new Phi3ChatCompletionService(modelPath));
builder.AddOnnxRuntimeGenAIChatCompletion(modelPath);
var kernel = builder.Build();

// create chat
var chat = kernel.GetRequiredService<IChatCompletionService>();
var history = new ChatHistory();

// run chat
while (true)
{
Console.Write("Q: ");
var userQ = Console.ReadLine();
if (string.IsNullOrEmpty(userQ))
{
break;
}
history.AddUserMessage(userQ);

Console.Write($"Phi3: ");
var response = "";
var result = chat.GetStreamingChatMessageContentsAsync(history);
await foreach (var message in result)
{
Console.Write(message.Content);
response += message.Content;
}
history.AddAssistantMessage(response);
Console.WriteLine("");
}


class Phi3ChatCompletionService : IChatCompletionService
{
public Phi3ChatCompletionService(string modelPath)
{
var model = new Model(modelPath);
var tokenizer = new Tokenizer(model);

Model = model;
Tokenizer = tokenizer;
}

public IReadOnlyDictionary<string, object?> Attributes { get; set; } = new Dictionary<string, object?>();
public Model Model { get; }
public Tokenizer Tokenizer { get; }

public Task<IReadOnlyList<ChatMessageContent>> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null,
Kernel? kernel = null, CancellationToken cancellationToken = new CancellationToken())
{
throw new NotImplementedException();
}

public IAsyncEnumerable<StreamingChatMessageContent> GetStreamingChatMessageContentsAsync(ChatHistory chatHistory,
PromptExecutionSettings? executionSettings = null, Kernel? kernel = null,
CancellationToken cancellationToken = new CancellationToken())
{
throw new NotImplementedException();
}
}

0 comments on commit 39b3443

Please sign in to comment.