Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

.Net: Add API to create a PromptTemplateConfig from a Prompty template #6414

Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
// Copyright (c) Microsoft. All rights reserved.

using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.PromptTemplates.Liquid;
using Microsoft.SemanticKernel.Prompty;

namespace Prompty;
namespace PromptTemplates;

public class PromptyFunction(ITestOutputHelper output) : BaseTest(output)
{
Expand Down Expand Up @@ -101,4 +104,36 @@ public async Task InlineFunctionWithVariablesAsync()
var result = await kernel.InvokeAsync(function, arguments);
Console.WriteLine(result);
}

[Fact]
public async Task RenderPromptAsync()
{
Kernel kernel = Kernel.CreateBuilder()
.AddOpenAIChatCompletion(
modelId: TestConfiguration.OpenAI.ChatModelId,
apiKey: TestConfiguration.OpenAI.ApiKey)
.Build();

string promptyTemplate = """
---
name: Contoso_Prompt
description: A sample prompt that responds with what Seattle is.
authors:
- ????
model:
api: chat
---
What is Seattle?
""";

var promptConfig = KernelFunctionPrompty.ToPromptTemplateConfig(promptyTemplate);
var promptTemplateFactory = new LiquidPromptTemplateFactory();
var promptTemplate = promptTemplateFactory.Create(promptConfig);
var prompt = await promptTemplate.RenderAsync(kernel);

var chatService = kernel.GetRequiredService<IChatCompletionService>();
var result = await chatService.GetChatMessageContentAsync(prompt);

Console.WriteLine(result);
}
}
5 changes: 1 addition & 4 deletions dotnet/samples/Concepts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -126,10 +126,7 @@ Down below you can find the code snippets that demonstrate the usage of many Sem
- [MultiplePromptTemplates](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptTemplates/MultiplePromptTemplates.cs)
- [PromptFunctionsWithChatGPT](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptTemplates/PromptFunctionsWithChatGPT.cs)
- [TemplateLanguage](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptTemplates/TemplateLanguage.cs)

## Prompty - Using Prompty file format to [import prompt functions](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs)

- [PromptyFunction](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Prompty/PromptyFunction.cs)
- [PromptyFunction](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptYemplates/PromptyFunction.cs)

## RAG - Retrieval-Augmented Generation

Expand Down
Original file line number Diff line number Diff line change
@@ -1,42 +1,16 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using Microsoft.SemanticKernel.PromptTemplates.Handlebars;
using Microsoft.SemanticKernel.PromptTemplates.Liquid;
using Microsoft.SemanticKernel.Prompty.Core;
using YamlDotNet.Serialization;
using Microsoft.SemanticKernel.Prompty;

namespace Microsoft.SemanticKernel;

/// <summary>
/// Provides extension methods for creating <see cref="KernelFunction"/>s from the Prompty template format.
/// </summary>
public static partial class PromptyKernelExtensions
public static class PromptyKernelExtensions
{
/// <summary>Default template factory to use when none is provided.</summary>
private static readonly AggregatorPromptTemplateFactory s_defaultTemplateFactory =
new(new LiquidPromptTemplateFactory(), new HandlebarsPromptTemplateFactory());

private const string PromptyPattern = /* lang=regex */ """
^---\s*$\n # Start of YAML front matter, a line beginning with "---" followed by optional whitespace
(?<header>.*?) # Capture the YAML front matter, everything up to the next "---" line
^---\s*$\n # End of YAML front matter, a line beginning with "---" followed by optional whitespace
(?<content>.*) # Capture the content after the YAML front matter
""";

/// <summary>Regex for parsing the YAML frontmatter and content from the prompty template.</summary>
#if NET
[GeneratedRegex(PromptyPattern, RegexOptions.Multiline | RegexOptions.Singleline | RegexOptions.IgnorePatternWhitespace)]
private static partial Regex PromptyRegex();
#else
private static Regex PromptyRegex() => s_promptyRegex;
private static readonly Regex s_promptyRegex = new(PromptyPattern, RegexOptions.Multiline | RegexOptions.Singleline | RegexOptions.IgnorePatternWhitespace | RegexOptions.Compiled);
#endif

/// <summary>
/// Create a <see cref="KernelFunction"/> from a prompty template file.
/// </summary>
Expand Down Expand Up @@ -83,150 +57,11 @@ public static partial class PromptyKernelExtensions
Verify.NotNull(kernel);
Verify.NotNullOrWhiteSpace(promptyTemplate);

// Step 1:
// Create PromptTemplateConfig from text.
// Retrieve the header, which is in yaml format and put between ---
// e.g
// file: chat.prompty
// ---
// name: Contoso Chat Prompt
// description: A retail assistant for Contoso Outdoors products retailer.
// authors:
// - XXXX
// model:
// api: chat
// configuration:
// type: azure_openai
// azure_deployment: gpt-35-turbo
// api_version: 2023-07-01-preview
// parameters:
// tools_choice: auto
// tools:
// -type: function
// function:
// name: test
// description: test function
// parameters:
// properties:
// location:
// description: The city and state or city and country, e.g.San Francisco, CA
// or Tokyo, Japan
// ---
// ... (rest of the prompty content)

// Parse the YAML frontmatter and content from the prompty template
Match m = PromptyRegex().Match(promptyTemplate);
if (!m.Success)
{
throw new ArgumentException("Invalid prompty template. Header and content could not be parsed.");
}

var header = m.Groups["header"].Value;
var content = m.Groups["content"].Value;

var prompty = new DeserializerBuilder().Build().Deserialize<PromptyYaml>(header) ??
throw new ArgumentException("Invalid prompty template. Header could not be parsed.");

// Step 2:
// Create a prompt template config from the prompty data.
var promptTemplateConfig = new PromptTemplateConfig
{
Name = prompty.Name, // TODO: sanitize name
Description = prompty.Description,
Template = content,
};

PromptExecutionSettings? defaultExecutionSetting = null;
if (prompty.Model?.ModelConfiguration?.ModelType is ModelType.azure_openai or ModelType.openai)
{
defaultExecutionSetting = new PromptExecutionSettings
{
ModelId = prompty.Model.ModelConfiguration.ModelType is ModelType.azure_openai ?
prompty.Model.ModelConfiguration.AzureDeployment :
prompty.Model.ModelConfiguration.Name
};

var extensionData = new Dictionary<string, object>();

if (prompty.Model?.Parameters?.Temperature is double temperature)
{
extensionData.Add("temperature", temperature);
}

if (prompty.Model?.Parameters?.TopP is double topP)
{
extensionData.Add("top_p", topP);
}

if (prompty.Model?.Parameters?.MaxTokens is int maxTokens)
{
extensionData.Add("max_tokens", maxTokens);
}

if (prompty.Model?.Parameters?.Seed is int seed)
{
extensionData.Add("seed", seed);
}

if (prompty.Model?.Parameters?.FrequencyPenalty is double frequencyPenalty)
{
extensionData.Add("frequency_penalty", frequencyPenalty);
}

if (prompty.Model?.Parameters?.PresencePenalty is double presencePenalty)
{
extensionData.Add("presence_penalty", presencePenalty);
}

if (prompty.Model?.Parameters?.Stop is List<string> stop)
{
extensionData.Add("stop_sequences", stop);
}

if (prompty.Model?.Parameters?.ResponseFormat == "json_object")
{
extensionData.Add("response_format", "json_object");
}

defaultExecutionSetting.ExtensionData = extensionData;
promptTemplateConfig.AddExecutionSettings(defaultExecutionSetting);
}

// Step 3:
// Add input and output variables.
if (prompty.Inputs is not null)
{
foreach (var input in prompty.Inputs)
{
if (input.Value is string description)
{
promptTemplateConfig.InputVariables.Add(new()
{
Name = input.Key,
Description = description,
});
}
}
}

if (prompty.Outputs is not null)
{
// PromptTemplateConfig supports only a single output variable. If the prompty template
// contains one and only one, use it. Otherwise, ignore any outputs.
if (prompty.Outputs.Count == 1 &&
prompty.Outputs.First().Value is string description)
{
promptTemplateConfig.OutputVariable = new() { Description = description };
}
}

// Step 4:
// Update template format. If not provided, use Liquid as default.
promptTemplateConfig.TemplateFormat = prompty.Template ?? LiquidPromptTemplateFactory.LiquidTemplateFormat;
var promptTemplateConfig = KernelFunctionPrompty.ToPromptTemplateConfig(promptyTemplate);

return KernelFunctionFactory.CreateFromPrompt(
promptTemplateConfig,
promptTemplateFactory ?? s_defaultTemplateFactory,
promptTemplateFactory ?? KernelFunctionPrompty.s_defaultTemplateFactory,
kernel.LoggerFactory);
}
}
Loading
Loading