-
Notifications
You must be signed in to change notification settings - Fork 0
/
Program.cs
130 lines (109 loc) · 5.06 KB
/
Program.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
using EmbeddingSample;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.KernelMemory;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
var kernelMemory = new KernelMemoryBuilder()
// If you want to use OpenAI, you need to call .WithOpenAITextEmbeddingGeneration (with corresponding parameters).
.WithAzureOpenAITextEmbeddingGeneration(new()
{
APIKey = AppConstants.Embedding.ApiKey,
Auth = AzureOpenAIConfig.AuthTypes.APIKey,
Deployment = AppConstants.Embedding.Deployment,
Endpoint = AppConstants.Embedding.Endpoint,
APIType = AzureOpenAIConfig.APITypes.EmbeddingGeneration,
MaxTokenTotal = AppConstants.Embedding.MaxTokens
})
// If you want to use OpenAI, you need to call .WithOpenAITextGeneration (with corresponding parameters).
.WithAzureOpenAITextGeneration(new()
{
APIKey = AppConstants.ChatCompletion.ApiKey,
Auth = AzureOpenAIConfig.AuthTypes.APIKey,
Deployment = AppConstants.ChatCompletion.Deployment,
Endpoint = AppConstants.ChatCompletion.Endpoint,
APIType = AzureOpenAIConfig.APITypes.ChatCompletion,
MaxTokenTotal = AppConstants.ChatCompletion.MaxTokens
})
.WithSearchClientConfig(new()
{
EmptyAnswer = "I'm sorry, I haven't found any relevant information that can be used to answer your question",
MaxMatchesCount = 25,
AnswerTokens = 800
})
.WithCustomTextPartitioningOptions(new()
{
// Defines the properties that are used to split the documents in chunks.
MaxTokensPerParagraph = 1000,
MaxTokensPerLine = 300,
OverlappingTokens = 100
})
//.WithSimpleFileStorage(AppConstants.Memory.ContentStoragePath) // Uncomment to use persistent Content Storage oh file system.
.WithSqlServerMemoryDb(AppConstants.Memory.ConnectionString) // Use SQL Server as Vector Storage for embeddings.
.Build<MemoryServerless>();
var builder = Kernel.CreateBuilder();
builder.Services.AddLogging(builder => builder.AddConsole());
builder.Services
// If you want to use OpenAI, you need to call .AddOpenAIChatCompletion (with corresponding parameters).
.AddAzureOpenAIChatCompletion(AppConstants.ChatCompletion.Deployment, AppConstants.ChatCompletion.Endpoint, AppConstants.ChatCompletion.ApiKey);
var kernel = builder.Build();
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();
// Import documents and web pages into Kernel Memory. The following instructions read, split in chunks and store the embeddings of the documents
// into Kernel Memory Vector Storage (SQL Server in this example, but other destinations are available). The embeddings are persisted, so you need to
// import the documents only once (unless you want to update the embeddings).
//await kernelMemory.ImportDocumentAsync(@"Taggia.pdf");
var chat = new ChatHistory();
string question;
do
{
Console.ForegroundColor = ConsoleColor.Green;
Console.Write("\n> Question: ");
Console.ResetColor();
question = Console.ReadLine();
if (string.IsNullOrWhiteSpace(question))
{
break;
}
question = await CreateQuestionAsync(question);
// Asks using the embedding search via Kernel Memory and the reformulated question.
var answer = await kernelMemory.AskAsync(question);
if (answer.NoResult == false)
{
// The answer has been found. Adds it to the chat so that it can be used to reformulate next questions.
chat.AddUserMessage(question);
chat.AddAssistantMessage(answer.Result);
Console.ForegroundColor = ConsoleColor.Green;
Console.Write("> Answer: ");
Console.ResetColor();
Console.WriteLine(answer.Result);
Console.WriteLine("Sources:");
foreach (var source in answer.RelevantSources)
{
Console.WriteLine($"- {source.SourceName}");
}
}
else
{
Console.ForegroundColor = ConsoleColor.DarkYellow;
Console.WriteLine(answer.Result);
Console.ResetColor();
}
Console.WriteLine();
} while (!string.IsNullOrWhiteSpace(question));
async Task<string> CreateQuestionAsync(string question)
{
// To be sure to keep the context of the chat when generating embeddings, we need to reformulate the question based on previous messages.
var embeddingQuestion = $"""
Reformulate the following question taking into account the context of the chat to perform embeddings search:
---
{question}
---
You must reformulate the question in the same language of the user's question.
Never add "in this chat", "in the context of this chat", "in the context of our conversation", "search for" or something like that in your answer.
""";
chat.AddUserMessage(embeddingQuestion);
var reformulatedQuestion = await chatCompletionService.GetChatMessageContentAsync(chat);
chat.AddAssistantMessage(reformulatedQuestion.Content);
return reformulatedQuestion.Content;
}