Skip to content

Commit

Permalink
Merge pull request #23 from AndreaPic/develop
Browse files Browse the repository at this point in the history
dev 2 master
  • Loading branch information
AndreaPic committed May 20, 2023
2 parents 7ee4fa2 + 5b78024 commit 3abde79
Show file tree
Hide file tree
Showing 9 changed files with 296 additions and 8 deletions.
20 changes: 20 additions & 0 deletions DevExtremeAI/OpenAIClient/IOpenAIAPIClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,34 @@ public interface IOpenAIAPIClient
/// </summary>
/// <param name="request">DTO with request specs.</param>
/// <returns>OpenAIResponse property contains the AI response, if an error occurs HasError is true and the Error property contains the complete error details.</returns>
/// <remarks>With this method the Stream property of CreateChatCompletionRequest is forced false</remarks>
public Task<ResponseDTO<CreateCompletionResponse>> CreateCompletionAsync(CreateCompletionRequest request);

/// <summary>
/// Creates a completion for the provided prompt and parameters in stream way.
/// Given a prompt, the model will return one or more predicted completions, and can also return the probabilities of alternative tokens at each position.
/// </summary>
/// <param name="request">DTO with request specs.</param>
/// <returns>OpenAIResponse property contains the AI response, if an error occurs HasError is true and the Error property contains the complete error details.</returns>
/// <remarks>With this method the Stream property of CreateChatCompletionRequest is forced true</remarks>
public IAsyncEnumerable<ResponseDTO<CreateCompletionResponse>> CreateCompletionStreamAsync(
CreateCompletionRequest request);
/// <summary>
/// Creates a model response for the given chat conversation.
/// </summary>
/// <param name="request">DTO with request specs.</param>
/// <returns>OpenAIResponse property contains the AI response, if an error occurs HasError is true and the Error property contains the complete error details.</returns>
/// <remarks>With this method the Stream property of CreateChatCompletionRequest is forced false</remarks>
public Task<ResponseDTO<CreateChatCompletionResponse>> CreateChatCompletionAsync(CreateChatCompletionRequest request);
/// <summary>
/// Creates a model response for the given chat conversation in stream way.
/// </summary>
/// <param name="request">DTO with request specs.</param>
/// <returns>OpenAIResponse property contains the AI response, if an error occurs HasError is true and the Error property contains the complete error details.</returns>
/// <remarks>With this method the Stream property of CreateChatCompletionRequest is forced true</remarks>
public IAsyncEnumerable<ResponseDTO<CreateChatCompletionResponse>> CreateChatCompletionStreamAsync(
CreateChatCompletionRequest request);
/// <summary>
/// Given a prompt and an instruction, the model will return an edited version of the prompt.
/// Creates a new edit for the provided input, instruction, and parameters.
/// </summary>
Expand Down
79 changes: 79 additions & 0 deletions DevExtremeAI/OpenAIClient/OpenAIAPIClient.Chat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.Linq;
using System.Net.Http.Json;
using System.Text.Json;
using DevExtremeAI.OpenAIDTO;

namespace DevExtremeAI.OpenAIClient
Expand All @@ -15,12 +16,14 @@ partial class OpenAIAPIClient
/// </summary>
/// <param name="request">DTO with request specs.</param>
/// <returns>OpenAIResponse property contains the AI response, if an error occurs HasError is true and the Error property contains the complete error details.</returns>
/// <remarks>With this method the Stream property of CreateChatCompletionRequest is forced false</remarks>
public async Task<ResponseDTO<CreateChatCompletionResponse>> CreateChatCompletionAsync(CreateChatCompletionRequest request)
{
ResponseDTO<CreateChatCompletionResponse> ret = new ResponseDTO<CreateChatCompletionResponse>();
HttpClient httpClient = CreateHttpClient(out bool doDispose);
try
{
request.Stream = false;
FillBaseAddress(httpClient);
FillAuthRequestHeaders(httpClient.DefaultRequestHeaders);

Expand All @@ -47,5 +50,81 @@ public async Task<ResponseDTO<CreateChatCompletionResponse>> CreateChatCompletio
}
}

/// <summary>
/// Creates a model response for the given chat conversation in stream way.
/// </summary>
/// <param name="request">DTO with request specs.</param>
/// <returns>OpenAIResponse property contains the AI response, if an error occurs HasError is true and the Error property contains the complete error details.</returns>
/// <remarks>With this method the Stream property of CreateChatCompletionRequest is forced true</remarks>
public async IAsyncEnumerable<ResponseDTO<CreateChatCompletionResponse>> CreateChatCompletionStreamAsync(CreateChatCompletionRequest request)
{
request.Stream = true;
HttpClient httpClient = CreateHttpClient(out bool doDispose);
try
{

request.Stream = true;
FillBaseAddress(httpClient);
FillAuthRequestHeaders(httpClient.DefaultRequestHeaders);

var jsonContent = CreateJsonStringContent(request);

var httpResponse = await httpClient.PostAsync($"chat/completions", jsonContent);
ResponseDTO<CreateChatCompletionResponse> ret = new ResponseDTO<CreateChatCompletionResponse>();
if (httpResponse.IsSuccessStatusCode)
{
await using var stream = await httpResponse.Content.ReadAsStreamAsync();
using var reader = new StreamReader(stream);

bool stop = false;
do
{
ret = new ResponseDTO<CreateChatCompletionResponse>();
var line = await reader.ReadLineAsync();
if (string.IsNullOrWhiteSpace(line))
{
stop = false;
}
else
{
if (line.StartsWith(streamLineBegin))
{
line = line.Substring(streamLineBegin.Length);
if (line != streamDoneLine)
{
ret.OpenAIResponse = JsonSerializer.Deserialize<CreateChatCompletionResponse>(line);
yield return ret;
stop = false;
}
else
{
stop = true;
}
}
else
{
//TODO: what is this?
stop = false;
}
}
} while (!stop);
}
else
{
ErrorResponse error = await httpResponse.Content.ReadFromJsonAsync<ErrorResponse>() ??
ErrorResponse.CreateDefaultErrorResponse();
ret.ErrorResponse = error;
yield return ret;
}
}
finally
{
if (doDispose)
{
httpClient.Dispose();
}
}
}

}
}
78 changes: 78 additions & 0 deletions DevExtremeAI/OpenAIClient/OpenAIAPIClient.Completions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
using System.Linq;
using System.Net.Http.Headers;
using System.Net.Http.Json;
using System.Text.Json;
using DevExtremeAI.OpenAIDTO;

namespace DevExtremeAI.OpenAIClient
Expand All @@ -16,12 +17,14 @@ partial class OpenAIAPIClient
/// </summary>
/// <param name="request">DTO with request specs.</param>
/// <returns>OpenAIResponse property contains the AI response, if an error occurs HasError is true and the Error property contains the complete error details.</returns>
/// <remarks>With this method the Stream property of CreateChatCompletionRequest is forced false</remarks>
public async Task<ResponseDTO<CreateCompletionResponse>> CreateCompletionAsync(CreateCompletionRequest request)
{
ResponseDTO<CreateCompletionResponse> ret = new ResponseDTO<CreateCompletionResponse>();
HttpClient httpClient = CreateHttpClient(out bool doDispose);
try
{
request.Stream = false;
FillBaseAddress(httpClient);
FillAuthRequestHeaders(httpClient.DefaultRequestHeaders);

Expand All @@ -48,5 +51,80 @@ public async Task<ResponseDTO<CreateCompletionResponse>> CreateCompletionAsync(C
}


/// <summary>
/// Creates a completion for the provided prompt and parameters in stream way.
/// Given a prompt, the model will return one or more predicted completions, and can also return the probabilities of alternative tokens at each position.
/// </summary>
/// <param name="request">DTO with request specs.</param>
/// <returns>OpenAIResponse property contains the AI response, if an error occurs HasError is true and the Error property contains the complete error details.</returns>
/// <remarks>With this method the Stream property of CreateChatCompletionRequest is forced true</remarks>
public async IAsyncEnumerable<ResponseDTO<CreateCompletionResponse>> CreateCompletionStreamAsync(CreateCompletionRequest request)
{
HttpClient httpClient = CreateHttpClient(out bool doDispose);
try
{
request.Stream = true;
FillBaseAddress(httpClient);
FillAuthRequestHeaders(httpClient.DefaultRequestHeaders);

var jsonContent = CreateJsonStringContent(request);

var httpResponse = await httpClient.PostAsync($"completions", jsonContent);
ResponseDTO<CreateCompletionResponse> ret = new ResponseDTO<CreateCompletionResponse>();
if (httpResponse.IsSuccessStatusCode)
{
await using var stream = await httpResponse.Content.ReadAsStreamAsync();
using var reader = new StreamReader(stream);

bool stop = false;
do
{
ret = new ResponseDTO<CreateCompletionResponse>();
var line = await reader.ReadLineAsync();
if (string.IsNullOrWhiteSpace(line))
{
stop = false;
}
else
{
if (line.StartsWith(streamLineBegin))
{
line = line.Substring(streamLineBegin.Length);
if (line != streamDoneLine)
{
ret.OpenAIResponse = JsonSerializer.Deserialize<CreateCompletionResponse>(line);
yield return ret;
stop = false;
}
else
{
stop = true;
}
}
else
{
//TODO: what is this?
stop = false;
}
}
} while (!stop);
}
else
{
ErrorResponse error = await httpResponse.Content.ReadFromJsonAsync<ErrorResponse>() ??
ErrorResponse.CreateDefaultErrorResponse();
ret.ErrorResponse = error;
yield return ret;
}
}
finally
{
if (doDispose)
{
httpClient.Dispose();
}
}
}

}
}
4 changes: 1 addition & 3 deletions DevExtremeAI/OpenAIClient/OpenAIAPIClient.FineTune.cs
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,6 @@ public async Task<ResponseDTO<GetFineTuneEventListResponse>> GetFineTuneEventLis
return ret;
}

const string streamLineBegin = "data: ";
private const string streamDoneLine = "[DONE]";

/// <summary>
/// Get fine-grained status updates for a fine-tune job.
Expand Down Expand Up @@ -166,7 +164,7 @@ public async IAsyncEnumerable<Event> GetFineTuneEventStreamAsync(FineTuneRequest
var line = await reader.ReadLineAsync();
if (string.IsNullOrWhiteSpace(line))
{
stop = true;
stop = false;
}
else
{
Expand Down
3 changes: 3 additions & 0 deletions DevExtremeAI/OpenAIClient/OpenAIAPIClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ namespace DevExtremeAI.OpenAIClient
public sealed partial class OpenAIAPIClient : IOpenAIAPIClient
{

private const string streamLineBegin = "data: ";
private const string streamDoneLine = "[DONE]";

private readonly JsonSerializerOptions _jsonSerializerOptions = new()
{
//PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
Expand Down
16 changes: 16 additions & 0 deletions DevExtremeAI/OpenAIDTO/ChatDTO.cs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,11 @@ public class CreateChatCompletionRequest
[JsonPropertyName("messages")]
public List<ChatCompletionRequestMessage> Messages { get; private set; } = new List<ChatCompletionRequestMessage>();

public void AddMessage(ChatCompletionRequestMessage message)
{
Messages.Add(message);
}

/// <summary>
/// What sampling temperature to use, between 0 and 2.
/// Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both.
Expand Down Expand Up @@ -64,6 +69,13 @@ public class CreateChatCompletionRequest

private List<string> stops { get; set; } = new List<string>();

/// <summary>
/// Add a sequence where the API will stop generating further tokens.
/// </summary>
/// <param name="stop">sequence where the API will stop generating further tokens</param>
/// <remarks>
/// Are allowed up to 4 sequence.
/// </remarks>
public void AddStop(string stop)
{
stops.Add(stop);
Expand Down Expand Up @@ -175,8 +187,12 @@ public class CreateChatCompletionResponseChoicesInner
[JsonPropertyName("index")]
public double? Index { get; set; }

[JsonPropertyName("message")]
public ChatCompletionResponseMessage? Message { get; set; }

[JsonPropertyName("delta")]
public ChatCompletionResponseMessage? Delta { get; set; }


[JsonPropertyName("finish_reason")]
public string? FinishReason { get; set; }
Expand Down
46 changes: 45 additions & 1 deletion DevExtremeAILibTest/AIChatCompletionTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,49 @@ public AIChatCompletionTests(TestApplication factory)
_factory = factory;
}



[Theory]
//[InlineData("text-davinci-003")]
[InlineData("gpt-3.5-turbo")]
public async Task CreateChatCompletionStreamTest(string modelID)
{

using (var scope = _factory.Services.CreateScope())
{
var openAiapiClient = scope.ServiceProvider.GetService<IOpenAIAPIClient>();
CreateChatCompletionRequest createCompletionRequest = new CreateChatCompletionRequest();
createCompletionRequest.Model = modelID;
createCompletionRequest.Temperature = 0.9;

createCompletionRequest.Messages.Add(new ChatCompletionRequestMessage()
{
Role = ChatCompletionMessageRoleEnum.User,
Content = "I'm getting bored, what can you do for me?"
});
await Task.Delay(22000);

try
{
await foreach (var response in openAiapiClient.CreateChatCompletionStreamAsync(
createCompletionRequest))
{
Assert.False(response.HasError, response?.ErrorResponse?.Error?.Message);
Assert.NotNull(response?.OpenAIResponse);
Assert.NotNull(response?.OpenAIResponse.Choices);
Assert.True(response?.OpenAIResponse.Choices.Count > 0);
Debug.WriteLine(response?.OpenAIResponse?.Choices[0]?.Delta?.Content);

}
}
catch (Exception ex)
{
Assert.True(false,ex.Message);
}

}
}


[Theory]
//[InlineData("text-davinci-003")]
[InlineData("gpt-3.5-turbo")]
Expand Down Expand Up @@ -64,6 +106,8 @@ public async Task CreateChatCompletionTest(string modelID)
}
}



[Theory]
[InlineData("gpt-3.5-turbo")]
public async Task CreateChatCompletionITATest(string modelID)
Expand Down
Loading

0 comments on commit 3abde79

Please sign in to comment.