Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
94 changes: 9 additions & 85 deletions src/AI.Tests/Extensions/PipelineTestOutput.cs
Original file line number Diff line number Diff line change
@@ -1,97 +1,21 @@
using System.ClientModel.Primitives;
using System.Text.Json;
using System.Text.Json.Nodes;

namespace Devlooped.Extensions.AI;

public static class PipelineTestOutput
{
/// <summary>
/// Sets a <see cref="ClientPipelineOptions.Transport"/> that renders HTTP messages to the
/// console using Spectre.Console rich JSON formatting, but only if the console is interactive.
/// </summary>
/// <typeparam name="TOptions">The options type to configure for HTTP logging.</typeparam>
/// <param name="pipelineOptions">The options instance to configure.</param>
/// <param name="output">The test output helper to write to.</param>
/// <param name="onRequest">A callback to process the <see cref="JsonNode"/> that was sent.</param>
/// <param name="onResponse">A callback to process the <see cref="JsonNode"/> that was received.</param>
/// <remarks>
/// NOTE: this is the lowst-level logging after all chat pipeline processing has been done.
/// <para>
/// If the options already provide a transport, it will be wrapped with the console
/// logging transport to minimize the impact on existing configurations.
/// </para>
/// </remarks>
public static TOptions WriteTo<TOptions>(this TOptions pipelineOptions, ITestOutputHelper? output = default, Action<JsonNode>? onRequest = default, Action<JsonNode>? onResponse = default)
where TOptions : ClientPipelineOptions
{
pipelineOptions.AddPolicy(new TestOutputPolicy(output ?? NullTestOutputHelper.Default, onRequest, onResponse), PipelinePosition.BeforeTransport);
return pipelineOptions;
}

class NullTestOutputHelper : ITestOutputHelper
static readonly JsonSerializerOptions options = new(JsonSerializerDefaults.General)
{
public static ITestOutputHelper Default { get; } = new NullTestOutputHelper();
NullTestOutputHelper() { }
public void WriteLine(string message) { }
public void WriteLine(string format, params object[] args) { }
}
WriteIndented = true,
};

class TestOutputPolicy(ITestOutputHelper output, Action<JsonNode>? onRequest = default, Action<JsonNode>? onResponse = default) : PipelinePolicy
public static TOptions WriteTo<TOptions>(this TOptions pipelineOptions, ITestOutputHelper output = default)

Check warning on line 13 in src/AI.Tests/Extensions/PipelineTestOutput.cs

View workflow job for this annotation

GitHub Actions / build-ubuntu-latest

Cannot convert null literal to non-nullable reference type.

Check warning on line 13 in src/AI.Tests/Extensions/PipelineTestOutput.cs

View workflow job for this annotation

GitHub Actions / build-ubuntu-latest

Cannot convert null literal to non-nullable reference type.

Check warning on line 13 in src/AI.Tests/Extensions/PipelineTestOutput.cs

View workflow job for this annotation

GitHub Actions / build-ubuntu-latest

Cannot convert null literal to non-nullable reference type.

Check warning on line 13 in src/AI.Tests/Extensions/PipelineTestOutput.cs

View workflow job for this annotation

GitHub Actions / build-ubuntu-latest

Cannot convert null literal to non-nullable reference type.
where TOptions : ClientPipelineOptions
{
static readonly JsonSerializerOptions options = new JsonSerializerOptions(JsonSerializerDefaults.General)
{
WriteIndented = true,
};

public override void Process(PipelineMessage message, IReadOnlyList<PipelinePolicy> pipeline, int currentIndex)
{
message.BufferResponse = true;
ProcessNext(message, pipeline, currentIndex);

if (message.Request.Content is not null)
{
using var memory = new MemoryStream();
message.Request.Content.WriteTo(memory);
memory.Position = 0;
using var reader = new StreamReader(memory);
var content = reader.ReadToEnd();
var node = JsonNode.Parse(content);
onRequest?.Invoke(node!);
output?.WriteLine(node!.ToJsonString(options));
}

if (message.Response != null)
{
var node = JsonNode.Parse(message.Response.Content.ToString());
onResponse?.Invoke(node!);
output?.WriteLine(node!.ToJsonString(options));
}
}

public override async ValueTask ProcessAsync(PipelineMessage message, IReadOnlyList<PipelinePolicy> pipeline, int currentIndex)
{
message.BufferResponse = true;
await ProcessNextAsync(message, pipeline, currentIndex);

if (message.Request.Content is not null)
{
using var memory = new MemoryStream();
message.Request.Content.WriteTo(memory);
memory.Position = 0;
using var reader = new StreamReader(memory);
var content = await reader.ReadToEndAsync();
var node = JsonNode.Parse(content);
onRequest?.Invoke(node!);
output?.WriteLine(node!.ToJsonString(options));
}

if (message.Response != null)
{
var node = JsonNode.Parse(message.Response.Content.ToString());
onResponse?.Invoke(node!);
output?.WriteLine(node!.ToJsonString(options));
}
}
return pipelineOptions.Observe(
request => output.WriteLine(request.ToJsonString(options)),
response => output.WriteLine(response.ToJsonString(options))
);
}
}
8 changes: 5 additions & 3 deletions src/AI.Tests/GrokTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@ public async Task GrokInvokesToolAndSearch()
var responses = new List<JsonNode>();

var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3",
new OpenAI.OpenAIClientOptions().WriteTo(output, requests.Add, responses.Add))
ClientOptions.Observable(requests.Add, responses.Add)
.WriteTo(output))
.AsBuilder()
.UseFunctionInvocation()
.Build();
Expand Down Expand Up @@ -105,7 +106,8 @@ public async Task GrokInvokesHostedSearchTool()
var responses = new List<JsonNode>();

var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3",
new OpenAI.OpenAIClientOptions().WriteTo(output, requests.Add, responses.Add));
ClientOptions.Observable(requests.Add, responses.Add)
.WriteTo(output));

var options = new ChatOptions
{
Expand Down Expand Up @@ -169,4 +171,4 @@ public async Task GrokThinksHard()
// different model and the grok client honors that choice.
Assert.StartsWith("grok-3-mini", response.ModelId);
}
}
}
7 changes: 4 additions & 3 deletions src/AI.Tests/OpenAITests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ public async Task OpenAISwitchesModel()
{ "user", "What products does Tesla make?" },
};

var chat = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "gpt-4.1-nano", new OpenAI.OpenAIClientOptions().WriteTo(output));
var chat = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "gpt-4.1-nano",
new OpenAI.OpenAIClientOptions().WriteTo(output));

var options = new ChatOptions
{
Expand All @@ -39,8 +40,8 @@ public async Task OpenAIThinks()

var requests = new List<JsonNode>();

var chat = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "o3-mini", new OpenAI.OpenAIClientOptions()
.WriteTo(output, requests.Add));
var chat = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "o3-mini",
ClientOptions.Observable(requests.Add).WriteTo(output));

var options = new ChatOptions
{
Expand Down
13 changes: 4 additions & 9 deletions src/AI.Tests/RetrievalTests.cs
Original file line number Diff line number Diff line change
@@ -1,22 +1,17 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.AI;
using OpenAI.Responses;
using static ConfigurationExtensions;

namespace Devlooped.Extensions.AI;

public class RetrievalTests(ITestOutputHelper output)
{
[SecretsTheory("OpenAI:Key")]
[SecretsTheory("OPENAI_API_KEY")]
[InlineData("gpt-4.1-nano", "Qué es la rebeldía en el Código Procesal Civil y Comercial Nacional?")]
[InlineData("gpt-4.1-nano", "What's the battery life in an iPhone 15?", true)]
public async Task CanRetrieveContent(string model, string question, bool empty = false)
{
var client = new OpenAI.OpenAIClient(Configuration["OpenAI:Key"]);
var client = new OpenAI.OpenAIClient(Configuration["OPENAI_API_KEY"]);
var store = client.GetVectorStoreClient().CreateVectorStore(true);
try
{
Expand All @@ -25,7 +20,7 @@ public async Task CanRetrieveContent(string model, string question, bool empty =
{
client.GetVectorStoreClient().AddFileToVectorStore(store.VectorStoreId, file.Value.Id, true);

var responses = new OpenAIResponseClient(model, Configuration["OpenAI:Key"]);
var responses = new OpenAIResponseClient(model, Configuration["OPENAI_API_KEY"]);

var chat = responses.AsIChatClient(
ResponseTool.CreateFileSearchTool([store.VectorStoreId]))
Expand Down
31 changes: 31 additions & 0 deletions src/AI/ClientOptions.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
using System.ClientModel.Primitives;
using System.Text.Json.Nodes;
using OpenAI;

namespace Devlooped.Extensions.AI;

/// <summary>
/// Shortcut factory methods for creating <see cref="ClientPipelineOptions"/> like
/// <see cref="OpenAIClientOptions"/> that provide convenient initialization options.
/// </summary>
public static class ClientOptions
{
/// <summary>
/// Creates an obserbable <see cref="OpenAIClientOptions"/> instance that can
/// be used to log requests and responses.
/// </summary>
/// <param name="onRequest">A callback to process the <see cref="JsonNode"/> that was sent.</param>
/// <param name="onResponse">A callback to process the <see cref="JsonNode"/> that was received.</param>
public static OpenAIClientOptions Observable(Action<JsonNode>? onRequest = default, Action<JsonNode>? onResponse = default)
=> Observable<OpenAIClientOptions>(onRequest, onResponse);

/// <summary>
/// Creates an obserbable <see cref="ClientPipelineOptions"/>-derived instance
/// that can be used to log requests and responses.
/// </summary>
/// <param name="onRequest">A callback to process the <see cref="JsonNode"/> that was sent.</param>
/// <param name="onResponse">A callback to process the <see cref="JsonNode"/> that was received.</param>
public static TOptions Observable<TOptions>(Action<JsonNode>? onRequest = default, Action<JsonNode>? onResponse = default)
where TOptions : ClientPipelineOptions, new()
=> new TOptions().Observe(onRequest, onResponse);
}
81 changes: 81 additions & 0 deletions src/AI/ClientPipelineExtensions.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
using System.ClientModel.Primitives;
using System.Text.Json;
using System.Text.Json.Nodes;

namespace Devlooped.Extensions.AI;

public static class ClientPipelineExtensions
{
/// <summary>
/// Adds a <see cref="PipelinePolicy"/> that observes requests and response
/// messages from the <see cref="ClientPipeline"/> and notifies the provided
/// callbacks with the JSON representation of the HTTP messages.
/// </summary>
/// <typeparam name="TOptions">The options type to configure for HTTP logging.</typeparam>
/// <param name="pipelineOptions">The options instance to configure.</param>
/// <param name="onRequest">A callback to process the <see cref="JsonNode"/> that was sent.</param>
/// <param name="onResponse">A callback to process the <see cref="JsonNode"/> that was received.</param>
/// <remarks>
/// This is the lowst-level logging after all chat pipeline processing has been done.
/// If no <see cref="JsonNode"/> can be parsed from the request or response,
/// the callbacks will not be invoked.
/// </remarks>
public static TOptions Observe<TOptions>(this TOptions pipelineOptions,
Action<JsonNode>? onRequest = default, Action<JsonNode>? onResponse = default)
where TOptions : ClientPipelineOptions
{
pipelineOptions.AddPolicy(new ObservePipelinePolicy(onRequest, onResponse), PipelinePosition.BeforeTransport);
return pipelineOptions;
}

class ObservePipelinePolicy(Action<JsonNode>? onRequest = default, Action<JsonNode>? onResponse = default) : PipelinePolicy
{
public override void Process(PipelineMessage message, IReadOnlyList<PipelinePolicy> pipeline, int currentIndex)
{
message.BufferResponse = true;
ProcessNext(message, pipeline, currentIndex);
NotifyObservers(message);
}

public override async ValueTask ProcessAsync(PipelineMessage message, IReadOnlyList<PipelinePolicy> pipeline, int currentIndex)
{
message.BufferResponse = true;
await ProcessNextAsync(message, pipeline, currentIndex);
NotifyObservers(message);
}

void NotifyObservers(PipelineMessage message)
{
if (onRequest != null && message.Request.Content != null)
{
using var memory = new MemoryStream();
message.Request.Content.WriteTo(memory);
memory.Position = 0;
using var reader = new StreamReader(memory);
var content = reader.ReadToEnd();
try
{
if (JsonNode.Parse(content) is { } node)
onRequest.Invoke(node!);
}
catch (JsonException)
{
// We ignore invalid JSON
}
}

if (onResponse != null && message.Response != null)
{
try
{
if (JsonNode.Parse(message.Response.Content.ToString()) is { } node)
onResponse.Invoke(node!);
}
catch (JsonException)
{
// We ignore invalid JSON
}
}
}
}
}
Loading