Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
LittleLittleCloud committed Mar 3, 2024
1 parent cbacdc7 commit 222a57c
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 226 deletions.
9 changes: 9 additions & 0 deletions dotnet/src/AutoGen.Core/Message/ImageMessage.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// ImageMessage.cs

using System;

namespace AutoGen.Core;

public class ImageMessage : IMessage
Expand All @@ -12,6 +14,13 @@ public ImageMessage(Role role, string url, string? from = null)
this.Url = url;
}

public ImageMessage(Role role, Uri uri, string? from = null)
{
this.Role = role;
this.From = from;
this.Url = uri.ToString();
}

public Role Role { get; set; }

public string Url { get; set; }
Expand Down
4 changes: 2 additions & 2 deletions dotnet/src/AutoGen.OpenAI/Agent/GPTAgent.cs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public class GPTAgent : IStreamingAgent
_ => throw new ArgumentException($"Unsupported config type {config.GetType()}"),
};

_innerAgent = new OpenAIChatAgent(openAIClient, name, systemMessage, modelName, temperature, maxTokens, functions);
_innerAgent = new OpenAIChatAgent(openAIClient, name, modelName, systemMessage, temperature, maxTokens, functions);
Name = name;
this.functionMap = functionMap;
}
Expand All @@ -76,7 +76,7 @@ public class GPTAgent : IStreamingAgent
this.modelName = modelName;
Name = name;
this.functionMap = functionMap;
_innerAgent = new OpenAIChatAgent(openAIClient, name, systemMessage, modelName, temperature, maxTokens, functions);
_innerAgent = new OpenAIChatAgent(openAIClient, name, modelName, systemMessage, temperature, maxTokens, functions);
}

public string Name { get; }
Expand Down

This file was deleted.

16 changes: 11 additions & 5 deletions dotnet/test/AutoGen.Tests/SingleAgentTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -67,25 +67,31 @@ public async Task GPTAgentVisionTestAsync()
{ nameof(GetHighestLabel), this.GetHighestLabelWrapper },
});


var imageUri = new Uri(@"https://raw.githubusercontent.com/microsoft/autogen/main/website/blog/2023-04-21-LLM-tuning-math/img/level2algebra.png");
var oaiMessage = new ChatRequestUserMessage(
new ChatMessageTextContentItem("which label has the highest inference cost"),
new ChatMessageImageContentItem(new Uri(@"https://raw.githubusercontent.com/microsoft/autogen/main/website/blog/2023-04-21-LLM-tuning-math/img/level2algebra.png")));
new ChatMessageImageContentItem(imageUri));
var multiModalMessage = new MultiModalMessage(Role.User,
[
new TextMessage(Role.User, "which label has the highest inference cost", from: "user"),
new ImageMessage(Role.User, @"https://raw.githubusercontent.com/microsoft/autogen/main/website/blog/2023-04-21-LLM-tuning-math/img/level2algebra.png", from: "user"),
new ImageMessage(Role.User, imageUri, from: "user"),
],
from: "user");

foreach (var message in new IMessage[] { new MessageEnvelope<ChatRequestUserMessage>(oaiMessage), multiModalMessage })
var imageMessage = new ImageMessage(Role.User, imageUri, from: "user");

IMessage[] messages = [
MessageEnvelope.Create(oaiMessage),
multiModalMessage,
imageMessage,
];
foreach (var message in messages)
{
var response = await visionAgent.SendAsync(message);
response.From.Should().Be(visionAgent.Name);

var labelResponse = await gpt3Agent.SendAsync(response);
labelResponse.From.Should().Be(gpt3Agent.Name);
labelResponse.GetContent().Should().Be("[HIGHEST_LABEL] gpt-4 (n=5) green");
labelResponse.GetToolCalls()!.First().FunctionName.Should().Be(nameof(GetHighestLabel));
}
}
Expand Down

0 comments on commit 222a57c

Please sign in to comment.