diff --git a/packages/langchain/lib/src/agents/agent.dart b/packages/langchain/lib/src/agents/agent.dart index b1fb6409..2498f408 100644 --- a/packages/langchain/lib/src/agents/agent.dart +++ b/packages/langchain/lib/src/agents/agent.dart @@ -9,6 +9,9 @@ abstract class BaseActionAgent { /// {@macro base_action_agent} const BaseActionAgent(); + /// The key for the scratchpad (intermediate steps) of the agent. + static const agentScratchpadInputKey = 'agent_scratchpad'; + /// Return key for the agent's output. static const agentReturnKey = 'output'; diff --git a/packages/langchain/lib/src/agents/executors.dart b/packages/langchain/lib/src/agents/executors.dart index dd3e6e19..0d7cef03 100644 --- a/packages/langchain/lib/src/agents/executors.dart +++ b/packages/langchain/lib/src/agents/executors.dart @@ -17,6 +17,12 @@ import 'tools/invalid.dart'; /// retrieves the output, and passes it back to the agent to determine the next /// action. This process continues until the agent determines it can directly /// respond to the user or completes its task. +/// +/// If you add [memory] to the [AgentExecutor], it will save the +/// [AgentExecutor]'s inputs and outputs. It won't save the agent's +/// intermediate inputs and outputs. If you want to save the agent's +/// intermediate inputs and outputs, you should add [memory] to the agent +/// instead. /// {@endtemplate} class AgentExecutor extends BaseChain { AgentExecutor({ diff --git a/packages/langchain/lib/src/agents/tools/base.dart b/packages/langchain/lib/src/agents/tools/base.dart index 7222da7b..2905651f 100644 --- a/packages/langchain/lib/src/agents/tools/base.dart +++ b/packages/langchain/lib/src/agents/tools/base.dart @@ -2,6 +2,7 @@ import 'dart:async'; import 'package:meta/meta.dart'; +import '../../model_io/chat_models/models/models.dart'; import 'models/models.dart'; /// {@template base_tool} @@ -104,6 +105,15 @@ abstract base class BaseTool { return run(toolInput); } + /// Converts the tool to a [ChatFunction]. + ChatFunction toChatFunction() { + return ChatFunction( + name: name, + description: description, + parameters: inputJsonSchema, + ); + } + @override bool operator ==(covariant final BaseTool other) => identical(this, other) || name == other.name; diff --git a/packages/langchain/lib/src/memory/chat.dart b/packages/langchain/lib/src/memory/chat.dart index 86f52ac6..aa3f0871 100644 --- a/packages/langchain/lib/src/memory/chat.dart +++ b/packages/langchain/lib/src/memory/chat.dart @@ -1,5 +1,6 @@ import 'dart:async'; +import '../model_io/chat_models/models/models.dart'; import '../utils/exception.dart'; import 'base.dart'; import 'models/models.dart'; @@ -46,11 +47,21 @@ abstract base class BaseChatMemory implements BaseMemory { }) async { // this is purposefully done in sequence so they're saved in order final (input, output) = _getInputOutputValues(inputValues, outputValues); - await chatHistory.addUserChatMessage(input); - await chatHistory.addAIChatMessage(output); + + if (input is ChatMessage) { + await chatHistory.addChatMessage(input); + } else { + await chatHistory.addHumanChatMessage(input.toString()); + } + + if (output is ChatMessage) { + await chatHistory.addChatMessage(output); + } else { + await chatHistory.addAIChatMessage(output.toString()); + } } - (String input, String output) _getInputOutputValues( + (dynamic input, dynamic output) _getInputOutputValues( final MemoryInputValues inputValues, final MemoryOutputValues outputValues, ) { diff --git a/packages/langchain/lib/src/memory/stores/message/history.dart b/packages/langchain/lib/src/memory/stores/message/history.dart index 59d6975f..64b02fd5 100644 --- a/packages/langchain/lib/src/memory/stores/message/history.dart +++ b/packages/langchain/lib/src/memory/stores/message/history.dart @@ -15,8 +15,8 @@ abstract base class BaseChatMessageHistory { /// Add [ChatMessage] to the history. Future addChatMessage(final ChatMessage message); - /// Add a user message to the history. - Future addUserChatMessage(final String message) { + /// Add a human message to the history. + Future addHumanChatMessage(final String message) { return addChatMessage(ChatMessage.human(message)); } diff --git a/packages/langchain/lib/src/memory/utils.dart b/packages/langchain/lib/src/memory/utils.dart index 2f65ae2a..8c2a9d59 100644 --- a/packages/langchain/lib/src/memory/utils.dart +++ b/packages/langchain/lib/src/memory/utils.dart @@ -1,3 +1,4 @@ +import '../agents/agent.dart'; import '../utils/exception.dart'; import 'models/models.dart'; @@ -10,13 +11,17 @@ String getPromptInputKey( final MemoryInputValues inputValues, final Set memoryKeys, ) { - // "stop" is a special key that can be passed as input but is not used to - // format the prompt - final promptInputKeys = - inputValues.keys.toSet().difference({...memoryKeys, 'stop'}); + // Reserved keys can be passed as input but is not used to format the prompt + final promptInputKeys = inputValues.keys.toSet().difference({ + ...memoryKeys, + 'stop', + BaseActionAgent.agentScratchpadInputKey, + }); if (promptInputKeys.length != 1) { throw LangChainException( - message: 'One input key expected got $promptInputKeys', + message: 'One input key expected got $promptInputKeys. ' + 'If you have multiple input keys in your prompt you need to specify ' + 'the input key to use for the memory using the `inputKey` parameter.', ); } return promptInputKeys.first; diff --git a/packages/langchain/lib/src/model_io/chat_models/models/models.dart b/packages/langchain/lib/src/model_io/chat_models/models/models.dart index c8036fba..5601f4d1 100644 --- a/packages/langchain/lib/src/model_io/chat_models/models/models.dart +++ b/packages/langchain/lib/src/model_io/chat_models/models/models.dart @@ -336,12 +336,12 @@ CustomChatMessage{ /// Role of a chat message enum ChatMessageRole { system, human, ai, custom } -/// {@template openai_function_model} +/// {@template chat_function} /// The description of a function that can be called by the chat model. /// {@endtemplate @immutable class ChatFunction { - /// {@macro openai_function_model} + /// {@macro chat_function} const ChatFunction({ required this.name, this.description, diff --git a/packages/langchain/test/memory/buffer_test.dart b/packages/langchain/test/memory/buffer_test.dart index cf0cd63e..f7fe901c 100644 --- a/packages/langchain/test/memory/buffer_test.dart +++ b/packages/langchain/test/memory/buffer_test.dart @@ -1,3 +1,4 @@ +import 'package:langchain/src/agents/agent.dart'; import 'package:langchain/src/memory/memory.dart'; import 'package:langchain/src/model_io/chat_models/chat_models.dart'; import 'package:test/test.dart'; @@ -7,7 +8,7 @@ void main() { test('Test buffer memory', () async { final memory = ConversationBufferMemory(); final result1 = await memory.loadMemoryVariables(); - expect(result1, {'history': ''}); + expect(result1, {BaseMemory.defaultMemoryKey: ''}); await memory.saveContext( inputValues: {'foo': 'bar'}, @@ -15,13 +16,13 @@ void main() { ); const expectedString = 'Human: bar\nAI: foo'; final result2 = await memory.loadMemoryVariables(); - expect(result2, {'history': expectedString}); + expect(result2, {BaseMemory.defaultMemoryKey: expectedString}); }); test('Test buffer memory return messages', () async { final memory = ConversationBufferMemory(returnMessages: true); final result1 = await memory.loadMemoryVariables(); - expect(result1, {'history': []}); + expect(result1, {BaseMemory.defaultMemoryKey: []}); await memory.saveContext( inputValues: {'foo': 'bar'}, @@ -32,7 +33,24 @@ void main() { ChatMessage.ai('foo'), ]; final result2 = await memory.loadMemoryVariables(); - expect(result2, {'history': expectedResult}); + expect(result2, {BaseMemory.defaultMemoryKey: expectedResult}); + }); + + test('Test chat message as input and output', () async { + final memory = ConversationBufferMemory(returnMessages: true); + final result1 = await memory.loadMemoryVariables(); + expect(result1, {BaseMemory.defaultMemoryKey: []}); + + await memory.saveContext( + inputValues: {'foo': ChatMessage.function(name: 'foo', content: 'bar')}, + outputValues: {'bar': ChatMessage.ai('baz')}, + ); + final expectedResult = [ + ChatMessage.function(name: 'foo', content: 'bar'), + ChatMessage.ai('baz'), + ]; + final result2 = await memory.loadMemoryVariables(); + expect(result2, {BaseMemory.defaultMemoryKey: expectedResult}); }); test('Test buffer memory with pre-loaded history', () async { @@ -45,7 +63,7 @@ void main() { chatHistory: ChatMessageHistory(messages: pastMessages), ); final result = await memory.loadMemoryVariables(); - expect(result, {'history': pastMessages}); + expect(result, {BaseMemory.defaultMemoryKey: pastMessages}); }); test('Test clear memory', () async { @@ -56,11 +74,69 @@ void main() { ); const expectedString = 'Human: bar\nAI: foo'; final result1 = await memory.loadMemoryVariables(); - expect(result1, {'history': expectedString}); + expect(result1, {BaseMemory.defaultMemoryKey: expectedString}); memory.clear(); final result2 = await memory.loadMemoryVariables(); - expect(result2, {'history': ''}); + expect(result2, {BaseMemory.defaultMemoryKey: ''}); + }); + + test('Test reserved keys are ignored when selecting prompt input keys', + () async { + final memory = ConversationBufferMemory(returnMessages: true); + await memory.saveContext( + inputValues: { + 'foo': 'bar', + 'stop': 'stop', + BaseActionAgent.agentScratchpadInputKey: 'baz', + }, + outputValues: {'bar': 'foo'}, + ); + final expectedResult = [ + ChatMessage.human('bar'), + ChatMessage.ai('foo'), + ]; + final result1 = await memory.loadMemoryVariables(); + expect(result1, {BaseMemory.defaultMemoryKey: expectedResult}); + }); + + test('Test multiple input values with inputKey specified', () async { + final memory = ConversationBufferMemory( + returnMessages: true, + inputKey: 'foo2', + ); + await memory.saveContext( + inputValues: { + 'foo1': 'bar1', + 'foo2': 'bar2', + BaseActionAgent.agentScratchpadInputKey: 'baz', + }, + outputValues: {'bar': 'foo'}, + ); + final expectedResult = [ + ChatMessage.human('bar2'), + ChatMessage.ai('foo'), + ]; + final result1 = await memory.loadMemoryVariables(); + expect(result1, {BaseMemory.defaultMemoryKey: expectedResult}); + }); + + test( + 'Test error is thrown if inputKey not specified when using with ' + 'multiple input values', () async { + final memory = ConversationBufferMemory(returnMessages: true); + + // expect throws exception if no input keys are selected + expect( + () async => memory.saveContext( + inputValues: { + 'foo1': 'bar1', + 'foo2': 'bar2', + }, + outputValues: {'bar': 'foo'}, + ), + throwsException, + ); }); }); } diff --git a/packages/langchain/test/memory/buffer_window_test.dart b/packages/langchain/test/memory/buffer_window_test.dart index 28f86ae2..6831b5af 100644 --- a/packages/langchain/test/memory/buffer_window_test.dart +++ b/packages/langchain/test/memory/buffer_window_test.dart @@ -15,13 +15,13 @@ void main() { ); const expectedString = 'Human: bar\nAI: foo'; final result2 = await memory.loadMemoryVariables(); - expect(result2, {'history': expectedString}); + expect(result2, {BaseMemory.defaultMemoryKey: expectedString}); }); test('Test buffer memory return messages', () async { final memory = ConversationBufferWindowMemory(k: 1, returnMessages: true); final result1 = await memory.loadMemoryVariables(); - expect(result1, {'history': []}); + expect(result1, {BaseMemory.defaultMemoryKey: []}); await memory.saveContext( inputValues: {'foo': 'bar'}, @@ -32,7 +32,7 @@ void main() { ChatMessage.ai('foo'), ]; final result2 = await memory.loadMemoryVariables(); - expect(result2, {'history': expectedResult}); + expect(result2, {BaseMemory.defaultMemoryKey: expectedResult}); await memory.saveContext( inputValues: {'foo': 'bar1'}, @@ -44,7 +44,7 @@ void main() { ChatMessage.ai('foo1'), ]; final result3 = await memory.loadMemoryVariables(); - expect(result3, {'history': expectedResult2}); + expect(result3, {BaseMemory.defaultMemoryKey: expectedResult2}); }); test('Test buffer memory with pre-loaded history', () async { @@ -57,7 +57,7 @@ void main() { chatHistory: ChatMessageHistory(messages: pastMessages), ); final result = await memory.loadMemoryVariables(); - expect(result, {'history': pastMessages}); + expect(result, {BaseMemory.defaultMemoryKey: pastMessages}); }); test('Test clear memory', () async { @@ -68,11 +68,11 @@ void main() { ); const expectedString = 'Human: bar\nAI: foo'; final result1 = await memory.loadMemoryVariables(); - expect(result1, {'history': expectedString}); + expect(result1, {BaseMemory.defaultMemoryKey: expectedString}); memory.clear(); final result2 = await memory.loadMemoryVariables(); - expect(result2, {'history': ''}); + expect(result2, {BaseMemory.defaultMemoryKey: ''}); }); }); } diff --git a/packages/langchain/test/memory/stores/message/in_memory_test.dart b/packages/langchain/test/memory/stores/message/in_memory_test.dart index 793438f6..1ad39f47 100644 --- a/packages/langchain/test/memory/stores/message/in_memory_test.dart +++ b/packages/langchain/test/memory/stores/message/in_memory_test.dart @@ -13,7 +13,7 @@ void main() { test('Test addUserMessage', () async { final history = ChatMessageHistory() - ..addUserChatMessage('This is a human msg'); + ..addHumanChatMessage('This is a human msg'); final messages = await history.getChatMessages(); expect(messages.first, isA()); expect(messages.first.content, 'This is a human msg'); diff --git a/packages/langchain/test/memory/token_buffer_test.dart b/packages/langchain/test/memory/token_buffer_test.dart index 0a1728fb..5e54091e 100644 --- a/packages/langchain/test/memory/token_buffer_test.dart +++ b/packages/langchain/test/memory/token_buffer_test.dart @@ -18,7 +18,7 @@ void main() { ); const expectedString = 'Human: bar\nAI: foo'; final result2 = await memory.loadMemoryVariables(); - expect(result2, {'history': expectedString}); + expect(result2, {BaseMemory.defaultMemoryKey: expectedString}); }); test('Test buffer memory return messages', () async { @@ -29,7 +29,7 @@ void main() { maxTokenLimit: 4, ); final result1 = await memory.loadMemoryVariables(); - expect(result1, {'history': []}); + expect(result1, {BaseMemory.defaultMemoryKey: []}); await memory.saveContext( inputValues: {'foo': 'bar'}, @@ -40,7 +40,7 @@ void main() { ChatMessage.ai('foo'), ]; final result2 = await memory.loadMemoryVariables(); - expect(result2, {'history': expectedResult}); + expect(result2, {BaseMemory.defaultMemoryKey: expectedResult}); await memory.saveContext( inputValues: {'foo': 'bar1'}, @@ -53,7 +53,7 @@ void main() { ChatMessage.ai('foo1'), ]; final result3 = await memory.loadMemoryVariables(); - expect(result3, {'history': expectedResult2}); + expect(result3, {BaseMemory.defaultMemoryKey: expectedResult2}); }); test('Test buffer memory with pre-loaded history', () async { @@ -69,7 +69,7 @@ void main() { chatHistory: ChatMessageHistory(messages: pastMessages), ); final result = await memory.loadMemoryVariables(); - expect(result, {'history': pastMessages}); + expect(result, {BaseMemory.defaultMemoryKey: pastMessages}); }); test('Test clear memory', () async { @@ -80,11 +80,11 @@ void main() { ); const expectedString = 'Human: bar\nAI: foo'; final result1 = await memory.loadMemoryVariables(); - expect(result1, {'history': expectedString}); + expect(result1, {BaseMemory.defaultMemoryKey: expectedString}); memory.clear(); final result2 = await memory.loadMemoryVariables(); - expect(result2, {'history': ''}); + expect(result2, {BaseMemory.defaultMemoryKey: ''}); }); }); } diff --git a/packages/langchain_openai/lib/src/agents/functions.dart b/packages/langchain_openai/lib/src/agents/functions.dart index 62758bcf..6276007d 100644 --- a/packages/langchain_openai/lib/src/agents/functions.dart +++ b/packages/langchain_openai/lib/src/agents/functions.dart @@ -12,70 +12,95 @@ const _systemChatMessagePromptTemplate = SystemChatMessagePromptTemplate( /// {@template openai_functions_agent} /// An Agent driven by OpenAIs Functions powered API. +/// +/// Example: +/// ```dart +/// final llm = ChatOpenAI( +/// apiKey: openaiApiKey, +/// model: 'gpt-3.5-turbo-0613', +/// temperature: 0, +/// ); +/// final tools = [CalculatorTool()]; +/// final agent = OpenAIFunctionsAgent.fromLLMAndTools(llm: llm, tools: tools); +/// final executor = AgentExecutor(agent: agent, tools: tools); +/// final res = await executor.run('What is 40 raised to the 0.43 power? '); +/// ``` /// {@endtemplate} class OpenAIFunctionsAgent extends BaseSingleActionAgent { /// {@macro openai_functions_agent} OpenAIFunctionsAgent({ - required this.llm, + required this.llmChain, required this.tools, - required this.prompt, - }) : assert( - prompt.inputVariables.contains(_agentScratchpadInputKey), - '`$_agentScratchpadInputKey` should be one of the variables in the prompt, ' - 'got ${prompt.inputVariables}', + }) : assert( + llmChain.memory != null || + llmChain.prompt.inputVariables + .contains(BaseActionAgent.agentScratchpadInputKey), + '`${BaseActionAgent.agentScratchpadInputKey}` should be one of the ' + 'variables in the prompt, got ${llmChain.prompt.inputVariables}', + ), + assert( + llmChain.memory == null || llmChain.memory!.returnMessages, + 'The memory must have `returnMessages` set to true', ); - /// A model that supports using functions. - final BaseChatOpenAI llm; + /// Chain to use to call the LLM. + /// + /// If the chain does not have a memory, the prompt MUST include a variable + /// called [BaseActionAgent.agentScratchpadInputKey] where the agent can put + /// its intermediary work. + /// + /// If the chain has a memory, the agent will use the memory to store the + /// intermediary work. + /// + /// The memory must have [BaseChatMemory.returnMessages] set to true for + /// the agent to work properly. + final LLMChain + llmChain; /// The tools this agent has access to. final List tools; - /// The prompt for this agent, should support `agent_scratchpad` as one of - /// the variables. - final BasePromptTemplate prompt; - /// The key for the input to the agent. static const agentInputKey = 'input'; - /// The key for the scratchpad (intermediate steps) of the agent. - static const _agentScratchpadInputKey = 'agent_scratchpad'; - @override Set get inputKeys => {agentInputKey}; - List get functions { - return tools.map((final t) { - return ChatFunction( - name: t.name, - description: t.description, - parameters: t.inputJsonSchema, - ); - }).toList(growable: false); - } + List get functions => llmChain.llmOptions?.functions ?? []; /// Construct an [OpenAIFunctionsAgent] from an [llm] and [tools]. /// /// - [llm] - The model to use for the agent. /// - [tools] - The tools the agent has access to. + /// - [memory] - The memory to use for the agent. /// - [systemChatMessage] message to use as the system message that will be - /// the first in the prompt. Default "You are a helpful AI assistant". + /// the first in the prompt. Default: "You are a helpful AI assistant". /// - [extraPromptMessages] prompt messages that will be placed between the - /// system message and the new human input. + /// system message and the input from the agent. factory OpenAIFunctionsAgent.fromLLMAndTools({ required final BaseChatOpenAI llm, required final List tools, + final BaseChatMemory? memory, final SystemChatMessagePromptTemplate systemChatMessage = _systemChatMessagePromptTemplate, final List? extraPromptMessages, }) { return OpenAIFunctionsAgent( - llm: llm, - tools: tools, - prompt: createPrompt( - systemChatMessage: systemChatMessage, - extraPromptMessages: extraPromptMessages, + llmChain: LLMChain( + llm: llm, + llmOptions: ChatOpenAIOptions( + functions: tools + .map((final t) => t.toChatFunction()) + .toList(growable: false), + ), + prompt: createPrompt( + systemChatMessage: systemChatMessage, + extraPromptMessages: extraPromptMessages, + memory: memory, + ), + memory: memory, ), + tools: tools, ); } @@ -89,19 +114,45 @@ class OpenAIFunctionsAgent extends BaseSingleActionAgent { final List intermediateSteps, final InputValues inputs, ) async { - final agentScratchpad = _constructScratchPad(intermediateSteps); - final fullInputs = { + final llmChainInputs = _constructLlmChainInputs(intermediateSteps, inputs); + final output = await llmChain.call(llmChainInputs); + final predictedMessage = output[LLMChain.defaultOutputKey] as ChatMessage; + return [_parseOutput(predictedMessage)]; + } + + Map _constructLlmChainInputs( + final List intermediateSteps, + final InputValues inputs, + ) { + final ChatMessage agentInput; + + // If there is a memory, we pass the last agent step as a function message. + // Otherwise, we pass the input as a human message. + if (llmChain.memory != null && intermediateSteps.isNotEmpty) { + final lastStep = intermediateSteps.last; + final functionMsg = ChatMessage.function( + name: lastStep.action.tool, + content: lastStep.observation, + ); + agentInput = functionMsg; + } else { + agentInput = switch (inputs[agentInputKey]) { + final String inputStr => ChatMessage.human(inputStr), + final ChatMessage inputMsg => inputMsg, + _ => throw LangChainException( + message: 'Agent expected a String or ChatMessage as input,' + ' got ${inputs[agentInputKey]}', + ), + }; + } + + return { ...inputs, - _agentScratchpadInputKey: agentScratchpad, + agentInputKey: [agentInput], + if (llmChain.memory == null) + BaseActionAgent.agentScratchpadInputKey: + _constructScratchPad(intermediateSteps), }; - - final prompt = this.prompt.formatPrompt(fullInputs); - final messages = prompt.toChatMessages(); - final predictedMessages = await llm.predictMessages( - messages, - options: ChatOpenAIOptions(functions: functions), - ); - return [_parseOutput(predictedMessages)]; } List _constructScratchPad( @@ -153,16 +204,23 @@ class OpenAIFunctionsAgent extends BaseSingleActionAgent { /// the first in the prompt. /// - [extraPromptMessages] prompt messages that will be placed between the /// system message and the new human input. + /// - [memory] optional memory to use for the agent. static BasePromptTemplate createPrompt({ final SystemChatMessagePromptTemplate systemChatMessage = _systemChatMessagePromptTemplate, final List? extraPromptMessages, + final BaseChatMemory? memory, }) { return ChatPromptTemplate.fromPromptMessages([ systemChatMessage, ...?extraPromptMessages, - HumanChatMessagePromptTemplate.fromTemplate('{$agentInputKey}'), - const MessagesPlaceholder(variableName: _agentScratchpadInputKey), + if (memory == null) + const MessagesPlaceholder( + variableName: BaseActionAgent.agentScratchpadInputKey, + ), + for (final memoryKey in memory?.memoryKeys ?? {}) + MessagesPlaceholder(variableName: memoryKey), + const MessagesPlaceholder(variableName: agentInputKey), ]); } } diff --git a/packages/langchain_openai/test/agents/functions_test.dart b/packages/langchain_openai/test/agents/functions_test.dart index 24a9d12e..4d8c645d 100644 --- a/packages/langchain_openai/test/agents/functions_test.dart +++ b/packages/langchain_openai/test/agents/functions_test.dart @@ -63,25 +63,16 @@ void main() { ); final tools = [tool]; + final memory = ConversationBufferMemory(returnMessages: returnMessages); final agent = OpenAIFunctionsAgent.fromLLMAndTools( llm: llm, tools: tools, - extraPromptMessages: [ - if (returnMessages) - const MessagesPlaceholder(variableName: BaseMemory.defaultMemoryKey) - else - HumanChatMessagePromptTemplate.fromTemplate( - 'Previous conversation history:\n{${BaseMemory.defaultMemoryKey}}', - ), - ], + memory: memory, ); - final memory = ConversationBufferMemory(returnMessages: returnMessages); - final executor = AgentExecutor( agent: agent, tools: tools, - memory: memory, ); final res1 = await executor.run( @@ -105,12 +96,15 @@ void main() { expect(res3, contains('Result 3')); } - test('Test OpenAIFunctionsAgent with string memory', () async { - await testMemory(returnMessages: false); - }); - test('Test OpenAIFunctionsAgent with messages memory', () async { await testMemory(returnMessages: true); }); + + test('Test OpenAIFunctionsAgent with string memory throws error', () async { + expect( + () async => testMemory(returnMessages: false), + throwsA(isA()), + ); + }); }); }