Skip to content

[Bug]: Can not execute async function when use nested_chats pattern #3083

Closed
@lfygh

Description

@lfygh

Describe the bug

import asyncio
import os
from typing import Annotated

import dotenv

import autogen
from autogen import ConversableAgent

dotenv.load_dotenv()

adder_agent = ConversableAgent(
    name="Adder_Agent",
    system_message="You add 1 to each number I give you and return me the new numbers, one number each line.",
    llm_config={
        "config_list": [{"model": "gpt-3.5-turbo", "api_key": os.environ["OPENAI_API_KEY"]}]},
    human_input_mode="NEVER",
)


async def multiplier(number: Annotated[int, "number"]):
    print("multiplier===============")
    return number * 2

multiplier_agent = ConversableAgent(
    name="Multiplier_Agent",
    system_message="use function to calculate",
    llm_config={"config_list": [{"model": "gpt-4o", "api_key": os.environ["OPENAI_API_KEY"]}]},
    human_input_mode="NEVER",
)

user_proxy = ConversableAgent(
    name="User_Proxy",
    system_message="You are a helpful assistant",
    llm_config={"config_list": [{"model": "gpt-4o", "api_key": os.environ["OPENAI_API_KEY"]}]},
    human_input_mode="NEVER",
)

assistant_agent = ConversableAgent(
    name="Assistant_Agent",
    system_message="You are a helpful assistant",
    llm_config={
        "config_list": [{"model": "gpt-3.5-turbo", "api_key": os.environ["OPENAI_API_KEY"]}]},
    human_input_mode="NEVER",
)
assistant_agent.register_nested_chats(
    chat_queue=[
        {
            "recipient": multiplier_agent
        }],
    trigger=lambda sender: sender in [user_proxy]
)

autogen.register_function(
    multiplier,
    caller=multiplier_agent,
    executor=assistant_agent,
    name="multiplier",
    description="multiply number by 2",
)


async def main():
    r = await user_proxy.a_initiate_chat(
        recipient=assistant_agent,
        message="my number is 5"
    )
    print(r)


if __name__ == '__main__':
    asyncio.run(main())





### Screenshots and logs

File "/Users/lfyg/.pyenv/versions/3.10.13/lib/python3.10/asyncio/runners.py", line 44, in run
    return loop.run_until_complete(main)
  File "/Users/lfyg/.pyenv/versions/3.10.13/lib/python3.10/asyncio/base_events.py", line 649, in run_until_complete
    return future.result()
  File "/Users/lfyg/IdeaProjects/autogen-main/samples/netest_error.py", line 70, in main
    r = await user_proxy.a_initiate_chat(
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 1085, in a_initiate_chat
    await self.a_send(msg2send, recipient, silent=silent)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 706, in a_send
    await recipient.a_receive(message, self, request_reply, silent)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 856, in a_receive
    reply = await self.a_generate_reply(sender=sender)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 2047, in a_generate_reply
    final, reply = reply_func(self, messages=messages, sender=sender, config=reply_func_tuple["config"])
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 439, in wrapped_reply_func
    return reply_func_from_nested_chats(chat_queue, recipient, messages, sender, config)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 403, in _summary_from_nested_chats
    res = initiate_chats(chat_to_run)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/chat.py", line 202, in initiate_chats
    chat_res = sender.initiate_chat(**chat_info)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 1019, in initiate_chat
    self.send(msg2send, recipient, silent=silent)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 656, in send
    recipient.receive(message, self, request_reply, silent)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 821, in receive
    self.send(reply, sender, silent=silent)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 656, in send
    recipient.receive(message, self, request_reply, silent)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 819, in receive
    reply = self.generate_reply(messages=self.chat_messages[sender], sender=sender)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 1973, in generate_reply
    final, reply = reply_func(self, messages=messages, sender=sender, config=reply_func_tuple["config"])
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 1625, in generate_tool_calls_reply
    _, func_return = loop.run_until_complete(self.a_execute_function(function_call))
  File "/Users/lfyg/.pyenv/versions/3.10.13/lib/python3.10/asyncio/base_events.py", line 625, in run_until_complete
    self._check_running()
  File "/Users/lfyg/.pyenv/versions/3.10.13/lib/python3.10/asyncio/base_events.py", line 584, in _check_running
    raise RuntimeError('This event loop is already running')
RuntimeError: This event loop is already running
sys:1: RuntimeWarning: coroutine 'ConversableAgent.a_execute_function' was never awaited

Additional Information

No response

Metadata

Metadata

Assignees

No one assigned

    Labels

    0.2Issues which are related to the pre 0.4 codebaseneeds-triage

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions