diff --git a/notebook/agentchat_websockets.ipynb b/notebook/agentchat_websockets.ipynb index bdf98d76d24..7e6e449675c 100644 --- a/notebook/agentchat_websockets.ipynb +++ b/notebook/agentchat_websockets.ipynb @@ -52,15 +52,7 @@ "execution_count": 1, "id": "dca301a4", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "gpt-4\n" - ] - } - ], + "outputs": [], "source": [ "from datetime import datetime\n", "from tempfile import TemporaryDirectory\n", @@ -68,17 +60,14 @@ "from websockets.sync.client import connect as ws_connect\n", "\n", "import autogen\n", - "from autogen.cache import Cache\n", - "from autogen.io.websockets import IOStream, IOWebsockets\n", + "from autogen.io.websockets import IOWebsockets\n", "\n", "config_list = autogen.config_list_from_json(\n", - " \"OAI_CONFIG_LIST\",\n", + " env_or_file=\"OAI_CONFIG_LIST\",\n", " filter_dict={\n", " \"model\": [\"gpt-4\", \"gpt-3.5-turbo\", \"gpt-3.5-turbo-16k\"],\n", " },\n", - ")\n", - "\n", - "print(config_list[0][\"model\"])" + ")" ] }, { @@ -120,20 +109,25 @@ "\n", " print(\" - on_connect(): Receiving message from client.\", flush=True)\n", "\n", + " # 1. Receive Initial Message\n", " initial_msg = iostream.input()\n", "\n", - " llm_config = {\n", - " \"config_list\": config_list,\n", - " \"stream\": True,\n", - " }\n", - "\n", + " # 2. Instantiate ConversableAgent\n", " agent = autogen.ConversableAgent(\n", " name=\"chatbot\",\n", - " system_message=\"Complete a task given to you and reply TERMINATE when the task is done. If asked about the weather, use tool weather_forecast(city) to get the weather forecast for a city.\",\n", - " llm_config=llm_config,\n", + " system_message=\"Complete a task given to you and reply TERMINATE when the task is done. If asked about the weather, use tool 'weather_forecast(city)' to get the weather forecast for a city.\",\n", + " llm_config={\n", + " \"config_list\": autogen.config_list_from_json(\n", + " env_or_file=\"OAI_CONFIG_LIST\",\n", + " filter_dict={\n", + " \"model\": [\"gpt-4\", \"gpt-3.5-turbo\", \"gpt-3.5-turbo-16k\"],\n", + " },\n", + " ),\n", + " \"stream\": True,\n", + " },\n", " )\n", "\n", - " # create a UserProxyAgent instance named \"user_proxy\"\n", + " # 3. Define UserProxyAgent\n", " user_proxy = autogen.UserProxyAgent(\n", " name=\"user_proxy\",\n", " system_message=\"A proxy for the user.\",\n", @@ -143,12 +137,15 @@ " code_execution_config=False,\n", " )\n", "\n", - " @user_proxy.register_for_execution()\n", - " @agent.register_for_llm(description=\"Weather forecats for a city\")\n", + " # 4. Define Agent-specific Functions\n", " def weather_forecast(city: str) -> str:\n", " return f\"The weather forecast for {city} at {datetime.now()} is sunny.\"\n", "\n", - " # we will use a temporary directory as the cache path root to ensure fresh completion each time\n", + " autogen.register_function(\n", + " weather_forecast, caller=agent, executor=user_proxy, description=\"Weather forecast for a city\"\n", + " )\n", + "\n", + " # 5. Initiate conversation\n", " print(\n", " f\" - on_connect(): Initiating chat with agent {agent} using message '{initial_msg}'\",\n", " flush=True,\n", @@ -166,15 +163,15 @@ "source": [ "Here's an explanation on how a typical `on_connect` function such as the one in the example above is defined:\n", "\n", - "1. **Receiving Initial Message**: Immediately after establishing a connection, receive an initial message from the client. This step is crucial for understanding the client's request or initiating the conversation flow.\n", - "\n", - "2. **Receiving Initial Message**: Immediately after establishing a connection, receive an initial message from the client. This step is crucial for understanding the client's request or initiating the conversation flow.\n", + "1. **Receive Initial Message**: Immediately after establishing a connection, receive an initial message from the client. This step is crucial for understanding the client's request or initiating the conversation flow.\n", "\n", - "3. **Configure the LLM**: Define the configuration for your large language model (LLM), specifying the list of configurations and the streaming capability. This configuration will be used to tailor the behavior of your conversational agent.\n", + "2. **Instantiate ConversableAgent**: Create an instance of ConversableAgent with a specific system message and the LLM configuration. If you need more than one agent, make sure they don't share the same `llm_config` as \n", + "adding a function to one of them will also attempt to add it to another.\n", "\n", - "4. **Instantiate ConversableAgent and UserProxyAgent**: Create an instance of ConversableAgent with a specific system message and the LLM configuration. Similarly, create a UserProxyAgent instance, defining its termination condition, human input mode, and other relevant parameters.\n", + "2. **Instantiate UserProxyAgent**: Similarly, create a UserProxyAgent instance, defining its termination condition, human input mode, and other relevant parameters. There is no need to define `llm_config` as the UserProxyAgent\n", + "does not use LLM.\n", "\n", - "5. **Define Agent-specific Functions**: If your conversable agent requires executing specific tasks, such as fetching a weather forecast in the example below, define these functions within the on_connect scope. Decorate these functions accordingly to link them with your agents.\n", + "4. **Define Agent-specific Functions**: If your conversable agent requires executing specific tasks, such as fetching a weather forecast in the example above, define these functions within the on_connect scope. Decorate these functions accordingly to link them with your agents.\n", "\n", "5. **Initiate Conversation**: Finally, use the `initiate_chat` method of your `UserProxyAgent` to start the interaction with the conversable agent, passing the initial message and a cache mechanism for efficiency." ] @@ -210,11 +207,11 @@ "output_type": "stream", "text": [ " - test_setup() with websocket server running on ws://127.0.0.1:8765.\n", - " - on_connect(): Connected to client using IOWebsockets \n", + " - on_connect(): Connected to client using IOWebsockets \n", " - on_connect(): Receiving message from client.\n", " - Connected to server on ws://127.0.0.1:8765\n", " - Sending message to server.\n", - " - on_connect(): Initiating chat with agent using message 'Check out the weather in Paris and write a poem about it.'\n", + " - on_connect(): Initiating chat with agent using message 'Check out the weather in Paris and write a poem about it.'\n", "\u001b[33muser_proxy\u001b[0m (to chatbot):\n", "\n", "Check out the weather in Paris and write a poem about it.\n", @@ -225,7 +222,7 @@ "\u001b[33mchatbot\u001b[0m (to user_proxy):\n", "\n", "\n", - "\u001b[32m***** Suggested tool call (call_pKFE5KQZOQTe4gC8mQRv6bZX): weather_forecast *****\u001b[0m\n", + "\u001b[32m***** Suggested tool call (call_xFFWe52vwdpgZ8xTRV6adBdy): weather_forecast *****\u001b[0m\n", "Arguments: \n", "{\n", " \"city\": \"Paris\"\n", @@ -239,36 +236,46 @@ "\n", "\u001b[33muser_proxy\u001b[0m (to chatbot):\n", "\n", - "\u001b[32m***** Response from calling tool (call_pKFE5KQZOQTe4gC8mQRv6bZX) *****\u001b[0m\n", - "The weather forecast for Paris at 2024-03-31 20:17:22.413225 is sunny.\n", + "\u001b[32m***** Response from calling tool (call_xFFWe52vwdpgZ8xTRV6adBdy) *****\u001b[0m\n", + "The weather forecast for Paris at 2024-04-05 12:00:06.206125 is sunny.\n", "\u001b[32m**********************************************************************\u001b[0m\n", "\n", "--------------------------------------------------------------------------------\n", "\u001b[31m\n", ">>>>>>>> USING AUTO REPLY...\u001b[0m\n", - "\u001b[32m\u001b[32mIn Paris, beneath the golden sun, so bright,\n", - "Where cobblestones glisten with pure delight,\n", - "The weather is sunny, a beautiful sight,\n", - "Oh Paris, you're dazzling in the sunlight.\n", + "\u001b[32m\u001b[32mIn the heart of France, beneath the sun's warm glow,\n", + "Lies the city of Paris, where the Seine waters flow.\n", + "Bathed in sunlight, every street and spire,\n", + "Illuminated each detail, just like a docile fire.\n", + "\n", + "Once monochromatic cityscape, kissed by the sun's bright light,\n", + "Now a kaleidoscope of colors, from morning till the night.\n", + "This sun-swept city sparkles, under the azure dome,\n", + "Her inhabitants find comfort, for they call this city home.\n", "\n", - "The bistros beckon with fragrant delight,\n", - "In gardens, flowers bloom with all their might.\n", - "Paris, your charm shines so bright,\n", - "Under the blanket of the sunny daylight.\n", + "One can wander in her sunshine, on this perfect weather day,\n", + "And feel the warmth it brings, to chase your blues away.\n", + "For the weather in Paris, is more than just a forecast,\n", + "It is a stage setting for dwellers and tourists amassed.\n", "\n", "TERMINATE\u001b[0m\n", "\n", "\u001b[33mchatbot\u001b[0m (to user_proxy):\n", "\n", - "In Paris, beneath the golden sun, so bright,\n", - "Where cobblestones glisten with pure delight,\n", - "The weather is sunny, a beautiful sight,\n", - "Oh Paris, you're dazzling in the sunlight.\n", + "In the heart of France, beneath the sun's warm glow,\n", + "Lies the city of Paris, where the Seine waters flow.\n", + "Bathed in sunlight, every street and spire,\n", + "Illuminated each detail, just like a docile fire.\n", "\n", - "The bistros beckon with fragrant delight,\n", - "In gardens, flowers bloom with all their might.\n", - "Paris, your charm shines so bright,\n", - "Under the blanket of the sunny daylight.\n", + "Once monochromatic cityscape, kissed by the sun's bright light,\n", + "Now a kaleidoscope of colors, from morning till the night.\n", + "This sun-swept city sparkles, under the azure dome,\n", + "Her inhabitants find comfort, for they call this city home.\n", + "\n", + "One can wander in her sunshine, on this perfect weather day,\n", + "And feel the warmth it brings, to chase your blues away.\n", + "For the weather in Paris, is more than just a forecast,\n", + "It is a stage setting for dwellers and tourists amassed.\n", "\n", "TERMINATE\n", "\n", @@ -397,7 +404,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "INFO: Started server process [264152]\n", + "INFO: Started server process [5227]\n", "INFO: Waiting for application startup.\n" ] }, @@ -420,10 +427,11 @@ "name": "stdout", "output_type": "stream", "text": [ - "INFO: 127.0.0.1:46378 - \"GET / HTTP/1.1\" 200 OK\n", - " - on_connect(): Connected to client using IOWebsockets \n", + "INFO: 127.0.0.1:42548 - \"GET / HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:42548 - \"GET /favicon.ico HTTP/1.1\" 404 Not Found\n", + " - on_connect(): Connected to client using IOWebsockets \n", " - on_connect(): Receiving message from client.\n", - " - on_connect(): Initiating chat with agent using message 'Check out the weather in Paris and write a poem about it.'\n" + " - on_connect(): Initiating chat with agent using message 'write a poem about lundon'\n" ] }, { @@ -433,7 +441,7 @@ "INFO: Shutting down\n", "INFO: Waiting for application shutdown.\n", "INFO: Application shutdown complete.\n", - "INFO: Finished server process [264152]\n" + "INFO: Finished server process [5227]\n" ] } ], @@ -491,7 +499,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 6, "id": "708a98de", "metadata": {}, "outputs": [ @@ -500,23 +508,40 @@ "output_type": "stream", "text": [ "Websocket server started at ws://127.0.0.1:8080.\n", - "HTTP server started at http://localhost:8000\n" + "HTTP server started at http://localhost:8000\n", + " - on_connect(): Connected to client using IOWebsockets \n", + " - on_connect(): Receiving message from client.\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "127.0.0.1 - - [31/Mar/2024 20:20:07] \"GET / HTTP/1.1\" 200 -\n" + "127.0.0.1 - - [05/Apr/2024 12:01:51] \"GET / HTTP/1.1\" 200 -\n", + "127.0.0.1 - - [05/Apr/2024 12:01:51] \"GET / HTTP/1.1\" 200 -\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - " - on_connect(): Connected to client using IOWebsockets \n", - " - on_connect(): Receiving message from client.\n", - " - on_connect(): Initiating chat with agent using message 'Check out the weather in Paris and write a poem about it.'\n", + " - on_connect(): Initiating chat with agent using message 'write a poem about new york'\n", + " - on_connect(): Connected to client using IOWebsockets \n", + " - on_connect(): Receiving message from client.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "127.0.0.1 - - [05/Apr/2024 12:02:27] \"GET / HTTP/1.1\" 304 -\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " - on_connect(): Initiating chat with agent using message 'check the weather in london and write a poem about it'\n", " - HTTP server stopped.\n" ] } @@ -621,7 +646,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.13" + "version": "3.10.14" } }, "nbformat": 4,