diff --git a/examples/tutorials/00_sync/000_hello_acp/dev.ipynb b/examples/tutorials/00_sync/000_hello_acp/dev.ipynb index 610d65b2..6cadb5a8 100644 --- a/examples/tutorials/00_sync/000_hello_acp/dev.ipynb +++ b/examples/tutorials/00_sync/000_hello_acp/dev.ipynb @@ -2,8 +2,8 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, - "id": "36834357", + "execution_count": null, + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -14,8 +14,8 @@ }, { "cell_type": "code", - "execution_count": 3, - "id": "d1c309d6", + "execution_count": null, + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -24,8 +24,8 @@ }, { "cell_type": "code", - "execution_count": 4, - "id": "9f6e6ef0", + "execution_count": null, + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -50,22 +50,13 @@ }, { "cell_type": "code", - "execution_count": 5, - "id": "b03b0d37", + "execution_count": null, + "id": "3", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Hello! I've received your message. Here's a generic response, but in future tutorials we'll see how you can get me to intelligently respond to your message. This is what I heard you say: Hello what can you do?\n" - ] - } - ], + "outputs": [], "source": [ "# Test non streaming response\n", - "from typing import List, cast\n", - "from agentex.types import TaskMessage, TextContent\n", + "from agentex.types import TextContent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -75,52 +66,35 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_message(\n", " agent_name=AGENT_NAME,\n", - " method=\"message/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"stream\": False\n", " }\n", ")\n", "\n", - "# # Extract and print just the text content from the response\n", - "# # The response is expected to be a dict with a \"result\" key containing a list of message dicts\n", - "if rpc_response and rpc_response.result:\n", + "if not rpc_response or not rpc_response.result:\n", + " raise ValueError(\"No result in response\")\n", "\n", - " # We know that the result of the message/send when stream is set to False will be a list of TaskMessage objects\n", - " task_message_list = cast(List[TaskMessage], rpc_response.result)\n", - " for task_message in rpc_response.result:\n", - " if isinstance(task_message, TaskMessage):\n", - " content = task_message.content\n", - " if isinstance(content, TextContent):\n", - " text = content.content\n", - " print(text)\n", - " else:\n", - " print(f\"Found non-text {type(task_message)} object in response.\")\n" + "# Extract and print just the text content from the response\n", + "for task_message in rpc_response.result:\n", + " content = task_message.content\n", + " if isinstance(content, TextContent):\n", + " text = content.content\n", + " print(text)\n" ] }, { "cell_type": "code", - "execution_count": 6, - "id": "79688331", + "execution_count": null, + "id": "4", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Hello! I've received your message. Here's a generic response, but in future tutorials we'll see how you can get me to intelligently respond to your message. This is what I heard you say: Hello what can you do?\n" - ] - } - ], + "outputs": [], "source": [ "# Test streaming response\n", - "import json\n", - "from agentex.types import AgentRpcResponse\n", - "from agentex.types.agent_rpc_result import StreamTaskMessageDelta, StreamTaskMessageFull\n", + "from agentex.types.task_message_update import StreamTaskMessageDelta, StreamTaskMessageFull\n", "from agentex.types.text_delta import TextDelta\n", - "from agentex.types.task_message_update import TaskMessageUpdate\n", "\n", "\n", "# The result object of message/send will be a TaskMessageUpdate which is a union of the following types:\n", @@ -136,41 +110,29 @@ "# Whenn processing StreamTaskMessageDelta, if you are expecting more than TextDeltas, such as DataDelta, ToolRequestDelta, or ToolResponseDelta, you can process them as well\n", "# Whenn processing StreamTaskMessageFull, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "with client.agents.with_streaming_response.rpc_by_name(\n", + "for agent_rpc_response_chunk in client.agents.send_message_stream(\n", " agent_name=AGENT_NAME,\n", - " method=\"message/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"stream\": True\n", " }\n", - ") as response:\n", - " for agent_rpc_response_str in response.iter_text():\n", - " chunk_rpc_response = AgentRpcResponse.model_validate(json.loads(agent_rpc_response_str))\n", - " # We know that the result of the message/send when stream is set to True will be a TaskMessageUpdate\n", - " task_message_update = cast(TaskMessageUpdate, chunk_rpc_response.result)\n", - "\n", - " # Print oly the text deltas as they arrive or any full messages\n", - " if isinstance(task_message_update, StreamTaskMessageDelta):\n", - " delta = task_message_update.delta\n", - " if isinstance(delta, TextDelta):\n", - " print(delta.text_delta, end=\"\", flush=True)\n", - " else:\n", - " print(f\"Found non-text {type(task_message)} object in streaming message.\")\n", - " elif isinstance(task_message_update, StreamTaskMessageFull):\n", - " content = task_message_update.content\n", - " if isinstance(content, TextContent):\n", - " print(content.content)\n", - " else:\n", - " print(f\"Found non-text {type(task_message)} object in full message.\")\n" + "):\n", + " # We know that the result of the message/send when stream is set to True will be a TaskMessageUpdate\n", + " task_message_update = agent_rpc_response_chunk.result\n", + " # Print oly the text deltas as they arrive or any full messages\n", + " if isinstance(task_message_update, StreamTaskMessageDelta):\n", + " delta = task_message_update.delta\n", + " if isinstance(delta, TextDelta):\n", + " print(delta.text_delta, end=\"\", flush=True)\n", + " else:\n", + " print(f\"Found non-text {type(task_message)} object in streaming message.\")\n", + " elif isinstance(task_message_update, StreamTaskMessageFull):\n", + " content = task_message_update.content\n", + " if isinstance(content, TextContent):\n", + " print(content.content)\n", + " else:\n", + " print(f\"Found non-text {type(task_message)} object in full message.\")\n" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "568673bf", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/examples/tutorials/00_sync/010_multiturn/dev.ipynb b/examples/tutorials/00_sync/010_multiturn/dev.ipynb index 46ab9fdd..c9f70cf3 100644 --- a/examples/tutorials/00_sync/010_multiturn/dev.ipynb +++ b/examples/tutorials/00_sync/010_multiturn/dev.ipynb @@ -2,8 +2,8 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, - "id": "36834357", + "execution_count": null, + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -14,8 +14,8 @@ }, { "cell_type": "code", - "execution_count": 3, - "id": "d1c309d6", + "execution_count": null, + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -24,8 +24,8 @@ }, { "cell_type": "code", - "execution_count": 4, - "id": "9f6e6ef0", + "execution_count": null, + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -50,30 +50,13 @@ }, { "cell_type": "code", - "execution_count": 5, - "id": "b03b0d37", + "execution_count": null, + "id": "3", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Hello! I can assist you with a variety of tasks, including:\n", - "\n", - "1. Answering questions on a wide range of topics, including science, history, technology, and more.\n", - "2. Providing explanations or summaries of complex concepts.\n", - "3. Offering writing assistance, such as proofreading, editing, or generating ideas for essays and articles.\n", - "4. Helping with problem-solving in areas like math, coding, or logic puzzles.\n", - "5. Engaging in conversation to provide companionship or entertainment.\n", - "\n", - "If there's something specific you need help with, feel free to ask!\n" - ] - } - ], + "outputs": [], "source": [ "# Test non streaming response\n", - "from typing import List, cast\n", - "from agentex.types import TaskMessage, TextContent\n", + "from agentex.types import TextContent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -83,61 +66,35 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_message(\n", " agent_name=AGENT_NAME,\n", - " method=\"message/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"stream\": False\n", " }\n", ")\n", "\n", - "# # Extract and print just the text content from the response\n", - "# # The response is expected to be a dict with a \"result\" key containing a list of message dicts\n", - "if rpc_response and rpc_response.result:\n", + "if not rpc_response or not rpc_response.result:\n", + " raise ValueError(\"No result in response\")\n", "\n", - " # We know that the result of the message/send when stream is set to False will be a list of TaskMessage objects\n", - " task_message_list = cast(List[TaskMessage], rpc_response.result)\n", - " for task_message in rpc_response.result:\n", - " if isinstance(task_message, TaskMessage):\n", - " content = task_message.content\n", - " if isinstance(content, TextContent):\n", - " text = content.content\n", - " print(text)\n", - " else:\n", - " print(f\"Found non-text {type(task_message)} object in response.\")\n" + "# Extract and print just the text content from the response\n", + "for task_message in rpc_response.result:\n", + " content = task_message.content\n", + " if isinstance(content, TextContent):\n", + " text = content.content\n", + " print(text)\n" ] }, { "cell_type": "code", - "execution_count": 6, - "id": "79688331", + "execution_count": null, + "id": "4", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Hello! I can assist you with a wide range of tasks, including:\n", - "\n", - "1. **Answering questions**: I can provide information on a variety of topics, including history, science, technology, and more.\n", - "2. **Learning and education**: I can help explain concepts, provide summaries, and assist with studying.\n", - "3. **Writing assistance**: I can help you draft, edit, or brainstorm ideas for essays, reports, and creative writing.\n", - "4. **Programming help**: I can assist with coding questions, debugging, and providing explanations of programming concepts.\n", - "5. **Language translation**: I can translate text between several languages.\n", - "6. **General advice**: I can offer suggestions on topics like time management, study techniques, and more.\n", - "\n", - "Feel free to ask me anything specific you need help with!\n" - ] - } - ], + "outputs": [], "source": [ "# Test streaming response\n", - "import json\n", - "from agentex.types import AgentRpcResponse\n", - "from agentex.types.agent_rpc_result import StreamTaskMessageDelta, StreamTaskMessageFull\n", + "from agentex.types.task_message_update import StreamTaskMessageDelta, StreamTaskMessageFull\n", "from agentex.types.text_delta import TextDelta\n", - "from agentex.types.task_message_update import TaskMessageUpdate\n", "\n", "\n", "# The result object of message/send will be a TaskMessageUpdate which is a union of the following types:\n", @@ -153,38 +110,34 @@ "# Whenn processing StreamTaskMessageDelta, if you are expecting more than TextDeltas, such as DataDelta, ToolRequestDelta, or ToolResponseDelta, you can process them as well\n", "# Whenn processing StreamTaskMessageFull, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "with client.agents.with_streaming_response.rpc_by_name(\n", + "for agent_rpc_response_chunk in client.agents.send_message_stream(\n", " agent_name=AGENT_NAME,\n", - " method=\"message/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"stream\": True\n", " }\n", - ") as response:\n", - " for agent_rpc_response_str in response.iter_text():\n", - " chunk_rpc_response = AgentRpcResponse.model_validate(json.loads(agent_rpc_response_str))\n", - " # We know that the result of the message/send when stream is set to True will be a TaskMessageUpdate\n", - " task_message_update = cast(TaskMessageUpdate, chunk_rpc_response.result)\n", - "\n", - " # Print oly the text deltas as they arrive or any full messages\n", - " if isinstance(task_message_update, StreamTaskMessageDelta):\n", - " delta = task_message_update.delta\n", - " if isinstance(delta, TextDelta):\n", - " print(delta.text_delta, end=\"\", flush=True)\n", - " else:\n", - " print(f\"Found non-text {type(task_message)} object in streaming message.\")\n", - " elif isinstance(task_message_update, StreamTaskMessageFull):\n", - " content = task_message_update.content\n", - " if isinstance(content, TextContent):\n", - " print(content.content)\n", - " else:\n", - " print(f\"Found non-text {type(task_message)} object in full message.\")\n" + "):\n", + " # We know that the result of the message/send when stream is set to True will be a TaskMessageUpdate\n", + " task_message_update = agent_rpc_response_chunk.result\n", + " # Print oly the text deltas as they arrive or any full messages\n", + " if isinstance(task_message_update, StreamTaskMessageDelta):\n", + " delta = task_message_update.delta\n", + " if isinstance(delta, TextDelta):\n", + " print(delta.text_delta, end=\"\", flush=True)\n", + " else:\n", + " print(f\"Found non-text {type(task_message)} object in streaming message.\")\n", + " elif isinstance(task_message_update, StreamTaskMessageFull):\n", + " content = task_message_update.content\n", + " if isinstance(content, TextContent):\n", + " print(content.content)\n", + " else:\n", + " print(f\"Found non-text {type(task_message)} object in full message.\")\n" ] }, { "cell_type": "code", "execution_count": null, - "id": "42689ee4", + "id": "5", "metadata": {}, "outputs": [], "source": [] diff --git a/examples/tutorials/00_sync/020_streaming/dev.ipynb b/examples/tutorials/00_sync/020_streaming/dev.ipynb index 63ee2d87..4cd5fb8d 100644 --- a/examples/tutorials/00_sync/020_streaming/dev.ipynb +++ b/examples/tutorials/00_sync/020_streaming/dev.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "36834357", + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -15,7 +15,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d1c309d6", + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -25,7 +25,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9f6e6ef0", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -51,13 +51,12 @@ { "cell_type": "code", "execution_count": null, - "id": "b03b0d37", + "id": "3", "metadata": {}, "outputs": [], "source": [ "# Test non streaming response\n", - "from typing import List, cast\n", - "from agentex.types import TaskMessage, TextContent\n", + "from agentex.types import TextContent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -67,44 +66,35 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_message(\n", " agent_name=AGENT_NAME,\n", - " method=\"message/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"stream\": False\n", " }\n", ")\n", "\n", - "# # Extract and print just the text content from the response\n", - "# # The response is expected to be a dict with a \"result\" key containing a list of message dicts\n", - "if rpc_response and rpc_response.result:\n", + "if not rpc_response or not rpc_response.result:\n", + " raise ValueError(\"No result in response\")\n", "\n", - " # We know that the result of the message/send when stream is set to False will be a list of TaskMessage objects\n", - " task_message_list = cast(List[TaskMessage], rpc_response.result)\n", - " for task_message in rpc_response.result:\n", - " if isinstance(task_message, TaskMessage):\n", - " content = task_message.content\n", - " if isinstance(content, TextContent):\n", - " text = content.content\n", - " print(text)\n", - " else:\n", - " print(f\"Found non-text {type(task_message)} object in response.\")\n" + "# Extract and print just the text content from the response\n", + "for task_message in rpc_response.result:\n", + " content = task_message.content\n", + " if isinstance(content, TextContent):\n", + " text = content.content\n", + " print(text)\n" ] }, { "cell_type": "code", "execution_count": null, - "id": "79688331", + "id": "4", "metadata": {}, "outputs": [], "source": [ "# Test streaming response\n", - "import json\n", - "from agentex.types import AgentRpcResponse\n", - "from agentex.types.agent_rpc_result import StreamTaskMessageDelta, StreamTaskMessageFull\n", + "from agentex.types.task_message_update import StreamTaskMessageDelta, StreamTaskMessageFull\n", "from agentex.types.text_delta import TextDelta\n", - "from agentex.types.task_message_update import TaskMessageUpdate\n", "\n", "\n", "# The result object of message/send will be a TaskMessageUpdate which is a union of the following types:\n", @@ -120,32 +110,28 @@ "# Whenn processing StreamTaskMessageDelta, if you are expecting more than TextDeltas, such as DataDelta, ToolRequestDelta, or ToolResponseDelta, you can process them as well\n", "# Whenn processing StreamTaskMessageFull, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "with client.agents.with_streaming_response.rpc_by_name(\n", + "for agent_rpc_response_chunk in client.agents.send_message_stream(\n", " agent_name=AGENT_NAME,\n", - " method=\"message/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"stream\": True\n", " }\n", - ") as response:\n", - " for agent_rpc_response_str in response.iter_text():\n", - " chunk_rpc_response = AgentRpcResponse.model_validate(json.loads(agent_rpc_response_str))\n", - " # We know that the result of the message/send when stream is set to True will be a TaskMessageUpdate\n", - " task_message_update = cast(TaskMessageUpdate, chunk_rpc_response.result)\n", - "\n", - " # Print oly the text deltas as they arrive or any full messages\n", - " if isinstance(task_message_update, StreamTaskMessageDelta):\n", - " delta = task_message_update.delta\n", - " if isinstance(delta, TextDelta):\n", - " print(delta.text_delta, end=\"\", flush=True)\n", - " else:\n", - " print(f\"Found non-text {type(task_message)} object in streaming message.\")\n", - " elif isinstance(task_message_update, StreamTaskMessageFull):\n", - " content = task_message_update.content\n", - " if isinstance(content, TextContent):\n", - " print(content.content)\n", - " else:\n", - " print(f\"Found non-text {type(task_message)} object in full message.\")\n" + "):\n", + " # We know that the result of the message/send when stream is set to True will be a TaskMessageUpdate\n", + " task_message_update = agent_rpc_response_chunk.result\n", + " # Print oly the text deltas as they arrive or any full messages\n", + " if isinstance(task_message_update, StreamTaskMessageDelta):\n", + " delta = task_message_update.delta\n", + " if isinstance(delta, TextDelta):\n", + " print(delta.text_delta, end=\"\", flush=True)\n", + " else:\n", + " print(f\"Found non-text {type(task_message)} object in streaming message.\")\n", + " elif isinstance(task_message_update, StreamTaskMessageFull):\n", + " content = task_message_update.content\n", + " if isinstance(content, TextContent):\n", + " print(content.content)\n", + " else:\n", + " print(f\"Found non-text {type(task_message)} object in full message.\")\n" ] } ], diff --git a/examples/tutorials/10_agentic/00_base/000_hello_acp/dev.ipynb b/examples/tutorials/10_agentic/00_base/000_hello_acp/dev.ipynb index b2d726c9..153a8040 100644 --- a/examples/tutorials/10_agentic/00_base/000_hello_acp/dev.ipynb +++ b/examples/tutorials/10_agentic/00_base/000_hello_acp/dev.ipynb @@ -2,8 +2,8 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, - "id": "36834357", + "execution_count": null, + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -14,8 +14,8 @@ }, { "cell_type": "code", - "execution_count": 3, - "id": "d1c309d6", + "execution_count": null, + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -24,59 +24,34 @@ }, { "cell_type": "code", - "execution_count": 4, - "id": "9f6e6ef0", + "execution_count": null, + "id": "2", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Task(id='4a6ff681-8875-4a60-9e21-7308193dc327', created_at=datetime.datetime(2025, 7, 27, 1, 16, 2, 616, tzinfo=TzInfo(UTC)), name='110f7a13-task', status='RUNNING', status_reason='Task created, forwarding to ACP server', updated_at=datetime.datetime(2025, 7, 27, 1, 16, 2, 616, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ "# (REQUIRED) Create a new task. For Agentic agents, you must create a task for messages to be associated with.\n", - "\n", - "from typing import cast\n", "import uuid\n", "\n", - "from agentex.types import Task\n", - "\n", - "TASK_ID = str(uuid.uuid4())[:8]\n", - "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.create_task(\n", " agent_name=AGENT_NAME,\n", - " method=\"task/create\",\n", " params={\n", - " \"name\": f\"{TASK_ID}-task\",\n", + " \"name\": f\"{str(uuid.uuid4())[:8]}-task\",\n", " \"params\": {}\n", " }\n", ")\n", "\n", - "task = cast(Task, rpc_response.result)\n", + "task = rpc_response.result\n", "print(task)" ] }, { "cell_type": "code", - "execution_count": 5, - "id": "b03b0d37", + "execution_count": null, + "id": "3", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Event(id='b235c5c6-b2f8-40b8-be60-a873c88f4d21', agent_id='93fa9758-abca-421e-a3ed-9f08a9881662', sequence_id=209, task_id='4a6ff681-8875-4a60-9e21-7308193dc327', content=TextContent(author='user', content='Hello what can you do?', attachments=None, format='plain', style='static', type='text'), created_at=datetime.datetime(2025, 7, 27, 1, 16, 2, 491008, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ - "# Test non streaming response\n", - "from typing import cast\n", - "from agentex.types import Event\n", + "# Send an event to the agent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -86,72 +61,26 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_event(\n", " agent_name=AGENT_NAME,\n", - " method=\"event/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"task_id\": task.id,\n", " }\n", ")\n", "\n", - "event = cast(Event, rpc_response.result)\n", + "event = rpc_response.result\n", "print(event)" ] }, { "cell_type": "code", - "execution_count": 6, - "id": "a6927cc0", + "execution_count": null, + "id": "4", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
╭───────────────────────── USER [07/27/2025 01:16:02] ─────────────────────────╮\n", - "│ Hello what can you do? │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[96m╭─\u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m \u001b[0m\u001b[1;96mUSER\u001b[0m\u001b[96m [07/27/2025 01:16:02] \u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m─╮\u001b[0m\n", - "\u001b[96m│\u001b[0m Hello what can you do? \u001b[96m│\u001b[0m\n", - "\u001b[96m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 01:16:02] ─────────────────────────╮\n", - "│ Hello! I've received your message. I can't respond right now, but in future │\n", - "│ tutorials we'll see how you can get me to intelligently respond to your │\n", - "│ message. │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[32m╭─\u001b[0m\u001b[32m───────────────────────\u001b[0m\u001b[32m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[32m [07/27/2025 01:16:02] \u001b[0m\u001b[32m────────────────────────\u001b[0m\u001b[32m─╮\u001b[0m\n", - "\u001b[32m│\u001b[0m Hello! I've received your message. I can't respond right now, but in future \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m tutorials we'll see how you can get me to intelligently respond to your \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m message. \u001b[32m│\u001b[0m\n", - "\u001b[32m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Streaming timed out after 5 seconds - returning collected messages\n" - ] - } - ], + "outputs": [], "source": [ + "# Subscribe to the async task messages produced by the agent\n", "from agentex.lib.utils.dev_tools import subscribe_to_async_task_messages\n", "\n", "task_messages = subscribe_to_async_task_messages(\n", @@ -167,7 +96,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4864e354", + "id": "5", "metadata": {}, "outputs": [], "source": [] diff --git a/examples/tutorials/10_agentic/00_base/010_multiturn/dev.ipynb b/examples/tutorials/10_agentic/00_base/010_multiturn/dev.ipynb index 1ef9b85f..1694e293 100644 --- a/examples/tutorials/10_agentic/00_base/010_multiturn/dev.ipynb +++ b/examples/tutorials/10_agentic/00_base/010_multiturn/dev.ipynb @@ -2,8 +2,8 @@ "cells": [ { "cell_type": "code", - "execution_count": 6, - "id": "36834357", + "execution_count": null, + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -14,8 +14,8 @@ }, { "cell_type": "code", - "execution_count": 7, - "id": "d1c309d6", + "execution_count": null, + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -24,59 +24,34 @@ }, { "cell_type": "code", - "execution_count": 8, - "id": "9f6e6ef0", + "execution_count": null, + "id": "2", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Task(id='e5b17323-4b7b-4043-874a-33a64aec8ab1', created_at=datetime.datetime(2025, 7, 27, 1, 16, 34, 635866, tzinfo=TzInfo(UTC)), name='36c97177-task', status='RUNNING', status_reason='Task created, forwarding to ACP server', updated_at=datetime.datetime(2025, 7, 27, 1, 16, 34, 635866, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ "# (REQUIRED) Create a new task. For Agentic agents, you must create a task for messages to be associated with.\n", - "\n", - "from typing import cast\n", "import uuid\n", "\n", - "from agentex.types import Task\n", - "\n", - "TASK_ID = str(uuid.uuid4())[:8]\n", - "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.create_task(\n", " agent_name=AGENT_NAME,\n", - " method=\"task/create\",\n", " params={\n", - " \"name\": f\"{TASK_ID}-task\",\n", + " \"name\": f\"{str(uuid.uuid4())[:8]}-task\",\n", " \"params\": {}\n", " }\n", ")\n", "\n", - "task = cast(Task, rpc_response.result)\n", + "task = rpc_response.result\n", "print(task)" ] }, { "cell_type": "code", - "execution_count": 9, - "id": "b03b0d37", + "execution_count": null, + "id": "3", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Event(id='a1835392-e2d7-43a2-a476-aeca90084653', agent_id='079830a3-8402-4b82-a4f6-bac497f87a10', sequence_id=211, task_id='e5b17323-4b7b-4043-874a-33a64aec8ab1', content=TextContent(author='user', content='Hello what can you do?', attachments=None, format='plain', style='static', type='text'), created_at=datetime.datetime(2025, 7, 27, 1, 16, 35, 496828, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ - "# Test non streaming response\n", - "from typing import cast\n", - "from agentex.types import Event\n", + "# Send an event to the agent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -86,100 +61,26 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_event(\n", " agent_name=AGENT_NAME,\n", - " method=\"event/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"task_id\": task.id,\n", " }\n", ")\n", "\n", - "event = cast(Event, rpc_response.result)\n", + "event = rpc_response.result\n", "print(event)" ] }, { "cell_type": "code", - "execution_count": 10, - "id": "a6927cc0", + "execution_count": null, + "id": "4", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
╭───────────────────────── USER [07/27/2025 01:16:35] ─────────────────────────╮\n", - "│ Hello what can you do? │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[96m╭─\u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m \u001b[0m\u001b[1;96mUSER\u001b[0m\u001b[96m [07/27/2025 01:16:35] \u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m─╮\u001b[0m\n", - "\u001b[96m│\u001b[0m Hello what can you do? \u001b[96m│\u001b[0m\n", - "\u001b[96m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 01:16:38] ─────────────────────────╮\n", - "│ Hello! I can assist you with a variety of tasks, including: │\n", - "│ │\n", - "│ 1 Answering Questions: I can provide information on a wide range of topics, │\n", - "│ including science, history, technology, and more. │\n", - "│ 2 Providing Explanations: I can explain complex concepts in simpler terms. │\n", - "│ 3 Writing Assistance: I can help with writing essays, articles, or creative │\n", - "│ writing, including brainstorming ideas and editing. │\n", - "│ 4 Language Help: I can assist with language learning, grammar questions, │\n", - "│ and translations. │\n", - "│ 5 Recommendations: I can offer suggestions for books, movies, or other │\n", - "│ resources based on your interests. │\n", - "│ 6 Problem Solving: I can help you work through problems, whether they’re │\n", - "│ academic or practical in nature. │\n", - "│ 7 General Advice: I can offer tips and guidance on various topics, such as │\n", - "│ study techniques or productivity. │\n", - "│ │\n", - "│ Feel free to ask me anything specific that you need help with! │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[32m╭─\u001b[0m\u001b[32m───────────────────────\u001b[0m\u001b[32m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[32m [07/27/2025 01:16:38] \u001b[0m\u001b[32m────────────────────────\u001b[0m\u001b[32m─╮\u001b[0m\n", - "\u001b[32m│\u001b[0m Hello! I can assist you with a variety of tasks, including: \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 1 \u001b[0m\u001b[1mAnswering Questions\u001b[0m: I can provide information on a wide range of topics, \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mincluding science, history, technology, and more. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 2 \u001b[0m\u001b[1mProviding Explanations\u001b[0m: I can explain complex concepts in simpler terms. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 3 \u001b[0m\u001b[1mWriting Assistance\u001b[0m: I can help with writing essays, articles, or creative \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mwriting, including brainstorming ideas and editing. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 4 \u001b[0m\u001b[1mLanguage Help\u001b[0m: I can assist with language learning, grammar questions, \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mand translations. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 5 \u001b[0m\u001b[1mRecommendations\u001b[0m: I can offer suggestions for books, movies, or other \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mresources based on your interests. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 6 \u001b[0m\u001b[1mProblem Solving\u001b[0m: I can help you work through problems, whether they’re \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0macademic or practical in nature. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 7 \u001b[0m\u001b[1mGeneral Advice\u001b[0m: I can offer tips and guidance on various topics, such as \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mstudy techniques or productivity. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m Feel free to ask me anything specific that you need help with! \u001b[32m│\u001b[0m\n", - "\u001b[32m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Streaming timed out after 5 seconds - returning collected messages\n" - ] - } - ], + "outputs": [], "source": [ + "# Subscribe to the async task messages produced by the agent\n", "from agentex.lib.utils.dev_tools import subscribe_to_async_task_messages\n", "\n", "task_messages = subscribe_to_async_task_messages(\n", @@ -195,7 +96,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f8e6c0f4", + "id": "5", "metadata": {}, "outputs": [], "source": [] diff --git a/examples/tutorials/10_agentic/00_base/020_streaming/dev.ipynb b/examples/tutorials/10_agentic/00_base/020_streaming/dev.ipynb index 530e2a27..f66be24d 100644 --- a/examples/tutorials/10_agentic/00_base/020_streaming/dev.ipynb +++ b/examples/tutorials/10_agentic/00_base/020_streaming/dev.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "36834357", + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -15,7 +15,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d1c309d6", + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -25,42 +25,33 @@ { "cell_type": "code", "execution_count": null, - "id": "9f6e6ef0", + "id": "2", "metadata": {}, "outputs": [], "source": [ "# (REQUIRED) Create a new task. For Agentic agents, you must create a task for messages to be associated with.\n", - "\n", - "from typing import cast\n", "import uuid\n", "\n", - "from agentex.types import Task\n", - "\n", - "TASK_ID = str(uuid.uuid4())[:8]\n", - "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.create_task(\n", " agent_name=AGENT_NAME,\n", - " method=\"task/create\",\n", " params={\n", - " \"name\": f\"{TASK_ID}-task\",\n", + " \"name\": f\"{str(uuid.uuid4())[:8]}-task\",\n", " \"params\": {}\n", " }\n", ")\n", "\n", - "task = cast(Task, rpc_response.result)\n", + "task = rpc_response.result\n", "print(task)" ] }, { "cell_type": "code", "execution_count": null, - "id": "b03b0d37", + "id": "3", "metadata": {}, "outputs": [], "source": [ - "# Test non streaming response\n", - "from typing import cast\n", - "from agentex.types import Event\n", + "# Send an event to the agent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -70,26 +61,26 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_event(\n", " agent_name=AGENT_NAME,\n", - " method=\"event/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"task_id\": task.id,\n", " }\n", ")\n", "\n", - "event = cast(Event, rpc_response.result)\n", + "event = rpc_response.result\n", "print(event)" ] }, { "cell_type": "code", "execution_count": null, - "id": "a6927cc0", + "id": "4", "metadata": {}, "outputs": [], "source": [ + "# Subscribe to the async task messages produced by the agent\n", "from agentex.lib.utils.dev_tools import subscribe_to_async_task_messages\n", "\n", "task_messages = subscribe_to_async_task_messages(\n", @@ -101,6 +92,14 @@ " timeout=5,\n", ")" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/examples/tutorials/10_agentic/00_base/030_tracing/dev.ipynb b/examples/tutorials/10_agentic/00_base/030_tracing/dev.ipynb index 56312e50..f667737b 100644 --- a/examples/tutorials/10_agentic/00_base/030_tracing/dev.ipynb +++ b/examples/tutorials/10_agentic/00_base/030_tracing/dev.ipynb @@ -2,8 +2,8 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, - "id": "36834357", + "execution_count": null, + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -14,8 +14,8 @@ }, { "cell_type": "code", - "execution_count": 2, - "id": "d1c309d6", + "execution_count": null, + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -24,59 +24,34 @@ }, { "cell_type": "code", - "execution_count": 3, - "id": "9f6e6ef0", + "execution_count": null, + "id": "2", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Task(id='e52ad928-8d6f-431f-a865-c632dfd925bd', created_at=datetime.datetime(2025, 7, 27, 1, 16, 54, 390130, tzinfo=TzInfo(UTC)), name='4c21818b-task', status='RUNNING', status_reason='Task created, forwarding to ACP server', updated_at=datetime.datetime(2025, 7, 27, 1, 16, 54, 390130, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ "# (REQUIRED) Create a new task. For Agentic agents, you must create a task for messages to be associated with.\n", - "\n", - "from typing import cast\n", "import uuid\n", "\n", - "from agentex.types import Task\n", - "\n", - "TASK_ID = str(uuid.uuid4())[:8]\n", - "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.create_task(\n", " agent_name=AGENT_NAME,\n", - " method=\"task/create\",\n", " params={\n", - " \"name\": f\"{TASK_ID}-task\",\n", + " \"name\": f\"{str(uuid.uuid4())[:8]}-task\",\n", " \"params\": {}\n", " }\n", ")\n", "\n", - "task = cast(Task, rpc_response.result)\n", + "task = rpc_response.result\n", "print(task)" ] }, { "cell_type": "code", - "execution_count": 4, - "id": "b03b0d37", + "execution_count": null, + "id": "3", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Event(id='8777dbbf-5a48-40ee-9d95-5476223ac759', agent_id='d1771757-da6c-43a3-b6bd-63679456790e', sequence_id=212, task_id='e52ad928-8d6f-431f-a865-c632dfd925bd', content=TextContent(author='user', content='Hello what can you do?', attachments=None, format='plain', style='static', type='text'), created_at=datetime.datetime(2025, 7, 27, 1, 16, 54, 847712, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ - "# Test non streaming response\n", - "from typing import cast\n", - "from agentex.types import Event\n", + "# Send an event to the agent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -86,111 +61,26 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_event(\n", " agent_name=AGENT_NAME,\n", - " method=\"event/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"task_id\": task.id,\n", " }\n", ")\n", "\n", - "event = cast(Event, rpc_response.result)\n", + "event = rpc_response.result\n", "print(event)" ] }, { "cell_type": "code", - "execution_count": 5, - "id": "a6927cc0", + "execution_count": null, + "id": "4", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
╭───────────────────────── USER [07/27/2025 01:16:54] ─────────────────────────╮\n", - "│ Hello what can you do? │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[96m╭─\u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m \u001b[0m\u001b[1;96mUSER\u001b[0m\u001b[96m [07/27/2025 01:16:54] \u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m─╮\u001b[0m\n", - "\u001b[96m│\u001b[0m Hello what can you do? \u001b[96m│\u001b[0m\n", - "\u001b[96m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " \r" - ] - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 01:16:54] ─────────────────────────╮\n", - "│ Hello! I can assist you with a variety of tasks, including: │\n", - "│ │\n", - "│ 1 Answering Questions: I can provide information on a wide range of topics, │\n", - "│ including history, science, technology, literature, and general │\n", - "│ knowledge. │\n", - "│ 2 Providing Explanations: I can explain concepts, summarize information, or │\n", - "│ help clarify difficult topics. │\n", - "│ 3 Writing Assistance: I can help you draft emails, essays, articles, or │\n", - "│ creative writing pieces. I can also assist with proofreading and editing. │\n", - "│ 4 Learning and Study Aid: I can help with study tips, summarizing │\n", - "│ educational materials, and answering homework questions. │\n", - "│ 5 Recommendations: I can suggest books, movies, recipes, or activities │\n", - "│ based on your interests. │\n", - "│ 6 Technical Help: I can provide assistance with basic troubleshooting for │\n", - "│ software or digital tools. │\n", - "│ 7 Conversation: If you just want to chat or discuss a specific topic, I'm │\n", - "│ here for that too! │\n", - "│ │\n", - "│ Let me know how I can assist you today! │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[32m╭─\u001b[0m\u001b[32m───────────────────────\u001b[0m\u001b[32m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[32m [07/27/2025 01:16:54] \u001b[0m\u001b[32m────────────────────────\u001b[0m\u001b[32m─╮\u001b[0m\n", - "\u001b[32m│\u001b[0m Hello! I can assist you with a variety of tasks, including: \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 1 \u001b[0m\u001b[1mAnswering Questions\u001b[0m: I can provide information on a wide range of topics, \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mincluding history, science, technology, literature, and general \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mknowledge. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 2 \u001b[0m\u001b[1mProviding Explanations\u001b[0m: I can explain concepts, summarize information, or \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mhelp clarify difficult topics. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 3 \u001b[0m\u001b[1mWriting Assistance\u001b[0m: I can help you draft emails, essays, articles, or \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mcreative writing pieces. I can also assist with proofreading and editing. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 4 \u001b[0m\u001b[1mLearning and Study Aid\u001b[0m: I can help with study tips, summarizing \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0meducational materials, and answering homework questions. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 5 \u001b[0m\u001b[1mRecommendations\u001b[0m: I can suggest books, movies, recipes, or activities \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mbased on your interests. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 6 \u001b[0m\u001b[1mTechnical Help\u001b[0m: I can provide assistance with basic troubleshooting for \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0msoftware or digital tools. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m 7 \u001b[0m\u001b[1mConversation\u001b[0m: If you just want to chat or discuss a specific topic, I'm \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mhere for that too! \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m Let me know how I can assist you today! \u001b[32m│\u001b[0m\n", - "\u001b[32m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Streaming timed out after 5 seconds - returning collected messages\n" - ] - } - ], + "outputs": [], "source": [ + "# Subscribe to the async task messages produced by the agent\n", "from agentex.lib.utils.dev_tools import subscribe_to_async_task_messages\n", "\n", "task_messages = subscribe_to_async_task_messages(\n", @@ -206,7 +96,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ad2405b9", + "id": "5", "metadata": {}, "outputs": [], "source": [] diff --git a/examples/tutorials/10_agentic/00_base/040_other_sdks/dev.ipynb b/examples/tutorials/10_agentic/00_base/040_other_sdks/dev.ipynb index 05dc443b..abb1b9e7 100644 --- a/examples/tutorials/10_agentic/00_base/040_other_sdks/dev.ipynb +++ b/examples/tutorials/10_agentic/00_base/040_other_sdks/dev.ipynb @@ -2,8 +2,8 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, - "id": "36834357", + "execution_count": null, + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -14,8 +14,8 @@ }, { "cell_type": "code", - "execution_count": 2, - "id": "d1c309d6", + "execution_count": null, + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -24,59 +24,34 @@ }, { "cell_type": "code", - "execution_count": 3, - "id": "9f6e6ef0", + "execution_count": null, + "id": "2", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Task(id='75c695e6-ba24-4a45-9843-2f6c70a16d79', created_at=datetime.datetime(2025, 7, 27, 1, 25, 7, 352235, tzinfo=TzInfo(UTC)), name='c1b4a675-task', status='RUNNING', status_reason='Task created, forwarding to ACP server', updated_at=datetime.datetime(2025, 7, 27, 1, 25, 7, 352235, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ "# (REQUIRED) Create a new task. For Agentic agents, you must create a task for messages to be associated with.\n", - "\n", - "from typing import cast\n", "import uuid\n", "\n", - "from agentex.types import Task\n", - "\n", - "TASK_ID = str(uuid.uuid4())[:8]\n", - "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.create_task(\n", " agent_name=AGENT_NAME,\n", - " method=\"task/create\",\n", " params={\n", - " \"name\": f\"{TASK_ID}-task\",\n", + " \"name\": f\"{str(uuid.uuid4())[:8]}-task\",\n", " \"params\": {}\n", " }\n", ")\n", "\n", - "task = cast(Task, rpc_response.result)\n", + "task = rpc_response.result\n", "print(task)" ] }, { "cell_type": "code", - "execution_count": 4, - "id": "b03b0d37", + "execution_count": null, + "id": "3", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Event(id='de2e5231-53c7-488c-9ecb-9f2605dea9f6', agent_id='cd4b9256-144b-4aef-949b-f9000995013b', sequence_id=217, task_id='75c695e6-ba24-4a45-9843-2f6c70a16d79', content=TextContent(author='user', content='Hello tell me the latest news about AI and AI startups', attachments=None, format='plain', style='static', type='text'), created_at=datetime.datetime(2025, 7, 27, 1, 25, 7, 716885, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ - "# Test non streaming response\n", - "from typing import cast\n", - "from agentex.types import Event\n", + "# Send an event to the agent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -86,371 +61,26 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_event(\n", " agent_name=AGENT_NAME,\n", - " method=\"event/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello tell me the latest news about AI and AI startups\"},\n", " \"task_id\": task.id,\n", " }\n", ")\n", "\n", - "event = cast(Event, rpc_response.result)\n", + "event = rpc_response.result\n", "print(event)" ] }, { "cell_type": "code", - "execution_count": 5, - "id": "a6927cc0", + "execution_count": null, + "id": "4", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
╭───────────────────────── USER [07/27/2025 01:25:07] ─────────────────────────╮\n", - "│ Hello tell me the latest news about AI and AI startups │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[96m╭─\u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m \u001b[0m\u001b[1;96mUSER\u001b[0m\u001b[96m [07/27/2025 01:25:07] \u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m─╮\u001b[0m\n", - "\u001b[96m│\u001b[0m Hello tell me the latest news about AI and AI startups \u001b[96m│\u001b[0m\n", - "\u001b[96m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " \r" - ] - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 01:25:20] ─────────────────────────╮\n", - "│ 🔧 Tool Request: web_search │\n", - "│ │\n", - "│ Arguments: │\n", - "│ │\n", - "│ │\n", - "│ { │\n", - "│ \"input\": \"latest news AI AI startups\", │\n", - "│ \"model\": \"gpt-4o\", │\n", - "│ \"type\": \"web_search_preview\", │\n", - "│ \"search_context_size\": \"high\" │\n", - "│ } │\n", - "│ │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[33m╭─\u001b[0m\u001b[33m───────────────────────\u001b[0m\u001b[33m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[33m [07/27/2025 01:25:20] \u001b[0m\u001b[33m────────────────────────\u001b[0m\u001b[33m─╮\u001b[0m\n", - "\u001b[33m│\u001b[0m 🔧 \u001b[1mTool Request: web_search\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[1mArguments:\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m{\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"input\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"latest news AI AI startups\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"model\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"gpt-4o\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"type\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"web_search_preview\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"search_context_size\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"high\"\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m}\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " \r" - ] - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 01:25:20] ─────────────────────────╮\n", - "│ ✅ Tool Response: web_search │\n", - "│ │\n", - "│ Response: │\n", - "│ │\n", - "│ │\n", - "│ { │\n", - "│ \"type\": \"text\", │\n", - "│ \"text\": \"The artificial intelligence (AI) startup landscape has │\n", - "│ experienced significant developments recently, marked by substantial fundi │\n", - "│ rounds, strategic partnerships, and notable acquisitions.\\n\\n**Major Fundi │\n", - "│ Rounds:**\\n\\n- **Perplexity AI**: This AI search startup secured $100 │\n", - "│ million in its latest funding round, elevating its valuation to $18 billio │\n", - "│ The company's rapid growth underscores robust investor interest in the │\n", - "│ competitive AI search market. │\n", - "│ ([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi │\n", - "│ week-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- │\n", - "│ **Thinking Machines**: Founded by former OpenAI CTO Mira Murati, Thinking │\n", - "│ Machines raised $2 billion, achieving a valuation of $10 billion. The │\n", - "│ startup focuses on developing multimodal AI capable of processing various │\n", - "│ forms of data. │\n", - "│ ([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi │\n", - "│ week-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- │\n", - "│ **Anthropic**: Reports indicate that Anthropic is in discussions with │\n", - "│ investors for funding that could value the company at $100 billion, │\n", - "│ following a previous valuation of $61.5 billion after a $3.5 billion │\n", - "│ fundraise earlier this year. │\n", - "│ ([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi │\n", - "│ week-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n**Stra │\n", - "│ gic Investments and Partnerships:**\\n\\n- **Nvidia's Role**: Nvidia has │\n", - "│ solidified its position as a key player in the AI sector, participating in │\n", - "│ numerous funding rounds and supporting the growth of several AI startups. │\n", - "│ Notably, Nvidia invested in Reka AI, contributing to its unicorn status, a │\n", - "│ has been involved in funding rounds for companies like OpenAI, xAI, and │\n", - "│ Scale AI. │\n", - "│ ([elpais.com](https://elpais.com/economia/2025-07-26/nvidia-el-mayor-gigan │\n", - "│ -bursatil-de-la-historia-que-crea-unicornios-con-la-magia-de-la-ia.html?ut │\n", - "│ source=openai))\\n\\n- **Amazon and Anthropic**: Amazon increased its │\n", - "│ investment in AI startup Anthropic by an additional $4 billion, bringing i │\n", - "│ total investment to $8 billion. This partnership designates Amazon Web │\n", - "│ Services (AWS) as Anthropic's primary training partner, highlighting │\n", - "│ Amazon's commitment to advancing AI technologies. │\n", - "│ ([apnews.com](https://apnews.com/article/7a5764907e8cf0c23117be9c710e9f6a? │\n", - "│ m_source=openai))\\n\\n**Market Trends:**\\n\\n- **Surge in AI Startup │\n", - "│ Funding**: In the first half of 2025, U.S. startup funding surged by 75.6% │\n", - "│ reaching $162.8 billion, driven largely by the AI boom. AI-related │\n", - "│ investments constituted 64.1% of the total deal value during this period. │\n", - "│ ([reuters.com](https://www.reuters.com/business/us-ai-startups-see-funding │\n", - "│ urge-while-more-vc-funds-struggle-raise-data-shows-2025-07-15/?utm_source= │\n", - "│ enai))\\n\\n- **Global Investment Landscape**: AI startups accounted for 53% │\n", - "│ of all global venture capital investments in the first half of 2025, │\n", - "│ reflecting the significant momentum and investor confidence in the AI │\n", - "│ sector. │\n", - "│ ([axios.com](https://www.axios.com/2025/07/03/ai-startups-vc-investments?u │\n", - "│ _source=openai))\\n\\n**Notable Acquisitions:**\\n\\n- **OpenAI and Jony Ive's │\n", - "│ Startup**: OpenAI is set to acquire Jony Ive's AI devices startup, io, in │\n", - "│ $6.4 billion all-equity deal. This acquisition marks OpenAI's largest to │\n", - "│ date and signifies its expansion into AI hardware. │\n", - "│ ([economictimes.indiatimes.com](https://economictimes.indiatimes.com/topic │\n", - "│ i-startups?utm_source=openai))\\n\\nThese developments illustrate the dynami │\n", - "│ and rapidly evolving nature of the AI startup ecosystem, characterized by │\n", - "│ significant financial investments, strategic collaborations, and a strong │\n", - "│ focus on innovation.\\n\\n\\n## Recent Developments in AI Startups:\\n- [Nvidi │\n", - "│ el mayor gigante burs\\u00e1til de la historia que crea unicornios con la │\n", - "│ magia de la │\n", - "│ IA](https://elpais.com/economia/2025-07-26/nvidia-el-mayor-gigante-bursati │\n", - "│ de-la-historia-que-crea-unicornios-con-la-magia-de-la-ia.html?utm_source=o │\n", - "│ nai)\\n- [US AI startups see funding surge while more VC funds struggle to │\n", - "│ raise, data │\n", - "│ shows](https://www.reuters.com/business/us-ai-startups-see-funding-surge-w │\n", - "│ le-more-vc-funds-struggle-raise-data-shows-2025-07-15/?utm_source=openai)\\ │\n", - "│ [Amazon to invest an additional $4 billion in AI startup │\n", - "│ Anthropic](https://apnews.com/article/7a5764907e8cf0c23117be9c710e9f6a?utm │\n", - "│ ource=openai) \", │\n", - "│ \"annotations\": null, │\n", - "│ \"meta\": null │\n", - "│ } │\n", - "│ │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[92m╭─\u001b[0m\u001b[92m───────────────────────\u001b[0m\u001b[92m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[92m [07/27/2025 01:25:20] \u001b[0m\u001b[92m────────────────────────\u001b[0m\u001b[92m─╮\u001b[0m\n", - "\u001b[92m│\u001b[0m ✅ \u001b[1mTool Response: web_search\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[1mResponse:\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m{\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"type\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"text\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"text\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"The artificial intelligence (AI) startup landscape has \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mexperienced significant developments recently, marked by substantial fundi\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mrounds, strategic partnerships, and notable acquisitions.\\n\\n**Major Fundi\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mRounds:**\\n\\n- **Perplexity AI**: This AI search startup secured $100 \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mmillion in its latest funding round, elevating its valuation to $18 billio\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mThe company's rapid growth underscores robust investor interest in the \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mcompetitive AI search market. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mweek-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m**Thinking Machines**: Founded by former OpenAI CTO Mira Murati, Thinking \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mMachines raised $2 billion, achieving a valuation of $10 billion. The \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mstartup focuses on developing multimodal AI capable of processing various \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mforms of data. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mweek-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m**Anthropic**: Reports indicate that Anthropic is in discussions with \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34minvestors for funding that could value the company at $100 billion, \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mfollowing a previous valuation of $61.5 billion after a $3.5 billion \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mfundraise earlier this year. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mweek-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n**Stra\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mgic Investments and Partnerships:**\\n\\n- **Nvidia's Role**: Nvidia has \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34msolidified its position as a key player in the AI sector, participating in\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mnumerous funding rounds and supporting the growth of several AI startups. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mNotably, Nvidia invested in Reka AI, contributing to its unicorn status, a\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mhas been involved in funding rounds for companies like OpenAI, xAI, and \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mScale AI. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([elpais.com](https://elpais.com/economia/2025-07-26/nvidia-el-mayor-gigan\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m-bursatil-de-la-historia-que-crea-unicornios-con-la-magia-de-la-ia.html?ut\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34msource=openai))\\n\\n- **Amazon and Anthropic**: Amazon increased its \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34minvestment in AI startup Anthropic by an additional $4 billion, bringing i\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mtotal investment to $8 billion. This partnership designates Amazon Web \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mServices (AWS) as Anthropic's primary training partner, highlighting \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mAmazon's commitment to advancing AI technologies. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([apnews.com](https://apnews.com/article/7a5764907e8cf0c23117be9c710e9f6a?\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mm_source=openai))\\n\\n**Market Trends:**\\n\\n- **Surge in AI Startup \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mFunding**: In the first half of 2025, U.S. startup funding surged by 75.6%\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mreaching $162.8 billion, driven largely by the AI boom. AI-related \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34minvestments constituted 64.1% of the total deal value during this period. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([reuters.com](https://www.reuters.com/business/us-ai-startups-see-funding\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34murge-while-more-vc-funds-struggle-raise-data-shows-2025-07-15/?utm_source=\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34menai))\\n\\n- **Global Investment Landscape**: AI startups accounted for 53%\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mof all global venture capital investments in the first half of 2025, \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mreflecting the significant momentum and investor confidence in the AI \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34msector. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([axios.com](https://www.axios.com/2025/07/03/ai-startups-vc-investments?u\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m_source=openai))\\n\\n**Notable Acquisitions:**\\n\\n- **OpenAI and Jony Ive's\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mStartup**: OpenAI is set to acquire Jony Ive's AI devices startup, io, in \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m$6.4 billion all-equity deal. This acquisition marks OpenAI's largest to \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mdate and signifies its expansion into AI hardware. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([economictimes.indiatimes.com](https://economictimes.indiatimes.com/topic\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mi-startups?utm_source=openai))\\n\\nThese developments illustrate the dynami\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mand rapidly evolving nature of the AI startup ecosystem, characterized by \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34msignificant financial investments, strategic collaborations, and a strong \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mfocus on innovation.\\n\\n\\n## Recent Developments in AI Startups:\\n- [Nvidi\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mel mayor gigante burs\\u00e1til de la historia que crea unicornios con la \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mmagia de la \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mIA](https://elpais.com/economia/2025-07-26/nvidia-el-mayor-gigante-bursati\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mde-la-historia-que-crea-unicornios-con-la-magia-de-la-ia.html?utm_source=o\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mnai)\\n- [US AI startups see funding surge while more VC funds struggle to \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mraise, data \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mshows](https://www.reuters.com/business/us-ai-startups-see-funding-surge-w\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mle-more-vc-funds-struggle-raise-data-shows-2025-07-15/?utm_source=openai)\\\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m [Amazon to invest an additional $4 billion in AI startup \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mAnthropic](https://apnews.com/article/7a5764907e8cf0c23117be9c710e9f6a?utm\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mource=openai) \"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"annotations\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;102;217;239;48;2;39;40;34mnull\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"meta\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;102;217;239;48;2;39;40;34mnull\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m}\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " \r" - ] - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 01:25:21] ─────────────────────────╮\n", - "│ Here are some of the latest updates in the AI and AI startups ecosystem: │\n", - "│ │\n", - "│ Major Funding Rounds │\n", - "│ │\n", - "│ • Perplexity AI: The AI search startup raised $100 million, boosting its │\n", - "│ valuation to $18 billion, reflecting strong investor interest in AI │\n", - "│ search technologies. │\n", - "│ • Thinking Machines: Founded by former OpenAI CTO Mira Murati, it secured │\n", - "│ $2 billion, valuing the company at $10 billion. The startup is working on │\n", - "│ multimodal AI. │\n", - "│ • Anthropic: Currently in discussions for a funding round that could value │\n", - "│ it at $100 billion, following a $3.5 billion raise earlier this year. │\n", - "│ │\n", - "│ Strategic Investments and Partnerships │\n", - "│ │\n", - "│ • Nvidia: A key AI sector player, Nvidia has participated in various │\n", - "│ funding rounds, including those for OpenAI, xAI, and Scale AI, and │\n", - "│ invested in Reka AI, leading to its unicorn status. │\n", - "│ • Amazon and Anthropic: Amazon has increased its investment in AI startup │\n", - "│ Anthropic by $4 billion, totaling $8 billion. Amazon Web Services (AWS) │\n", - "│ is Anthropic’s primary training partner. │\n", - "│ │\n", - "│ Market Trends │\n", - "│ │\n", - "│ • U.S. Startup Funding: In the first half of 2025, AI-driven funding surged │\n", - "│ by 75.6% to $162.8 billion, with AI-related investments forming a │\n", - "│ significant portion. │\n", - "│ • Global VC Investments: AI startups made up 53% of all global venture │\n", - "│ capital investments in the first half of 2025. │\n", - "│ │\n", - "│ Notable Acquisitions │\n", - "│ │\n", - "│ • OpenAI: Set to acquire Jony Ive's AI devices startup, io, for $6.4 │\n", - "│ billion in an all-equity deal, marking a move into AI hardware. │\n", - "│ │\n", - "│ These updates reveal a vibrant and rapidly evolving AI startup landscape, │\n", - "│ characterized by significant financial investments, strategic │\n", - "│ collaborations, and innovative development. │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[32m╭─\u001b[0m\u001b[32m───────────────────────\u001b[0m\u001b[32m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[32m [07/27/2025 01:25:21] \u001b[0m\u001b[32m────────────────────────\u001b[0m\u001b[32m─╮\u001b[0m\n", - "\u001b[32m│\u001b[0m Here are some of the latest updates in the AI and AI startups ecosystem: \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1mMajor Funding Rounds\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mPerplexity AI\u001b[0m: The AI search startup raised $100 million, boosting its \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mvaluation to $18 billion, reflecting strong investor interest in AI \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0msearch technologies. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mThinking Machines\u001b[0m: Founded by former OpenAI CTO Mira Murati, it secured \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0m$2 billion, valuing the company at $10 billion. The startup is working on \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mmultimodal AI. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mAnthropic\u001b[0m: Currently in discussions for a funding round that could value \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mit at $100 billion, following a $3.5 billion raise earlier this year. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1mStrategic Investments and Partnerships\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mNvidia\u001b[0m: A key AI sector player, Nvidia has participated in various \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mfunding rounds, including those for OpenAI, xAI, and Scale AI, and \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0minvested in Reka AI, leading to its unicorn status. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mAmazon and Anthropic\u001b[0m: Amazon has increased its investment in AI startup \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mAnthropic by $4 billion, totaling $8 billion. Amazon Web Services (AWS) \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mis Anthropic’s primary training partner. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1mMarket Trends\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mU.S. Startup Funding\u001b[0m: In the first half of 2025, AI-driven funding surged \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mby 75.6% to $162.8 billion, with AI-related investments forming a \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0msignificant portion. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mGlobal VC Investments\u001b[0m: AI startups made up 53% of all global venture \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mcapital investments in the first half of 2025. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1mNotable Acquisitions\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mOpenAI\u001b[0m: Set to acquire Jony Ive's AI devices startup, io, for $6.4 \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mbillion in an all-equity deal, marking a move into AI hardware. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m These updates reveal a vibrant and rapidly evolving AI startup landscape, \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m characterized by significant financial investments, strategic \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m collaborations, and innovative development. \u001b[32m│\u001b[0m\n", - "\u001b[32m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Streaming timed out after 20 seconds - returning collected messages\n" - ] - } - ], + "outputs": [], "source": [ + "# Subscribe to the async task messages produced by the agent\n", "from agentex.lib.utils.dev_tools import subscribe_to_async_task_messages\n", "\n", "task_messages = subscribe_to_async_task_messages(\n", @@ -466,7 +96,7 @@ { "cell_type": "code", "execution_count": null, - "id": "593a0a47", + "id": "5", "metadata": {}, "outputs": [], "source": [] diff --git a/examples/tutorials/10_agentic/00_base/080_batch_events/dev.ipynb b/examples/tutorials/10_agentic/00_base/080_batch_events/dev.ipynb index e2551a5d..19a40f71 100644 --- a/examples/tutorials/10_agentic/00_base/080_batch_events/dev.ipynb +++ b/examples/tutorials/10_agentic/00_base/080_batch_events/dev.ipynb @@ -2,8 +2,8 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, - "id": "36834357", + "execution_count": null, + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -14,8 +14,8 @@ }, { "cell_type": "code", - "execution_count": 2, - "id": "d1c309d6", + "execution_count": null, + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -24,62 +24,34 @@ }, { "cell_type": "code", - "execution_count": 3, - "id": "9f6e6ef0", + "execution_count": null, + "id": "2", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Task(id='83aeec42-3e63-4652-aff5-506252e7cb67', created_at=datetime.datetime(2025, 7, 27, 5, 56, 44, 321516, tzinfo=TzInfo(UTC)), name='776014ce-task', status='RUNNING', status_reason='Task created, forwarding to ACP server', updated_at=datetime.datetime(2025, 7, 27, 5, 56, 44, 321516, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ "# (REQUIRED) Create a new task. For Agentic agents, you must create a task for messages to be associated with.\n", - "\n", - "from typing import cast\n", "import uuid\n", "\n", - "from agentex.types import Task\n", - "\n", - "TASK_ID = str(uuid.uuid4())[:8]\n", - "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.create_task(\n", " agent_name=AGENT_NAME,\n", - " method=\"task/create\",\n", " params={\n", - " \"name\": f\"{TASK_ID}-task\",\n", + " \"name\": f\"{str(uuid.uuid4())[:8]}-task\",\n", " \"params\": {}\n", " }\n", ")\n", "\n", - "task = cast(Task, rpc_response.result)\n", + "task = rpc_response.result\n", "print(task)" ] }, { "cell_type": "code", - "execution_count": 6, - "id": "b03b0d37", + "execution_count": null, + "id": "3", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Event(id='f83319aa-6f63-4495-be1e-6ca52595e865', agent_id='406f9f42-9f3f-4bb2-869c-d6b36028e487', sequence_id=225, task_id='83aeec42-3e63-4652-aff5-506252e7cb67', content=TextContent(author='user', content='Hello, what can you do?', attachments=None, format='plain', style='static', type='text'), created_at=datetime.datetime(2025, 7, 27, 6, 0, 8, 557860, tzinfo=TzInfo(UTC)))\n", - "Event(id='be4b68b3-991a-4dfe-8be5-31f1cc032ef1', agent_id='406f9f42-9f3f-4bb2-869c-d6b36028e487', sequence_id=226, task_id='83aeec42-3e63-4652-aff5-506252e7cb67', content=TextContent(author='user', content='Can you tell me a joke?', attachments=None, format='plain', style='static', type='text'), created_at=datetime.datetime(2025, 7, 27, 6, 0, 8, 615897, tzinfo=TzInfo(UTC)))\n", - "Event(id='3001a88f-de80-44bf-b2ff-686428da043b', agent_id='406f9f42-9f3f-4bb2-869c-d6b36028e487', sequence_id=227, task_id='83aeec42-3e63-4652-aff5-506252e7cb67', content=TextContent(author='user', content='What is the capital of France?', attachments=None, format='plain', style='static', type='text'), created_at=datetime.datetime(2025, 7, 27, 6, 0, 8, 665333, tzinfo=TzInfo(UTC)))\n", - "Event(id='49ca99da-1c88-41f3-abea-91845cd927d5', agent_id='406f9f42-9f3f-4bb2-869c-d6b36028e487', sequence_id=228, task_id='83aeec42-3e63-4652-aff5-506252e7cb67', content=TextContent(author='user', content='Write a short story about a cat', attachments=None, format='plain', style='static', type='text'), created_at=datetime.datetime(2025, 7, 27, 6, 0, 8, 704690, tzinfo=TzInfo(UTC)))\n", - "Event(id='da567964-5fa6-419d-a573-66e673f18669', agent_id='406f9f42-9f3f-4bb2-869c-d6b36028e487', sequence_id=229, task_id='83aeec42-3e63-4652-aff5-506252e7cb67', content=TextContent(author='user', content='Tell me how an LLM works', attachments=None, format='plain', style='static', type='text'), created_at=datetime.datetime(2025, 7, 27, 6, 0, 8, 748329, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ - "# Test non streaming response\n", - "from typing import cast\n", + "# Send an event to the agent\n", "from agentex.types import Event\n", "from agentex.types.agent_rpc_params import ParamsSendEventRequest\n", "\n", @@ -117,13 +89,15 @@ "events: list[Event] = []\n", "\n", "for event_message in concurrent_event_messages:\n", - " rpc_response = client.agents.rpc_by_name(\n", + " rpc_response = client.agents.send_event(\n", " agent_name=AGENT_NAME,\n", - " method=\"event/send\",\n", - " params=event_message\n", + " params={\n", + " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello tell me the latest news about AI and AI startups\"},\n", + " \"task_id\": task.id,\n", + " }\n", " )\n", "\n", - " event = cast(Event, rpc_response.result)\n", + " event = rpc_response.result\n", " events.append(event)\n", "\n", "for event in events:\n", @@ -132,58 +106,10 @@ }, { "cell_type": "code", - "execution_count": 7, - "id": "a6927cc0", + "execution_count": null, + "id": "4", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 06:00:18] ─────────────────────────╮\n", - "│ Processed event IDs: ['f83319aa-6f63-4495-be1e-6ca52595e865', │\n", - "│ 'be4b68b3-991a-4dfe-8be5-31f1cc032ef1'] │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[32m╭─\u001b[0m\u001b[32m───────────────────────\u001b[0m\u001b[32m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[32m [07/27/2025 06:00:18] \u001b[0m\u001b[32m────────────────────────\u001b[0m\u001b[32m─╮\u001b[0m\n", - "\u001b[32m│\u001b[0m Processed event IDs: ['f83319aa-6f63-4495-be1e-6ca52595e865', \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m 'be4b68b3-991a-4dfe-8be5-31f1cc032ef1'] \u001b[32m│\u001b[0m\n", - "\u001b[32m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 06:00:33] ─────────────────────────╮\n", - "│ Processed event IDs: ['3001a88f-de80-44bf-b2ff-686428da043b', │\n", - "│ '49ca99da-1c88-41f3-abea-91845cd927d5', │\n", - "│ 'da567964-5fa6-419d-a573-66e673f18669'] │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[32m╭─\u001b[0m\u001b[32m───────────────────────\u001b[0m\u001b[32m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[32m [07/27/2025 06:00:33] \u001b[0m\u001b[32m────────────────────────\u001b[0m\u001b[32m─╮\u001b[0m\n", - "\u001b[32m│\u001b[0m Processed event IDs: ['3001a88f-de80-44bf-b2ff-686428da043b', \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m '49ca99da-1c88-41f3-abea-91845cd927d5', \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m 'da567964-5fa6-419d-a573-66e673f18669'] \u001b[32m│\u001b[0m\n", - "\u001b[32m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Streaming timed out after 20 seconds - returning collected messages\n" - ] - } - ], + "outputs": [], "source": [ "from agentex.lib.utils.dev_tools import subscribe_to_async_task_messages\n", "\n", @@ -200,7 +126,7 @@ { "cell_type": "code", "execution_count": null, - "id": "593a0a47", + "id": "5", "metadata": {}, "outputs": [], "source": [] diff --git a/examples/tutorials/10_agentic/10_temporal/000_hello_acp/dev.ipynb b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/dev.ipynb index a7806201..f8a66a0f 100644 --- a/examples/tutorials/10_agentic/10_temporal/000_hello_acp/dev.ipynb +++ b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/dev.ipynb @@ -2,8 +2,8 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, - "id": "36834357", + "execution_count": null, + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -14,8 +14,8 @@ }, { "cell_type": "code", - "execution_count": 3, - "id": "d1c309d6", + "execution_count": null, + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -24,59 +24,34 @@ }, { "cell_type": "code", - "execution_count": 4, - "id": "9f6e6ef0", + "execution_count": null, + "id": "2", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Task(id='908547e1-5796-48c4-8b72-b02366025584', created_at=datetime.datetime(2025, 7, 27, 6, 2, 40, 117494, tzinfo=TzInfo(UTC)), name='4f1591db-task', status='RUNNING', status_reason='Task created, forwarding to ACP server', updated_at=datetime.datetime(2025, 7, 27, 6, 2, 40, 117494, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ "# (REQUIRED) Create a new task. For Agentic agents, you must create a task for messages to be associated with.\n", - "\n", - "from typing import cast\n", "import uuid\n", "\n", - "from agentex.types import Task\n", - "\n", - "TASK_ID = str(uuid.uuid4())[:8]\n", - "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.create_task(\n", " agent_name=AGENT_NAME,\n", - " method=\"task/create\",\n", " params={\n", - " \"name\": f\"{TASK_ID}-task\",\n", + " \"name\": f\"{str(uuid.uuid4())[:8]}-task\",\n", " \"params\": {}\n", " }\n", ")\n", "\n", - "task = cast(Task, rpc_response.result)\n", + "task = rpc_response.result\n", "print(task)" ] }, { "cell_type": "code", - "execution_count": 5, - "id": "b03b0d37", + "execution_count": null, + "id": "3", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Event(id='84a84d4c-d3e7-4e58-801c-ac1bafcefcff', agent_id='1f6cd429-4e4a-4884-b449-72a1f2740393', sequence_id=230, task_id='908547e1-5796-48c4-8b72-b02366025584', content=TextContent(author='user', content='Hello what can you do?', attachments=None, format='plain', style='static', type='text'), created_at=datetime.datetime(2025, 7, 27, 6, 2, 46, 253653, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ - "# Test non streaming response\n", - "from typing import cast\n", - "from agentex.types import Event\n", + "# Send an event to the agent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -86,72 +61,26 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_event(\n", " agent_name=AGENT_NAME,\n", - " method=\"event/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"task_id\": task.id,\n", " }\n", ")\n", "\n", - "event = cast(Event, rpc_response.result)\n", + "event = rpc_response.result\n", "print(event)" ] }, { "cell_type": "code", - "execution_count": 6, - "id": "a6927cc0", + "execution_count": null, + "id": "4", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
╭───────────────────────── USER [07/27/2025 06:02:46] ─────────────────────────╮\n", - "│ Hello what can you do? │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[96m╭─\u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m \u001b[0m\u001b[1;96mUSER\u001b[0m\u001b[96m [07/27/2025 06:02:46] \u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m─╮\u001b[0m\n", - "\u001b[96m│\u001b[0m Hello what can you do? \u001b[96m│\u001b[0m\n", - "\u001b[96m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 06:02:46] ─────────────────────────╮\n", - "│ Hello! I've received your message. I can't respond right now, but in future │\n", - "│ tutorials we'll see how you can get me to intelligently respond to your │\n", - "│ message. │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[32m╭─\u001b[0m\u001b[32m───────────────────────\u001b[0m\u001b[32m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[32m [07/27/2025 06:02:46] \u001b[0m\u001b[32m────────────────────────\u001b[0m\u001b[32m─╮\u001b[0m\n", - "\u001b[32m│\u001b[0m Hello! I've received your message. I can't respond right now, but in future \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m tutorials we'll see how you can get me to intelligently respond to your \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m message. \u001b[32m│\u001b[0m\n", - "\u001b[32m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Streaming timed out after 5 seconds - returning collected messages\n" - ] - } - ], + "outputs": [], "source": [ + "# Subscribe to the async task messages produced by the agent\n", "from agentex.lib.utils.dev_tools import subscribe_to_async_task_messages\n", "\n", "task_messages = subscribe_to_async_task_messages(\n", @@ -167,7 +96,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b60701ae", + "id": "5", "metadata": {}, "outputs": [], "source": [] diff --git a/examples/tutorials/10_agentic/10_temporal/010_agent_chat/dev.ipynb b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/dev.ipynb index 7721747f..abb1b9e7 100644 --- a/examples/tutorials/10_agentic/10_temporal/010_agent_chat/dev.ipynb +++ b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/dev.ipynb @@ -2,8 +2,8 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, - "id": "36834357", + "execution_count": null, + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -14,69 +14,44 @@ }, { "cell_type": "code", - "execution_count": 2, - "id": "d1c309d6", + "execution_count": null, + "id": "1", "metadata": {}, "outputs": [], "source": [ - "AGENT_NAME = \"at010-agent-chat\"" + "AGENT_NAME = \"ab040-other-sdks\"" ] }, { "cell_type": "code", - "execution_count": 3, - "id": "9f6e6ef0", + "execution_count": null, + "id": "2", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Task(id='4f06d705-24e8-4008-ad90-693072386cc5', created_at=datetime.datetime(2025, 7, 27, 6, 4, 37, 584907, tzinfo=TzInfo(UTC)), name='f282773f-task', status='RUNNING', status_reason='Task created, forwarding to ACP server', updated_at=datetime.datetime(2025, 7, 27, 6, 4, 37, 584907, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ "# (REQUIRED) Create a new task. For Agentic agents, you must create a task for messages to be associated with.\n", - "\n", - "from typing import cast\n", "import uuid\n", "\n", - "from agentex.types import Task\n", - "\n", - "TASK_ID = str(uuid.uuid4())[:8]\n", - "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.create_task(\n", " agent_name=AGENT_NAME,\n", - " method=\"task/create\",\n", " params={\n", - " \"name\": f\"{TASK_ID}-task\",\n", + " \"name\": f\"{str(uuid.uuid4())[:8]}-task\",\n", " \"params\": {}\n", " }\n", ")\n", "\n", - "task = cast(Task, rpc_response.result)\n", + "task = rpc_response.result\n", "print(task)" ] }, { "cell_type": "code", - "execution_count": 4, - "id": "b03b0d37", + "execution_count": null, + "id": "3", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Event(id='ce9ba762-9db3-4779-906c-9acec68c1f9e', agent_id='04a102f4-984c-4deb-8402-b9d491074a4a', sequence_id=232, task_id='4f06d705-24e8-4008-ad90-693072386cc5', content=TextContent(author='user', content='Hello tell me the latest news about AI and AI startups', attachments=None, format='plain', style='static', type='text'), created_at=datetime.datetime(2025, 7, 27, 6, 4, 38, 128727, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ - "# Test non streaming response\n", - "from typing import cast\n", - "from agentex.types import Event\n", + "# Send an event to the agent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -86,336 +61,26 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_event(\n", " agent_name=AGENT_NAME,\n", - " method=\"event/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello tell me the latest news about AI and AI startups\"},\n", " \"task_id\": task.id,\n", " }\n", ")\n", "\n", - "event = cast(Event, rpc_response.result)\n", + "event = rpc_response.result\n", "print(event)" ] }, { "cell_type": "code", "execution_count": null, - "id": "a6927cc0", + "id": "4", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
╭───────────────────────── USER [07/27/2025 06:04:38] ─────────────────────────╮\n", - "│ Hello tell me the latest news about AI and AI startups │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[96m╭─\u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m \u001b[0m\u001b[1;96mUSER\u001b[0m\u001b[96m [07/27/2025 06:04:38] \u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m─╮\u001b[0m\n", - "\u001b[96m│\u001b[0m Hello tell me the latest news about AI and AI startups \u001b[96m│\u001b[0m\n", - "\u001b[96m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 06:04:47] ─────────────────────────╮\n", - "│ 🔧 Tool Request: web_search │\n", - "│ │\n", - "│ Arguments: │\n", - "│ │\n", - "│ │\n", - "│ { │\n", - "│ \"input\": \"latest news about AI and AI startups\", │\n", - "│ \"model\": \"gpt-4o-mini\", │\n", - "│ \"type\": \"web_search_preview\" │\n", - "│ } │\n", - "│ │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[33m╭─\u001b[0m\u001b[33m───────────────────────\u001b[0m\u001b[33m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[33m [07/27/2025 06:04:47] \u001b[0m\u001b[33m────────────────────────\u001b[0m\u001b[33m─╮\u001b[0m\n", - "\u001b[33m│\u001b[0m 🔧 \u001b[1mTool Request: web_search\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[1mArguments:\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m{\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"input\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"latest news about AI and AI startups\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"model\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"gpt-4o-mini\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"type\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"web_search_preview\"\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m}\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 06:04:47] ─────────────────────────╮\n", - "│ ✅ Tool Response: web_search │\n", - "│ │\n", - "│ Response: │\n", - "│ │\n", - "│ │\n", - "│ { │\n", - "│ \"type\": \"text\", │\n", - "│ \"text\": \"Artificial intelligence (AI) continues to be a dynamic and │\n", - "│ rapidly evolving sector, with significant developments in both global │\n", - "│ initiatives and startup activities.\\n\\n**Global AI Initiatives**\\n\\nChina │\n", - "│ has proposed the establishment of a new international organization aimed a │\n", - "│ promoting global cooperation in AI. This initiative seeks to provide an │\n", - "│ alternative to U.S.-led efforts and foster inclusive development of AI │\n", - "│ technologies. Premier Li Qiang announced the proposal at the World │\n", - "│ Artificial Intelligence Conference in Shanghai, emphasizing equitable acce │\n", - "│ to AI for all nations, particularly those in the Global South. The plan │\n", - "│ includes hosting the organization's headquarters in Shanghai and involves │\n", - "│ collaboration among governments, industry leaders, and academics worldwide │\n", - "│ ([reuters.com](https://www.reuters.com/world/china/china-proposes-new-glob │\n", - "│ -ai-cooperation-organisation-2025-07-26/?utm_source=openai))\\n\\n**AI Start │\n", - "│ Funding and Developments**\\n\\nThe AI startup ecosystem is experiencing │\n", - "│ unprecedented growth, with substantial investments and valuations:\\n\\n- │\n", - "│ **Perplexity**: This AI search startup has achieved a valuation of $18 │\n", - "│ billion following a $100 million funding round, reflecting strong investor │\n", - "│ confidence in the competitive AI search market. │\n", - "│ ([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi │\n", - "│ week-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- │\n", - "│ **Thinking Machines**: Founded by former OpenAI CTO Mira Murati, Thinking │\n", - "│ Machines has raised $2 billion, valuing the startup at $10 billion. The │\n", - "│ company plans to unveil its first product soon, featuring a significant │\n", - "│ open-source component to support researchers and startups developing custo │\n", - "│ models. │\n", - "│ ([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi │\n", - "│ week-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- │\n", - "│ **Anthropic**: Reports indicate that Anthropic is being approached by │\n", - "│ investors whose funding offers could value the startup at $100 billion, │\n", - "│ highlighting the growing interest in AI safety and ethics. │\n", - "│ ([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi │\n", - "│ week-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- │\n", - "│ **Mistral AI**: The French AI startup, specializing in open-weight large │\n", - "│ language models, secured \\u20ac600 million in funding in June 2024, │\n", - "│ elevating its valuation to \\u20ac5.8 billion. This positions Mistral as a │\n", - "│ significant player in the global AI race, particularly outside the San │\n", - "│ Francisco Bay Area. │\n", - "│ ([en.wikipedia.org](https://en.wikipedia.org/wiki/Mistral_AI?utm_source=op │\n", - "│ ai))\\n\\n- **Applied Intuition**: In June 2025, Applied Intuition achieved │\n", - "│ $15 billion valuation after completing a $600 million Series F funding │\n", - "│ round. The company specializes in AI software for autonomous systems and h │\n", - "│ partnered with automotive manufacturers like Porsche and Audi to develop │\n", - "│ next-generation advanced driver-assistance systems (ADAS). │\n", - "│ ([en.wikipedia.org](https://en.wikipedia.org/wiki/Applied_Intuition?utm_so │\n", - "│ ce=openai))\\n\\n**AI in Venture Capital**\\n\\nAI startups are increasingly │\n", - "│ dominating venture capital investments:\\n\\n- In the first half of 2025, AI │\n", - "│ startups accounted for 53% of all global venture capital investments, │\n", - "│ according to PitchBook. │\n", - "│ ([axios.com](https://www.axios.com/2025/07/03/ai-startups-vc-investments?u │\n", - "│ _source=openai))\\n\\n- Despite this surge, capital has become concentrated, │\n", - "│ with over a third of U.S. VC funds in Q2 going to just five companies. │\n", - "│ ([axios.com](https://www.axios.com/newsletters/axios-pro-rata-d4299627-1e8 │\n", - "│ 44f2-9308-212d8860b6aa?utm_source=openai))\\n\\nThese developments underscor │\n", - "│ the transformative impact of AI across various sectors and highlight the │\n", - "│ sector's rapid growth and investment potential.\\n\\n\\n## Recent Development │\n", - "│ in AI Startups and Global Initiatives:\\n- [China proposes new global AI │\n", - "│ cooperation │\n", - "│ organisation](https://www.reuters.com/world/china/china-proposes-new-globa │\n", - "│ ai-cooperation-organisation-2025-07-26/?utm_source=openai)\\n- [US AI │\n", - "│ startups see funding surge while more VC funds struggle to raise, data │\n", - "│ shows](https://www.reuters.com/business/us-ai-startups-see-funding-surge-w │\n", - "│ le-more-vc-funds-struggle-raise-data-shows-2025-07-15/?utm_source=openai)\\ │\n", - "│ [Axios Pro Rata: AI eats │\n", - "│ VC](https://www.axios.com/newsletters/axios-pro-rata-d4299627-1e82-44f2-93 │\n", - "│ -212d8860b6aa?utm_source=openai) \", │\n", - "│ \"annotations\": null, │\n", - "│ \"meta\": null │\n", - "│ } │\n", - "│ │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[92m╭─\u001b[0m\u001b[92m───────────────────────\u001b[0m\u001b[92m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[92m [07/27/2025 06:04:47] \u001b[0m\u001b[92m────────────────────────\u001b[0m\u001b[92m─╮\u001b[0m\n", - "\u001b[92m│\u001b[0m ✅ \u001b[1mTool Response: web_search\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[1mResponse:\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m{\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"type\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"text\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"text\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"Artificial intelligence (AI) continues to be a dynamic and \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mrapidly evolving sector, with significant developments in both global \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34minitiatives and startup activities.\\n\\n**Global AI Initiatives**\\n\\nChina \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mhas proposed the establishment of a new international organization aimed a\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mpromoting global cooperation in AI. This initiative seeks to provide an \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34malternative to U.S.-led efforts and foster inclusive development of AI \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mtechnologies. Premier Li Qiang announced the proposal at the World \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mArtificial Intelligence Conference in Shanghai, emphasizing equitable acce\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mto AI for all nations, particularly those in the Global South. The plan \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mincludes hosting the organization's headquarters in Shanghai and involves \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mcollaboration among governments, industry leaders, and academics worldwide\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([reuters.com](https://www.reuters.com/world/china/china-proposes-new-glob\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m-ai-cooperation-organisation-2025-07-26/?utm_source=openai))\\n\\n**AI Start\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mFunding and Developments**\\n\\nThe AI startup ecosystem is experiencing \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34munprecedented growth, with substantial investments and valuations:\\n\\n- \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m**Perplexity**: This AI search startup has achieved a valuation of $18 \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mbillion following a $100 million funding round, reflecting strong investor\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mconfidence in the competitive AI search market. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mweek-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m**Thinking Machines**: Founded by former OpenAI CTO Mira Murati, Thinking \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mMachines has raised $2 billion, valuing the startup at $10 billion. The \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mcompany plans to unveil its first product soon, featuring a significant \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mopen-source component to support researchers and startups developing custo\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mmodels. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mweek-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m**Anthropic**: Reports indicate that Anthropic is being approached by \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34minvestors whose funding offers could value the startup at $100 billion, \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mhighlighting the growing interest in AI safety and ethics. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mweek-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m**Mistral AI**: The French AI startup, specializing in open-weight large \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mlanguage models, secured \\u20ac600 million in funding in June 2024, \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34melevating its valuation to \\u20ac5.8 billion. This positions Mistral as a \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34msignificant player in the global AI race, particularly outside the San \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mFrancisco Bay Area. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([en.wikipedia.org](https://en.wikipedia.org/wiki/Mistral_AI?utm_source=op\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mai))\\n\\n- **Applied Intuition**: In June 2025, Applied Intuition achieved \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m$15 billion valuation after completing a $600 million Series F funding \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mround. The company specializes in AI software for autonomous systems and h\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mpartnered with automotive manufacturers like Porsche and Audi to develop \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mnext-generation advanced driver-assistance systems (ADAS). \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([en.wikipedia.org](https://en.wikipedia.org/wiki/Applied_Intuition?utm_so\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mce=openai))\\n\\n**AI in Venture Capital**\\n\\nAI startups are increasingly \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mdominating venture capital investments:\\n\\n- In the first half of 2025, AI\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mstartups accounted for 53% of all global venture capital investments, \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34maccording to PitchBook. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([axios.com](https://www.axios.com/2025/07/03/ai-startups-vc-investments?u\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m_source=openai))\\n\\n- Despite this surge, capital has become concentrated,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mwith over a third of U.S. VC funds in Q2 going to just five companies. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([axios.com](https://www.axios.com/newsletters/axios-pro-rata-d4299627-1e8\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m44f2-9308-212d8860b6aa?utm_source=openai))\\n\\nThese developments underscor\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mthe transformative impact of AI across various sectors and highlight the \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34msector's rapid growth and investment potential.\\n\\n\\n## Recent Development\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34min AI Startups and Global Initiatives:\\n- [China proposes new global AI \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mcooperation \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34morganisation](https://www.reuters.com/world/china/china-proposes-new-globa\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mai-cooperation-organisation-2025-07-26/?utm_source=openai)\\n- [US AI \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mstartups see funding surge while more VC funds struggle to raise, data \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mshows](https://www.reuters.com/business/us-ai-startups-see-funding-surge-w\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mle-more-vc-funds-struggle-raise-data-shows-2025-07-15/?utm_source=openai)\\\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m [Axios Pro Rata: AI eats \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mVC](https://www.axios.com/newsletters/axios-pro-rata-d4299627-1e82-44f2-93\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m-212d8860b6aa?utm_source=openai) \"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"annotations\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;102;217;239;48;2;39;40;34mnull\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"meta\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;102;217;239;48;2;39;40;34mnull\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m}\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " \r" - ] - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 06:04:49] ─────────────────────────╮\n", - "│ Here's the latest news about AI and AI startups: │\n", - "│ │\n", - "│ Global AI Initiatives │\n", - "│ │\n", - "│ • China's Proposal: China has proposed the creation of a new international │\n", - "│ organization for AI cooperation. This aims to promote global │\n", - "│ collaboration, with headquarters potentially in Shanghai. The goal is to │\n", - "│ ensure equitable AI access and development, particularly benefiting │\n", - "│ nations in the Global South. (Source) │\n", - "│ │\n", - "│ AI Startup Developments │\n", - "│ │\n", - "│ • Perplexity: An AI search startup, Perplexity, is valued at $18 billion │\n", - "│ after a $100 million funding round. (Source) │\n", - "│ • Thinking Machines: Founded by former OpenAI CTO Mira Murati, this startup │\n", - "│ raised $2 billion, reaching a $10 billion valuation. It plans to launch │\n", - "│ its first product with a significant open-source component. (Source) │\n", - "│ • Anthropic: Investors are reportedly interested in valuing this AI │\n", - "│ safety-focused startup at $100 billion. (Source) │\n", - "│ • Mistral AI: This French startup raised €600 million, achieving a │\n", - "│ valuation of €5.8 billion. It focuses on open-weight large language │\n", - "│ models. (Source) │\n", - "│ • Applied Intuition: Specializing in AI for autonomous systems, it reached │\n", - "│ a $15 billion valuation after a $600 million funding round. (Source) │\n", - "│ │\n", - "│ AI in Venture Capital │\n", - "│ │\n", - "│ • Investment Surge: In 2025, AI startups accounted for 53% of global │\n", - "│ venture capital investments. However, more than a third of U.S. VC funds │\n", - "│ were directed to just five companies. (Source) │\n", - "│ │\n", - "│ These highlights reveal the dynamic growth and transformative impact of AI │\n", - "│ across sectors globally. │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[32m╭─\u001b[0m\u001b[32m───────────────────────\u001b[0m\u001b[32m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[32m [07/27/2025 06:04:49] \u001b[0m\u001b[32m────────────────────────\u001b[0m\u001b[32m─╮\u001b[0m\n", - "\u001b[32m│\u001b[0m Here's the latest news about AI and AI startups: \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1mGlobal AI Initiatives\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mChina's Proposal:\u001b[0m China has proposed the creation of a new international \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0morganization for AI cooperation. This aims to promote global \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mcollaboration, with headquarters potentially in Shanghai. The goal is to \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mensure equitable AI access and development, particularly benefiting \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mnations in the Global South. (\u001b]8;id=31401;https://www.reuters.com/world/china/china-proposes-new-global-ai-cooperation-organisation-2025-07-26/?utm_source=openai\u001b\\\u001b[4;34mSource\u001b[0m\u001b]8;;\u001b\\) \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1mAI Startup Developments\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mPerplexity:\u001b[0m An AI search startup, Perplexity, is valued at $18 billion \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mafter a $100 million funding round. (\u001b]8;id=615514;https://www.pymnts.com/news/artificial-intelligence/2025/this-week-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai\u001b\\\u001b[4;34mSource\u001b[0m\u001b]8;;\u001b\\) \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mThinking Machines:\u001b[0m Founded by former OpenAI CTO Mira Murati, this startup \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mraised $2 billion, reaching a $10 billion valuation. It plans to launch \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mits first product with a significant open-source component. (\u001b]8;id=877122;https://www.pymnts.com/news/artificial-intelligence/2025/this-week-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai\u001b\\\u001b[4;34mSource\u001b[0m\u001b]8;;\u001b\\) \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mAnthropic:\u001b[0m Investors are reportedly interested in valuing this AI \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0msafety-focused startup at $100 billion. (\u001b]8;id=634141;https://www.pymnts.com/news/artificial-intelligence/2025/this-week-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai\u001b\\\u001b[4;34mSource\u001b[0m\u001b]8;;\u001b\\) \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mMistral AI:\u001b[0m This French startup raised €600 million, achieving a \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mvaluation of €5.8 billion. It focuses on open-weight large language \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mmodels. (\u001b]8;id=894816;https://en.wikipedia.org/wiki/Mistral_AI?utm_source=openai\u001b\\\u001b[4;34mSource\u001b[0m\u001b]8;;\u001b\\) \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mApplied Intuition:\u001b[0m Specializing in AI for autonomous systems, it reached \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0ma $15 billion valuation after a $600 million funding round. (\u001b]8;id=900026;https://en.wikipedia.org/wiki/Applied_Intuition?utm_source=openai\u001b\\\u001b[4;34mSource\u001b[0m\u001b]8;;\u001b\\) \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1mAI in Venture Capital\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mInvestment Surge:\u001b[0m In 2025, AI startups accounted for 53% of global \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mventure capital investments. However, more than a third of U.S. VC funds \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mwere directed to just five companies. (\u001b]8;id=854031;https://www.axios.com/2025/07/03/ai-startups-vc-investments?utm_source=openai\u001b\\\u001b[4;34mSource\u001b[0m\u001b]8;;\u001b\\) \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m These highlights reveal the dynamic growth and transformative impact of AI \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m across sectors globally. \u001b[32m│\u001b[0m\n", - "\u001b[32m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ + "# Subscribe to the async task messages produced by the agent\n", "from agentex.lib.utils.dev_tools import subscribe_to_async_task_messages\n", "\n", "task_messages = subscribe_to_async_task_messages(\n", @@ -431,7 +96,7 @@ { "cell_type": "code", "execution_count": null, - "id": "557ce4b1", + "id": "5", "metadata": {}, "outputs": [], "source": [] diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/dev.ipynb b/examples/tutorials/10_agentic/10_temporal/020_state_machine/dev.ipynb index e6c2841c..e0057f48 100644 --- a/examples/tutorials/10_agentic/10_temporal/020_state_machine/dev.ipynb +++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/dev.ipynb @@ -2,8 +2,8 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, - "id": "36834357", + "execution_count": null, + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -14,8 +14,8 @@ }, { "cell_type": "code", - "execution_count": 3, - "id": "d1c309d6", + "execution_count": null, + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -24,59 +24,34 @@ }, { "cell_type": "code", - "execution_count": 4, - "id": "9f6e6ef0", + "execution_count": null, + "id": "2", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Task(id='09121bce-e4d6-452d-82be-7bb9de75a24e', created_at=datetime.datetime(2025, 7, 27, 6, 3, 27, 74360, tzinfo=TzInfo(UTC)), name='2191ef3a-task', status='RUNNING', status_reason='Task created, forwarding to ACP server', updated_at=datetime.datetime(2025, 7, 27, 6, 3, 27, 74360, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ "# (REQUIRED) Create a new task. For Agentic agents, you must create a task for messages to be associated with.\n", - "\n", - "from typing import cast\n", "import uuid\n", "\n", - "from agentex.types import Task\n", - "\n", - "TASK_ID = str(uuid.uuid4())[:8]\n", - "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.create_task(\n", " agent_name=AGENT_NAME,\n", - " method=\"task/create\",\n", " params={\n", - " \"name\": f\"{TASK_ID}-task\",\n", + " \"name\": f\"{str(uuid.uuid4())[:8]}-task\",\n", " \"params\": {}\n", " }\n", ")\n", "\n", - "task = cast(Task, rpc_response.result)\n", + "task = rpc_response.result\n", "print(task)" ] }, { "cell_type": "code", - "execution_count": 5, - "id": "b03b0d37", + "execution_count": null, + "id": "3", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Event(id='b379c00e-b30a-4808-b0fe-4c6ffa6acd5b', agent_id='8e8d7d03-214f-4163-97b4-ce687ce9923f', sequence_id=231, task_id='09121bce-e4d6-452d-82be-7bb9de75a24e', content=TextContent(author='user', content='Hello tell me the latest news about AI and AI startups', attachments=None, format='plain', style='static', type='text'), created_at=datetime.datetime(2025, 7, 27, 6, 3, 27, 572883, tzinfo=TzInfo(UTC)))\n" - ] - } - ], + "outputs": [], "source": [ - "# Test non streaming response\n", - "from typing import cast\n", - "from agentex.types import Event\n", + "# Send an event to the agent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -86,430 +61,67 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_event(\n", " agent_name=AGENT_NAME,\n", - " method=\"event/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello tell me the latest news about AI and AI startups\"},\n", " \"task_id\": task.id,\n", " }\n", ")\n", "\n", - "event = cast(Event, rpc_response.result)\n", + "event = rpc_response.result\n", + "print(event)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4", + "metadata": {}, + "outputs": [], + "source": [ + "# Subscribe to the async task messages produced by the agent\n", + "from agentex.lib.utils.dev_tools import subscribe_to_async_task_messages\n", + "\n", + "task_messages = subscribe_to_async_task_messages(\n", + " client=client,\n", + " task=task, \n", + " only_after_timestamp=event.created_at, \n", + " print_messages=True,\n", + " rich_print=True,\n", + " timeout=5,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5", + "metadata": {}, + "outputs": [], + "source": [ + "# Send a follow up event to the agent in response to the agent's follow up question\n", + "\n", + "rpc_response = client.agents.send_event(\n", + " agent_name=AGENT_NAME,\n", + " params={\n", + " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"I want to know what viral news came up and which startups failed, got acquired, or became very successful or popular in the last 3 months\"},\n", + " \"task_id\": task.id,\n", + " }\n", + ")\n", + "\n", + "event = rpc_response.result\n", "print(event)" ] }, { "cell_type": "code", - "execution_count": 7, - "id": "a6927cc0", + "execution_count": null, + "id": "6", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
╭───────────────────────── USER [07/27/2025 06:03:27] ─────────────────────────╮\n", - "│ Hello tell me the latest news about AI and AI startups │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[96m╭─\u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m \u001b[0m\u001b[1;96mUSER\u001b[0m\u001b[96m [07/27/2025 06:03:27] \u001b[0m\u001b[96m────────────────────────\u001b[0m\u001b[96m─╮\u001b[0m\n", - "\u001b[96m│\u001b[0m Hello tell me the latest news about AI and AI startups \u001b[96m│\u001b[0m\n", - "\u001b[96m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 06:03:31] ─────────────────────────╮\n", - "│ 🔧 Tool Request: web_search │\n", - "│ │\n", - "│ Arguments: │\n", - "│ │\n", - "│ │\n", - "│ { │\n", - "│ \"input\": \"latest news about AI and AI startups\", │\n", - "│ \"user_location\": { │\n", - "│ \"type\": \"approximate\", │\n", - "│ \"city\": \"New York\", │\n", - "│ \"country\": \"USA\", │\n", - "│ \"region\": \"NY\", │\n", - "│ \"timezone\": \"America/New_York\" │\n", - "│ } │\n", - "│ } │\n", - "│ │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[33m╭─\u001b[0m\u001b[33m───────────────────────\u001b[0m\u001b[33m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[33m [07/27/2025 06:03:31] \u001b[0m\u001b[33m────────────────────────\u001b[0m\u001b[33m─╮\u001b[0m\n", - "\u001b[33m│\u001b[0m 🔧 \u001b[1mTool Request: web_search\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[1mArguments:\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m{\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"input\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"latest news about AI and AI startups\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"user_location\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m{\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"type\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"approximate\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"city\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"New York\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"country\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"USA\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"region\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"NY\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"timezone\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"America/New_York\"\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m}\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m}\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 06:03:31] ─────────────────────────╮\n", - "│ ✅ Tool Response: web_search │\n", - "│ │\n", - "│ Response: │\n", - "│ │\n", - "│ │\n", - "│ { │\n", - "│ \"type\": \"text\", │\n", - "│ \"text\": \"Error executing tool web_search: Error code: 400 - {'error': │\n", - "│ {'message': \\\"Invalid input USA: 'country' must be an ISO 3166-1 code │\n", - "│ (https://en.wikipedia.org/wiki/ISO_3166-1).\\\", 'type': │\n", - "│ 'invalid_request_error', 'param': 'tools', 'code': None}}\", │\n", - "│ \"annotations\": null, │\n", - "│ \"meta\": null │\n", - "│ } │\n", - "│ │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[92m╭─\u001b[0m\u001b[92m───────────────────────\u001b[0m\u001b[92m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[92m [07/27/2025 06:03:31] \u001b[0m\u001b[92m────────────────────────\u001b[0m\u001b[92m─╮\u001b[0m\n", - "\u001b[92m│\u001b[0m ✅ \u001b[1mTool Response: web_search\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[1mResponse:\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m{\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"type\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"text\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"text\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"Error executing tool web_search: Error code: 400 - {'error': \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m{'message': \\\"Invalid input USA: 'country' must be an ISO 3166-1 code \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m(https://en.wikipedia.org/wiki/ISO_3166-1).\\\", 'type': \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m'invalid_request_error', 'param': 'tools', 'code': None}}\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"annotations\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;102;217;239;48;2;39;40;34mnull\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"meta\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;102;217;239;48;2;39;40;34mnull\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m}\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 06:03:39] ─────────────────────────╮\n", - "│ 🔧 Tool Request: web_search │\n", - "│ │\n", - "│ Arguments: │\n", - "│ │\n", - "│ │\n", - "│ { │\n", - "│ \"input\": \"latest news about AI and AI startups\", │\n", - "│ \"user_location\": { │\n", - "│ \"type\": \"approximate\", │\n", - "│ \"city\": \"New York\", │\n", - "│ \"country\": \"US\", │\n", - "│ \"region\": \"NY\", │\n", - "│ \"timezone\": \"America/New_York\" │\n", - "│ } │\n", - "│ } │\n", - "│ │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[33m╭─\u001b[0m\u001b[33m───────────────────────\u001b[0m\u001b[33m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[33m [07/27/2025 06:03:39] \u001b[0m\u001b[33m────────────────────────\u001b[0m\u001b[33m─╮\u001b[0m\n", - "\u001b[33m│\u001b[0m 🔧 \u001b[1mTool Request: web_search\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[1mArguments:\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m{\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"input\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"latest news about AI and AI startups\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"user_location\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m{\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"type\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"approximate\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"city\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"New York\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"country\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"US\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"region\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"NY\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"timezone\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"America/New_York\"\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m}\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m}\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[33m│\u001b[0m\n", - "\u001b[33m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 06:03:39] ─────────────────────────╮\n", - "│ ✅ Tool Response: web_search │\n", - "│ │\n", - "│ Response: │\n", - "│ │\n", - "│ │\n", - "│ { │\n", - "│ \"type\": \"text\", │\n", - "│ \"text\": \"Artificial intelligence (AI) continues to be a dynamic sector, │\n", - "│ with numerous startups making significant strides in technology developmen │\n", - "│ and securing substantial funding. Here's an overview of recent │\n", - "│ developments:\\n\\n**Major Funding Rounds:**\\n\\n- **Perplexity AI:** This AI │\n", - "│ search startup has achieved a valuation of $18 billion following a $100 │\n", - "│ million funding round, reflecting strong investor interest in the │\n", - "│ competitive AI search market. │\n", - "│ ([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi │\n", - "│ week-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- │\n", - "│ **Thinking Machines:** Founded by former OpenAI CTO Mira Murati, this │\n", - "│ startup raised $2 billion, valuing the company at $10 billion. │\n", - "│ ([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi │\n", - "│ week-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- │\n", - "│ **Harvey:** Specializing in AI-driven legal solutions, Harvey secured $300 │\n", - "│ million in a Series D funding round, bringing its valuation to $3 billion. │\n", - "│ ([en.wikipedia.org](https://en.wikipedia.org/wiki/Harvey_%28software%29?ut │\n", - "│ source=openai))\\n\\n**Notable Acquisitions:**\\n\\n- **Amazon's Acquisition o │\n", - "│ Bee:** Amazon announced plans to acquire Bee, a San Francisco-based startu │\n", - "│ developing AI-powered wearable technology. Bee's flagship product is a $50 │\n", - "│ AI-enabled wristband capable of transcribing conversations and generating │\n", - "│ summaries or to-do lists from the recordings. │\n", - "│ ([reuters.com](https://www.reuters.com/business/retail-consumer/amazon-buy │\n", - "│ tartup-focused-ai-wearables-2025-07-22/?utm_source=openai))\\n\\n**Industry │\n", - "│ Trends:**\\n\\n- **AI Dominates Venture Capital Funding:** In the first half │\n", - "│ of 2025, AI startups accounted for 53% of global venture capital │\n", - "│ investments, with 64% in the U.S., indicating a transformative period for │\n", - "│ tech investment. │\n", - "│ ([axios.com](https://www.axios.com/newsletters/axios-pro-rata-d4299627-1e8 │\n", - "│ 44f2-9308-212d8860b6aa?utm_source=openai))\\n\\n- **China's AI Cooperation │\n", - "│ Initiative:** China proposed the creation of a new international │\n", - "│ organization to promote global cooperation on AI, aiming to provide an │\n", - "│ alternative to U.S.-led initiatives and foster inclusive development of th │\n", - "│ technology. │\n", - "│ ([reuters.com](https://www.reuters.com/world/china/china-proposes-new-glob │\n", - "│ -ai-cooperation-organisation-2025-07-26/?utm_source=openai))\\n\\n**Emerging │\n", - "│ Startups:**\\n\\n- **Mistral AI:** Founded in 2023, Mistral AI specializes i │\n", - "│ open-weight large language models and has secured significant funding, │\n", - "│ including a \\u20ac600 million round in June 2024, elevating its valuation │\n", - "│ \\u20ac5.8 billion. │\n", - "│ ([en.wikipedia.org](https://en.wikipedia.org/wiki/Mistral_AI?utm_source=op │\n", - "│ ai))\\n\\n- **Neysa:** Established in 2023, Neysa provides a cloud platform │\n", - "│ for AI acceleration and high-performance computing infrastructure services │\n", - "│ raising a total of $50 million across two major funding rounds. │\n", - "│ ([en.wikipedia.org](https://en.wikipedia.org/wiki/Neysa?utm_source=openai) │\n", - "│ n\\n\\n## Recent Developments in AI Startups:\\n- [Amazon to buy startup │\n", - "│ focused on AI │\n", - "│ wearables](https://www.reuters.com/business/retail-consumer/amazon-buy-sta │\n", - "│ up-focused-ai-wearables-2025-07-22/?utm_source=openai), Published on │\n", - "│ Tuesday, July 22\\n- [Axios Pro Rata: AI eats │\n", - "│ VC](https://www.axios.com/newsletters/axios-pro-rata-d4299627-1e82-44f2-93 │\n", - "│ -212d8860b6aa?utm_source=openai), Published on Thursday, July 03\\n- [China │\n", - "│ proposes new global AI cooperation │\n", - "│ organisation](https://www.reuters.com/world/china/china-proposes-new-globa │\n", - "│ ai-cooperation-organisation-2025-07-26/?utm_source=openai), Published on │\n", - "│ Saturday, July 26 \", │\n", - "│ \"annotations\": null, │\n", - "│ \"meta\": null │\n", - "│ } │\n", - "│ │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[92m╭─\u001b[0m\u001b[92m───────────────────────\u001b[0m\u001b[92m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[92m [07/27/2025 06:03:39] \u001b[0m\u001b[92m────────────────────────\u001b[0m\u001b[92m─╮\u001b[0m\n", - "\u001b[92m│\u001b[0m ✅ \u001b[1mTool Response: web_search\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[1mResponse:\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m{\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"type\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"text\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"text\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\"Artificial intelligence (AI) continues to be a dynamic sector, \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mwith numerous startups making significant strides in technology developmen\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mand securing substantial funding. Here's an overview of recent \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mdevelopments:\\n\\n**Major Funding Rounds:**\\n\\n- **Perplexity AI:** This AI\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34msearch startup has achieved a valuation of $18 billion following a $100 \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mmillion funding round, reflecting strong investor interest in the \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mcompetitive AI search market. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mweek-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m**Thinking Machines:** Founded by former OpenAI CTO Mira Murati, this \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mstartup raised $2 billion, valuing the company at $10 billion. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([pymnts.com](https://www.pymnts.com/news/artificial-intelligence/2025/thi\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mweek-in-ai-ai-startups-hit-fundraising-gold/?utm_source=openai))\\n\\n- \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m**Harvey:** Specializing in AI-driven legal solutions, Harvey secured $300\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mmillion in a Series D funding round, bringing its valuation to $3 billion.\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([en.wikipedia.org](https://en.wikipedia.org/wiki/Harvey_%28software%29?ut\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34msource=openai))\\n\\n**Notable Acquisitions:**\\n\\n- **Amazon's Acquisition o\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mBee:** Amazon announced plans to acquire Bee, a San Francisco-based startu\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mdeveloping AI-powered wearable technology. Bee's flagship product is a $50\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mAI-enabled wristband capable of transcribing conversations and generating \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34msummaries or to-do lists from the recordings. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([reuters.com](https://www.reuters.com/business/retail-consumer/amazon-buy\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mtartup-focused-ai-wearables-2025-07-22/?utm_source=openai))\\n\\n**Industry \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mTrends:**\\n\\n- **AI Dominates Venture Capital Funding:** In the first half\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mof 2025, AI startups accounted for 53% of global venture capital \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34minvestments, with 64% in the U.S., indicating a transformative period for \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mtech investment. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([axios.com](https://www.axios.com/newsletters/axios-pro-rata-d4299627-1e8\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m44f2-9308-212d8860b6aa?utm_source=openai))\\n\\n- **China's AI Cooperation \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mInitiative:** China proposed the creation of a new international \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34morganization to promote global cooperation on AI, aiming to provide an \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34malternative to U.S.-led initiatives and foster inclusive development of th\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mtechnology. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([reuters.com](https://www.reuters.com/world/china/china-proposes-new-glob\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m-ai-cooperation-organisation-2025-07-26/?utm_source=openai))\\n\\n**Emerging\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mStartups:**\\n\\n- **Mistral AI:** Founded in 2023, Mistral AI specializes i\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mopen-weight large language models and has secured significant funding, \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mincluding a \\u20ac600 million round in June 2024, elevating its valuation \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m\\u20ac5.8 billion. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([en.wikipedia.org](https://en.wikipedia.org/wiki/Mistral_AI?utm_source=op\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mai))\\n\\n- **Neysa:** Established in 2023, Neysa provides a cloud platform \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mfor AI acceleration and high-performance computing infrastructure services\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mraising a total of $50 million across two major funding rounds. \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m([en.wikipedia.org](https://en.wikipedia.org/wiki/Neysa?utm_source=openai)\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mn\\n\\n## Recent Developments in AI Startups:\\n- [Amazon to buy startup \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mfocused on AI \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mwearables](https://www.reuters.com/business/retail-consumer/amazon-buy-sta\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mup-focused-ai-wearables-2025-07-22/?utm_source=openai), Published on \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mTuesday, July 22\\n- [Axios Pro Rata: AI eats \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mVC](https://www.axios.com/newsletters/axios-pro-rata-d4299627-1e82-44f2-93\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34m-212d8860b6aa?utm_source=openai), Published on Thursday, July 03\\n- [China\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mproposes new global AI cooperation \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34morganisation](https://www.reuters.com/world/china/china-proposes-new-globa\u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mai-cooperation-organisation-2025-07-26/?utm_source=openai), Published on \u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;230;219;116;48;2;39;40;34mSaturday, July 26 \"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"annotations\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;102;217;239;48;2;39;40;34mnull\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m,\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;255;70;137;48;2;39;40;34m\"meta\"\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m:\u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m \u001b[0m\u001b[38;2;102;217;239;48;2;39;40;34mnull\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m\u001b[38;2;248;248;242;48;2;39;40;34m}\u001b[0m\u001b[48;2;39;40;34m \u001b[0m\u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m│\u001b[0m \u001b[48;2;39;40;34m \u001b[0m \u001b[92m│\u001b[0m\n", - "\u001b[92m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
╭──────────────────────── AGENT [07/27/2025 06:03:40] ─────────────────────────╮\n", - "│ Here's the latest news about AI and AI startups: │\n", - "│ │\n", - "│ Major Funding Rounds: │\n", - "│ │\n", - "│ • Perplexity AI: This AI search startup has secured a $100 million funding │\n", - "│ round, bringing its valuation to $18 billion. This reflects the strong │\n", - "│ interest in AI search technology. │\n", - "│ • Thinking Machines: Founded by former OpenAI CTO Mira Murati, this startup │\n", - "│ raised $2 billion, with a valuation reaching $10 billion. │\n", - "│ • Harvey: Specializing in AI-driven legal solutions, Harvey raised $300 │\n", - "│ million in a Series D round, resulting in a valuation of $3 billion. │\n", - "│ │\n", - "│ Notable Acquisitions: │\n", - "│ │\n", - "│ • Amazon's Acquisition of Bee: Amazon plans to acquire Bee, a San │\n", - "│ Francisco-based startup focused on AI-powered wearables. Bee’s flagship │\n", - "│ product is an AI-enabled wristband priced at $50 that can transcribe │\n", - "│ conversations and generate summaries or to-do lists. │\n", - "│ │\n", - "│ Industry Trends: │\n", - "│ │\n", - "│ • AI Venture Capital Dominance: In the first half of 2025, AI startups │\n", - "│ accounted for 53% of global venture capital investments, with a │\n", - "│ significant portion (64%) in the U.S. │\n", - "│ • China's AI Cooperation Initiative: China has proposed creating a new │\n", - "│ international organization to promote global AI cooperation, aiming to │\n", - "│ foster inclusive development and provide an alternative to U.S.-led │\n", - "│ initiatives. │\n", - "│ │\n", - "│ Emerging Startups: │\n", - "│ │\n", - "│ • Mistral AI: Specializing in open-weight large language models, Mistral AI │\n", - "│ has secured significant funding, including €600 million in June 2024, │\n", - "│ with a valuation now at €5.8 billion. │\n", - "│ • Neysa: Offering a cloud platform for AI acceleration, Neysa has raised a │\n", - "│ total of $50 million across two major funding rounds. │\n", - "│ │\n", - "│ For more details, you may visit their respective announcements on sites like │\n", - "│ Reuters, Wikipedia, and Axios. │\n", - "╰──────────────────────────────────────────────────────────────────────────────╯\n", - "\n" - ], - "text/plain": [ - "\u001b[32m╭─\u001b[0m\u001b[32m───────────────────────\u001b[0m\u001b[32m \u001b[0m\u001b[1;32mAGENT\u001b[0m\u001b[32m [07/27/2025 06:03:40] \u001b[0m\u001b[32m────────────────────────\u001b[0m\u001b[32m─╮\u001b[0m\n", - "\u001b[32m│\u001b[0m Here's the latest news about AI and AI startups: \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1mMajor Funding Rounds:\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mPerplexity AI\u001b[0m: This AI search startup has secured a $100 million funding \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mround, bringing its valuation to $18 billion. This reflects the strong \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0minterest in AI search technology. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mThinking Machines\u001b[0m: Founded by former OpenAI CTO Mira Murati, this startup \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mraised $2 billion, with a valuation reaching $10 billion. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mHarvey\u001b[0m: Specializing in AI-driven legal solutions, Harvey raised $300 \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mmillion in a Series D round, resulting in a valuation of $3 billion. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1mNotable Acquisitions:\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mAmazon's Acquisition of Bee\u001b[0m: Amazon plans to acquire Bee, a San \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mFrancisco-based startup focused on AI-powered wearables. Bee’s flagship \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mproduct is an AI-enabled wristband priced at $50 that can transcribe \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mconversations and generate summaries or to-do lists. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1mIndustry Trends:\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mAI Venture Capital Dominance\u001b[0m: In the first half of 2025, AI startups \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0maccounted for 53% of global venture capital investments, with a \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0msignificant portion (64%) in the U.S. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mChina's AI Cooperation Initiative\u001b[0m: China has proposed creating a new \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0minternational organization to promote global AI cooperation, aiming to \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mfoster inclusive development and provide an alternative to U.S.-led \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0minitiatives. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1mEmerging Startups:\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mMistral AI\u001b[0m: Specializing in open-weight large language models, Mistral AI \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mhas secured significant funding, including €600 million in June 2024, \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mwith a valuation now at €5.8 billion. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m • \u001b[0m\u001b[1mNeysa\u001b[0m: Offering a cloud platform for AI acceleration, Neysa has raised a \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[1;33m \u001b[0mtotal of $50 million across two major funding rounds. \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m For more details, you may visit their respective announcements on sites like \u001b[32m│\u001b[0m\n", - "\u001b[32m│\u001b[0m Reuters, Wikipedia, and Axios. \u001b[32m│\u001b[0m\n", - "\u001b[32m╰──────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Streaming timed out after 20 seconds - returning collected messages\n" - ] - } - ], + "outputs": [], "source": [ + "# Subscribe to the async task messages produced by the agent\n", "from agentex.lib.utils.dev_tools import subscribe_to_async_task_messages\n", "\n", "task_messages = subscribe_to_async_task_messages(\n", @@ -518,14 +130,14 @@ " only_after_timestamp=event.created_at, \n", " print_messages=True,\n", " rich_print=True,\n", - " timeout=20,\n", + " timeout=30, # Notice the longer timeout to give time for the agent to respond\n", ")" ] }, { "cell_type": "code", "execution_count": null, - "id": "ec2b599d", + "id": "7", "metadata": {}, "outputs": [], "source": [] diff --git a/pyproject.toml b/pyproject.toml index 4f5c36e2..49663a10 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -124,6 +124,7 @@ build-backend = "hatchling.build" [dependency-groups] dev = [ "ipywidgets>=8.1.7", + "nbstripout>=0.8.1", "yaspin>=3.1.0", ] diff --git a/src/agentex/lib/cli/templates/default/dev.ipynb.j2 b/src/agentex/lib/cli/templates/default/dev.ipynb.j2 index c16f0a02..70242ebf 100644 --- a/src/agentex/lib/cli/templates/default/dev.ipynb.j2 +++ b/src/agentex/lib/cli/templates/default/dev.ipynb.j2 @@ -30,24 +30,17 @@ "outputs": [], "source": [ "# (REQUIRED) Create a new task. For Agentic agents, you must create a task for messages to be associated with.\n", - "\n", - "from typing import cast\n", "import uuid\n", "\n", - "from agentex.types import Task\n", - "\n", - "TASK_ID = str(uuid.uuid4())[:8]\n", - "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.create_task(\n", " agent_name=AGENT_NAME,\n", - " method=\"task/create\",\n", " params={\n", - " \"name\": f\"{TASK_ID}-task\",\n", + " \"name\": f\"{str(uuid.uuid4())[:8]}-task\",\n", " \"params\": {}\n", " }\n", ")\n", "\n", - "task = cast(Task, rpc_response.result)\n", + "task = rpc_response.result\n", "print(task)" ] }, @@ -58,9 +51,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Test non streaming response\n", - "from typing import cast\n", - "from agentex.types import Event\n", + "# Send an event to the agent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -70,16 +61,15 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_event(\n", " agent_name=AGENT_NAME,\n", - " method=\"event/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"task_id\": task.id,\n", " }\n", ")\n", "\n", - "event = cast(Event, rpc_response.result)\n", + "event = rpc_response.result\n", "print(event)" ] }, @@ -90,6 +80,7 @@ "metadata": {}, "outputs": [], "source": [ + "# Subscribe to the async task messages produced by the agent\n", "from agentex.lib.utils.dev_tools import subscribe_to_async_task_messages\n", "\n", "task_messages = subscribe_to_async_task_messages(\n", @@ -101,6 +92,14 @@ " timeout=5,\n", ")" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4864e354", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/src/agentex/lib/cli/templates/sync/dev.ipynb.j2 b/src/agentex/lib/cli/templates/sync/dev.ipynb.j2 index 490c9cb9..d8c10a65 100644 --- a/src/agentex/lib/cli/templates/sync/dev.ipynb.j2 +++ b/src/agentex/lib/cli/templates/sync/dev.ipynb.j2 @@ -56,8 +56,7 @@ "outputs": [], "source": [ "# Test non streaming response\n", - "from typing import List, cast\n", - "from agentex.types import TaskMessage, TextContent\n", + "from agentex.types import TextContent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -67,29 +66,23 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_message(\n", " agent_name=AGENT_NAME,\n", - " method=\"message/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"stream\": False\n", " }\n", ")\n", "\n", - "# # Extract and print just the text content from the response\n", - "# # The response is expected to be a dict with a \"result\" key containing a list of message dicts\n", - "if rpc_response and rpc_response.result:\n", + "if not rpc_response or not rpc_response.result:\n", + " raise ValueError(\"No result in response\")\n", "\n", - " # We know that the result of the message/send when stream is set to False will be a list of TaskMessage objects\n", - " task_message_list = cast(List[TaskMessage], rpc_response.result)\n", - " for task_message in rpc_response.result:\n", - " if isinstance(task_message, TaskMessage):\n", - " content = task_message.content\n", - " if isinstance(content, TextContent):\n", - " text = content.content\n", - " print(text)\n", - " else:\n", - " print(f\"Found non-text {type(task_message)} object in response.\")\n" + "# Extract and print just the text content from the response\n", + "for task_message in rpc_response.result:\n", + " content = task_message.content\n", + " if isinstance(content, TextContent):\n", + " text = content.content\n", + " print(text)\n" ] }, { @@ -100,11 +93,8 @@ "outputs": [], "source": [ "# Test streaming response\n", - "import json\n", - "from agentex.types import AgentRpcResponse\n", - "from agentex.types.agent_rpc_result import StreamTaskMessageDelta, StreamTaskMessageFull\n", + "from agentex.types.task_message_update import StreamTaskMessageDelta, StreamTaskMessageFull\n", "from agentex.types.text_delta import TextDelta\n", - "from agentex.types.task_message_update import TaskMessageUpdate\n", "\n", "\n", "# The result object of message/send will be a TaskMessageUpdate which is a union of the following types:\n", @@ -120,38 +110,34 @@ "# Whenn processing StreamTaskMessageDelta, if you are expecting more than TextDeltas, such as DataDelta, ToolRequestDelta, or ToolResponseDelta, you can process them as well\n", "# Whenn processing StreamTaskMessageFull, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "with client.agents.with_streaming_response.rpc_by_name(\n", + "for agent_rpc_response_chunk in client.agents.send_message_stream(\n", " agent_name=AGENT_NAME,\n", - " method=\"message/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"stream\": True\n", " }\n", - ") as response:\n", - " for agent_rpc_response_str in response.iter_text():\n", - " chunk_rpc_response = AgentRpcResponse.model_validate(json.loads(agent_rpc_response_str))\n", - " # We know that the result of the message/send when stream is set to True will be a TaskMessageUpdate\n", - " task_message_update = cast(TaskMessageUpdate, chunk_rpc_response.result)\n", - "\n", - " # Print oly the text deltas as they arrive or any full messages\n", - " if isinstance(task_message_update, StreamTaskMessageDelta):\n", - " delta = task_message_update.delta\n", - " if isinstance(delta, TextDelta):\n", - " print(delta.text_delta, end=\"\", flush=True)\n", - " else:\n", - " print(f\"Found non-text {type(task_message)} object in streaming message.\")\n", - " elif isinstance(task_message_update, StreamTaskMessageFull):\n", - " content = task_message_update.content\n", - " if isinstance(content, TextContent):\n", - " print(content.content)\n", - " else:\n", - " print(f\"Found non-text {type(task_message)} object in full message.\")\n" + "):\n", + " # We know that the result of the message/send when stream is set to True will be a TaskMessageUpdate\n", + " task_message_update = agent_rpc_response_chunk.result\n", + " # Print oly the text deltas as they arrive or any full messages\n", + " if isinstance(task_message_update, StreamTaskMessageDelta):\n", + " delta = task_message_update.delta\n", + " if isinstance(delta, TextDelta):\n", + " print(delta.text_delta, end=\"\", flush=True)\n", + " else:\n", + " print(f\"Found non-text {type(task_message)} object in streaming message.\")\n", + " elif isinstance(task_message_update, StreamTaskMessageFull):\n", + " content = task_message_update.content\n", + " if isinstance(content, TextContent):\n", + " print(content.content)\n", + " else:\n", + " print(f\"Found non-text {type(task_message)} object in full message.\")\n" ] }, { "cell_type": "code", "execution_count": null, - "id": "4ffb663c", + "id": "c5e7e042", "metadata": {}, "outputs": [], "source": [] diff --git a/src/agentex/lib/cli/templates/temporal/dev.ipynb.j2 b/src/agentex/lib/cli/templates/temporal/dev.ipynb.j2 index c16f0a02..70242ebf 100644 --- a/src/agentex/lib/cli/templates/temporal/dev.ipynb.j2 +++ b/src/agentex/lib/cli/templates/temporal/dev.ipynb.j2 @@ -30,24 +30,17 @@ "outputs": [], "source": [ "# (REQUIRED) Create a new task. For Agentic agents, you must create a task for messages to be associated with.\n", - "\n", - "from typing import cast\n", "import uuid\n", "\n", - "from agentex.types import Task\n", - "\n", - "TASK_ID = str(uuid.uuid4())[:8]\n", - "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.create_task(\n", " agent_name=AGENT_NAME,\n", - " method=\"task/create\",\n", " params={\n", - " \"name\": f\"{TASK_ID}-task\",\n", + " \"name\": f\"{str(uuid.uuid4())[:8]}-task\",\n", " \"params\": {}\n", " }\n", ")\n", "\n", - "task = cast(Task, rpc_response.result)\n", + "task = rpc_response.result\n", "print(task)" ] }, @@ -58,9 +51,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Test non streaming response\n", - "from typing import cast\n", - "from agentex.types import Event\n", + "# Send an event to the agent\n", "\n", "# The response is expected to be a list of TaskMessage objects, which is a union of the following types:\n", "# - TextContent: A message with just text content \n", @@ -70,16 +61,15 @@ "\n", "# When processing the message/send response, if you are expecting more than TextContent, such as DataContent, ToolRequestContent, or ToolResponseContent, you can process them as well\n", "\n", - "rpc_response = client.agents.rpc_by_name(\n", + "rpc_response = client.agents.send_event(\n", " agent_name=AGENT_NAME,\n", - " method=\"event/send\",\n", " params={\n", " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n", " \"task_id\": task.id,\n", " }\n", ")\n", "\n", - "event = cast(Event, rpc_response.result)\n", + "event = rpc_response.result\n", "print(event)" ] }, @@ -90,6 +80,7 @@ "metadata": {}, "outputs": [], "source": [ + "# Subscribe to the async task messages produced by the agent\n", "from agentex.lib.utils.dev_tools import subscribe_to_async_task_messages\n", "\n", "task_messages = subscribe_to_async_task_messages(\n", @@ -101,6 +92,14 @@ " timeout=5,\n", ")" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4864e354", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/src/agentex/resources/agents.py b/src/agentex/resources/agents.py index dccba983..0dfe6873 100644 --- a/src/agentex/resources/agents.py +++ b/src/agentex/resources/agents.py @@ -2,7 +2,8 @@ from __future__ import annotations -from typing import Union, Optional +import json +from typing import AsyncGenerator, Generator, Union, Optional from typing_extensions import Literal import httpx @@ -20,7 +21,7 @@ ) from ..types.agent import Agent from .._base_client import make_request_options -from ..types.agent_rpc_response import AgentRpcResponse +from ..types.agent_rpc_response import AgentRpcResponse, CancelTaskResponse, CreateTaskResponse, SendEventResponse, SendMessageResponse, SendMessageStreamResponse from ..types.agent_list_response import AgentListResponse __all__ = ["AgentsResource", "AsyncAgentsResource"] @@ -310,6 +311,260 @@ def rpc_by_name( ), cast_to=AgentRpcResponse, ) + + def create_task( + self, + agent_id: str | None = None, + agent_name: str | None = None, + *, + params: agent_rpc_params.ParamsCreateTaskRequest, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> CreateTaskResponse: + if agent_id is not None and agent_name is not None: + raise ValueError("Either agent_id or agent_name must be provided, but not both") + + if agent_id is not None: + raw_agent_rpc_response = self.rpc( + agent_id=agent_id, + method="task/create", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + elif agent_name is not None: + raw_agent_rpc_response = self.rpc_by_name( + agent_name=agent_name, + method="task/create", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + else: + raise ValueError("Either agent_id or agent_name must be provided") + + return CreateTaskResponse.model_validate(raw_agent_rpc_response, from_attributes=True) + + def cancel_task( + self, + agent_id: str | None = None, + agent_name: str | None = None, + *, + params: agent_rpc_params.ParamsCancelTaskRequest, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> CancelTaskResponse: + if agent_id is not None and agent_name is not None: + raise ValueError("Either agent_id or agent_name must be provided, but not both") + + if agent_id is not None: + raw_agent_rpc_response = self.rpc( + agent_id=agent_id, + method="task/cancel", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + elif agent_name is not None: + raw_agent_rpc_response = self.rpc_by_name( + agent_name=agent_name, + method="task/cancel", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + else: + raise ValueError("Either agent_id or agent_name must be provided") + + return CancelTaskResponse.model_validate(raw_agent_rpc_response, from_attributes=True) + + def send_message( + self, + agent_id: str | None = None, + agent_name: str | None = None, + *, + params: agent_rpc_params.ParamsSendMessageRequest, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SendMessageResponse: + if agent_id is not None and agent_name is not None: + raise ValueError("Either agent_id or agent_name must be provided, but not both") + + if "stream" in params and params["stream"] == True: + raise ValueError("If stream is set to True, use send_message_stream() instead") + else: + if agent_id is not None: + raw_agent_rpc_response = self.rpc( + agent_id=agent_id, + method="message/send", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + elif agent_name is not None: + raw_agent_rpc_response = self.rpc_by_name( + agent_name=agent_name, + method="message/send", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + else: + raise ValueError("Either agent_id or agent_name must be provided") + + return SendMessageResponse.model_validate(raw_agent_rpc_response, from_attributes=True) + + def send_message_stream( + self, + agent_id: str | None = None, + agent_name: str | None = None, + *, + params: agent_rpc_params.ParamsSendMessageRequest, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Generator[SendMessageStreamResponse, None, None]: + if agent_id is not None and agent_name is not None: + raise ValueError("Either agent_id or agent_name must be provided, but not both") + + if "stream" in params and params["stream"] == False: + raise ValueError("If stream is set to False, use send_message() instead") + + params["stream"] = True + + if agent_id is not None: + raw_agent_rpc_response = self.with_streaming_response.rpc( + agent_id=agent_id, + method="message/send", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + elif agent_name is not None: + raw_agent_rpc_response = self.with_streaming_response.rpc_by_name( + agent_name=agent_name, + method="message/send", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + else: + raise ValueError("Either agent_id or agent_name must be provided") + + with raw_agent_rpc_response as response: + for agent_rpc_response_str in response.iter_text(): + if agent_rpc_response_str.strip(): # Only process non-empty lines + try: + chunk_rpc_response = SendMessageStreamResponse.model_validate( + json.loads(agent_rpc_response_str), + from_attributes=True + ) + yield chunk_rpc_response + except json.JSONDecodeError: + # Skip invalid JSON lines + continue + + def send_event( + self, + agent_id: str | None = None, + agent_name: str | None = None, + *, + params: agent_rpc_params.ParamsSendEventRequest, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SendEventResponse: + if agent_id is not None and agent_name is not None: + raise ValueError("Either agent_id or agent_name must be provided, but not both") + + if agent_id is not None: + raw_agent_rpc_response = self.rpc( + agent_id=agent_id, + method="event/send", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + elif agent_name is not None: + raw_agent_rpc_response = self.rpc_by_name( + agent_name=agent_name, + method="event/send", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + else: + raise ValueError("Either agent_id or agent_name must be provided") + + return SendEventResponse.model_validate(raw_agent_rpc_response, from_attributes=True) class AsyncAgentsResource(AsyncAPIResource): @@ -596,7 +851,260 @@ async def rpc_by_name( ), cast_to=AgentRpcResponse, ) - + + async def create_task( + self, + agent_id: str | None = None, + agent_name: str | None = None, + *, + params: agent_rpc_params.ParamsCreateTaskRequest, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> CreateTaskResponse: + if agent_id is not None and agent_name is not None: + raise ValueError("Either agent_id or agent_name must be provided, but not both") + + if agent_id is not None: + raw_agent_rpc_response = await self.rpc( + agent_id=agent_id, + method="task/create", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + elif agent_name is not None: + raw_agent_rpc_response = await self.rpc_by_name( + agent_name=agent_name, + method="task/create", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + else: + raise ValueError("Either agent_id or agent_name must be provided") + + return CreateTaskResponse.model_validate(raw_agent_rpc_response, from_attributes=True) + + async def cancel_task( + self, + agent_id: str | None = None, + agent_name: str | None = None, + *, + params: agent_rpc_params.ParamsCancelTaskRequest, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> CancelTaskResponse: + if agent_id is not None and agent_name is not None: + raise ValueError("Either agent_id or agent_name must be provided, but not both") + + if agent_id is not None: + raw_agent_rpc_response = await self.rpc( + agent_id=agent_id, + method="task/cancel", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + elif agent_name is not None: + raw_agent_rpc_response = await self.rpc_by_name( + agent_name=agent_name, + method="task/cancel", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + else: + raise ValueError("Either agent_id or agent_name must be provided") + + return CancelTaskResponse.model_validate(raw_agent_rpc_response, from_attributes=True) + + async def send_message( + self, + agent_id: str | None = None, + agent_name: str | None = None, + *, + params: agent_rpc_params.ParamsSendMessageRequest, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SendMessageResponse: + if agent_id is not None and agent_name is not None: + raise ValueError("Either agent_id or agent_name must be provided, but not both") + + if "stream" in params and params["stream"] == True: + raise ValueError("If stream is set to True, use send_message_stream() instead") + else: + if agent_id is not None: + raw_agent_rpc_response = await self.rpc( + agent_id=agent_id, + method="message/send", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + elif agent_name is not None: + raw_agent_rpc_response = await self.rpc_by_name( + agent_name=agent_name, + method="message/send", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + else: + raise ValueError("Either agent_id or agent_name must be provided") + + return SendMessageResponse.model_validate(raw_agent_rpc_response, from_attributes=True) + + async def send_message_stream( + self, + agent_id: str | None = None, + agent_name: str | None = None, + *, + params: agent_rpc_params.ParamsSendMessageRequest, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncGenerator[SendMessageStreamResponse, None]: + if agent_id is not None and agent_name is not None: + raise ValueError("Either agent_id or agent_name must be provided, but not both") + + if "stream" in params and params["stream"] == False: + raise ValueError("If stream is set to False, use send_message() instead") + + params["stream"] = True + + if agent_id is not None: + raw_agent_rpc_response = self.with_streaming_response.rpc( + agent_id=agent_id, + method="message/send", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + elif agent_name is not None: + raw_agent_rpc_response = self.with_streaming_response.rpc_by_name( + agent_name=agent_name, + method="message/send", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + else: + raise ValueError("Either agent_id or agent_name must be provided") + + async with raw_agent_rpc_response as response: + async for agent_rpc_response_str in response.iter_text(): + if agent_rpc_response_str.strip(): # Only process non-empty lines + try: + chunk_rpc_response = SendMessageStreamResponse.model_validate( + json.loads(agent_rpc_response_str), + from_attributes=True + ) + yield chunk_rpc_response + except json.JSONDecodeError: + # Skip invalid JSON lines + continue + + async def send_event( + self, + agent_id: str | None = None, + agent_name: str | None = None, + *, + params: agent_rpc_params.ParamsSendEventRequest, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SendEventResponse: + if agent_id is not None and agent_name is not None: + raise ValueError("Either agent_id or agent_name must be provided, but not both") + + if agent_id is not None: + raw_agent_rpc_response = await self.rpc( + agent_id=agent_id, + method="event/send", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + elif agent_name is not None: + raw_agent_rpc_response = await self.rpc_by_name( + agent_name=agent_name, + method="event/send", + params=params, + id=id, + jsonrpc=jsonrpc, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + else: + raise ValueError("Either agent_id or agent_name must be provided") + + return SendEventResponse.model_validate(raw_agent_rpc_response, from_attributes=True) class AgentsResourceWithRawResponse: def __init__(self, agents: AgentsResource) -> None: diff --git a/src/agentex/types/agent_rpc_response.py b/src/agentex/types/agent_rpc_response.py index e9995e80..d1b9af83 100644 --- a/src/agentex/types/agent_rpc_response.py +++ b/src/agentex/types/agent_rpc_response.py @@ -5,16 +5,44 @@ from .._models import BaseModel from .agent_rpc_result import AgentRpcResult +from .event import Event +from .task import Task +from .task_message import TaskMessage +from .task_message_update import TaskMessageUpdate __all__ = ["AgentRpcResponse"] -class AgentRpcResponse(BaseModel): +class BaseAgentRpcResponse(BaseModel): + id: Union[int, str, None] = None + error: Optional[object] = None + jsonrpc: Optional[Literal["2.0"]] = None + + +class AgentRpcResponse(BaseAgentRpcResponse): result: Optional[AgentRpcResult] = None """The result of the agent RPC request""" - id: Union[int, str, None] = None - error: Optional[object] = None +class CreateTaskResponse(BaseAgentRpcResponse): + result: Task + """The result of the task creation""" - jsonrpc: Optional[Literal["2.0"]] = None + +class CancelTaskResponse(BaseAgentRpcResponse): + result: Task + """The result of the task cancellation""" + + +class SendMessageResponse(BaseAgentRpcResponse): + result: list[TaskMessage] + """The result of the message sending""" + +class SendMessageStreamResponse(BaseAgentRpcResponse): + result: TaskMessageUpdate + """The result of the message sending""" + + +class SendEventResponse(BaseAgentRpcResponse): + result: Event + """The result of the event sending""" \ No newline at end of file diff --git a/uv.lock b/uv.lock index 0b060aa6..1d0d0402 100644 --- a/uv.lock +++ b/uv.lock @@ -4,7 +4,7 @@ requires-python = ">=3.12, <4" [[package]] name = "agentex-sdk" -version = "0.2.0" +version = "0.2.1" source = { editable = "." } dependencies = [ { name = "aiohttp" }, @@ -52,6 +52,7 @@ dev = [ [package.dev-dependencies] dev = [ { name = "ipywidgets" }, + { name = "nbstripout" }, { name = "yaspin" }, ] @@ -97,6 +98,7 @@ provides-extras = ["aiohttp", "dev"] [package.metadata.requires-dev] dev = [ { name = "ipywidgets", specifier = ">=8.1.7" }, + { name = "nbstripout", specifier = ">=0.8.1" }, { name = "yaspin", specifier = ">=3.1.0" }, ] @@ -400,6 +402,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/53/50/b1222562c6d270fea83e9c9075b8e8600b8479150a18e4516a6138b980d1/fastapi-0.115.14-py3-none-any.whl", hash = "sha256:6c0c8bf9420bd58f565e585036d971872472b4f7d3f6c73b698e10cffdefb3ca", size = 95514 }, ] +[[package]] +name = "fastjsonschema" +version = "2.21.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/50/4b769ce1ac4071a1ef6d86b1a3fb56cdc3a37615e8c5519e1af96cdac366/fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4", size = 373939 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/2b/0817a2b257fe88725c25589d89aec060581aabf668707a8d03b2e9e0cb2a/fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667", size = 23924 }, +] + [[package]] name = "filelock" version = "3.18.0" @@ -1053,6 +1064,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313 }, ] +[[package]] +name = "nbformat" +version = "5.10.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fastjsonschema" }, + { name = "jsonschema" }, + { name = "jupyter-core" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454 }, +] + +[[package]] +name = "nbstripout" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nbformat" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/92/6e/05d7e0e35598bd0d423167295f978005912a2dcd137c88ebf36e34047dc7/nbstripout-0.8.1.tar.gz", hash = "sha256:eaac8b6b4e729e8dfe1e5df2c0f8ba44abc5a17a65448f0480141f80be230bb1", size = 26399 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/91/93b459c456b0e4389b2b3ddb3b82cd401d022691334a0f06e92c2046e780/nbstripout-0.8.1-py2.py3-none-any.whl", hash = "sha256:79a8c8da488d98c54c112fa87185045f0271a97d84f1d46918d6a3ee561b30e7", size = 16329 }, +] + [[package]] name = "nest-asyncio" version = "1.6.0"