Skip to content
44 changes: 44 additions & 0 deletions openai_agents/handoffs/README.md
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Left two comments at #226 that apply here as well

Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# Handoffs Examples

Agent handoff patterns with message filtering in Temporal workflows.

*Adapted from [OpenAI Agents SDK handoffs examples](https://github.com/openai/openai-agents-python/tree/main/examples/handoffs)*

Before running these examples, be sure to review the [prerequisites and background on the integration](../README.md).

## Running the Examples

First, start the worker:
```bash
uv run openai_agents/handoffs/run_worker.py
```

Then run the workflow:

### Message Filter Workflow
Demonstrates agent handoffs with message history filtering:
```bash
uv run openai_agents/handoffs/run_message_filter_workflow.py
```

## Workflow Pattern

The workflow demonstrates a 4-step conversation with message filtering:

1. **Introduction**: User greets first agent with name
2. **Tool Usage**: First agent generates random number using function tool
3. **Agent Switch**: Conversation moves to second agent for general questions
4. **Spanish Handoff**: Second agent detects Spanish and hands off to Spanish specialist

During the Spanish handoff, message filtering occurs:
- All tool-related messages are removed from history
- First two messages are dropped (demonstration of selective context)
- Filtered conversation continues with Spanish agent

The workflow returns both the final response and complete message history for inspection.

## Omitted Examples

The following patterns from the [reference repository](https://github.com/openai/openai-agents-python/tree/main/examples/handoffs) are not included in this Temporal adaptation:

- **Message Filter Streaming**: Streaming capabilities are not yet available in the Temporal integration
38 changes: 38 additions & 0 deletions openai_agents/handoffs/run_message_filter_workflow.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import asyncio
import json

from temporalio.client import Client
from temporalio.contrib.openai_agents import OpenAIAgentsPlugin

from openai_agents.handoffs.workflows.message_filter_workflow import (
MessageFilterWorkflow,
)


async def main():
# Create client connected to server at the given address
client = await Client.connect(
"localhost:7233",
plugins=[
OpenAIAgentsPlugin(),
],
)

# Execute a workflow
result = await client.execute_workflow(
MessageFilterWorkflow.run,
"Sora",
id="message-filter-workflow",
task_queue="openai-agents-handoffs-task-queue",
)

print(f"Final output: {result.final_output}")
print("\n===Final messages===\n")

# Print the final message history to see the effect of the message filter
for message in result.final_messages:
print(json.dumps(message, indent=2))


if __name__ == "__main__":
asyncio.run(main())
42 changes: 42 additions & 0 deletions openai_agents/handoffs/run_worker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
from __future__ import annotations

import asyncio
from datetime import timedelta

from temporalio.client import Client
from temporalio.contrib.openai_agents import ModelActivityParameters, OpenAIAgentsPlugin
from temporalio.worker import Worker

from openai_agents.handoffs.workflows.message_filter_workflow import (
MessageFilterWorkflow,
)


async def main():
# Create client connected to server at the given address
client = await Client.connect(
"localhost:7233",
plugins=[
OpenAIAgentsPlugin(
model_params=ModelActivityParameters(
start_to_close_timeout=timedelta(seconds=60)
)
),
],
)

worker = Worker(
client,
task_queue="openai-agents-handoffs-task-queue",
workflows=[
MessageFilterWorkflow,
],
activities=[
# No custom activities needed for these workflows
],
)
await worker.run()


if __name__ == "__main__":
asyncio.run(main())
112 changes: 112 additions & 0 deletions openai_agents/handoffs/workflows/message_filter_workflow.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
from __future__ import annotations

from dataclasses import dataclass
from typing import List

from agents import Agent, HandoffInputData, Runner, function_tool, handoff
from agents.extensions import handoff_filters
from agents.items import TResponseInputItem
from temporalio import workflow


@dataclass
class MessageFilterResult:
final_output: str
final_messages: List[TResponseInputItem]


@function_tool
def random_number_tool(max: int) -> int:
"""Return a random integer between 0 and the given maximum."""
return workflow.random().randint(0, max)


def spanish_handoff_message_filter(
handoff_message_data: HandoffInputData,
) -> HandoffInputData:
# First, we'll remove any tool-related messages from the message history
handoff_message_data = handoff_filters.remove_all_tools(handoff_message_data)

# Second, we'll also remove the first two items from the history, just for demonstration
history = (
tuple(handoff_message_data.input_history[2:])
if isinstance(handoff_message_data.input_history, tuple)
else handoff_message_data.input_history
)

return HandoffInputData(
input_history=history,
pre_handoff_items=tuple(handoff_message_data.pre_handoff_items),
new_items=tuple(handoff_message_data.new_items),
)


@workflow.defn
class MessageFilterWorkflow:
@workflow.run
async def run(self, user_name: str = "Sora") -> MessageFilterResult:
first_agent = Agent(
name="Assistant",
instructions="Be extremely concise.",
tools=[random_number_tool],
)

spanish_agent = Agent(
name="Spanish Assistant",
instructions="You only speak Spanish and are extremely concise.",
handoff_description="A Spanish-speaking assistant.",
)

second_agent = Agent(
name="Assistant",
instructions=(
"Be a helpful assistant. If the user speaks Spanish, handoff to the Spanish assistant."
),
handoffs=[
handoff(spanish_agent, input_filter=spanish_handoff_message_filter)
],
)

# 1. Send a regular message to the first agent
result = await Runner.run(first_agent, input=f"Hi, my name is {user_name}.")

# 2. Ask it to generate a number
result = await Runner.run(
first_agent,
input=result.to_input_list()
+ [
{
"content": "Can you generate a random number between 0 and 100?",
"role": "user",
}
],
)

# 3. Call the second agent
result = await Runner.run(
second_agent,
input=result.to_input_list()
+ [
{
"content": "I live in New York City. What's the population of the city?",
"role": "user",
}
],
)

# 4. Cause a handoff to occur
result = await Runner.run(
second_agent,
input=result.to_input_list()
+ [
{
"content": "Por favor habla en español. ¿Cuál es mi nombre y dónde vivo?",
"role": "user",
}
],
)

# Return the final result and message history
return MessageFilterResult(
final_output=result.final_output, final_messages=result.to_input_list()
)
39 changes: 39 additions & 0 deletions openai_agents/hosted_mcp/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# Hosted MCP Examples

Integration with hosted MCP (Model Context Protocol) servers using OpenAI agents in Temporal workflows.

*Adapted from [OpenAI Agents SDK hosted_mcp examples](https://github.com/openai/openai-agents-python/tree/main/examples/hosted_mcp)*

Before running these examples, be sure to review the [prerequisites and background on the integration](../README.md).

## Running the Examples

First, start the worker (supports all MCP workflows):
```bash
uv run openai_agents/hosted_mcp/run_worker.py
```

Then run individual examples in separate terminals:

### Simple MCP Connection
Connect to a hosted MCP server without approval requirements (trusted servers):
```bash
uv run openai_agents/hosted_mcp/run_simple_mcp_workflow.py
```

### MCP with Approval Callbacks
Connect to a hosted MCP server with approval workflow for tool execution:
```bash
uv run openai_agents/hosted_mcp/run_approval_mcp_workflow.py
```

## MCP Server Configuration

Both examples default to using the GitMCP server (`https://gitmcp.io/openai/codex`) which provides repository analysis capabilities. The workflows can be easily modified to use different MCP servers by changing the `server_url` parameter.

### Approval Workflow Notes

The approval example demonstrates the callback structure for tool approvals in a Temporal context. In this implementation:

- The approval callback automatically approves requests for demonstration purposes
- In production environments, approvals would typically be handled by communicating with a human user. Because the approval executes in the Temporal workflow, you can use signals or updates to communicate approval status.
30 changes: 30 additions & 0 deletions openai_agents/hosted_mcp/run_approval_mcp_workflow.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import asyncio

from temporalio.client import Client
from temporalio.contrib.openai_agents import OpenAIAgentsPlugin

from openai_agents.hosted_mcp.workflows.approval_mcp_workflow import ApprovalMCPWorkflow


async def main():
# Create client connected to server at the given address
client = await Client.connect(
"localhost:7233",
plugins=[
OpenAIAgentsPlugin(),
],
)

# Execute a workflow
result = await client.execute_workflow(
ApprovalMCPWorkflow.run,
"Which language is this repo written in?",
id="approval-mcp-workflow",
task_queue="openai-agents-hosted-mcp-task-queue",
)

print(f"Result: {result}")


if __name__ == "__main__":
asyncio.run(main())
30 changes: 30 additions & 0 deletions openai_agents/hosted_mcp/run_simple_mcp_workflow.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import asyncio

from temporalio.client import Client
from temporalio.contrib.openai_agents import OpenAIAgentsPlugin

from openai_agents.hosted_mcp.workflows.simple_mcp_workflow import SimpleMCPWorkflow


async def main():
# Create client connected to server at the given address
client = await Client.connect(
"localhost:7233",
plugins=[
OpenAIAgentsPlugin(),
],
)

# Execute a workflow
result = await client.execute_workflow(
SimpleMCPWorkflow.run,
"Which language is this repo written in?",
id="simple-mcp-workflow",
task_queue="openai-agents-hosted-mcp-task-queue",
)

print(f"Result: {result}")


if __name__ == "__main__":
asyncio.run(main())
42 changes: 42 additions & 0 deletions openai_agents/hosted_mcp/run_worker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
from __future__ import annotations

import asyncio
from datetime import timedelta

from temporalio.client import Client
from temporalio.contrib.openai_agents import ModelActivityParameters, OpenAIAgentsPlugin
from temporalio.worker import Worker

from openai_agents.hosted_mcp.workflows.approval_mcp_workflow import ApprovalMCPWorkflow
from openai_agents.hosted_mcp.workflows.simple_mcp_workflow import SimpleMCPWorkflow


async def main():
# Create client connected to server at the given address
client = await Client.connect(
"localhost:7233",
plugins=[
OpenAIAgentsPlugin(
model_params=ModelActivityParameters(
start_to_close_timeout=timedelta(seconds=60)
)
),
],
)

worker = Worker(
client,
task_queue="openai-agents-hosted-mcp-task-queue",
workflows=[
SimpleMCPWorkflow,
ApprovalMCPWorkflow,
],
activities=[
# No custom activities needed for these workflows
],
)
await worker.run()


if __name__ == "__main__":
asyncio.run(main())
Loading
Loading