diff --git a/docs/api/mcp.md b/docs/api/mcp.md index 0f5cbeddb6..bc9f4592dd 100644 --- a/docs/api/mcp.md +++ b/docs/api/mcp.md @@ -1 +1,3 @@ +# `pydantic_ai.mcp` + ::: pydantic_ai.mcp diff --git a/docs/mcp/client.md b/docs/mcp/client.md index 10e07141d2..a12a640159 100644 --- a/docs/mcp/client.md +++ b/docs/mcp/client.md @@ -18,29 +18,32 @@ pip/uv-add 'pydantic-ai-slim[mcp]' PydanticAI comes with two ways to connect to MCP servers: -- [`MCPServerSSE`][pydantic_ai.mcp.MCPServerSSE] which connects to an MCP server using the [HTTP SSE](https://modelcontextprotocol.io/docs/concepts/transports#server-sent-events-sse) transport -- [`MCPServerStdio`][pydantic_ai.mcp.MCPServerStdio] which runs the server as a subprocess and connects to it using the [stdio](https://modelcontextprotocol.io/docs/concepts/transports#standard-input%2Foutput-stdio) transport +- [`MCPServerHTTP`][pydantic_ai.mcp.MCPServerHTTP] which connects to an MCP server using the [HTTP SSE](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#http-with-sse) transport +- [`MCPServerStdio`][pydantic_ai.mcp.MCPServerStdio] which runs the server as a subprocess and connects to it using the [stdio](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#stdio) transport Examples of both are shown below; [mcp-run-python](run-python.md) is used as the MCP server in both examples. ### SSE Client -[`MCPServerSSE`][pydantic_ai.mcp.MCPServerSSE] connects over HTTP using the [HTTP + Server Sent Events transport](https://modelcontextprotocol.io/docs/concepts/transports#server-sent-events-sse) to a server. +[`MCPServerHTTP`][pydantic_ai.mcp.MCPServerHTTP] connects over HTTP using the [HTTP + Server Sent Events transport](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#http-with-sse) to a server. !!! note - [`MCPServerSSE`][pydantic_ai.mcp.MCPServerSSE] requires an MCP server to be running and accepting HTTP connections before calling [`agent.run_mcp_servers()`][pydantic_ai.Agent.run_mcp_servers]. Running the server is not managed by PydanticAI. + [`MCPServerHTTP`][pydantic_ai.mcp.MCPServerHTTP] requires an MCP server to be running and accepting HTTP connections before calling [`agent.run_mcp_servers()`][pydantic_ai.Agent.run_mcp_servers]. Running the server is not managed by PydanticAI. + +The name "HTTP" is used since this implemented will be adapted in future to use the new +[Streamable HTTP](https://github.com/modelcontextprotocol/specification/pull/206) currently in development. Before creating the SSE client, we need to run the server (docs [here](run-python.md)): -```bash {title="run_sse_server.py"} +```bash {title="terminal (run sse server)"} npx @pydantic/mcp-run-python sse ``` ```python {title="mcp_sse_client.py" py="3.10"} from pydantic_ai import Agent -from pydantic_ai.mcp import MCPServerSSE +from pydantic_ai.mcp import MCPServerHTTP -server = MCPServerSSE(url='http://localhost:3001/sse') # (1)! +server = MCPServerHTTP(url='http://localhost:3001/sse') # (1)! agent = Agent('openai:gpt-4o', mcp_servers=[server]) # (2)! @@ -81,7 +84,7 @@ Will display as follows: ### MCP "stdio" Server -The other transport offered by MCP is the [stdio transport](https://modelcontextprotocol.io/docs/concepts/transports#standard-input%2Foutput-stdio) where the server is run as a subprocess and communicates with the client over `stdin` and `stdout`. In this case, you'd use the [`MCPServerStdio`][pydantic_ai.mcp.MCPServerStdio] class. +The other transport offered by MCP is the [stdio transport](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#stdio) where the server is run as a subprocess and communicates with the client over `stdin` and `stdout`. In this case, you'd use the [`MCPServerStdio`][pydantic_ai.mcp.MCPServerStdio] class. !!! note When using [`MCPServerStdio`][pydantic_ai.mcp.MCPServerStdio] servers, the [`agent.run_mcp_servers()`][pydantic_ai.Agent.run_mcp_servers] context manager is responsible for starting and stopping the server. diff --git a/docs/mcp/run-python.md b/docs/mcp/run-python.md index 3ed64bee72..f5f5956034 100644 --- a/docs/mcp/run-python.md +++ b/docs/mcp/run-python.md @@ -20,8 +20,8 @@ npx @pydantic/mcp-run-python [stdio|sse] Where: -* `stdio`: Runs the server with [stdin/stdout transport](https://modelcontextprotocol.io/docs/concepts/transports#standard-input%2Foutput-stdio) (for subprocess usage) -* `sse`: Runs the server with [HTTP Server-Sent Events transport](https://modelcontextprotocol.io/docs/concepts/transports#server-sent-events-sse) (for remote connections) +* `stdio`: Runs the server with [stdin/stdout transport](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#stdio) (for subprocess usage) +* `sse`: Runs the server with [HTTP Server-Sent Events transport](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#http-with-sse) (for remote connections) Usage of `@pydantic/mcp-run-python` with PydanticAI is described in the [client](client.md#mcp-stdio-server) documentation. @@ -75,6 +75,9 @@ async def main(): """ ``` +If an exception occurs, `status` will be `install-error` or `run-error` and `return_value` will be replaced +by `error` which will include the traceback and exception message. + ## Dependencies Dependencies are installed when code is run. diff --git a/examples/pydantic_ai_examples/mcp_server.py b/examples/pydantic_ai_examples/mcp_server.py deleted file mode 100644 index 56ada0b900..0000000000 --- a/examples/pydantic_ai_examples/mcp_server.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Simple MCP Server that can be used to test the MCP protocol. - -Run with: - - uv run -m pydantic_ai_examples.mcp_server --transport - -TRANSPORT can be either `sse` or `stdio`. -""" - -import argparse - -from mcp.server.fastmcp import FastMCP - -mcp = FastMCP('PydanticAI MCP Server', port=8005) - - -@mcp.tool() -async def celsius_to_fahrenheit(celsius: float) -> float: - """Convert Celsius to Fahrenheit. - - Args: - celsius: Temperature in Celsius - - Returns: - Temperature in Fahrenheit - """ - return (celsius * 9 / 5) + 32 - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument( - '--transport', type=str, default='stdio', choices=('sse', 'stdio') - ) - args = parser.parse_args() - - mcp.run(transport=args.transport) diff --git a/mcp-run-python/README.md b/mcp-run-python/README.md index d8a6874d6c..a9b4a94221 100644 --- a/mcp-run-python/README.md +++ b/mcp-run-python/README.md @@ -13,5 +13,5 @@ npx @pydantic/mcp-run-python [stdio|sse] where: -- `stdio` runs the server with the [Stdio MCP transport](https://modelcontextprotocol.io/docs/concepts/transports#standard-input%2Foutput-stdio) — suitable for running the process as a subprocess locally -- and `sse` runs the server with the [SSE MCP transport](https://modelcontextprotocol.io/docs/concepts/transports#server-sent-events-sse) — running the server as an HTTP server to connect locally or remotely +- `stdio` runs the server with the [Stdio MCP transport](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#stdio) — suitable for running the process as a subprocess locally +- and `sse` runs the server with the [SSE MCP transport](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#http-with-sse) — running the server as an HTTP server to connect locally or remotely diff --git a/mkdocs.yml b/mkdocs.yml index cebf66f5c4..8c05cf7f33 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -55,6 +55,7 @@ nav: - api/exceptions.md - api/settings.md - api/usage.md + - api/mcp.md - api/format_as_xml.md - api/models/base.md - api/models/openai.md @@ -69,7 +70,6 @@ nav: - api/models/function.md - api/models/fallback.md - api/providers.md - - api/mcp.md - api/pydantic_graph/graph.md - api/pydantic_graph/nodes.md - api/pydantic_graph/persistence.md diff --git a/pydantic_ai_slim/pydantic_ai/mcp.py b/pydantic_ai_slim/pydantic_ai/mcp.py index d28d0999f8..8f2e85199b 100644 --- a/pydantic_ai_slim/pydantic_ai/mcp.py +++ b/pydantic_ai_slim/pydantic_ai/mcp.py @@ -1,8 +1,3 @@ -"""This module implements the MCP server interface between the agent and the LLM. - -See for more information. -""" - from __future__ import annotations from abc import ABC, abstractmethod @@ -29,13 +24,13 @@ "you can use the `mcp` optional group — `pip install 'pydantic-ai-slim[mcp]'`" ) from _import_error -__all__ = ('MCPServer', 'MCPServerStdio', 'MCPServerSSE') +__all__ = 'MCPServer', 'MCPServerStdio', 'MCPServerHTTP' class MCPServer(ABC): - """Base class for MCP servers that can be used to run a command or connect to an SSE server. + """Base class for attaching agents to MCP servers. - See for more information. + See for more information. """ is_running: bool = False @@ -105,19 +100,30 @@ async def __aexit__( @dataclass class MCPServerStdio(MCPServer): - """An MCP server that runs a subprocess. + """Runs an MCP server in a subprocess and communicates with it over stdin/stdout. This class implements the stdio transport from the MCP specification. - See for more information. + See for more information. + + !!! note + Using this class as an async context manager will start the server as a subprocess when entering the context, + and stop it when exiting the context. Example: ```python {py="3.10"} from pydantic_ai import Agent from pydantic_ai.mcp import MCPServerStdio - server = MCPServerStdio('python', ['-m', 'pydantic_ai_examples.mcp_server']) + server = MCPServerStdio('npx', ['-y', '@pydantic/mcp-run-python', 'stdio']) # (1)! agent = Agent('openai:gpt-4o', mcp_servers=[server]) + + async def main(): + async with agent.run_mcp_servers(): # (2)! + ... ``` + + 1. See [MCP Run Python](../mcp/run-python.md) for more information. + 2. This will start the server as a subprocess and connect to it. """ command: str @@ -127,7 +133,11 @@ class MCPServerStdio(MCPServer): """The arguments to pass to the command.""" env: dict[str, str] | None = None - """The environment variables the CLI server will have access to.""" + """The environment variables the CLI server will have access to. + + By default the subprocess will not inherit any environment variables from the parent process. + If you want to inherit the environment variables from the parent process, use `env=os.environ`. + """ @asynccontextmanager async def client_streams( @@ -141,15 +151,42 @@ async def client_streams( @dataclass -class MCPServerSSE(MCPServer): - """An MCP server that connects to a remote server. +class MCPServerHTTP(MCPServer): + """An MCP server that connects over streamable HTTP connections. This class implements the SSE transport from the MCP specification. - See for more information. + See for more information. + + The name "HTTP" is used since this implemented will be adapted in future to use the new + [Streamable HTTP](https://github.com/modelcontextprotocol/specification/pull/206) currently in development. + + !!! note + Using this class as an async context manager will create a new pool of HTTP connections to connect + to a server which should already be running. + + Example: + ```python {py="3.10"} + from pydantic_ai import Agent + from pydantic_ai.mcp import MCPServerHTTP + + server = MCPServerHTTP('http://localhost:3001/sse') # (1)! + agent = Agent('openai:gpt-4o', mcp_servers=[server]) + + async def main(): + async with agent.run_mcp_servers(): # (2)! + ... + ``` + + 1. E.g. you might be connecting to a server run with `npx @pydantic/mcp-run-python sse`, + see [MCP Run Python](../mcp/run-python.md) for more information. + 2. This will connect to a server running on `localhost:3001`. """ url: str - """The URL of the remote server.""" + """The URL of the SSE endpoint on the MCP server. + + For example for a server running locally, this might be `http://localhost:3001/sse`. + """ @asynccontextmanager async def client_streams( diff --git a/tests/test_examples.py b/tests/test_examples.py index 24b7d10168..97c823028d 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -79,7 +79,7 @@ def test_docs_examples( # noqa: C901 mocker.patch('rich.prompt.Prompt.ask', side_effect=rich_prompt_ask) if sys.version_info >= (3, 10): - mocker.patch('pydantic_ai.mcp.MCPServerSSE', return_value=MockMCPServer()) + mocker.patch('pydantic_ai.mcp.MCPServerHTTP', return_value=MockMCPServer()) mocker.patch('mcp.server.fastmcp.FastMCP') env.set('OPENAI_API_KEY', 'testing') diff --git a/tests/test_mcp.py b/tests/test_mcp.py index 37056648a7..8f83153fa7 100644 --- a/tests/test_mcp.py +++ b/tests/test_mcp.py @@ -13,7 +13,7 @@ with try_import() as imports_successful: from mcp.types import CallToolResult, TextContent - from pydantic_ai.mcp import MCPServerSSE, MCPServerStdio + from pydantic_ai.mcp import MCPServerHTTP, MCPServerStdio from pydantic_ai.models.openai import OpenAIModel from pydantic_ai.providers.openai import OpenAIProvider @@ -39,7 +39,7 @@ async def test_stdio_server(): def test_sse_server(): - sse_server = MCPServerSSE(url='http://localhost:8000/sse') + sse_server = MCPServerHTTP(url='http://localhost:8000/sse') assert sse_server.url == 'http://localhost:8000/sse'