diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 8eee8ee..ae7a1d5 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -2,6 +2,9 @@ name: Publish (PyPI + MCP Registry) on: workflow_dispatch: + push: + tags: + - 'v*' concurrency: group: publish-${{ github.ref }} @@ -11,6 +14,7 @@ jobs: pypi: name: Publish to PyPI runs-on: ubuntu-latest + timeout-minutes: 15 permissions: contents: read id-token: write # REQUIRED for PyPI Trusted Publishing (OIDC) @@ -18,25 +22,46 @@ jobs: - name: Checkout uses: actions/checkout@v4 + - name: Validate version matches tag + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') + run: | + set -euo pipefail + + TAG_VERSION="${GITHUB_REF#refs/tags/v}" + PYPROJECT_VERSION=$(grep -oP '^version = "\K[^"]+' pyproject.toml) + SERVER_VERSION=$(grep -oP '"version":\s*"\K[^"]+' server.json | head -1) + SERVER_PKG_VERSION=$(grep -oP '"version":\s*"\K[^"]+' server.json | tail -1) + + ERRORS=0 + [ "$PYPROJECT_VERSION" != "$TAG_VERSION" ] && echo "pyproject.toml: $PYPROJECT_VERSION != $TAG_VERSION" && ERRORS=1 + [ "$SERVER_VERSION" != "$TAG_VERSION" ] && echo "server.json: $SERVER_VERSION != $TAG_VERSION" && ERRORS=1 + [ "$SERVER_PKG_VERSION" != "$TAG_VERSION" ] && echo "server.json package: $SERVER_PKG_VERSION != $TAG_VERSION" && ERRORS=1 + + if [ $ERRORS -eq 1 ]; then + echo "Please update all version fields to $TAG_VERSION before creating the tag." + exit 1 + fi + + echo "All versions match: $TAG_VERSION" + - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.10" - name: Install uv run: pipx install uv - # --- Optional checks (keep for now; can be moved to PR CI later) --- - - name: Sync deps + - name: Sync dependencies run: uv sync - - name: Ruff lint + - name: Run ruff lint run: uvx ruff check . - - name: Ruff format check + - name: Run ruff format check run: uvx ruff format --check . - - name: Run tests (if present) + - name: Run tests run: | if [ -d tests ]; then uv run pytest -q @@ -44,21 +69,18 @@ jobs: echo "No tests/ directory; skipping." fi - # --- Build artifacts for PyPI --- - - name: Build sdist & wheel + - name: Build package run: uv build - # --- Publish using Trusted Publishing (no tokens) --- - name: Publish to PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: packages-dir: dist - # For a dress rehearsal, you can set: - # repository-url: https://test.pypi.org/legacy/ registry: name: Publish to MCP Registry runs-on: ubuntu-latest + timeout-minutes: 10 needs: pypi permissions: contents: read @@ -67,23 +89,22 @@ jobs: - name: Checkout uses: actions/checkout@v4 - # Small delay to allow PyPI metadata to propagate - - name: Wait briefly + - name: Wait for PyPI metadata propagation run: sleep 20 - name: Install MCP Publisher run: | - set -e + set -euo pipefail OS=$(uname -s | tr '[:upper:]' '[:lower:]') ARCH=$(uname -m | sed 's/x86_64/amd64/;s/aarch64/arm64/') echo "Get the latest release version" LATEST_VERSION=$(curl -s https://api.github.com/repos/modelcontextprotocol/registry/releases/latest | jq -r '.tag_name') echo "Installing MCP Publisher version: $LATEST_VERSION" - curl -L "https://github.com/modelcontextprotocol/registry/releases/download/${LATEST_VERSION}/mcp-publisher_${LATEST_VERSION#v}_${OS}_${ARCH}.tar.gz" \ + curl -L "https://github.com/modelcontextprotocol/registry/releases/latest/download/mcp-publisher_${OS}_${ARCH}.tar.gz" \ | tar xz mcp-publisher - - name: Login to MCP Registry (OIDC) + - name: Login to MCP Registry run: ./mcp-publisher login github-oidc - - name: Publish server.json to MCP Registry + - name: Publish to MCP Registry run: ./mcp-publisher publish diff --git a/.github/workflows/publish_mcp.yml b/.github/workflows/publish_mcp.yml index 0a398e3..df202c9 100644 --- a/.github/workflows/publish_mcp.yml +++ b/.github/workflows/publish_mcp.yml @@ -2,46 +2,36 @@ name: Publish MCP Registry on: workflow_dispatch: - workflow_run: - workflows: ["Publish PyPI"] - types: [completed] concurrency: - group: publish-${{ github.ref }} + group: publish-mcp-${{ github.ref }} cancel-in-progress: true jobs: - on-success: + mcp-registry: name: Publish to MCP Registry runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.conclusion == 'success' }} + timeout-minutes: 10 permissions: contents: read - id-token: write + id-token: write # REQUIRED for MCP Registry GitHub OIDC login steps: - name: Checkout uses: actions/checkout@v4 - name: Install MCP Publisher run: | - set -e + set -euo pipefail OS=$(uname -s | tr '[:upper:]' '[:lower:]') ARCH=$(uname -m | sed 's/x86_64/amd64/;s/aarch64/arm64/') + echo "Get the latest release version" LATEST_VERSION=$(curl -s https://api.github.com/repos/modelcontextprotocol/registry/releases/latest | jq -r '.tag_name') - curl -L "https://github.com/modelcontextprotocol/registry/releases/download/${LATEST_VERSION}/mcp-publisher_${LATEST_VERSION#v}_${OS}_${ARCH}.tar.gz" | tar xz mcp-publisher + echo "Installing MCP Publisher version: $LATEST_VERSION" + curl -L "https://github.com/modelcontextprotocol/registry/releases/latest/download/mcp-publisher_${OS}_${ARCH}.tar.gz" \ + | tar xz mcp-publisher - name: Login to MCP Registry run: ./mcp-publisher login github-oidc - name: Publish to MCP Registry - run: | - echo "PyPI workflow succeeded - Publishing to MCP Registry" - ./mcp-publisher publish - - on-failure: - name: Handle PyPI Failure - runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.conclusion == 'failure' }} - steps: - - name: Log PyPI Failure - run: echo "PyPI workflow failed - MCP Registry publication skipped" + run: ./mcp-publisher publish \ No newline at end of file diff --git a/.github/workflows/publish_pypi.yml b/.github/workflows/publish_pypi.yml index a821dc2..0563fb6 100644 --- a/.github/workflows/publish_pypi.yml +++ b/.github/workflows/publish_pypi.yml @@ -2,9 +2,6 @@ name: Publish PyPI on: workflow_dispatch: - push: - tags: - - "v*" concurrency: group: publish-pypi-${{ github.ref }} @@ -14,9 +11,10 @@ jobs: pypi: name: Publish PyPI runs-on: ubuntu-latest + timeout-minutes: 15 permissions: contents: read - id-token: write # REQUIRED for PyPI Trusted Publishing (OIDC) + id-token: write steps: - name: Checkout uses: actions/checkout@v4 @@ -24,22 +22,21 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.10" - name: Install uv run: pipx install uv - # --- Optional checks (keep for now; can be moved to PR CI later) --- - - name: Sync deps + - name: Sync dependencies run: uv sync - - name: Ruff lint + - name: Run ruff lint run: uvx ruff check . - - name: Ruff format check + - name: Run ruff format check run: uvx ruff format --check . - - name: Run tests (if present) + - name: Run tests run: | if [ -d tests ]; then uv run pytest -q @@ -47,14 +44,10 @@ jobs: echo "No tests/ directory; skipping." fi - # --- Build artifacts for PyPI --- - - name: Build sdist & wheel + - name: Build package run: uv build - # --- Publish using Trusted Publishing (no tokens) --- - name: Publish to PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: - packages-dir: dist - # For a dress rehearsal, you can set: - # repository-url: https://test.pypi.org/legacy/ \ No newline at end of file + packages-dir: dist \ No newline at end of file diff --git a/README.md b/README.md index 42c75c5..e31ec8d 100644 --- a/README.md +++ b/README.md @@ -82,12 +82,9 @@ The MCP server is version-agnostic (ROS1 or ROS2) and works with any MCP-compati ### Installation -Follow the [installation guide](docs/installation.md) for step-by-step instructions: -1. Clone the repository -2. Install `uv` and `rosbridge` -3. Install Claude Desktop (or any MCP-enabled client) -4. Configure your client to connect to the ROS MCP Server -5. Start `rosbridge` on the target robot +Follow the [installation guide](docs/installation.md) for step-by-step instructions to install, run, and troubleshoot the ROS-MCP server. + +For developers, we also have instructions for [installation from source](docs/installation-from-source.md) --- diff --git a/docs/installation-alternatives.md b/docs/installation-alternatives.md new file mode 100644 index 0000000..9d7900e --- /dev/null +++ b/docs/installation-alternatives.md @@ -0,0 +1,131 @@ +# Alternate Installation and Configuration Options + +This document covers alternative methods for installing the ROS-MCP server, configuring it with different transport options, and using it with different LLM clients. + +--- + +## Alternative Installation Options + +### Option A: Install using pip +For users who prefer traditional pip installation: + +```bash +pip install ros-mcp +``` +> **⚠️ Important**: This package requires pip version 23.0 or higher. Check your pip version with `pip --version` and upgrade if needed: +> **⚠️ Important**: This package requires python version 3.10 or higher. Check your python version with `python3 --version` and upgrade if needed: +```bash +python3 -m pip install --upgrade pip +``` + +### Option B: Install from Source +For developers or advanced users who need to modify the source code, see [Installation from Source](installation-from-source.md). + +### Option C: Install from Source using pip +For developers who want to install from source but still use pip: + +```bash +# Clone the repository +git clone https://github.com/robotmcp/ros-mcp-server.git +cd ros-mcp-server + +# Install from source using pip +pip install . +``` + +> **⚠️ Important**: This package requires pip version 23.0 or higher. Check your pip version with `pip --version` and upgrade if needed: +> **⚠️ Important**: This package requires python version 3.10 or higher. Check your python version with `python3 --version` and upgrade if needed: +```bash +python3 -m pip install --upgrade pip +``` + +--- + +## Alternate Configuration - HTTP Transport + +The default configurations set up the MCP server using the STDIO transport layer, which launches the server as a plugin automatically on launching Claude. + +It is also possible to configure the MCP server using the HTTP transport layer, which configures Claude to connect to the MCP server when it is launched as a standalone application. + +For HTTP transport, the configuration is the same across all platforms. First start the MCP server manually: + +**Linux/macOS/Windows(WSL):** +```bash +cd //ros-mcp-server +# Using command line arguments (recommended) +ros-mcp --transport streamable-http --host 127.0.0.1 --port 9000 + +# Or using environment variables (legacy) +export MCP_TRANSPORT=streamable-http +export MCP_HOST=127.0.0.1 +export MCP_PORT=9000 +uv run server.py +``` + +Then configure Claude Desktop to connect to the HTTP server (same for all platforms): + +```json +{ + "mcpServers": { + "ros-mcp-server-http": { + "name": "ROS-MCP Server (http)", + "transport": "http", + "url": "http://127.0.0.1:9000/mcp" + } + } +} +``` + +--- + +## Comparison between default (STDIO) and HTTP Transport + +#### STDIO Transport (Default) +- **Best for**: Local development, single-user setups +- **Pros**: Simple setup, no network configuration needed +- **Cons**: MCP server and LLM/MCP client need to be running on the local machine. +- **Use case**: Running MCP server directly with your LLM client + +#### HTTP/Streamable-HTTP Transport +- **Best for**: Remote access, multiple clients, production deployments +- **Pros**: Network accessible, multiple clients can connect +- **Cons**: Requires network configuration, MCP server needs to be run independently. +- **Use case**: Remote robots, team environments, web-based clients + +--- + +## Alternate Clients + +### Cursor IDE +For detailed Cursor setup instructions, see our [Cursor Tutorial](../examples/7_cursor/README.md). + +### ChatGPT +For detailed ChatGPT setup instructions, see our [ChatGPT Tutorial](../examples/6_chatgpt/README.md). + +### Google Gemini +For detailed Gemini setup instructions, see our [Gemini Tutorial](../examples/2_gemini/README.md). + +### Custom MCP Client +You can also use the MCP server directly in your Python code. +
+Here is a python example of how to integrate it programmatically + +```python +from mcp import ClientSession, StdioServerParameters +from mcp.client.stdio import stdio_client + +async def main(): + server_params = StdioServerParameters( + command="uv", + args=["--directory", "/path/to/ros-mcp-server", "run", "server.py"] + ) + + async with stdio_client(server_params) as (read, write): + async with ClientSession(read, write) as session: + # Use the MCP server + result = await session.call_tool("get_topics", {}) + print(result) +``` + +
+ diff --git a/docs/installation.md b/docs/installation.md index 269b4df..c575d0b 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -3,113 +3,58 @@ > ⚠️ **Prerequisite**: You need either ROS installed locally on your machine OR access over the network to a robot/computer with ROS installed. This MCP server connects to ROS systems on a robot, so a running ROS environment is required. Installation includes the following steps: -- Install the MCP server using pip -- Install and configure the Language Model Client +- On the Host Machine (Where the LLM will run) + - Install the ROS-MCP server - Install any language model client (We demonstrate with Claude Desktop) - - Configure the client to run the MCP server and connect automatically on launch. -- Install and launch Rosbridge + - Configure the client to run the MCP server automatically on launch. +- On the Robot (Where ROS will be running) + - Install and launch rosbridge Below are detailed instructions for each of these steps. --- -# 1. Install the MCP server (On the host machine where the LLM will be running) -Install using pipx (recommended for isolated installation): +# On The Host Machine (Where Your LLM Will Run) -```bash -# Install pipx if you don't have it -pip install pipx +The ROS MCP server is capable of connecting any LLM client that supports the MCP protocol, and can also do so via multiple transport protocols (stdio, streamable http, etc.) -# Install ros-mcp using pipx -pipx install ros-mcp -``` -
-Why pipx? +For the primary installation guide, we demonstrate using **Claude Desktop** and the default stdio transport layer. We use uvx for installation and management of dependencies. -**Benefits of pipx:** -- Isolated installation in its own virtual environment -- Won't conflict with other Python packages -- Easy to uninstall: `pipx uninstall ros-mcp` -- Automatic PATH management +We also have examples for different LLM clients (Cursor, Gemini, and ChatGPT) and other transport protocols (streamable http) -
-
-Alternative Installation Options +For alternate clients, transport protocols, and installation methods, see [Alternate Installation and Configuration Options](installation-alternatives.md#alternative-installation-options). -### Option A: Install using pip -For users who prefer traditional pip installation: +--- +Expand the OS below for installation instructions -```bash -pip install ros-mcp -``` -> **⚠️ Important**: This package requires pip version 23.0 or higher. Check your pip version with `pip --version` and upgrade if needed: -```bash -python3 -m pip install --upgrade pip -``` +
+Linux (Ubuntu) -### Option B: Install from Source -For developers or advanced users who need to modify the source code, see [Installation from Source](installation-from-source.md). +## 1. Install the MCP server -### Option C: Install from Source using pipx -For developers who want to install from source but still use pipx for isolation: +### 1.1 Install uv ```bash -# Clone the repository -git clone https://github.com/robotmcp/ros-mcp-server.git -cd ros-mcp-server - -# Install from source using pipx -pipx install . +curl -LsSf https://astral.sh/uv/install.sh | sh ``` -> **Note**: This also works with regular pip: `pip install .` +Look up [documentation from uv](https://docs.astral.sh/uv/getting-started/installation/) for more information or in the case of any errors. + +### 1.2 Test run ROS-MCP using uvx -> **⚠️ Important**: This package requires pip version 23.0 or higher. Check your pip version with `pip --version` and upgrade if needed: ```bash -python3 -m pip install --upgrade pip +# Test that the ROS-MCP server can be accessed in the venv +uvx ros-mcp --help ``` -
- ---- - -# 2. Install and configure a Language Model Client - -Any LLM client that supports MCP can be used. We use **Claude Desktop** for testing and development. - - - -## 2.1. Download Claude Desktop -
-Linux (Ubuntu) +## 2. Install and configure a Language Model Client +### 2.1 Download - Follow the installation instructions from the community-supported [claude-desktop-debian](https://github.com/aaddrick/claude-desktop-debian) -
- -
-MacOS - -- Download from [claude.ai](https://claude.ai/download) - -
- -
-Windows (Using WSL) - -This will have Claude running on Windows and the MCP server running on WSL. We assume that you have cloned the repository and installed UV on your [WSL](https://apps.microsoft.com/detail/9pn20msr04dw?hl=en-US&gl=US) - -- Download from [claude.ai](https://claude.ai/download) - -
- - -## 2.2. Configure Claude Desktop to launch the MCP server -
-Linux (Ubuntu) - +### 2.2 Configure - Locate and edit the `claude_desktop_config.json` file: - (If the file does not exist, create it) ```bash @@ -125,19 +70,65 @@ This will have Claude running on Windows and the MCP server running on WSL. We a "command": "bash", "args": [ "-lc", - "ros-mcp --transport=stdio" + "uvx ros-mcp --transport=stdio" ] } } } ``` +### 2.3 Test the connection +- Launch Claude Desktop and check connection status. +- The ros-mcp-server should be visible in your list of tools. + +

+ +

+ +
+ Troubleshooting + +- If the `ros-mcp-server` doesn't appear even after correctly configuring `claude_desktop_config.json`, try completely shutting down Claude Desktop using the commands below and then restarting it. This could be a Claude Desktop caching issue. +```bash +# Completely terminate Claude Desktop processes +pkill -f claude-desktop +# Or alternatively +killall claude-desktop + +# Restart Claude Desktop +claude-desktop +``` +
+
MacOS +## 1. Install the MCP server + +### 1.1 Install uv + +```bash +curl -LsSf https://astral.sh/uv/install.sh | sh +``` + +Look up [documentation from uv](https://docs.astral.sh/uv/getting-started/installation/) for more information or in the case of any errors. + +### 1.2 Test run ROS-MCP using uvx + +```bash +# Test that the ROS-MCP server can be accessed in the venv +uvx ros-mcp --help +``` + +## 2. Install and configure a Language Model Client + +### 2.1 Download +- Download from [claude.ai](https://claude.ai/download) + +### 2.2 Configure - Locate and edit the `claude_desktop_config.json` file: - (If the file does not exist, create it) ```bash @@ -153,19 +144,67 @@ This will have Claude running on Windows and the MCP server running on WSL. We a "command": "zsh", "args": [ "-lc", - "ros-mcp --transport=stdio" + "uvx ros-mcp --transport=stdio" ] } } } ``` +### 2.3 Test the connection +- Launch Claude Desktop and check connection status. +- The ros-mcp-server should be visible in your list of tools. + +

+ +

+ +
+ Troubleshooting + +- If the `ros-mcp-server` doesn't appear even after correctly configuring `claude_desktop_config.json`, try completely shutting down Claude Desktop using the commands below and then restarting it. This could be a Claude Desktop caching issue. +```bash +# Completely terminate Claude Desktop processes +pkill -f claude-desktop +# Or alternatively +killall claude-desktop + +# Restart Claude Desktop +claude-desktop +``` +
+
Windows (Using WSL) +## 1. Install the MCP server + +### 1.1 Install uv + +```bash +curl -LsSf https://astral.sh/uv/install.sh | sh +``` + +Look up [documentation from uv](https://docs.astral.sh/uv/getting-started/installation/) for more information or in the case of any errors. + +### 1.2 Test run ROS-MCP using uvx + +```bash +# Test that the ROS-MCP server can be accessed in the venv +uvx ros-mcp --help +``` + +## 2. Install and configure a Language Model Client + +### 2.1 Download +- Download from [claude.ai](https://claude.ai/download) + +This will have Claude running on Windows and the MCP server running on WSL. We assume that you have installed uv on your [WSL](https://apps.microsoft.com/detail/9pn20msr04dw?hl=en-US&gl=US) + +### 2.2 Configure - Locate and edit the `claude_desktop_config.json` file: - (If the file does not exist, create it) ```bash @@ -185,74 +224,89 @@ This will have Claude running on Windows and the MCP server running on WSL. We a "Ubuntu-22.04", "bash", "-lc", - "ros-mcp --transport=stdio" + "uvx ros-mcp --transport=stdio" ] } } } ``` + +### 2.3 Test the connection +- Launch Claude Desktop and check connection status. +- The ros-mcp-server should be visible in your list of tools. + +

+ +

+ +
+ Troubleshooting + +- If the `ros-mcp-server` doesn't appear even after correctly configuring `claude_desktop_config.json`, try completely shutting down Claude Desktop using the commands below and then restarting it. This could be a Claude Desktop caching issue. +```bash +# Completely terminate Claude Desktop processes +pkill -f claude-desktop +# Or alternatively +killall claude-desktop + +# Restart Claude Desktop +claude-desktop +``` +
---- +
- Alternate Configuration - HTTP Transport +Windows (Using PowerShell) -The above configurations sets up the MCP server using the default STDIO transport layer, which launches the server as a plugin automatically on launching Claude. +## 1. Install the MCP server -It is also possible to configure the MCP server using the http transport layer, which configures Claude to connect to the MCP server when it is launched as a standalone application. +### 1.1 Install uv -For HTTP transport, the configuration is the same across all platforms. First start the MCP server manually: +```powershell +# Use the following command in windows powershell +powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install ps1 | iex" +``` -**Linux/macOS/Windows(WSL):** -```bash -cd //ros-mcp-server -# Using command line arguments (recommended) -ros-mcp --transport streamable-http --host 127.0.0.1 --port 9000 +Look up [documentation from uv](https://docs.astral.sh/uv/getting-started/installation/) for more information or in the case of any errors. -# Or using environment variables (legacy) -export MCP_TRANSPORT=streamable-http -export MCP_HOST=127.0.0.1 -export MCP_PORT=9000 -uv run server.py +### 1.2 Test run ROS-MCP using uvx + +```powershell +# Test that the ROS-MCP server can be accessed in the venv +uvx ros-mcp --help +``` + +## 2. Install and configure a Language Model Client + +### 2.1 Download +- Download from [claude.ai](https://claude.ai/download) + +This will have Claude and the MCP server running within Windows. + +### 2.2 Configure +- Locate and edit the `claude_desktop_config.json` file: +- (If the file does not exist, create it) +```bash +~/.config/Claude/claude_desktop_config.json ``` -Then configure Claude Desktop to connect to the HTTP server (same for all platforms): +- Add the following to the `"mcpServers"` section of the JSON file: ```json { "mcpServers": { - "ros-mcp-server-http": { - "name": "ROS-MCP Server (http)", - "transport": "http", - "url": "http://127.0.0.1:9000/mcp" + "ros-mcp-server": { + "command": "uvx", + "args": ["ros-mcp", "--transport=stdio"] } } } ``` -
- -
- Comparison between default (STDIO) and HTTP Transport - -#### STDIO Transport (Default) -- **Best for**: Local development, single-user setups -- **Pros**: Simple setup, no network configuration needed -- **Cons**: MCP server and LLM/MCP client need to be running on the local machine. -- **Use case**: Running MCP server directly with your LLM client - -#### HTTP/Streamable-HTTP Transport -- **Best for**: Remote access, multiple clients, production deployments -- **Pros**: Network accessible, multiple clients can connect -- **Cons**: Requires network configuration, MCP server needs to be run independently. -- **Use case**: Remote robots, team environments, web-based clients - -
- - -## 2.3. Test the connection +### 2.3 Test the connection - Launch Claude Desktop and check connection status. - The ros-mcp-server should be visible in your list of tools. @@ -276,14 +330,16 @@ claude-desktop
+ --- -# 3. Install and run rosbridge (On the target robot where ROS will be running) +# On The Target Robot (Where ROS Will Be Running)
ROS 1 -## 3.1. Install `rosbridge_server` +## 3. Install and run rosbridge +### 3.1. Install `rosbridge_server` This package is required for MCP to interface with ROS or ROS 2 via WebSocket. It needs to be installed on the same machine that is running ROS. @@ -300,7 +356,7 @@ sudo apt install ros-${ROS_DISTRO}-rosbridge-server ```
-## 3.2. Launch rosbridge in your ROS environment: +### 3.2. Launch rosbridge in your ROS environment: ```bash @@ -313,8 +369,8 @@ roslaunch rosbridge_server rosbridge_websocket.launch
ROS 2 - -## 3.1. Install `rosbridge_server` +## 3. Install and run rosbridge +### 3.1. Install `rosbridge_server` This package is required for MCP to interface with ROS or ROS 2 via WebSocket. It needs to be installed on the same machine that is running ROS. @@ -331,8 +387,7 @@ sudo apt install ros-${ROS_DISTRO}-rosbridge-server ```
- -## 3.2. Launch rosbridge in your ROS environment: +### 3.2. Launch rosbridge in your ROS environment: ```bash @@ -346,10 +401,14 @@ ros2 launch rosbridge_server rosbridge_websocket_launch.xml --- -# 4. You're ready to go! -You can test out your server with any robot that you have running. Just tell your AI to connect to the robot using its target IP address. (Default is localhost, so you don't need to tell it to connect if the MCP server is installed on the same machine as your ROS) +# You're ready to go! + +You can test out your server with any robot that you have running. Just tell your AI to connect to the robot using its target IP address. (Default is localhost, so you don't need to tell it to connect if the MCP server is installed on the same machine as your ROS.) + +✅ **Tip:** If you don't currently have any robots running, turtlesim is considered the 'hello world' robot for ROS to experiment with. It does not have any simulation dependencies such as Gazebo or IsaacSim. -✅ **Tip:** If you don't currently have any robots running, turtlesim is considered the 'hello world' robot for ROS to experiment with. It does not have any simulation dependencies such as Gazebo or IsaacSim. +
+Testing with turtlesim For a complete step-by-step tutorial on using turtlesim with the MCP server and for more information on ROS and turtlesim, see our [Turtlesim Tutorial](../examples/1_turtlesim/README.md). @@ -364,6 +423,7 @@ rosrun turtlesim turtlesim_node ros2 run turtlesim turtlesim_node ``` +
Example Commands @@ -390,54 +450,12 @@ What topics and services do you see on the robot?
---- - -# 5. Alternate Clients (ChatGPT, Gemini, Cursor) -
-Examples and setup instructions for other LLM Hosts and Clients - -## 5.1. Cursor IDE -For detailed Cursor setup instructions, see our [Cursor Tutorial](../examples/7_cursor/README.md). -## 5.2. ChatGPT -For detailed ChatGPT setup instructions, see our [ChatGPT Tutorial](../examples/6_chatgpt/README.md). -## 5.3. Google Gemini -For detailed Gemini setup instructions, see our [Gemini Tutorial](../examples/2_gemini/README.md). +## Troubleshooting -## 5.4. Custom MCP Client -You can also use the MCP server directly in your Python code.
-Here is a python example of how to integrate it programmatically - -```python -from mcp import ClientSession, StdioServerParameters -from mcp.client.stdio import stdio_client - -async def main(): - server_params = StdioServerParameters( - command="uv", - args=["--directory", "/path/to/ros-mcp-server", "run", "server.py"] - ) - - async with stdio_client(server_params) as (read, write): - async with ClientSession(read, write) as session: - # Use the MCP server - result = await session.call_tool("get_topics", {}) - print(result) -``` - -
- -
- - ---- - -# 6. Troubleshooting - -
-6.1. Common Issues +Common Issues Here are some frequently encountered issues and their solutions: @@ -447,14 +465,12 @@ Here are some frequently encountered issues and their solutions: **Symptoms**: The ros-mcp-server doesn't appear in your LLM client's tool list. **Solutions**: -1. **Check file paths**: Ensure all paths in your configuration are absolute and correct -2. **Restart client**: Completely shut down and restart your LLM client -3. **Check logs**: Look for error messages in your LLM client's logs -4. **Test manually**: Try running the MCP server manually to check for errors: +1. **Restart client**: Completely shut down and restart your LLM client +2. **Check logs**: Look for error messages in your LLM client's logs +3. **Test manually**: Try running the MCP server manually to check for errors: ```bash -cd //ros-mcp-server -uv run server.py +uvx ros-mcp ```
@@ -485,16 +501,12 @@ curl -I http://localhost:9090 **Solutions**: 1. **Check WSL distribution**: Ensure you're using the correct WSL distribution name -2. **Verify uv path**: Check that the uv path in WSL is correct: +2. **Test manually**: Try running the MCP server manually in WSL: ```bash -# In WSL -which uv +uvx ros-mcp ``` -3. **Test WSL connectivity**: Ensure Windows can reach WSL services -4. **Check WSL networking**: For HTTP transport, use `0.0.0.0` instead of `127.0.0.1` -
@@ -506,10 +518,10 @@ which uv 1. **Check command line arguments**: Ensure the correct transport, host, and port are specified: ```bash # Check available options - python server.py --help + uvx ros-mcp --help # Example with custom settings - python server.py --transport http --host 0.0.0.0 --port 8080 + uvx ros-mcp --transport http --host 0.0.0.0 --port 8080 ``` 2. **Check environment variables** (legacy): Ensure MCP_TRANSPORT, MCP_HOST, and MCP_PORT are set correctly @@ -534,7 +546,7 @@ curl http://localhost:9000
If you're still having issues: -1. **Check the logs**: Look for error messages in your LLM client and MCP server logs +1. **Check the logs**: Look for error messages in your LLM client and MCP server logs. Running the logs through an LLM (ChatGPT or Claude) can greatly help debugging! 2. **Test with turtlesim**: Try the [turtlesim tutorial](../examples/1_turtlesim/README.md) to verify basic functionality 3. **Open an issue**: Create an issue on the [GitHub repository](https://github.com/robotmcp/ros-mcp-server/issues) with: - Your operating system @@ -550,7 +562,7 @@ curl http://localhost:9000
-6.2. Debug Commands +Debug Commands Test ROS connectivity ```bash @@ -565,7 +577,7 @@ curl -I http://localhost:9090 Test MCP server manually ```bash -ros-mcp --transport=stdio +uvx ros-mcp --transport=stdio ``` Check running processes diff --git a/pyproject.toml b/pyproject.toml index ae6a34f..c990680 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "ros-mcp" -version = "2.2.0" +version = "2.2.1" description = "Connect AI Language Models with Robots on ROS using MCP" readme = "README.md" requires-python = ">=3.10" diff --git a/server.json b/server.json index bc4afcd..4c82911 100644 --- a/server.json +++ b/server.json @@ -1,18 +1,18 @@ { - "$schema": "https://static.modelcontextprotocol.io/schemas/2025-09-29/server.schema.json", + "$schema": "https://static.modelcontextprotocol.io/schemas/2025-10-17/server.schema.json", "name": "io.github.robotmcp/ros-mcp-server", "description": "Connect AI models like Claude & ChatGPT with ROS robots using MCP", "repository": { "url": "https://github.com/robotmcp/ros-mcp-server", "source": "github" }, - "version": "2.2.0", + "version": "2.2.1", "packages": [ { "registryType": "pypi", "registryBaseUrl": "https://pypi.org", "identifier": "ros-mcp", - "version": "2.2.0", + "version": "2.2.1", "transport": { "type": "stdio", "command": "ros-mcp", diff --git a/uv.lock b/uv.lock index 434fae4..db1eefd 100644 --- a/uv.lock +++ b/uv.lock @@ -1526,7 +1526,7 @@ wheels = [ [[package]] name = "ros-mcp" -version = "2.2.0" +version = "2.2.1" source = { editable = "." } dependencies = [ { name = "fastmcp" },