diff --git a/.github/workflows/extensions.yml b/.github/workflows/extensions.yml index 295a7e84..c5f23fa6 100644 --- a/.github/workflows/extensions.yml +++ b/.github/workflows/extensions.yml @@ -55,6 +55,8 @@ jobs: usage-metrics-dashboard: extensions/usage-metrics-dashboard/** voila-example: extensions/voila-example/** stock-report: extensions/stock-report/** + simple-mcp-server: extensions/simple-mcp-server/** + simple-shiny-chat-with-mcp: extensions/simple-shiny-chat-with-mcp/** # Runs for each extension that has changed from `simple-extension-changes` # Lints and packages in preparation for tests and and release. diff --git a/extensions.json b/extensions.json index 7ee9c5e6..525f609a 100644 --- a/extensions.json +++ b/extensions.json @@ -11,7 +11,13 @@ "description": "Pre-built content to illustrate the types of content publishable on Posit Connect." } ], - "tags": [], + "tags": [ + "python", + "shiny", + "fastapi", + "mcp", + "llm" + ], "requiredFeatures": [ "API Publishing", "OAuth Integrations", @@ -1038,4 +1044,4 @@ "category": "example" } ] -} \ No newline at end of file +} diff --git a/extensions/simple-mcp-server/.gitignore b/extensions/simple-mcp-server/.gitignore new file mode 100644 index 00000000..cfe5a687 --- /dev/null +++ b/extensions/simple-mcp-server/.gitignore @@ -0,0 +1,3 @@ +.venv +.env +rsconnect-python diff --git a/extensions/simple-mcp-server/.python-version b/extensions/simple-mcp-server/.python-version new file mode 100644 index 00000000..2c073331 --- /dev/null +++ b/extensions/simple-mcp-server/.python-version @@ -0,0 +1 @@ +3.11 diff --git a/extensions/simple-mcp-server/CONTRIBUTING.md b/extensions/simple-mcp-server/CONTRIBUTING.md new file mode 100644 index 00000000..86e3854b --- /dev/null +++ b/extensions/simple-mcp-server/CONTRIBUTING.md @@ -0,0 +1,59 @@ +# Contributing to the Simple MCP Server Extension + +## Local Development + +For local testing and development: + +```bash +# Install dependencies +pip install -r requirements.txt + +# Run the server locally +python main.py +``` + +The server will start on `http://127.0.0.1:8001` with the MCP endpoint at `/mcp`. + +## Tool Development + +### Adding New Tools + +To add new MCP tools, use the `@mcp.tool()` decorator: + +```python +@mcp.tool() +def your_new_tool(parameter: str) -> str: + """ + Description of what your tool does. + + Args: + parameter: Description of the parameter + + Returns: + Description of the return value + """ + # Your tool implementation + return "result" +``` + +### Error Handling + +Use `ToolError` for proper error handling: + +```python +from mcp.server.fastmcp.exceptions import ToolError + +@mcp.tool() +def example_tool(input_value: str) -> str: + if not input_value: + raise ToolError("Input value cannot be empty") + return f"Processed: {input_value}" +``` + +## Authentication + +The server supports Connect API key authentication for tools that interact with Connect services. API keys should be passed in the `x-mcp-authorization` header in the format: + +``` +x-mcp-authorization: Key YOUR_API_KEY +``` diff --git a/extensions/simple-mcp-server/README.md b/extensions/simple-mcp-server/README.md new file mode 100644 index 00000000..711eceaf --- /dev/null +++ b/extensions/simple-mcp-server/README.md @@ -0,0 +1,123 @@ +# Simple MCP Server + +A FastAPI-based Model Context Protocol (MCP) server that demonstrates how to deploy MCP tools on Posit Connect. This extension showcases Connect's ability to host MCP servers that can be consumed by AI assistants and other MCP clients. + +## Overview + +This extension demonstrates Connect's capability to host Model Context Protocol servers, enabling LLMs to access and execute tools remotely. The Simple MCP Server provides a collection of data analysis tools and Connect integration capabilities, making it an ideal companion for AI-powered applications like the [Simple Shiny Chat](../simple-shiny-chat-with-mcp/README.md) extension. + +![MCP Server Landing Page](./images/demo.png) + +## Features + +- **FastAPI-Based MCP Server**: Built on FastAPI with streamable HTTP transport for efficient MCP communication +- **Data Analysis Tools**: Includes tools for dataset operations and summary statistics +- **Connect Integration**: Provides tools that interact with Connect's API +- **Interactive Documentation**: Clean web interface that displays available tools and their parameters +- **Copy-to-Clipboard Endpoint**: Easy sharing of MCP server URLs +- **Automatic Tool Discovery**: MCP clients can dynamically discover and use available tools + +## Available Tools + +### Dataset Operations +- **`list_known_datasets`**: Lists all available datasets in the server +- **`calculate_summary_statistics`**: Generates comprehensive summary statistics for specified datasets + +### Connect Integration +- **`connect_whoami`**: Calls the Connect `/me` endpoint using API key authentication + +### Sample Datasets +- **Iris Dataset**: Classic machine learning dataset from scikit-learn +- **Sample Data**: Simple demonstration dataset with mixed data types + +## Prerequisites + +### Connect Requirements + +1. **Minimum Connect Version**: 2025.04.0 or later +2. **API Publishing**: Must be enabled on your Connect server +3. **Python 3.10+**: Required for the MCP SDK + +## Deployment + +### 1. Deploy the Extension +Deploy this extension to your Connect server. If you are deploying through the Connect Gallery, see the [Gallery documentation](https://docs.posit.co/connect/user/publishing-connect-gallery/). + +### 2. Access the Server +Once deployed, the extension provides: +- **Web Interface**: Visit the content URL to see available tools and copy the MCP endpoint +- **MCP Endpoint**: Located at `{direct-content-url}/mcp` for MCP client connections + +Please note that it is recommended to set the minimum number of instances/processes for this application to >= 1 in the content settings. This will ensure that the MCP server is always available for clients to connect. See the [content process configuration documentation](https://docs.posit.co/connect/user/content-settings/index.html#process-configurations). + +### 3. Use with MCP Clients +The server can be consumed by any MCP-compatible client, including: +- [Simple Shiny Chat](../simple-shiny-chat-with-mcp/README.md) extension +- Local MCP clients +- AI development environments that support MCP + +## Usage Examples + +### With Simple Shiny Chat Extension + +1. Deploy both the Simple MCP Server and Simple Shiny Chat extensions +2. In the chat application, add the MCP server URL from this extension +3. Ask the AI assistant to: + - "What datasets are available?" + - "Calculate summary statistics for the iris dataset" + - "Show me information about my Connect user account" + +### With Other MCP Clients + +Connect to the MCP endpoint at `{your-connect-server}/content/{content-guid}/mcp` and use the available tools programmatically. + +If you are not using the Simple Shiny Chat extension to connect to this MCP server, you will need to ensure that you can specify your Connect API key in both the `x-mcp-authorization` header and the `authorization` header for Connect API calls. Some MCP clients may not support that directly today (June 2025). + +## Architecture + +The application consists of several key components: + +- **FastMCP Framework**: Handles MCP protocol implementation and tool registration +- **FastAPI Application**: Provides HTTP transport and web interface +- **Tool Implementations**: Individual functions that implement business logic +- **Template Engine**: Jinja2 templates for the documentation interface +- **Dataset Storage**: In-memory storage for demonstration datasets + +## Troubleshooting + +### Deployment Issues +- Ensure your Connect server supports API publishing +- Verify Python 3.10+ is available in your Connect environment +- Check that all dependencies are properly installed + +### MCP Client Connection Issues +- Verify the MCP endpoint URL is correct (`{content-url}/mcp`) +- Ensure the server is accessible from the client +- Check Connect content permissions + +### Tool Execution Errors +- Review tool parameter requirements and types +- Verify API key format for Connect integration tools +- Check Connect logs for detailed error messages + +## Integration with Simple Shiny Chat + +This MCP server is designed to work seamlessly with the [Simple Shiny Chat](../simple-shiny-chat-with-mcp/README.md) extension: + +1. Deploy both extensions to your Connect server +2. Configure the chat application with appropriate LLM credentials +3. Register this MCP server in the chat interface +4. Start conversing with AI assistants that can use these tools + +## Related Resources + +- [Model Context Protocol Documentation](https://modelcontextprotocol.io/) +- [FastMCP Framework](https://github.com/jlowin/fastmcp) +- [MCP Framework](https://github.com/modelcontextprotocol/python-sdk) +- [FastAPI Documentation](https://fastapi.tiangolo.com/) +- [Simple Shiny Chat Extension](../simple-shiny-chat-with-mcp/README.md) +- [Posit Connect Extension Gallery Guide](https://docs.posit.co/connect/admin/connect-gallery/index.html) + +## Support + +For issues specific to this extension, please check the [Connect Extensions repository](https://github.com/posit-dev/connect-extensions). diff --git a/extensions/simple-mcp-server/images/demo.png b/extensions/simple-mcp-server/images/demo.png new file mode 100644 index 00000000..96f878a6 Binary files /dev/null and b/extensions/simple-mcp-server/images/demo.png differ diff --git a/extensions/simple-mcp-server/index.html.jinja b/extensions/simple-mcp-server/index.html.jinja new file mode 100644 index 00000000..7472e719 --- /dev/null +++ b/extensions/simple-mcp-server/index.html.jinja @@ -0,0 +1,164 @@ + + + + + + {{ server_name }} + + + +
+

{{ server_name }}

+

This server provides data-related tools via the Model Context Protocol (MCP). + Please note that in order for this endpoint to be reliable, it is recommended to set the minimum + number of instances/processes to 1 in the content settings.

+ +

The MCP endpoint is available at:

+
+ + + +
+ +

Available MCP Tools:

+ {% if tools %} + + {% else %} +

No tools are currently available.

+ {% endif %} +
+ + + + diff --git a/extensions/simple-mcp-server/main.py b/extensions/simple-mcp-server/main.py new file mode 100644 index 00000000..a7cee81a --- /dev/null +++ b/extensions/simple-mcp-server/main.py @@ -0,0 +1,152 @@ +import contextlib +import json +import pandas as pd +from sklearn.datasets import load_iris +from fastapi import FastAPI, Request +from fastapi.templating import Jinja2Templates +from fastmcp import FastMCP, Context +from fastmcp.exceptions import ToolError +from posit.connect.client import Client as ConnectClient +import urllib + +# --- FastMCP Server Initialization --- +mcp = FastMCP( + name="Simple MCP Server", + instructions="MCP server for dataset operations and Connect 'whoami' via FastAPI.", +) + +# --- Datasets --- +# Simple in-memory datasets for demonstration +_datasets_store = { + "iris": lambda: pd.DataFrame(data=load_iris(as_frame=True).frame), + "sample_data": lambda: pd.DataFrame( + {"A": [1, 2, 3, 4, 5], "B": [5, 4, 3, 2, 1], "C": ["x", "y", "x", "z", "y"]} + ), +} + + +# --- MCP Tool Implementations --- +@mcp.tool() +def list_known_datasets() -> str: + """Lists available dataset names.""" + return str(list(_datasets_store.keys())) + + +@mcp.tool() +def calculate_summary_statistics(dataset_name: str) -> str: + """ + Calculates summary statistics for a specified dataset. + Returns the summary as a string or an error. + """ + if dataset_name not in _datasets_store: + raise ToolError(f"Dataset '{dataset_name}' not found.") + try: + df = _datasets_store[dataset_name]() + summary = df.describe(include="all").to_string() + return summary + except Exception as e: + raise ToolError(f"Error processing dataset '{dataset_name}': {str(e)}") + + +@mcp.tool() +async def connect_whoami(context: Context) -> str: + """ + Calls the Posit Connect /me endpoint using an API key from the Authorization header. + The Authorization header should be in the format: 'Key YOUR_API_KEY'. + """ + + # context.request is a starlette.requests.Request + http_request = context.request_context.request + if http_request is None: + raise ToolError( + "Request context not available. This tool requires an HTTP-based transport." + ) + + auth_header = http_request.headers.get("x-mcp-authorization") + + if not auth_header: + raise ToolError("Authorization header is missing.") + + parts = auth_header.split() + if len(parts) != 2 or parts[0].lower() != "key": + raise ToolError( + "Invalid Authorization header format. Expected 'Key YOUR_API_KEY'." + ) + + api_key = parts[1] + + try: + connect_client = ConnectClient(api_key=api_key) + return json.dumps(connect_client.me) + except Exception as e: + raise ToolError(f"Error calling Connect API: {str(e)}") + + +def get_tools_info(): + tools = [] + for tool_name, tool_def in mcp._tool_manager._tools.items(): + parameters = {} + for prop_name, prop in tool_def.parameters["properties"].items(): + parameters[prop_name] = { + "name": prop["title"], + "type": prop["type"], + "required": False, + } + + if "required" in tool_def.parameters: + for required_prop_name in tool_def.parameters["required"]: + if required_prop_name in parameters: + parameters[required_prop_name]["required"] = True + + tools.append( + { + "name": tool_name, + "description": tool_def.description or "No description available.", + "parameters": parameters, + } + ) + return tools + + +@contextlib.asynccontextmanager +async def lifespan(app: FastAPI): + async with contextlib.AsyncExitStack() as stack: + await stack.enter_async_context(mcp.session_manager.run()) + yield + + +mcp_app = mcp.http_app(path="/mcp") +app = FastAPI(title="Simple MCP Server with FastAPI", lifespan=mcp_app.lifespan) +templates = Jinja2Templates(directory=".") + + +@app.get("/") +async def get_index_page(request: Request): + """Serves the HTML index page using a Jinja2 template.""" + tools = get_tools_info() + endpoint = urllib.parse.urljoin(request.url._url, "mcp") + return templates.TemplateResponse( + "index.html.jinja", + { + "request": request, + "server_name": mcp.name, + "endpoint": endpoint, + "tools": tools, + }, + ) + + +app.mount("/", mcp_app) + + +# --- Uvicorn Runner (for local development) --- +if __name__ == "__main__": + import uvicorn + + print("Starting FastAPI server with MCP...") + print(f"MCP Server Name: {mcp.name}") + print("Registered MCP Tools:") + for tool_name in mcp._tool_manager._tools: + print(f" - {tool_name}") + + uvicorn.run(app, host="127.0.0.1", port=8001) diff --git a/extensions/simple-mcp-server/manifest.json b/extensions/simple-mcp-server/manifest.json new file mode 100644 index 00000000..50037d79 --- /dev/null +++ b/extensions/simple-mcp-server/manifest.json @@ -0,0 +1,45 @@ +{ + "version": 1, + "locale": "en_US.UTF-8", + "metadata": { + "appmode": "python-fastapi", + "entrypoint": "main" + }, + "python": { + "version": "3.12.7", + "package_manager": { + "name": "pip", + "version": "24.2", + "package_file": "requirements.txt" + } + }, + "environment": { + "python": { + "requires": ">=3.10" + } + }, + "extension": { + "name": "simple-mcp-server", + "title": "Simple MCP Server", + "description": "A simple example of a Connect extension that serves tools via the MCP protocol.", + "homepage": "https://github.com/posit-dev/connect-extensions/tree/main/extensions/simple-mcp-server", + "category": "example", + "tags": ["python", "fastapi", "mcp"], + "requiredFeatures": [ + "API Publishing" + ], + "minimumConnectVersion": "2025.04.0", + "version": "0.0.1" + }, + "files": { + "requirements.txt": { + "checksum": "3f6b39234649b12256ccab08952dc062" + }, + "main.py": { + "checksum": "d41d8cd98f00b204e9800998ecf8427e" + }, + "index.html.jinja": { + "checksum": "eb078517ea04264c8578e0e55afb7685" + } + } +} diff --git a/extensions/simple-mcp-server/pyproject.toml b/extensions/simple-mcp-server/pyproject.toml new file mode 100644 index 00000000..e50f31e8 --- /dev/null +++ b/extensions/simple-mcp-server/pyproject.toml @@ -0,0 +1,14 @@ +[project] +name = "simple-mcp-server" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.10" +dependencies = [ + "fastapi==0.115.12", + "fastmcp>=2.9.0", + "jinja2==3.1.6", + "pandas==2.3.0", + "posit-sdk==0.10.0", + "scikit-learn==1.7.0", +] diff --git a/extensions/simple-mcp-server/requirements.txt b/extensions/simple-mcp-server/requirements.txt new file mode 100644 index 00000000..8020fe8c --- /dev/null +++ b/extensions/simple-mcp-server/requirements.txt @@ -0,0 +1,56 @@ +# This file was autogenerated by uv via the following command: +# uv export -o requirements.txt --no-hashes +annotated-types==0.7.0 +anyio==4.9.0 +authlib==1.6.0 +certifi==2025.4.26 +cffi==1.17.1 ; platform_python_implementation != 'PyPy' +charset-normalizer==3.4.2 +click==8.2.1 +colorama==0.4.6 ; platform_system == 'Windows' +cryptography==45.0.4 +exceptiongroup==1.3.0 +fastapi==0.115.12 +fastmcp==2.9.0 +h11==0.16.0 +httpcore==1.0.9 +httpx==0.28.1 +httpx-sse==0.4.1 +idna==3.10 +jinja2==3.1.6 +joblib==1.5.1 +markdown-it-py==3.0.0 +markupsafe==3.0.2 +mcp==1.9.4 +mdurl==0.1.2 +numpy==2.2.6 ; python_full_version < '3.11' +numpy==2.3.0 ; python_full_version >= '3.11' +openapi-pydantic==0.5.1 +packaging==25.0 +pandas==2.3.0 +posit-sdk==0.10.0 +pycparser==2.22 ; platform_python_implementation != 'PyPy' +pydantic==2.11.5 +pydantic-core==2.33.2 +pydantic-settings==2.10.1 +pygments==2.19.2 +python-dateutil==2.9.0.post0 +python-dotenv==1.1.1 +python-multipart==0.0.20 +pytz==2025.2 +requests==2.32.4 +rich==14.0.0 +scikit-learn==1.7.0 +scipy==1.15.3 +shellingham==1.5.4 +six==1.17.0 +sniffio==1.3.1 +sse-starlette==2.3.6 +starlette==0.46.2 +threadpoolctl==3.6.0 +typer==0.16.0 +typing-extensions==4.14.0 +typing-inspection==0.4.1 +tzdata==2025.2 +urllib3==2.4.0 +uvicorn==0.34.3 ; sys_platform != 'emscripten' diff --git a/extensions/simple-shiny-chat-with-mcp/.gitignore b/extensions/simple-shiny-chat-with-mcp/.gitignore new file mode 100644 index 00000000..cfe5a687 --- /dev/null +++ b/extensions/simple-shiny-chat-with-mcp/.gitignore @@ -0,0 +1,3 @@ +.venv +.env +rsconnect-python diff --git a/extensions/simple-shiny-chat-with-mcp/.python-version b/extensions/simple-shiny-chat-with-mcp/.python-version new file mode 100644 index 00000000..2c073331 --- /dev/null +++ b/extensions/simple-shiny-chat-with-mcp/.python-version @@ -0,0 +1 @@ +3.11 diff --git a/extensions/simple-shiny-chat-with-mcp/README.md b/extensions/simple-shiny-chat-with-mcp/README.md new file mode 100644 index 00000000..ff30d486 --- /dev/null +++ b/extensions/simple-shiny-chat-with-mcp/README.md @@ -0,0 +1,140 @@ +# Simple Shiny Chat + +A Shiny chat application that demonstrates how to deploy a Shiny chat application, as well as, utilizing the Model Context Protocol (MCP) to enable LLMs to interact with remote tools and services hosted on Posit Connect and beyond. + +## Overview + +This extension showcases Connect's ability to take full advantage of the Model Context Protocol, a new standard that enables Large Language Models to run tools hosted in separate processes or servers. The Simple Shiny Chat extension is designed to be paired with MCP servers deployed on Connect, creating a powerful ecosystem where AI assistants can dynamically access and execute tools. + +![Demo Screenshot](./images/demo.png) + +## Features + +- **Interactive Chat Interface**: Clean, modern chat UI built with Shiny for Python +- **Dynamic MCP Server Registration**: Add and remove MCP servers on-the-fly through the sidebar +- **Multi-Provider LLM Support**: Compatible with OpenAI, Anthropic/BedrockAnthropic, Google, and other providers via [chatlas](https://posit-dev.github.io/chatlas/) +- **Tool Discovery**: Automatically discovers and displays available tools from registered MCP servers +- **Secure Authentication**: Uses Connect's OAuth integrations and visitor API keys for secure server communication +- **Real-time Streaming**: Supports streaming responses for better user experience + +## Prerequisites + +### Required Environment Variables + +Before deploying this extension, you must configure the following environment variables: + +#### LLM Provider Configuration +- `CHATLAS_CHAT_PROVIDER`: The LLM provider to use (e.g., "openai", "anthropic", "google") +- `CHATLAS_CHAT_ARGS`: JSON string with provider-specific arguments (e.g., `{"model": "gpt-4o"}`) + +For more details on supported providers and their arguments, see the [Chatlas documentation](https://posit-dev.github.io/chatlas/reference/ChatAuto.html). + +#### API Keys +Set the appropriate API key for your chosen provider: +- `OPENAI_API_KEY`: For OpenAI models +- `ANTHROPIC_API_KEY`: For Anthropic models +- `GOOGLE_API_KEY`: For Google models + +### Connect Requirements + +1. **Minimum Connect Version**: 2025.04.0 or later +2. **Minimum Python Version**: 3.10 or later +3. **OAuth Integrations**: Must be enabled on your Connect server +4. **Connect Visitor API Key**: This extension requires access to the Connect API on behalf of the visiting user to list their available content. In the "Access" pane of the content settings, add a "Connect Visitor API Key" integration. + +## Setup Examples + +### OpenAI Configuration +```bash +CHATLAS_CHAT_PROVIDER="openai" +CHATLAS_CHAT_ARGS='{"model": "gpt-4o"}' +OPENAI_API_KEY="sk-..." +``` + +### Anthropic Configuration +```bash +CHATLAS_CHAT_PROVIDER="anthropic" +CHATLAS_CHAT_ARGS='{"model": "claude-3-5-sonnet-20241022"}' +ANTHROPIC_API_KEY="sk-ant-..." +``` + +### Google Configuration +```bash +CHATLAS_CHAT_PROVIDER="google" +CHATLAS_CHAT_ARGS='{"model": "gemini-1.5-pro"}' +GOOGLE_API_KEY="AI..." +``` + +### Anthropic on AWS Bedrock + +If the Connect server is running on an EC2 instance with an IAM role that grants access to Bedrock, no environment variables are needed. The application will automatically detect and use AWS credentials. It defaults to the `us.anthropic.claude-sonnet-4-20250514-v1:0` model. Otherwise, you can set the following environment variables with your AWS credentials: + +- `CHATLAS_CHAT_PROVIDER`: `bedrock-anthropic` +- `CHATLAS_CHAT_ARGS`: `{"model": "us.anthropic.claude-sonnet-4-20250514-v1:0", "aws_access_key": "...", "aws_secret_key": "...", "aws_session_token": "..."}` (if not using IAM roles) + +## Usage + +### 1. Deploy the Extension +Deploy this extension to your Connect server with the required environment variables configured. If you are deploying through the Connect gallery, see the documentation detailed [here](https://docs.posit.co/connect/user/publishing-connect-gallery/). + +### 2. Configure Access +In the Connect dashboard: +1. Navigate to the content access panel +2. Add a "Connect Visitor API Key" integration +3. This enables the chat application to authenticate with MCP servers + +### 3. Register MCP Servers +1. In the chat application sidebar, enter the URL of an MCP server deployed on Connect +2. Click "Add Server" to register it +3. Available tools from the server will be displayed as badges +4. The LLM can now use these tools in conversation + +### 4. Start Chatting +Ask the AI assistant to help you with tasks that can be accomplished using the registered MCP tools. The assistant will: +- Show you what tools are available +- Ask for confirmation before executing actions that create, update, or delete data +- Present execution plans for complex multi-step operations +- Display raw tool outputs without modification + +## Example MCP Servers + +This extension is designed to work with Streamable HTTP MCP servers like this [example](../simple-mcp-server/README.md). + +## Architecture + +The application consists of several key components: + +- **Chat Interface**: Built with Shiny's chat UI components for modern, responsive messaging +- **MCP Client**: Handles registration and communication with MCP servers +- **Authentication Layer**: Manages Connect visitor API keys for secure server access +- **Server Registry**: Dynamic management of registered MCP servers and their tools + +## Troubleshooting + +### Setup Issues +If you see the setup screen instead of the chat interface: +1. Verify all required environment variables are set +2. Ensure the Connect Visitor API Key integration is properly configured +3. Check that your Connect version meets the minimum requirements + +### MCP Server Connection Issues +- Verify the MCP server URL is correct and accessible +- Ensure the server is properly deployed on Connect +- Check that authentication headers are correctly configured + +### Chat Response Issues +- Verify your LLM API key is valid and has sufficient credits/quota +- Check the `CHATLAS_CHAT_ARGS` configuration matches your provider's requirements +- Review Connect logs for any authentication or network errors + +## Related Resources + +- [Model Context Protocol Documentation](https://modelcontextprotocol.io/) +- [chatlas Documentation](https://posit-dev.github.io/chatlas/) +- [Shiny for Python Chat Components](https://shiny.posit.co/py/components/display-messages/chat/) +- [Posit Connect Extension Gallery Guide](https://docs.posit.co/connect/admin/connect-gallery/index.html) + +## Support + +For issues specific to this extension, please check the [Connect Extensions repository](https://github.com/posit-dev/connect-extensions). + diff --git a/extensions/simple-shiny-chat-with-mcp/app.py b/extensions/simple-shiny-chat-with-mcp/app.py new file mode 100644 index 00000000..e63ade56 --- /dev/null +++ b/extensions/simple-shiny-chat-with-mcp/app.py @@ -0,0 +1,402 @@ +from datetime import datetime +import os +import traceback +import uuid + +import chatlas +import faicons +import uvicorn +from posit.connect import Client +from posit.connect.errors import ClientError +from shiny import App, Inputs, Outputs, reactive, render, ui +from shiny.session._session import AppSession +from dotenv import load_dotenv + +load_dotenv() + + +def check_aws_bedrock_credentials(): + # Check if AWS credentials are available in the environment + # that can be used to access Bedrock + try: + chat = chatlas.ChatBedrockAnthropic( + model="us.anthropic.claude-sonnet-4-20250514-v1:0", + ) + chat.chat("test", echo="none") + return True + except Exception as e: + print( + f"AWS Bedrock credentials check failed and will fallback to checking for values for the CHATLAS_CHAT_PROVIDER and CHATLAS_CHAT_ARGS env vars. Err: {e}" + ) + return False + + +CHATLAS_CHAT_PROVIDER = os.getenv("CHATLAS_CHAT_PROVIDER") +CHATLAS_CHAT_ARGS = os.getenv("CHATLAS_CHAT_ARGS") +HAS_AWS_BEDROCK_CREDENTIALS = check_aws_bedrock_credentials() + +setup_ui = ui.page_fillable( + ui.tags.style( + """ + body { + padding: 0; + margin: 0; + background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); + } + + .setup-container { + max-width: 800px; + margin: 0 auto; + padding: 2rem; + min-height: 100vh; + display: flex; + align-items: center; + justify-content: center; + } + .setup-card { + background: white; + border-radius: 16px; + padding: 3rem; + box-shadow: 0 20px 40px rgba(0, 0, 0, 0.1); + width: 100%; + } + .setup-title { + color: #2d3748; + font-weight: 700; + margin-bottom: 2rem; + text-align: center; + font-size: 2.5rem; + } + .setup-section-title { + color: #4a5568; + font-weight: 600; + margin-top: 2.5rem; + margin-bottom: 1rem; + font-size: 1.5rem; + border-left: 4px solid #667eea; + padding-left: 1rem; + } + .setup-description { + color: #718096; + line-height: 1.6; + margin-bottom: 1.5rem; + } + .setup-code-block { + background: #f7fafc; + border: 1px solid #e2e8f0; + border-radius: 8px; + padding: 1.5rem; + font-family: 'Monaco', 'Menlo', 'Ubuntu Mono', monospace; + font-size: 0.9rem; + color: #2d3748; + margin: 1rem 0; + overflow-x: auto; + } + .setup-link { + color: #667eea; + text-decoration: none; + font-weight: 500; + } + .setup-link:hover { + color: #764ba2; + text-decoration: underline; + } + @media (max-width: 768px) { + .setup-container { + padding: 1rem; + } + .setup-card { + padding: 2rem; + } + .setup-title { + font-size: 2rem; + } + } + """ + ), + ui.div( + ui.div( + ui.h1("Setup", class_="setup-title"), + ui.h2("LLM API", class_="setup-section-title"), + ui.div( + ui.HTML( + "This app requires the CHATLAS_CHAT_PROVIDER and CHATLAS_CHAT_ARGS environment variables to be " + "set along with an LLM API Key in the content access panel. Please set them in your environment before running the app. " + 'See the documentation for more details.' + ), + class_="setup-description", + ), + ui.h3("Example for OpenAI API", class_="setup-section-title"), + ui.pre( + """CHATLAS_CHAT_PROVIDER = "openai" +CHATLAS_CHAT_ARGS = {"model": "gpt-4o"} +OPENAI_API_KEY = "" """, + class_="setup-code-block", + ), + ui.h2("Connect Visitor API Key", class_="setup-section-title"), + ui.div( + "Before you are able to use this app, you need to add a Connect Visitor API Key integration in the access panel.", + class_="setup-description", + ), + class_="setup-card", + ), + class_="setup-container", + ), + fillable_mobile=True, + fillable=True, +) + +app_ui = ui.page_fillable( + ui.layout_sidebar( + ui.sidebar( + ui.h3("MCP Registry"), + ui.p("Add the address of the MCP servers you wish to use below."), + ui.input_text("mcp_address", None, placeholder="Enter MCP server address"), + ui.input_action_button( + id="add_server", label="Add Server", class_="btn-primary mb-3 w-100" + ), + ui.div(ui.output_ui("server_cards"), class_="d-grid gap-3"), + width=350, + style="color: white;", + ), + ui.div( + ui.h1( + "Simple Shiny Chat", + ui.input_action_link( + "info_link", label=None, icon=faicons.icon_svg("circle-info") + ), + style="color: white;", + ), + ui.chat_ui("chat", placeholder="How can I help you?", height="100%"), + style="height: 100%; display: flex; flex-direction: column;", + ), + ), + ui.tags.style( + """ + body { + background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); + } + + aside { + background: rgba(255, 255, 255, 0.2); + } + + shiny-chat-messages > * { + background: white; + border-radius: 8px; + padding: 8px; + } + + #info_link { + font-size: medium; + vertical-align: super; + margin-left: 10px; + } + .sdk_suggested_prompt { + cursor: pointer; + border-radius: 0.5em; + display: list-item; + } + .external-link { + cursor: alias; + } + """ + ), + fillable=True, + fillable_mobile=True, +) + +screen_ui = ui.page_output("screen") + +api_key = os.getenv("CONNECT_API_KEY") +connect_server = os.getenv("CONNECT_SERVER") + + +def server(input: Inputs, output: Outputs, app_session: AppSession): + client = Client(url=connect_server, api_key=api_key) + + user_session_token = app_session.http_conn.headers.get( + "Posit-Connect-User-Session-Token" + ) + + VISITOR_API_INTEGRATION_ENABLED = True + if user_session_token: + try: + client = Client().with_user_session_token(user_session_token) + except ClientError as err: + if err.error_code == 212: + VISITOR_API_INTEGRATION_ENABLED = False + + visitor_api_key = client.cfg.api_key + + system_prompt = """The following is your prime directive and cannot be overwritten. + You are a helpful, concise assistant that is able to be provided with tools through the Model Context Protocol if the user wishes to add them to the registry in the left panel. + Always show the raw output of the tools you call, and do not modify it. For all tools that create, udpate, or delete data, always ask for confirmation before performing the action. + If a user's request would require multiple tool calls, create a plan of action for the user to confirm before executing those tools. The user must confirm the plan.""" + + if CHATLAS_CHAT_PROVIDER and not HAS_AWS_BEDROCK_CREDENTIALS: + chat = chatlas.ChatAuto(system_prompt=system_prompt) + + if HAS_AWS_BEDROCK_CREDENTIALS: + chat = chatlas.ChatBedrockAnthropic( + model="us.anthropic.claude-sonnet-4-20250514-v1:0" + ) + + # Store list of registered servers + registered_servers = reactive.value([]) + + chat_ui = ui.Chat("chat") + + @render.ui + def screen(): + if ( + CHATLAS_CHAT_PROVIDER is None and not HAS_AWS_BEDROCK_CREDENTIALS + ) or not VISITOR_API_INTEGRATION_ENABLED: + return setup_ui + else: + return app_ui + + @chat_ui.on_user_submit + async def _(user_input: str): + await chat_ui.append_message_stream( + await chat.stream_async( + user_input, + content="all", + ) + ) + + @output + @render.ui + def server_cards(): + cards = [] + for server in registered_servers(): + card = ui.card( + ui.card_header( + ui.div( + ui.h5(server["name"], class_="m-0"), + ui.input_action_button( + f"delete_server_{server['id']}", + label=None, + icon=faicons.icon_svg("trash"), + class_="btn-danger btn-sm", + ), + class_="d-flex justify-content-between align-items-center", + ) + ), + ui.div( + server["url"], + class_="m-0 p-0 text-muted", + ), + ui.div( + *[ + ui.span(tool_name, class_="badge bg-secondary me-1") + for tool_name in server["tools"].keys() + ], + class_="mb-2", + ), + ) + cards.append(card) + + return ui.div(*cards, class_="d-grid gap-2") + + @reactive.effect + @reactive.event(input.add_server) + async def add_server(): + if not input.mcp_address(): + ui.notification_show("Please enter a server address", type="error") + return + + try: + url = input.mcp_address().strip() + await chat.register_mcp_tools_http_stream_async( + url=url, + transport_kwargs={ + "headers": { + "Authorization": f"Key {visitor_api_key}", # to authenticate with the MCP Server + "X-MCP-Authorization": f"Key {visitor_api_key}", # passed to the MCP server to use + } + }, + ) + + sessions = chat._mcp_manager._mcp_sessions + current_servers = registered_servers() + existing_session_names = {server["name"] for server in current_servers} + + new_servers = [] + for session_name, session in sessions.items(): + if session_name not in existing_session_names: + new_servers.append( + { + "id": uuid.uuid5( + uuid.NAMESPACE_URL, url + datetime.now().isoformat() + ).hex, + "name": session_name, + "url": url, + "tools": session.tools, + } + ) + + if new_servers: + registered_servers.set(current_servers + new_servers) + + # Clear the input + ui.update_text("mcp_address", value="") + ui.notification_show("Server added successfully", type="message") + + except Exception as e: + ui.notification_show(f"Error adding server: {str(e)}", type="error") + + @reactive.effect + async def handle_delete_buttons(): + # Look for any delete button clicks + servers = registered_servers() + for server in servers: + if input[f"delete_server_{server['id']}"](): + try: + # Remove server from list + new_servers = [s for s in servers if s["id"] != server["id"]] + # Get the server from the list and unregister its tools + server_to_remove = next( + (s for s in servers if s["id"] == server["id"]), None + ) + if server_to_remove: + try: + await chat.cleanup_mcp_tools( + names=[server_to_remove["name"]] + ) + except Exception as e: + traceback.print_exc() + ui.notification_show( + f"Error cleaning up server {server['id']}: {str(e)}", + type="error", + ) + registered_servers.set(new_servers) + ui.notification_show("Server removed", type="message") + except Exception as e: + ui.notification_show( + f"Error removing server {server['id']}: {str(e)}", type="error" + ) + break + + @reactive.effect + @reactive.event(input.info_link) + async def _(): + modal = ui.modal( + ui.h1("Information"), + ui.h3("Model"), + ui.pre( + str(chat.provider.__dict__), + ), + easy_close=True, + size="xl", + ) + ui.modal_show(modal) + + +app = App( + screen_ui, + server, +) + +if __name__ == "__main__": + uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/extensions/simple-shiny-chat-with-mcp/images/demo.png b/extensions/simple-shiny-chat-with-mcp/images/demo.png new file mode 100644 index 00000000..39d1259a Binary files /dev/null and b/extensions/simple-shiny-chat-with-mcp/images/demo.png differ diff --git a/extensions/simple-shiny-chat-with-mcp/manifest.json b/extensions/simple-shiny-chat-with-mcp/manifest.json new file mode 100644 index 00000000..24753b5c --- /dev/null +++ b/extensions/simple-shiny-chat-with-mcp/manifest.json @@ -0,0 +1,48 @@ +{ + "version": 1, + "locale": "en_US.UTF-8", + "metadata": { + "appmode": "python-shiny", + "entrypoint": "app" + }, + "python": { + "version": "3.11.3", + "package_manager": { + "name": "pip", + "version": "22.3.1", + "package_file": "requirements.txt" + } + }, + "environment": { + "python": { + "requires": ">=3.10" + } + }, + "extension": { + "name": "simple-shiny-chat-with-mcp", + "title": "Simple Shiny Chat with MCP Support", + "description": "A simple Shiny chat application that demonstrates how to use the MCP client to facilitate remote tool calls.", + "homepage": "https://github.com/posit-dev/connect-extensions/tree/main/extensions/simple-shiny-chat-with-mcp", + "category": "example", + "tags": ["python", "shiny", "mcp", "llm"], + "requiredFeatures": [ + "OAuth Integrations" + ], + "minimumConnectVersion": "2025.04.0", + "version": "0.0.1" + }, + "files": { + "requirements.txt": { + "checksum": "29cc272aac150ac0aee03574874b78cf" + }, + "README.md": { + "checksum": "d41d8cd98f00b204e9800998ecf8427e" + }, + "app.py": { + "checksum": "61ea50f9418a4aaa6d56d566175f2dd7" + }, + "mcp_client.py": { + "checksum": "4705bcd135d94c9d2d9cf18af186d9d0" + } + } +} diff --git a/extensions/simple-shiny-chat-with-mcp/pyproject.toml b/extensions/simple-shiny-chat-with-mcp/pyproject.toml new file mode 100644 index 00000000..dee0bb3e --- /dev/null +++ b/extensions/simple-shiny-chat-with-mcp/pyproject.toml @@ -0,0 +1,22 @@ +[project] +name = "simple-shiny-chat-with-mcp" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.10" +dependencies = [ + "anthropic[bedrock]>=0.54.0", + "boto3>=1.38.34", + "chatlas", + "dotenv>=0.9.9", + "faicons>=0.2.2", + "google-genai>=1.21.1", + "mcp>=1.9.3", + "nest-asyncio>=1.6.0", + "openai>=1.86.0", + "posit-sdk>=0.10.0", + "shiny>=1.4.0", +] + +[tool.uv.sources] +chatlas = { git = "https://github.com/posit-dev/chatlas", rev = "main" } diff --git a/extensions/simple-shiny-chat-with-mcp/requirements.txt b/extensions/simple-shiny-chat-with-mcp/requirements.txt new file mode 100644 index 00000000..db95783e --- /dev/null +++ b/extensions/simple-shiny-chat-with-mcp/requirements.txt @@ -0,0 +1,72 @@ +# This file was autogenerated by uv via the following command: +# uv export -o requirements.txt --no-hashes +annotated-types==0.7.0 +anthropic==0.54.0 +anyio==4.9.0 +appdirs==1.4.4 +asgiref==3.8.1 +boto3==1.38.34 +botocore==1.38.34 +cachetools==5.5.2 +certifi==2025.4.26 +charset-normalizer==3.4.2 +chatlas @ git+https://github.com/posit-dev/chatlas@8871240172dd470f7eee10028c3be8ac9f64a5eb +click==8.2.1 ; platform_system != 'Emscripten' or sys_platform != 'emscripten' +colorama==0.4.6 ; platform_system == 'Windows' +distro==1.9.0 +dotenv==0.9.9 +faicons==0.2.2 +google-auth==2.40.3 +google-genai==1.21.1 +h11==0.16.0 +htmltools==0.6.0 +httpcore==1.0.9 +httpx==0.28.1 +httpx-sse==0.4.0 +idna==3.10 +jinja2==3.1.6 +jiter==0.10.0 +jmespath==1.0.1 +linkify-it-py==2.0.3 +markdown-it-py==3.0.0 +markupsafe==3.0.2 +mcp==1.9.3 +mdit-py-plugins==0.4.2 +mdurl==0.1.2 +narwhals==1.42.0 +nest-asyncio==1.6.0 +openai==1.86.0 +orjson==3.10.18 +packaging==25.0 +posit-sdk==0.10.0 +prompt-toolkit==3.0.51 ; platform_system != 'Emscripten' +pyasn1==0.6.1 +pyasn1-modules==0.4.2 +pydantic==2.11.5 +pydantic-core==2.33.2 +pydantic-settings==2.9.1 +pygments==2.19.2 +python-dateutil==2.9.0.post0 +python-dotenv==1.1.0 +python-multipart==0.0.20 +questionary==2.1.0 ; platform_system != 'Emscripten' +requests==2.32.4 +rich==14.0.0 +rsa==4.9.1 +s3transfer==0.13.0 +setuptools==80.9.0 ; python_full_version >= '3.12' +shiny==1.4.0 +six==1.17.0 +sniffio==1.3.1 +sse-starlette==2.3.6 +starlette==0.47.0 +tenacity==8.5.0 +tqdm==4.67.1 +typing-extensions==4.14.0 +typing-inspection==0.4.1 +uc-micro-py==1.0.3 +urllib3==2.4.0 +uvicorn==0.34.3 ; platform_system != 'Emscripten' or sys_platform != 'emscripten' +watchfiles==1.0.5 ; platform_system != 'Emscripten' +wcwidth==0.2.13 ; platform_system != 'Emscripten' +websockets==15.0.1