diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..8646659 --- /dev/null +++ b/.env.example @@ -0,0 +1,27 @@ +# CodeRunner Configuration Example +# Copy this file to .env and modify as needed + +# Jupyter Settings +CODERUNNER_JUPYTER_HOST=127.0.0.1 +CODERUNNER_JUPYTER_PORT=8888 +CODERUNNER_JUPYTER_WS_URL=ws://127.0.0.1:8888 + +# Directory Settings +CODERUNNER_SHARED_DIR=/app/uploads + +# Execution Settings +CODERUNNER_EXECUTION_TIMEOUT=300.0 +CODERUNNER_WEBSOCKET_TIMEOUT=1.0 +CODERUNNER_MAX_WAIT_JUPYTER=30 + +# FastMCP Settings +CODERUNNER_FASTMCP_HOST=0.0.0.0 +CODERUNNER_FASTMCP_PORT=8222 + +# Logging Settings +CODERUNNER_LOG_LEVEL=INFO +CODERUNNER_LOG_FORMAT=%(asctime)s - %(levelname)s - %(message)s + +# Resource Settings (optional) +# CODERUNNER_MAX_KERNEL_MEMORY=1G +# CODERUNNER_MAX_KERNEL_CPU=1.0 \ No newline at end of file diff --git a/.gitignore b/.gitignore index 5d6c19d..b41c799 100644 --- a/.gitignore +++ b/.gitignore @@ -63,3 +63,4 @@ jupyter_runtime/ # Node modules (if any) node_modules/ +.aider* diff --git a/Dockerfile b/Dockerfile index 0393710..5d95fad 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,74 +1,66 @@ -# Use the specified standard Python 3.13.3 base image (Debian-based) -FROM python:3.13.3 +# Multi-stage build for optimized image size +FROM python:3.13.3 as builder # Set environment variables -ENV PYTHONDONTWRITEBYTECODE 1 -ENV PYTHONUNBUFFERED 1 +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 ENV DEBIAN_FRONTEND=noninteractive -# Set working directory -WORKDIR /app - -# Install system dependencies INCLUDING systemd +# Install build dependencies RUN apt-get update && apt-get install -y --no-install-recommends \ - systemd \ - sudo \ - curl \ - iproute2 \ - ffmpeg \ - bash \ build-essential \ - procps \ - openssh-client \ - openssh-server \ - jq \ - kmod \ - && apt-get clean && rm -rf /var/lib/apt/lists/* - + && rm -rf /var/lib/apt/lists/* # Upgrade pip RUN python -m pip install --no-cache-dir --upgrade pip -# Copy requirements file -COPY ./requirements.txt /app/requirements.txt +# Copy requirements and install Python dependencies +COPY requirements.txt /tmp/requirements.txt +RUN pip install --no-cache-dir --user -r /tmp/requirements.txt -# Install Python dependencies -RUN pip install --no-cache-dir -r requirements.txt +# Runtime stage +FROM python:3.13.3-slim +# Set environment variables +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 +ENV DEBIAN_FRONTEND=noninteractive +ENV PATH="/root/.local/bin:$PATH" -# Install the bash kernel spec for Jupyter (not working with uv) -RUN python -m bash_kernel.install +# Install runtime dependencies and tini for proper signal handling +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + jq \ + tini \ + && rm -rf /var/lib/apt/lists/* +# Set working directory +WORKDIR /app -# Copy the application code (server.py) -COPY ./server.py /app/server.py +# Copy Python packages from builder stage +COPY --from=builder /root/.local /root/.local -# Create application/jupyter directories -RUN mkdir -p /app/uploads /app/jupyter_runtime +# Install the bash kernel spec for Jupyter +RUN python -m bash_kernel.install -# # Generate SSH host keys -# RUN ssh-keygen -A +# Copy application code +COPY server.py config.py jupyter_client.py /app/ -# Clean systemd machine-id -RUN rm -f /etc/machine-id && touch /etc/machine-id +# Create application directories +RUN mkdir -p /app/uploads /app/jupyter_runtime -# --- Set environment variables for the application --- -ENV FASTMCP_HOST="0.0.0.0" -ENV FASTMCP_PORT="8222" +# Copy the entrypoint script +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh +# Set environment variables for the application +ENV CODERUNNER_FASTMCP_HOST="0.0.0.0" +ENV CODERUNNER_FASTMCP_PORT="8222" +ENV CODERUNNER_JUPYTER_HOST="0.0.0.0" +ENV CODERUNNER_JUPYTER_PORT="8888" # Expose the FastAPI port -EXPOSE 8222 - -# Start the FastAPI application -# CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8002", "--workers", "1", "--no-access-log"] - - -# Copy the entrypoint script into the image -COPY entrypoint.sh /entrypoint.sh - -# Make the entrypoint script executable -RUN chmod +x /entrypoint.sh +EXPOSE 8222 8888 -# Use the entrypoint script -ENTRYPOINT ["/entrypoint.sh"] +# Use tini for proper signal handling +ENTRYPOINT ["tini", "--", "/entrypoint.sh"] diff --git a/README.md b/README.md index f54e2aa..ea99d63 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ CodeRunner is an MCP (Model Context Protocol) server that executes AI-generated code in a sandboxed environment on your Mac using Apple's native [containers](https://github.com/apple/container). -**Key use case:** Process your local files (videos, images, documents, data) with remote LLMs like Claude or ChatGPT without uploading your files to the cloud. The LLM generates code that runs locally on your machine to analyze, transform, or process your files. +**Key use case:** Process your local files (videos, images, documents, data) with remote LLMs like Claude or ChatGPT without uploading your files to the cloud. The LLM generates Python code or bash scripts that run locally on your machine to analyze, transform, or process your files. ## What CodeRunner Enables @@ -21,6 +21,7 @@ CodeRunner is an MCP (Model Context Protocol) server that executes AI-generated | Copy/paste scripts to run elsewhere | Code runs immediately, shows output/files | | LLM analyzes text descriptions of files | LLM directly processes your actual files | | Manage Python environments and packages | Pre-configured environment ready to use | +| Limited to one programming language | Supports both Python and Bash execution | ## Quick Start @@ -140,8 +141,9 @@ From [@apple/container](https://github.com/apple/container/blob/main/docs/techni ## Architecture CodeRunner consists of: -- **Sandbox Container:** Isolated execution environment with Jupyter kernel +- **Sandbox Container:** Isolated execution environment with Python and Bash Jupyter kernels - **MCP Server:** Handles communication between AI models and the sandbox +- **Multi-Kernel Support:** Automatically routes Python and Bash code to appropriate kernels ## Examples @@ -149,6 +151,175 @@ The `examples/` directory contains: - `openai-agents` - Example OpenAI agents integration - `claude-desktop` - Example Claude Desktop integration +## Development + +### Running Locally + +1. **Install dependencies:** + ```bash + pip install -r requirements.txt + ``` + +2. **Set up configuration:** + ```bash + cp .env.example .env + # Edit .env with your preferred settings + ``` + +3. **Run tests:** + ```bash + python -m pytest tests/ -v + ``` + +4. **Run the server:** + ```bash + python server.py + ``` + +### Available MCP Tools + +CodeRunner provides the following MCP tools for AI models: + +1. **`execute_python_code`** - Execute Python code in a persistent Jupyter kernel + ``` + execute_python_code(command="print('Hello, World!')") + ``` + +2. **`execute_bash_code`** - Execute Bash commands in a persistent Jupyter bash kernel + ``` + execute_bash_code(command="ls -la && echo 'Directory listing complete'") + ``` + +3. **`get_kernel_status`** - Check the status of available kernels + ``` + get_kernel_status() + ``` + +### Usage Examples + +**Python Code Execution:** +```python +# Data analysis +execute_python_code(""" +import pandas as pd +import matplotlib.pyplot as plt + +# Create sample data +data = {'x': [1, 2, 3, 4, 5], 'y': [2, 4, 6, 8, 10]} +df = pd.DataFrame(data) +print(df.describe()) +""") +``` + +**Bash Script Execution:** +```bash +# File operations +execute_bash_code(""" +# Create directory structure +mkdir -p /tmp/test_dir +cd /tmp/test_dir + +# Create files +echo "Hello World" > hello.txt +echo "Goodbye World" > goodbye.txt + +# List files with details +ls -la +""") +``` + +**Combined Usage:** +```python +# Use bash to prepare data, then Python to analyze +execute_bash_code("curl -o data.csv https://example.com/data.csv") +execute_python_code(""" +import pandas as pd +df = pd.read_csv('data.csv') +print(df.head()) +""") +``` + +### Configuration + +CodeRunner can be configured via environment variables with the `CODERUNNER_` prefix for consistency across all components (Python application, Docker container, and entrypoint script). See `.env.example` for available options: + +- `CODERUNNER_JUPYTER_HOST`: Jupyter server host (default: 127.0.0.1) +- `CODERUNNER_JUPYTER_PORT`: Jupyter server port (default: 8888) +- `CODERUNNER_FASTMCP_HOST`: FastMCP server host (default: 0.0.0.0) +- `CODERUNNER_FASTMCP_PORT`: FastMCP server port (default: 8222) +- `CODERUNNER_EXECUTION_TIMEOUT`: Code execution timeout in seconds (default: 300) +- `CODERUNNER_LOG_LEVEL`: Logging level (default: INFO) + +### Testing + +Run the test suite: +```bash +# Run all tests +python -m pytest tests/ + +# Run specific test files +python -m pytest tests/test_config.py -v + +# Run tests with coverage (if installed) +python -m pytest tests/ --cov=. --cov-report=html +``` + +## Recent Changes (Refactor Branch) + +### Major Refactoring Improvements + +1. **Modularized Architecture** + - Extracted Jupyter client logic into `jupyter_client.py` + - Created centralized configuration system in `config.py` + - Improved separation of concerns + +2. **Enhanced Configuration Management** + - Environment variable support with `CODERUNNER_` prefix + - Centralized configuration with sensible defaults + - Better local development support + +3. **Improved Error Handling** + - Custom exception classes for better error categorization + - More robust WebSocket connection handling + - Comprehensive logging and error reporting + +4. **Container Optimizations** + - Multi-stage Docker build for smaller images + - Proper signal handling with `tini` + - Better entrypoint script with error handling + - Unified configuration with `CODERUNNER_` prefix across all components + +5. **Multi-Kernel Support** + - Added Bash kernel support alongside Python + - New `execute_bash_code` MCP tool for shell commands + - Kernel status monitoring with `get_kernel_status` tool + +6. **Testing Framework** + - Comprehensive test suite with pytest + - Unit tests for configuration and Jupyter client + - Mock-based testing for isolated components + +7. **Code Quality Improvements** + - Pinned dependency versions for reproducible builds + - Cleaner, more maintainable code structure + - Better documentation and type hints + +### File Structure +``` +coderunner/ +├── config.py # Configuration management +├── jupyter_client.py # Jupyter WebSocket client +├── server.py # Main FastMCP server +├── requirements.txt # Pinned dependencies +├── Dockerfile # Optimized multi-stage build +├── entrypoint.sh # Improved container entrypoint +├── .env.example # Configuration template +├── pytest.ini # Test configuration +└── tests/ # Test suite + ├── test_config.py + └── test_jupyter_client.py +``` + ## Contributing We welcome contributions! Please see [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines. diff --git a/config.py b/config.py new file mode 100644 index 0000000..b2605cc --- /dev/null +++ b/config.py @@ -0,0 +1,64 @@ +import os +import pathlib +from typing import Optional + + +class Config: + """Configuration settings for CodeRunner""" + + def __init__(self, **kwargs): + # Jupyter settings + self.jupyter_port = int(os.getenv("CODERUNNER_JUPYTER_PORT", kwargs.get("jupyter_port", 8888))) + self.jupyter_host = os.getenv("CODERUNNER_JUPYTER_HOST", kwargs.get("jupyter_host", "127.0.0.1")) + self.jupyter_ws_url = os.getenv("CODERUNNER_JUPYTER_WS_URL", kwargs.get("jupyter_ws_url", f"ws://{self.jupyter_host}:{self.jupyter_port}")) + + # Directory settings + default_shared_dir = "./uploads" if not os.path.exists("/app") else "/app/uploads" + shared_dir_path = os.getenv("CODERUNNER_SHARED_DIR", kwargs.get("shared_dir", default_shared_dir)) + self.shared_dir = pathlib.Path(shared_dir_path) + self.kernel_id_file = os.getenv("CODERUNNER_KERNEL_ID_FILE", kwargs.get("kernel_id_file", None)) + + # Execution settings + self.execution_timeout = float(os.getenv("CODERUNNER_EXECUTION_TIMEOUT", kwargs.get("execution_timeout", 300.0))) + self.websocket_timeout = float(os.getenv("CODERUNNER_WEBSOCKET_TIMEOUT", kwargs.get("websocket_timeout", 1.0))) + self.max_wait_jupyter = int(os.getenv("CODERUNNER_MAX_WAIT_JUPYTER", kwargs.get("max_wait_jupyter", 30))) + + # FastMCP settings + self.fastmcp_host = os.getenv("CODERUNNER_FASTMCP_HOST", kwargs.get("fastmcp_host", "0.0.0.0")) + self.fastmcp_port = int(os.getenv("CODERUNNER_FASTMCP_PORT", kwargs.get("fastmcp_port", 8222))) + + # Logging settings + self.log_level = os.getenv("CODERUNNER_LOG_LEVEL", kwargs.get("log_level", "INFO")) + self.log_format = os.getenv("CODERUNNER_LOG_FORMAT", kwargs.get("log_format", "%(asctime)s - %(levelname)s - %(message)s")) + + # Resource settings + self.max_kernel_memory = os.getenv("CODERUNNER_MAX_KERNEL_MEMORY", kwargs.get("max_kernel_memory", None)) + self.max_kernel_cpu = os.getenv("CODERUNNER_MAX_KERNEL_CPU", kwargs.get("max_kernel_cpu", None)) + if self.max_kernel_cpu is not None: + self.max_kernel_cpu = float(self.max_kernel_cpu) + + # Ensure shared directory exists + self.shared_dir.mkdir(parents=True, exist_ok=True) + + # Set kernel ID file path if not provided + if self.kernel_id_file is None: + self.kernel_id_file = str(self.shared_dir / "python_kernel_id.txt") + + @property + def bash_kernel_id_file(self) -> str: + """Get the path for the bash kernel ID file""" + return os.path.join(os.path.dirname(self.kernel_id_file), "bash_kernel_id.txt") + + @property + def jupyter_ws_base_url(self) -> str: + """Get the base WebSocket URL for Jupyter""" + return f"ws://{self.jupyter_host}:{self.jupyter_port}" + + @property + def jupyter_api_base_url(self) -> str: + """Get the base API URL for Jupyter""" + return f"http://{self.jupyter_host}:{self.jupyter_port}" + + +# Global config instance +config = Config() \ No newline at end of file diff --git a/entrypoint.sh b/entrypoint.sh index da0ee6e..9728d3e 100644 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -1,29 +1,50 @@ #!/bin/bash +set -euo pipefail + +# Trap handlers for cleanup +cleanup() { + echo "Shutting down services..." + pkill -f "jupyter server" || true + pkill -f "uvicorn" || true + exit 0 +} +trap cleanup SIGTERM SIGINT + +# Configuration +JUPYTER_PORT=${CODERUNNER_JUPYTER_PORT:-8888} +JUPYTER_HOST=${CODERUNNER_JUPYTER_HOST:-0.0.0.0} +MAX_WAIT=${CODERUNNER_MAX_WAIT_JUPYTER:-30} +SHARED_DIR=${CODERUNNER_SHARED_DIR:-/app/uploads} +FASTMCP_PORT=${CODERUNNER_FASTMCP_PORT:-8222} +FASTMCP_HOST=${CODERUNNER_FASTMCP_HOST:-0.0.0.0} + +echo "Starting Jupyter server on ${JUPYTER_HOST}:${JUPYTER_PORT}..." + # Start Jupyter server jupyter server \ - --ip=0.0.0.0 \ - --port=8888 \ + --ip="${JUPYTER_HOST}" \ + --port="${JUPYTER_PORT}" \ --no-browser \ --IdentityProvider.token='' \ --ServerApp.disable_check_xsrf=True \ - --ServerApp.notebook_dir='/app/uploads' \ + --ServerApp.notebook_dir="${SHARED_DIR}" \ --ServerApp.allow_origin='*' \ --ServerApp.allow_credentials=True \ --ServerApp.allow_remote_access=True \ --ServerApp.log_level='INFO' \ --ServerApp.allow_root=True & -echo "Waiting for Jupyter Server to become available..." - -max_wait=30 +JUPYTER_PID=$! -# This while loop is a great pattern. The logic here is correct. -while ! curl -s --fail http://localhost:8888/api/status > /dev/null; do - count=$((count + 1)) # More readable spacing +echo "Waiting for Jupyter Server to become available..." - # This 'if' statement will now work because $max_wait has a value - if [ "$count" -gt "$max_wait" ]; then - echo "Error: Jupyter Server did not start within ${max_wait} seconds." +count=0 +while ! curl -s --fail "http://localhost:${JUPYTER_PORT}/api/status" > /dev/null; do + count=$((count + 1)) + + if [ "$count" -gt "$MAX_WAIT" ]; then + echo "Error: Jupyter Server did not start within ${MAX_WAIT} seconds." + kill $JUPYTER_PID 2>/dev/null || true exit 1 fi @@ -34,16 +55,47 @@ done echo echo "Jupyter Server is ready!" +# Start Python3 kernel session and store the kernel ID +echo "Starting Python3 kernel..." +python_response=$(curl -s -X POST "http://localhost:${JUPYTER_PORT}/api/kernels" \ + -H "Content-Type: application/json" \ + -d '{"name":"python3"}') + +if [ $? -ne 0 ]; then + echo "Error: Failed to start Python3 kernel" + kill $JUPYTER_PID 2>/dev/null || true + exit 1 +fi + +python_kernel_id=$(echo "$python_response" | jq -r '.id') +if [ "$python_kernel_id" == "null" ] || [ -z "$python_kernel_id" ]; then + echo "Error: Failed to get Python kernel ID from response: $python_response" + kill $JUPYTER_PID 2>/dev/null || true + exit 1 +fi -# Start a Python3 kernel session and store the kernel ID -response=$(curl -s -X POST "http://localhost:8888/api/kernels" -H "Content-Type: application/json" -d '{"name":"python3"}') -kernel_id=$(echo $response | jq -r '.id') -echo "Python3 kernel started with ID: $kernel_id" +echo "Python3 kernel started with ID: $python_kernel_id" +echo "$python_kernel_id" > "${SHARED_DIR}/python_kernel_id.txt" -# Write the kernel ID to a file for later use -echo $kernel_id > /app/uploads/python_kernel_id.txt +# Start Bash kernel session and store the kernel ID +echo "Starting Bash kernel..." +bash_response=$(curl -s -X POST "http://localhost:${JUPYTER_PORT}/api/kernels" \ + -H "Content-Type: application/json" \ + -d '{"name":"bash"}') + +if [ $? -ne 0 ]; then + echo "Warning: Failed to start Bash kernel (continuing without it)" +else + bash_kernel_id=$(echo "$bash_response" | jq -r '.id') + if [ "$bash_kernel_id" == "null" ] || [ -z "$bash_kernel_id" ]; then + echo "Warning: Failed to get Bash kernel ID from response: $bash_response" + else + echo "Bash kernel started with ID: $bash_kernel_id" + echo "$bash_kernel_id" > "${SHARED_DIR}/bash_kernel_id.txt" + fi +fi -# exec python mcp_main.py +echo "Starting FastAPI application on ${FASTMCP_HOST}:${FASTMCP_PORT}..." # Start FastAPI application -exec uvicorn server:app --host 0.0.0.0 --port 8222 --workers 1 --no-access-log \ No newline at end of file +exec uvicorn server:app --host "$FASTMCP_HOST" --port "$FASTMCP_PORT" --workers 1 --no-access-log \ No newline at end of file diff --git a/jupyter_client.py b/jupyter_client.py new file mode 100644 index 0000000..60f0a65 --- /dev/null +++ b/jupyter_client.py @@ -0,0 +1,214 @@ +import asyncio +import json +import logging +import os +import time +import uuid +from typing import Dict, Any, Optional, Tuple + +import websockets +from websockets.exceptions import ConnectionClosed + +from config import config + +logger = logging.getLogger(__name__) + + +class JupyterExecutionError(Exception): + """Exception raised when Jupyter code execution fails""" + pass + + +class JupyterConnectionError(Exception): + """Exception raised when connection to Jupyter fails""" + pass + + +class JupyterClient: + """Client for executing code in Jupyter kernels via WebSocket""" + + def __init__(self): + self.kernels: Dict[str, Optional[str]] = { + "python": None, + "bash": None + } + self._load_kernel_ids() + + def _load_kernel_ids(self) -> None: + """Load kernel IDs from files""" + kernel_files = { + "python": config.kernel_id_file, + "bash": config.bash_kernel_id_file + } + + for kernel_type, kernel_file in kernel_files.items(): + if not os.path.exists(kernel_file): + logger.info(f"{kernel_type.title()} kernel ID file not found at: {kernel_file}") + continue + + try: + with open(kernel_file, 'r') as file: + kernel_id = file.read().strip() + if kernel_id: + self.kernels[kernel_type] = kernel_id + logger.info(f"Loaded {kernel_type} kernel ID: {kernel_id}") + else: + logger.warning(f"{kernel_type.title()} kernel ID file is empty") + except Exception as e: + logger.error(f"Error reading {kernel_type} kernel ID file: {e}") + + @property + def kernel_id(self) -> Optional[str]: + """Backward compatibility property for Python kernel""" + return self.kernels.get("python") + + def _create_execute_request(self, code: str) -> Tuple[str, str]: + """ + Create a Jupyter execute_request message. + Returns a tuple: (msg_id, json_payload_string) + """ + msg_id = uuid.uuid4().hex + session_id = uuid.uuid4().hex + + request = { + "header": { + "msg_id": msg_id, + "username": "mcp_client", + "session": session_id, + "msg_type": "execute_request", + "version": "5.3", + }, + "parent_header": {}, + "metadata": {}, + "content": { + "code": code, + "silent": False, + "store_history": False, + "user_expressions": {}, + "allow_stdin": False, + "stop_on_error": True, + }, + "buffers": [], + } + return msg_id, json.dumps(request) + + async def execute_code(self, code: str, kernel_type: str = "python") -> str: + """ + Execute code in the specified Jupyter kernel and return the output. + + Args: + code: The code to execute + kernel_type: Type of kernel to use ("python" or "bash") + + Returns: + The execution output as a string + + Raises: + JupyterConnectionError: If unable to connect to Jupyter + JupyterExecutionError: If code execution fails + """ + if kernel_type not in self.kernels: + raise JupyterConnectionError(f"Unsupported kernel type: {kernel_type}") + + kernel_id = self.kernels[kernel_type] + if not kernel_id: + raise JupyterConnectionError(f"{kernel_type.title()} kernel is not running. The kernel ID is not available.") + + jupyter_ws_url = f"{config.jupyter_ws_base_url}/api/kernels/{kernel_id}/channels" + output_lines = [] + sent_msg_id = None + + try: + async with websockets.connect(jupyter_ws_url) as websocket: + # Send execution request + sent_msg_id, jupyter_request_json = self._create_execute_request(code) + await websocket.send(jupyter_request_json) + logger.info(f"Sent execute_request (msg_id: {sent_msg_id})") + + # Process responses + execution_complete = False + start_time = time.time() + + while not execution_complete and (time.time() - start_time) < config.execution_timeout: + try: + message_str = await asyncio.wait_for( + websocket.recv(), + timeout=config.websocket_timeout + ) + except asyncio.TimeoutError: + continue + + try: + message_data = json.loads(message_str) + except json.JSONDecodeError as e: + logger.error(f"Invalid JSON received: {e}") + continue + + parent_msg_id = message_data.get("parent_header", {}).get("msg_id") + + # Ignore messages not related to our request + if parent_msg_id != sent_msg_id: + continue + + execution_complete = self._process_message(message_data, output_lines) + + if not execution_complete: + raise JupyterExecutionError( + f"Execution timed out after {config.execution_timeout} seconds" + ) + + return "".join(output_lines) if output_lines else "[Execution successful with no output]" + + except ConnectionClosed as e: + logger.error(f"WebSocket connection closed: {e}") + raise JupyterConnectionError(f"Could not connect to Jupyter kernel: {e}") + except Exception as e: + logger.error(f"Unexpected error during execution: {e}", exc_info=True) + raise JupyterExecutionError(f"Internal error during execution: {str(e)}") + + def _process_message(self, message_data: Dict[str, Any], output_lines: list) -> bool: + """ + Process a single message from Jupyter. + + Args: + message_data: The parsed message data + output_lines: List to append output to + + Returns: + True if execution is complete, False otherwise + + Raises: + JupyterExecutionError: If the message indicates an error + """ + msg_type = message_data.get("header", {}).get("msg_type") + content = message_data.get("content", {}) + + if msg_type == "stream": + output_lines.append(content.get("text", "")) + elif msg_type in ["execute_result", "display_data"]: + output_lines.append(content.get("data", {}).get("text/plain", "")) + elif msg_type == "error": + error_traceback = "\n".join(content.get("traceback", [])) + logger.error(f"Jupyter execution error: {error_traceback}") + raise JupyterExecutionError(f"Execution Error:\n{error_traceback}") + elif msg_type == "status" and content.get("execution_state") == "idle": + # Execution is complete + return True + + return False + + def reload_kernel_ids(self) -> None: + """Reload kernel IDs from files (useful if kernels were restarted)""" + self._load_kernel_ids() + + def reload_kernel_id(self) -> None: + """Backward compatibility method""" + self.reload_kernel_ids() + + def is_kernel_available(self, kernel_type: str = "python") -> bool: + """Check if kernel ID is available for the specified kernel type""" + return self.kernels.get(kernel_type) is not None + + def get_available_kernels(self) -> Dict[str, bool]: + """Get availability status of all kernels""" + return {kernel_type: kernel_id is not None for kernel_type, kernel_id in self.kernels.items()} \ No newline at end of file diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..014f4e2 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,14 @@ +[tool:pytest] +testpaths = tests +python_files = test_*.py +python_classes = Test* +python_functions = test_* +addopts = + -v + --tb=short + --strict-markers + --disable-warnings +markers = + slow: marks tests as slow (deselect with '-m "not slow"') + integration: marks tests as integration tests +asyncio_mode = auto \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index eedc041..62e0331 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,36 +1,34 @@ - # Core Jupyter Server -jupyter-server +jupyter-server==2.14.2 # Bash Kernel for Jupyter -bash_kernel - +bash_kernel==0.7.2 # Web Framework -fastapi - +fastapi==0.115.6 # ASGI Server (with standard extras like websockets, httptools) -uvicorn[standard] - -# WebSocket client library (explicitly needed by main.py) -websockets - -# Async HTTP client (used by main.py to talk to Jupyter API) -httpx +uvicorn[standard]==0.32.1 -# For FastAPI file uploads -python-multipart +# WebSocket client library +websockets==13.1 -# For async file operations in FastAPI uploads -aiofiles +# For async file operations +aiofiles==24.1.0 -openai -requests==2.32.4 +# MCP framework +mcp[cli]==1.1.0 +fastmcp==0.6.0 -mcp[cli] +# Development and testing dependencies +pytest==8.3.4 +pytest-asyncio==0.25.0 +pytest-mock==3.14.0 -fastmcp +# OpenAI integration (optional) +openai==1.59.3 +openai-agents==0.0.18 -openai-agents +# System requests +requests==2.32.4 \ No newline at end of file diff --git a/server.py b/server.py index b1d7e0c..8e1eb08 100644 --- a/server.py +++ b/server.py @@ -1,68 +1,25 @@ # --- IMPORTS --- -import asyncio -import base64 -import binascii -import json import logging -import os -import pathlib -import time -import uuid -import aiofiles -import websockets from mcp.server.fastmcp import FastMCP +from config import config +from jupyter_client import JupyterClient, JupyterConnectionError, JupyterExecutionError + # --- CONFIGURATION & SETUP --- logging.basicConfig( - level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" + level=getattr(logging, config.log_level.upper()), + format=config.log_format ) logger = logging.getLogger(__name__) # Initialize the MCP server with a descriptive name for the toolset mcp = FastMCP("CodeRunner") - -# Jupyter connection settings -JUPYTER_WS_URL = "ws://127.0.0.1:8888" - -# Directory configuration (ensure this matches your Jupyter/Docker setup) -# This directory must be accessible by both this script and the Jupyter kernel. -SHARED_DIR = pathlib.Path("/app/uploads") -SHARED_DIR.mkdir(exist_ok=True) -KERNEL_ID_FILE_PATH = SHARED_DIR / "python_kernel_id.txt" +# Initialize Jupyter client +jupyter_client = JupyterClient() -# --- HELPER FUNCTION --- -def create_jupyter_request(code: str) -> tuple[str, str]: - """ - Creates a Jupyter execute_request message. - Returns a tuple: (msg_id, json_payload_string) - """ - msg_id = uuid.uuid4().hex - session_id = uuid.uuid4().hex - - request = { - "header": { - "msg_id": msg_id, - "username": "mcp_client", - "session": session_id, - "msg_type": "execute_request", - "version": "5.3", - }, - "parent_header": {}, - "metadata": {}, - "content": { - "code": code, - "silent": False, - "store_history": False, - "user_expressions": {}, - "allow_stdin": False, - "stop_on_error": True, - }, - "buffers": [], - } - return msg_id, json.dumps(request) # --- MCP TOOLS --- @@ -76,73 +33,64 @@ async def execute_python_code(command: str) -> str: Args: command: The Python code to execute as a single string. """ - # 1. Get Kernel ID - if not os.path.exists(KERNEL_ID_FILE_PATH): - logger.error(f"Kernel ID file not found at: {KERNEL_ID_FILE_PATH}") - return "Error: Kernel is not running. The kernel ID file was not found." - - with open(KERNEL_ID_FILE_PATH, 'r') as file: - kernel_id = file.read().strip() + try: + result = await jupyter_client.execute_code(command, kernel_type="python") + return result + except JupyterConnectionError as e: + logger.error(f"Jupyter connection error: {e}") + return f"Error: {str(e)}" + except JupyterExecutionError as e: + logger.error(f"Jupyter execution error: {e}") + return f"Error: {str(e)}" + except Exception as e: + logger.error(f"Unexpected error: {e}", exc_info=True) + return f"Error: An internal server error occurred: {str(e)}" - if not kernel_id: - return "Error: Kernel ID file is empty. Cannot connect to the kernel." - # 2. Connect and Execute via WebSocket - jupyter_ws_url = f"{JUPYTER_WS_URL}/api/kernels/{kernel_id}/channels" - output_lines = [] - sent_msg_id = None +@mcp.tool() +async def execute_bash_code(command: str) -> str: + """ + Executes a string of Bash shell commands in a persistent Jupyter bash kernel and returns the output. + This is suitable for file operations, system commands, and shell scripting. + Args: + command: The Bash shell commands to execute as a single string. + """ try: - async with websockets.connect(jupyter_ws_url) as jupyter_ws: - sent_msg_id, jupyter_request_json = create_jupyter_request(command) - await jupyter_ws.send(jupyter_request_json) - logger.info(f"Sent execute_request (msg_id: {sent_msg_id})") - - execution_complete = False - loop_timeout = 300.0 # Total time to wait for a result - start_time = time.time() - - while not execution_complete and (time.time() - start_time) < loop_timeout: - try: - # Wait for a message with a short timeout to keep the loop responsive - message_str = await asyncio.wait_for(jupyter_ws.recv(), timeout=1.0) - except asyncio.TimeoutError: - continue - - message_data = json.loads(message_str) - parent_msg_id = message_data.get("parent_header", {}).get("msg_id") - - # Ignore messages not related to our request - if parent_msg_id != sent_msg_id: - continue - - msg_type = message_data.get("header", {}).get("msg_type") - content = message_data.get("content", {}) - - if msg_type == "stream": - output_lines.append(content.get("text", "")) - elif msg_type == "execute_result" or msg_type == "display_data": - output_lines.append(content.get("data", {}).get("text/plain", "")) - elif msg_type == "error": - error_traceback = "\n".join(content.get("traceback", [])) - logger.error(f"Execution error for msg_id {sent_msg_id}:\n{error_traceback}") - return f"Execution Error:\n{error_traceback}" - elif msg_type == "status" and content.get("execution_state") == "idle": - # The kernel is idle, meaning our execution is finished. - execution_complete = True - - if not execution_complete: - logger.error(f"Execution timed out for msg_id: {sent_msg_id}") - return f"Error: Execution timed out after {loop_timeout} seconds." - - return "".join(output_lines) if output_lines else "[Execution successful with no output]" - - except websockets.exceptions.ConnectionClosed as e: - logger.error(f"WebSocket connection failed: {e}") - return f"Error: Could not connect to the Jupyter kernel. It may be offline. Details: {e}" + result = await jupyter_client.execute_code(command, kernel_type="bash") + return result + except JupyterConnectionError as e: + logger.error(f"Jupyter connection error: {e}") + return f"Error: {str(e)}" + except JupyterExecutionError as e: + logger.error(f"Jupyter execution error: {e}") + return f"Error: {str(e)}" except Exception as e: - logger.error(f"An unexpected error occurred during execution: {e}", exc_info=True) + logger.error(f"Unexpected error: {e}", exc_info=True) return f"Error: An internal server error occurred: {str(e)}" +@mcp.tool() +async def get_kernel_status() -> str: + """ + Returns the status of available Jupyter kernels. + + Returns: + A string describing which kernels are available and their status. + """ + try: + kernel_status = jupyter_client.get_available_kernels() + status_lines = [] + + for kernel_type, is_available in kernel_status.items(): + status = "✓ Available" if is_available else "✗ Not available" + kernel_id = jupyter_client.kernels.get(kernel_type, "None") + status_lines.append(f"{kernel_type.title()} kernel: {status} (ID: {kernel_id})") + + return "\n".join(status_lines) + except Exception as e: + logger.error(f"Error checking kernel status: {e}", exc_info=True) + return f"Error: Could not check kernel status: {str(e)}" + + app = mcp.sse_app() diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..8654399 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +# Tests for CodeRunner \ No newline at end of file diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 0000000..0e804ee --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,108 @@ +import os +import pathlib +import tempfile +from unittest.mock import patch + +import pytest + +from config import Config + + +def test_config_defaults(): + """Test that config has expected default values""" + config = Config() + + assert config.jupyter_ws_url == "ws://127.0.0.1:8888" + assert config.jupyter_port == 8888 + assert config.jupyter_host == "127.0.0.1" + # Default shared_dir is "./uploads" for local development or "/app/uploads" in container + assert config.shared_dir in [pathlib.Path("./uploads"), pathlib.Path("/app/uploads")] + assert config.execution_timeout == 300.0 + assert config.websocket_timeout == 1.0 + assert config.max_wait_jupyter == 30 + assert config.fastmcp_host == "0.0.0.0" + assert config.fastmcp_port == 8222 + assert config.log_level == "INFO" + + +def test_config_from_env(): + """Test that config can be loaded from environment variables""" + env_vars = { + "CODERUNNER_JUPYTER_PORT": "9999", + "CODERUNNER_JUPYTER_HOST": "192.168.1.1", + "CODERUNNER_EXECUTION_TIMEOUT": "600.0", + "CODERUNNER_FASTMCP_PORT": "8333", + "CODERUNNER_LOG_LEVEL": "DEBUG" + } + + with patch.dict(os.environ, env_vars): + config = Config() + + assert config.jupyter_port == 9999 + assert config.jupyter_host == "192.168.1.1" + assert config.execution_timeout == 600.0 + assert config.fastmcp_port == 8333 + assert config.log_level == "DEBUG" + + +def test_config_shared_dir_creation(): + """Test that shared directory is created if it doesn't exist""" + with tempfile.TemporaryDirectory() as tmp_dir: + shared_dir = pathlib.Path(tmp_dir) / "test_uploads" + + # Directory shouldn't exist initially + assert not shared_dir.exists() + + config = Config(shared_dir=shared_dir) + + # Directory should be created during initialization + assert shared_dir.exists() + assert shared_dir.is_dir() + + +def test_config_jupyter_urls(): + """Test that Jupyter URL properties work correctly""" + config = Config(jupyter_host="localhost", jupyter_port=8888) + + assert config.jupyter_ws_base_url == "ws://localhost:8888" + assert config.jupyter_api_base_url == "http://localhost:8888" + + +def test_config_kernel_id_file_path(): + """Test that kernel ID file path is set correctly""" + with tempfile.TemporaryDirectory() as tmp_dir: + shared_dir = pathlib.Path(tmp_dir) / "uploads" + config = Config(shared_dir=shared_dir) + + expected_python_path = str(shared_dir / "python_kernel_id.txt") + expected_bash_path = str(shared_dir / "bash_kernel_id.txt") + + assert config.kernel_id_file == expected_python_path + assert config.bash_kernel_id_file == expected_bash_path + + +def test_config_bash_kernel_id_file_with_custom_path(): + """Test that bash kernel ID file path is robust to custom python kernel path""" + with tempfile.TemporaryDirectory() as tmp_dir: + custom_python_path = os.path.join(tmp_dir, "custom", "my_python_kernel.txt") + os.makedirs(os.path.dirname(custom_python_path), exist_ok=True) + + config = Config(kernel_id_file=custom_python_path) + + expected_bash_path = os.path.join(tmp_dir, "custom", "bash_kernel_id.txt") + assert config.bash_kernel_id_file == expected_bash_path + + +def test_config_resource_settings(): + """Test resource settings configuration with CPU conversion""" + env_vars = { + "CODERUNNER_MAX_KERNEL_MEMORY": "2G", + "CODERUNNER_MAX_KERNEL_CPU": "1.5" + } + + with patch.dict(os.environ, env_vars): + config = Config() + + assert config.max_kernel_memory == "2G" + assert config.max_kernel_cpu == 1.5 + assert isinstance(config.max_kernel_cpu, float) \ No newline at end of file diff --git a/tests/test_jupyter_client.py b/tests/test_jupyter_client.py new file mode 100644 index 0000000..f69f17a --- /dev/null +++ b/tests/test_jupyter_client.py @@ -0,0 +1,380 @@ +import asyncio +import json +import os +import pathlib +import tempfile +from unittest.mock import AsyncMock, MagicMock, patch, mock_open + +import pytest +import websockets + +from jupyter_client import JupyterClient, JupyterConnectionError, JupyterExecutionError +from config import Config + + +@pytest.fixture +def mock_config(): + """Create a mock configuration for testing""" + with tempfile.TemporaryDirectory() as tmp_dir: + shared_dir = pathlib.Path(tmp_dir) / "uploads" + shared_dir.mkdir() + kernel_id_file = shared_dir / "python_kernel_id.txt" + + config = Config( + shared_dir=shared_dir, + kernel_id_file=str(kernel_id_file), + execution_timeout=10.0, + websocket_timeout=0.1 + ) + + with patch('jupyter_client.config', config): + yield config + + +@pytest.fixture +def jupyter_client_with_kernel(mock_config): + """Create a JupyterClient instance with a mock kernel ID""" + kernel_id = "test-kernel-123" + + # Write kernel ID to file + with open(mock_config.kernel_id_file, 'w') as f: + f.write(kernel_id) + + client = JupyterClient() + return client + + +class TestJupyterClient: + + def test_init_no_kernel_files(self, mock_config): + """Test initialization when kernel ID files don't exist""" + client = JupyterClient() + assert client.kernel_id is None + assert not client.is_kernel_available("python") + assert not client.is_kernel_available("bash") + + def test_init_empty_kernel_file(self, mock_config): + """Test initialization when kernel ID file is empty""" + # Create empty file + with open(mock_config.kernel_id_file, 'w') as f: + f.write("") + + client = JupyterClient() + assert client.kernel_id is None + assert not client.is_kernel_available("python") + + def test_init_with_kernel_file(self, jupyter_client_with_kernel): + """Test initialization when kernel ID file exists and has content""" + assert jupyter_client_with_kernel.kernel_id == "test-kernel-123" + assert jupyter_client_with_kernel.is_kernel_available("python") + + def test_init_with_bash_kernel(self, mock_config): + """Test initialization with bash kernel""" + # Create both python and bash kernel files + python_kernel_id = "python-kernel-123" + bash_kernel_id = "bash-kernel-456" + + with open(mock_config.kernel_id_file, 'w') as f: + f.write(python_kernel_id) + + bash_kernel_file = mock_config.bash_kernel_id_file + with open(bash_kernel_file, 'w') as f: + f.write(bash_kernel_id) + + client = JupyterClient() + assert client.kernels["python"] == python_kernel_id + assert client.kernels["bash"] == bash_kernel_id + assert client.is_kernel_available("python") + assert client.is_kernel_available("bash") + + def test_get_available_kernels(self, mock_config): + """Test getting available kernels status""" + # Create only python kernel file + with open(mock_config.kernel_id_file, 'w') as f: + f.write("python-kernel-123") + + client = JupyterClient() + available = client.get_available_kernels() + + assert available["python"] is True + assert available["bash"] is False + + def test_create_execute_request(self, jupyter_client_with_kernel): + """Test creation of Jupyter execute request""" + code = "print('hello world')" + msg_id, request_json = jupyter_client_with_kernel._create_execute_request(code) + + assert msg_id is not None + assert len(msg_id) > 0 + + request = json.loads(request_json) + assert request["header"]["msg_type"] == "execute_request" + assert request["header"]["msg_id"] == msg_id + assert request["content"]["code"] == code + assert request["content"]["stop_on_error"] is True + + @pytest.mark.asyncio + async def test_execute_code_no_kernel(self, mock_config): + """Test execute_code when no kernel is available""" + client = JupyterClient() + + with pytest.raises(JupyterConnectionError, match="Python kernel is not running"): + await client.execute_code("print('test')", kernel_type="python") + + with pytest.raises(JupyterConnectionError, match="Bash kernel is not running"): + await client.execute_code("echo 'test'", kernel_type="bash") + + @pytest.mark.asyncio + async def test_execute_code_unsupported_kernel(self, jupyter_client_with_kernel): + """Test execute_code with unsupported kernel type""" + with pytest.raises(JupyterConnectionError, match="Unsupported kernel type"): + await jupyter_client_with_kernel.execute_code("print('test')", kernel_type="unsupported") + + @pytest.mark.asyncio + async def test_execute_code_success(self, jupyter_client_with_kernel): + """Test successful code execution""" + code = "print('hello world')" + expected_output = "hello world\n" + + # Mock WebSocket connection + mock_websocket = AsyncMock() + mock_websocket.send = AsyncMock() + mock_websocket.recv = AsyncMock() + + # Mock the message sequence + execute_request_msg = None + def capture_request(msg): + nonlocal execute_request_msg + execute_request_msg = json.loads(msg) + + mock_websocket.send.side_effect = capture_request + + # Mock response messages + async def mock_recv(): + if execute_request_msg is None: + raise asyncio.TimeoutError() + + msg_id = execute_request_msg["header"]["msg_id"] + + # First return stream output + stream_msg = { + "header": {"msg_type": "stream"}, + "parent_header": {"msg_id": msg_id}, + "content": {"text": expected_output} + } + + # Then return status idle + status_msg = { + "header": {"msg_type": "status"}, + "parent_header": {"msg_id": msg_id}, + "content": {"execution_state": "idle"} + } + + # Return messages in sequence + if not hasattr(mock_recv, 'call_count'): + mock_recv.call_count = 0 + + mock_recv.call_count += 1 + if mock_recv.call_count == 1: + return json.dumps(stream_msg) + else: + return json.dumps(status_msg) + + mock_websocket.recv.side_effect = mock_recv + + # Mock websockets.connect + with patch('websockets.connect') as mock_connect: + mock_connect.return_value.__aenter__.return_value = mock_websocket + + result = await jupyter_client_with_kernel.execute_code(code) + + assert result == expected_output + mock_websocket.send.assert_called_once() + + @pytest.mark.asyncio + async def test_execute_code_error(self, jupyter_client_with_kernel): + """Test code execution with error""" + code = "raise ValueError('test error')" + + # Mock WebSocket connection + mock_websocket = AsyncMock() + mock_websocket.send = AsyncMock() + + # Mock error response + async def mock_recv(): + msg_id = "test-msg-id" + error_msg = { + "header": {"msg_type": "error"}, + "parent_header": {"msg_id": msg_id}, + "content": {"traceback": ["ValueError: test error"]} + } + return json.dumps(error_msg) + + mock_websocket.recv.side_effect = mock_recv + + # Mock the request creation to return predictable msg_id + with patch.object(jupyter_client_with_kernel, '_create_execute_request') as mock_create: + mock_create.return_value = ("test-msg-id", '{"test": "request"}') + + with patch('websockets.connect') as mock_connect: + mock_connect.return_value.__aenter__.return_value = mock_websocket + + with pytest.raises(JupyterExecutionError, match="ValueError: test error"): + await jupyter_client_with_kernel.execute_code(code) + + @pytest.mark.asyncio + async def test_execute_code_connection_error(self, jupyter_client_with_kernel): + """Test code execution with connection error""" + code = "print('test')" + + # Mock connection failure + with patch('websockets.connect') as mock_connect: + mock_connect.side_effect = websockets.exceptions.ConnectionClosed(None, None) + + with pytest.raises(JupyterConnectionError, match="Could not connect to Jupyter kernel"): + await jupyter_client_with_kernel.execute_code(code) + + @pytest.mark.asyncio + async def test_execute_bash_code_success(self, mock_config): + """Test successful bash code execution""" + # Create bash kernel file + bash_kernel_file = mock_config.bash_kernel_id_file + with open(bash_kernel_file, 'w') as f: + f.write("bash-kernel-789") + + client = JupyterClient() + code = "echo 'hello bash'" + expected_output = "hello bash\n" + + # Mock WebSocket connection + mock_websocket = AsyncMock() + mock_websocket.send = AsyncMock() + mock_websocket.recv = AsyncMock() + + # Mock the message sequence + execute_request_msg = None + def capture_request(msg): + nonlocal execute_request_msg + execute_request_msg = json.loads(msg) + + mock_websocket.send.side_effect = capture_request + + # Mock response messages + async def mock_recv(): + if execute_request_msg is None: + raise asyncio.TimeoutError() + + msg_id = execute_request_msg["header"]["msg_id"] + + # First return stream output + stream_msg = { + "header": {"msg_type": "stream"}, + "parent_header": {"msg_id": msg_id}, + "content": {"text": expected_output} + } + + # Then return status idle + status_msg = { + "header": {"msg_type": "status"}, + "parent_header": {"msg_id": msg_id}, + "content": {"execution_state": "idle"} + } + + # Return messages in sequence + if not hasattr(mock_recv, 'call_count'): + mock_recv.call_count = 0 + + mock_recv.call_count += 1 + if mock_recv.call_count == 1: + return json.dumps(stream_msg) + else: + return json.dumps(status_msg) + + mock_websocket.recv.side_effect = mock_recv + + # Mock websockets.connect + with patch('websockets.connect') as mock_connect: + mock_connect.return_value.__aenter__.return_value = mock_websocket + + result = await client.execute_code(code, kernel_type="bash") + + assert result == expected_output + mock_websocket.send.assert_called_once() + + def test_reload_kernel_ids(self, mock_config): + """Test reloading kernel IDs from files""" + client = JupyterClient() + assert client.kernel_id is None + assert not client.is_kernel_available("bash") + + # Write kernel IDs to files + with open(mock_config.kernel_id_file, 'w') as f: + f.write("new-python-kernel-456") + + bash_kernel_file = mock_config.bash_kernel_id_file + with open(bash_kernel_file, 'w') as f: + f.write("new-bash-kernel-789") + + client.reload_kernel_ids() + assert client.kernel_id == "new-python-kernel-456" + assert client.kernels["bash"] == "new-bash-kernel-789" + assert client.is_kernel_available("python") + assert client.is_kernel_available("bash") + + def test_reload_kernel_id_backward_compatibility(self, mock_config): + """Test backward compatibility method""" + client = JupyterClient() + assert client.kernel_id is None + + # Write kernel ID to file + with open(mock_config.kernel_id_file, 'w') as f: + f.write("new-kernel-456") + + client.reload_kernel_id() # Should call reload_kernel_ids() + assert client.kernel_id == "new-kernel-456" + assert client.is_kernel_available("python") + + def test_load_kernel_ids_file_read_error(self, mock_config): + """Test handling of file read errors during kernel ID loading""" + # Create a kernel file + with open(mock_config.kernel_id_file, 'w') as f: + f.write("test-kernel-123") + + # Mock file reading to raise an exception + with patch('builtins.open', side_effect=PermissionError("Access denied")): + client = JupyterClient() + + # Should handle the exception gracefully + assert client.kernels["python"] is None + assert client.kernels["bash"] is None + + @pytest.mark.asyncio + async def test_execute_code_timeout(self, jupyter_client_with_kernel): + """Test code execution timeout""" + code = "import time; time.sleep(10)" + + # Mock WebSocket connection that never sends completion + mock_websocket = AsyncMock() + mock_websocket.send = AsyncMock() + + # Mock recv to always timeout + async def mock_recv(): + raise asyncio.TimeoutError() + + mock_websocket.recv.side_effect = mock_recv + + # Mock the request creation + with patch.object(jupyter_client_with_kernel, '_create_execute_request') as mock_create: + mock_create.return_value = ("test-msg-id", '{"test": "request"}') + + with patch('websockets.connect') as mock_connect: + mock_connect.return_value.__aenter__.return_value = mock_websocket + + # Mock config to have short timeout + with patch('jupyter_client.config') as mock_config: + mock_config.execution_timeout = 0.1 # Very short timeout + mock_config.websocket_timeout = 0.01 + mock_config.jupyter_ws_base_url = "ws://localhost:8888" + + with pytest.raises(JupyterExecutionError, match="timed out"): + await jupyter_client_with_kernel.execute_code(code) \ No newline at end of file