diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000000..3e78a453bd --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,27 @@ +ARG PYTHON_VERSION=3.12 +FROM mcr.microsoft.com/devcontainers/python:${PYTHON_VERSION}-trixie + +# Copy uv and uvx (latest version) +COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ + +# Set non-interactive frontend for apt +ENV DEBIAN_FRONTEND=noninteractive + +# Enable Docker BuildKit for cache mounts +ENV DOCKER_BUILDKIT=1 + +# Install system dependencies with cache mount for faster rebuilds (latest versions) +RUN --mount=type=cache,target=/var/cache/apt \ + --mount=type=cache,target=/var/lib/apt/lists \ + apt-get update \ + && apt-get install -y --no-install-recommends \ + cmake \ + ninja-build \ + libclang-dev \ + && apt-get clean -y \ + && rm -rf /var/lib/apt/lists/* + +# The vscode user already exists in the base image with UID 1000. +USER vscode + +WORKDIR /workspaces/pydantic-ai diff --git a/.devcontainer/README.md b/.devcontainer/README.md new file mode 100644 index 0000000000..db8b046b70 --- /dev/null +++ b/.devcontainer/README.md @@ -0,0 +1,36 @@ +## Requirements + +Please ensure that [Docker](https://docs.docker.com/desktop/) is installed and running on the host machine. To configure your IDE and MCP servers, mount your own `.vscode/` or `.cursor/` folders by uncommenting the `mounts` section in `devcontainer.json`. To set your API keys, please copy a valid `.env` file based on `.env.example` into the container by uncommenting the `initializeCommand` in `devcontainer.json`. + +## Overview + +The dev container is built using a hybrid approach: the `Dockerfile` provides low-level customization, while `devcontainer.json` is used for fine-tuning and IDE integration. Minor changes to `devcontainer.json` do not require a full rebuild of the entire image, which speeds up the development workflow. + +1. The `Dockerfile` is based on Microsoft's Debian-style [Trixie image for Python 3.12](https://hub.docker.com/r/microsoft/devcontainers-python). It configures a non-root `vscode` user, sets `/workspaces/pydantic-ai` as the working directory, and installs essential system dependencies along with `uv`. + +2. The `devcontainer.json` is based on Microsoft's [dev container template](https://github.com/devcontainers/templates/tree/main/src/python) for Python 3. It installs additional development tools via `features`, sets important environment variables, and runs `uv sync`. The container does not enforce any specific IDE configuration; developers are encouraged to mount their own `.vscode/` or `.cursor/` folders externally. To set your API keys, please uncomment the `initializeCommand` in `devcontainer.json` and copy a valid `.env` file into the container. Note that the Ollama instance runs on the host machine for performance reasons. Please ensure that [Ollama](https://ollama.com/download) is installed and running on the host. + +## Building and testing the container locally + +You can build and test the container locally using the `devcontainer` CLI tool. This process works independently of any specific IDE, such as VS Code or Cursor. + +```bash +# brew install devcontainer +# npm install -g @devcontainers/cli + +devcontainer read-configuration --workspace-folder . # Validates devcontainer.json configuration +devcontainer build --workspace-folder . # Builds the dev container +devcontainer up --workspace-folder . # Starts the dev container and runs postCreateCommand. Complete startup test. +``` + +## Building and testing the container in the CI pipeline + +The container build and startup process are tested in the CI pipeline defined in `.github/workflows/ci.yml`. The availability of major development tools and the successful execution of `make lint`, `make typecheck`, and `make test` are verified. + +## Known Issue in Cursor IDE + +Occasionally, the dev container may fail to start properly in the Cursor IDE. A [suggested workaround](https://forum.cursor.com/t/dev-containers-support/1510/13) is: + +1. Start the container using VS Code. +2. In Cursor, attach to the already running container. +3. Inside the container, navigate to `/workspaces/pydantic-ai`. diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000000..2091b98ce7 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,116 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/python +{ + "name": "Pydantic AI", + "build": { + "dockerfile": "Dockerfile" + }, + // Features to add to the dev container. More info: https://containers.dev/features. + "features": { + "ghcr.io/devcontainers/features/github-cli:1": {}, + "ghcr.io/lukewiwa/features/shellcheck:0": {}, + "ghcr.io/devcontainers/features/common-utils:2": { + "installLsof": true, + "installBuildTools": true, // This option installs build-essential + "upgradePackages": true, + "installZsh": false, + "installOhMyZsh": false + }, + "ghcr.io/devcontainers-community/features/deno:1": { // Required for https://github.com/pydantic/mcp-run-python + "version": "latest" + } + }, + // Port forwarding for local services + "forwardPorts": [ + 8000, + 8080, + 11434, + 54320 + ], + "portsAttributes": { + "8000": { + "label": "MkDocs Server", + "onAutoForward": "notify" + }, + "8080": { + "label": "FastAPI/Example Apps", + "onAutoForward": "notify" + }, + "11434": { + "label": "Ollama", + "onAutoForward": "silent" + }, + "54320": { + "label": "PostgreSQL", + "onAutoForward": "silent" + } + }, + // Use 'containerEnv' to set environment variables in the container. Visible system-wide. For example when `docker exec` is used. + "containerEnv": { + "OLLAMA_HOST": "http://host.docker.internal:11434", // Connect to Ollama instance running on host machine. + "COLUMNS": "150", + "CI": "false", + "LOGFIRE_INSPECT_ARGUMENTS": "1", + "CMAKE_ARGS": "-DGGML_NATIVE=OFF", // Switch off native architecture optimization! Building llama-cpp-python on Apple Silicon fails otherwise. + "UV_FROZEN": "0", + "UV_LINK_MODE": "copy", + "UV_COMPILE_BYTECODE": "1", // uv will compile .py files to .pyc files. Speeds up startup time. + "PYTHONUNBUFFERED": "1" // Ensure stdout and stderr are unbuffered. + }, + // Use 'remoteEnv' to set environment variables for the remote workspace. Visible to VS Code background process only. + "remoteEnv": {}, + // Use 'postCreateCommand' to run commands after the container is created. + "postCreateCommand": [ + "bash", + "-c", + "uv tool install pre-commit && make install" + ], + //"postStartCommand": "", + // Configure tool-specific properties. + // Please keep minimal. For further customizations, use .vscode and .cursor in the "mounts" section. + "customizations": { + "vscode": { + "settings": { + "python.defaultInterpreterPath": ".venv/bin/python" + }, + "extensions": [ + // Python + "ms-python.python", + "charliermarsh.ruff", + // yaml + toml + "redhat.vscode-yaml", + "tamasfe.even-better-toml" + ] + }, + "cursor": { + "settings": { + "python.defaultInterpreterPath": ".venv/bin/python" + }, + "extensions": [ + // Python + "anysphere.pyright", + "charliermarsh.ruff", + // yaml + toml + "redhat.vscode-yaml", + "tamasfe.even-better-toml" + ] + } + } + // Uncomment to copy .env file into the container. + // "initializeCommand": "cp ../my-env/.env ./.env", + // Uncomment to mount .vscode and .cursor directories from other repositories or directories. + // "mounts": [ + // { + // "source": "${localWorkspaceFolder}/../my-vscode-config/.vscode", + // "target": "${containerWorkspaceFolder}/.vscode", + // "type": "bind" + // }, + // { + // "source": "${localWorkspaceFolder}/../my-cursor-config/.cursor", + // "target": "${containerWorkspaceFolder}/.cursor", + // "type": "bind" + // } + // ] + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" +} diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000..0cb2a9e5d3 --- /dev/null +++ b/.env.example @@ -0,0 +1,158 @@ +# Copy this file to .env and fill in your actual values + +# ============================================================================ +# MODEL PROVIDER API KEYS +# ============================================================================ + +# OpenAI (Required for: OpenAI models, OpenAI-compatible providers) +# Get your key at: https://platform.openai.com/api-keys +OPENAI_API_KEY= + +# Anthropic (Required for: Claude models) +# Get your key at: https://console.anthropic.com/settings/keys +ANTHROPIC_API_KEY= + +# Google Generative AI (Required for: Gemini models via Google AI Studio) +# Get your key at: https://aistudio.google.com/apikey +GEMINI_API_KEY= + +# Google Cloud (Required for: Gemini models via Vertex AI) +# Service account JSON content (not a file path) +# Get it from: https://console.cloud.google.com/iam-admin/serviceaccounts +GOOGLE_SERVICE_ACCOUNT_CONTENT= + +# Groq (Required for: Groq models) +# Get your key at: https://console.groq.com/keys +GROQ_API_KEY= + +# Mistral AI (Required for: Mistral models) +# Get your key at: https://console.mistral.ai/api-keys +MISTRAL_API_KEY= + +# Cohere (Required for: Cohere models) +# Get your key at: https://dashboard.cohere.com/api-keys +CO_API_KEY= + +# AWS Bedrock (Required for: AWS Bedrock models) +# Configure via AWS CLI or set these: +# AWS_ACCESS_KEY_ID= +# AWS_SECRET_ACCESS_KEY= +# AWS_REGION=us-east-1 + +# ============================================================================ +# ADDITIONAL MODEL PROVIDERS (OpenAI-compatible) +# ============================================================================ + +# DeepSeek (OpenAI-compatible) +# Get your key at: https://platform.deepseek.com/api_keys +DEEPSEEK_API_KEY= + +# xAI Grok (OpenAI-compatible) +# Get your key at: https://console.x.ai/ +GROK_API_KEY= + +# OpenRouter (Aggregates multiple providers) +# Get your key at: https://openrouter.ai/settings/keys +OPENROUTER_API_KEY= + +# Vercel AI Gateway +# Configure at: https://vercel.com/docs/ai-gateway +VERCEL_AI_GATEWAY_API_KEY= + +# Fireworks AI (OpenAI-compatible) +# Get your key at: https://fireworks.ai/api-keys +FIREWORKS_API_KEY= + +# Together AI (OpenAI-compatible) +# Get your key at: https://api.together.ai/settings/api-keys +TOGETHER_API_KEY= + +# Cerebras (OpenAI-compatible) +# Get your key at: https://cloud.cerebras.ai/ +CEREBRAS_API_KEY= + +# Nebius AI (OpenAI-compatible) +# Get your key at: https://studio.nebius.ai/ +NEBIUS_API_KEY= + +# OVHcloud AI Endpoints (OpenAI-compatible) +# Get your key at: https://endpoints.ai.cloud.ovh.net/ +OVHCLOUD_API_KEY= + +# MoonshotAI (OpenAI-compatible) +# Get your key at: https://platform.moonshot.cn/ +MOONSHOTAI_API_KEY= + +# Heroku Inference (OpenAI-compatible) +# Get your key at: https://www.heroku.com/ai +HEROKU_INFERENCE_KEY= + +# ============================================================================ +# LOCAL MODEL PROVIDERS +# ============================================================================ + +# Ollama (Optional - for local models) +# If running Ollama locally or via docker-compose, set the base URL +# Default when using docker-compose ollama service: +# OLLAMA_BASE_URL=http://localhost:11434/v1/ +# OLLAMA_API_KEY=placeholder # Not needed for local, but some tools require it + +# ============================================================================ +# OBSERVABILITY & MONITORING +# ============================================================================ + +# Logfire (Optional - for structured logging and tracing) +# Get your token at: https://logfire.pydantic.dev/ +# LOGFIRE_TOKEN= +# LOGFIRE_SERVICE_NAME=pydantic-ai-dev + +# ============================================================================ +# SEARCH PROVIDERS (for tool integrations) +# ============================================================================ + +# Brave Search (Optional - for web search tools) +# Get your key at: https://brave.com/search/api/ +# BRAVE_API_KEY= + +# Tavily Search (Optional - for web search tools) +# Get your key at: https://tavily.com/ +# TAVILY_API_KEY= + +# ============================================================================ +# MODEL CONTEXT PROTOCOL (MCP) +# ============================================================================ + +# GitHub Personal Access Token (Optional - for MCP GitHub server) +# Create at: https://github.com/settings/tokens +# Needs: repo, read:org scopes +# GITHUB_PERSONAL_ACCESS_TOKEN= + +# ============================================================================ +# DATABASE CONNECTIONS (for examples) +# ============================================================================ + +# PostgreSQL (Optional - for SQL/RAG examples) +# Default when using docker-compose postgres service: +# DATABASE_URL=postgresql://postgres:postgres@localhost:54320/postgres + +# PostgreSQL with pgvector (Optional - for RAG examples) +# Default when using docker-compose pgvector service: +# PGVECTOR_DATABASE_URL=postgresql://postgres:postgres@localhost:54321/postgres + +# ============================================================================ +# TESTING FLAGS +# ============================================================================ + +# Enable live API testing (Optional - USE WITH CAUTION - incurs API costs!) +# Set to exact value below to enable live tests that hit real APIs +# PYDANTIC_AI_LIVE_TEST_DANGEROUS=CHARGE-ME! + +# ============================================================================ +# NOTES +# ============================================================================ +# +# - Most API keys are OPTIONAL - only set the ones you plan to use +# - For testing, use test models or Ollama to avoid API costs +# - Never commit this file with real API keys +# - Add .env to .gitignore (already done in this project) +# - See README.md for detailed setup instructions per provider diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b1467c2ac9..e6843dc339 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -315,6 +315,39 @@ jobs: path: htmlcov include-hidden-files: true + devcontainer: + runs-on: ubuntu-latest + steps: + - name: Free Disk Space on GitHub Actions runner + # Runners come with many preinstalled tools that we don't need for our build. + # This action removes them to free up disk space for the following heavy devcontainer build. + uses: jlumbroso/free-disk-space@v1.3.1 + with: + tool-cache: false + android: true + dotnet: true + haskell: true + large-packages: true + docker-images: true + swap-storage: true + + - uses: actions/checkout@v4 + + - name: Build and run dev container task + uses: devcontainers/ci@v0.3 + with: + runCmd: | + bash -c ' + set -o errexit -o pipefail -o nounset + uv --version + uvx --version + pre-commit --version + make lint + make typecheck + make test + echo "Dev container built and run successfully" + ' + # https://github.com/marketplace/actions/alls-green#why used for branch protection checks check: if: always() @@ -327,6 +360,7 @@ jobs: - test-lowest-versions - test-examples - coverage + - devcontainer runs-on: ubuntu-latest steps: diff --git a/.gitignore b/.gitignore index c1b21315b8..aca8999b31 100644 --- a/.gitignore +++ b/.gitignore @@ -22,4 +22,3 @@ node_modules/ .mcp.json .claude/ /.cursor/ -/.devcontainer/ diff --git a/pyproject.toml b/pyproject.toml index 1b28c22490..800c75579c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -118,7 +118,7 @@ docs = [ "mkdocs>=1.6.1", "mkdocs-glightbox>=0.4.0", "mkdocs-llmstxt>=0.2.0", - 'mkdocs-redirects>=1.2.2', + "mkdocs-redirects>=1.2.2", "mkdocs-material[imaging]>=9.5.45", "mkdocstrings-python>=1.12.2", "griffe-warnings-deprecated>=1.1.0", diff --git a/tests/models/test_outlines.py b/tests/models/test_outlines.py index 4ecc3668ba..d960e31b23 100644 --- a/tests/models/test_outlines.py +++ b/tests/models/test_outlines.py @@ -57,7 +57,7 @@ import vllm # We try to load the vllm model to ensure it is available - try: # pragma: no lax cover + try: # pragma: lax no cover vllm.LLM('microsoft/Phi-3-mini-4k-instruct') except RuntimeError as e: # pragma: lax no cover if 'Found no NVIDIA driver' in str(e) or 'Device string must not be empty' in str(e):