diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 94783bf..ab639cd 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -36,7 +36,7 @@ jobs: run: exit 0 # Skip unnecessary test runs for dependabot and merge queues. Artifically flag as successful, as this is a required check for branch protection. - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Initialize CodeQL uses: github/codeql-action/init@v3 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml deleted file mode 100644 index 96a410f..0000000 --- a/.github/workflows/docs.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: Build Documentation - -on: - push: - branches: - - main - -permissions: - contents: read - pages: write - id-token: write - -concurrency: - group: "documentation" - cancel-in-progress: true - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Setup Pages - uses: actions/configure-pages@v5 - - - name: Configure Python - uses: actions/setup-python@v5 - with: - python-version: "3.8" - - - name: Configure dependencies - run: | - pip install --user --upgrade pip - pip install --user pipx - pipx ensurepath - pipx install sphinx==5.3.0 - pipx inject sphinx pyjwt cryptography sphinx-mdinclude sphinx-rtd-theme sphinx-autodoc-typehints - - - name: Build documentation - run: | - sphinx-build ./docs/source ./docs/build --keep-going -n -a -b html - - - name: Upload artifact - uses: actions/upload-pages-artifact@v3 - with: - path: "./docs/build" - - deploy: - needs: build - runs-on: ubuntu-latest - environment: - name: "github-pages" - url: ${{ steps.deployment.outputs.page_url }} - - steps: - - id: deployment - name: Deploy to GitHub Pages - uses: actions/deploy-pages@v4 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 19c0283..6c425e9 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -32,7 +32,7 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 fetch-tags: true diff --git a/.github/workflows/rl-scanner.yml b/.github/workflows/rl-scanner.yml index a10b241..f921e4c 100644 --- a/.github/workflows/rl-scanner.yml +++ b/.github/workflows/rl-scanner.yml @@ -31,7 +31,7 @@ jobs: scan-status: ${{ steps.rl-scan-conclusion.outcome }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 fetch-tags: true diff --git a/.github/workflows/semgrep.yml b/.github/workflows/semgrep.yml deleted file mode 100644 index 7913b13..0000000 --- a/.github/workflows/semgrep.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Semgrep - -on: - merge_group: - pull_request: - types: - - opened - - synchronize - push: - branches: - - main - schedule: - - cron: "30 0 1,15 * *" - -permissions: - contents: read - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} - -jobs: - run: - name: Check for Vulnerabilities - runs-on: ubuntu-latest - - container: - image: returntocorp/semgrep - - steps: - - if: github.actor == 'dependabot[bot]' || github.event_name == 'merge_group' - run: exit 0 # Skip unnecessary test runs for dependabot and merge queues. Artifically flag as successful, as this is a required check for branch protection. - - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha || github.ref }} - - - run: semgrep ci - env: - SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} diff --git a/.github/workflows/snyk.yml b/.github/workflows/snyk.yml index 090b428..12024b9 100644 --- a/.github/workflows/snyk.yml +++ b/.github/workflows/snyk.yml @@ -31,7 +31,7 @@ jobs: - if: github.actor == 'dependabot[bot]' || github.event_name == 'merge_group' run: exit 0 # Skip unnecessary test runs for dependabot and merge queues. Artifically flag as successful, as this is a required check for branch protection. - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.event.pull_request.head.sha || github.ref }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 22fefd4..500d257 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -18,68 +18,53 @@ concurrency: cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} jobs: - run: - name: Run + test: runs-on: ubuntu-latest - - env: - BUBBLEWRAP_ARGUMENTS: | - --unshare-all \ - --clearenv \ - --ro-bind / / \ - --bind ${{ github.workspace }} ${{ github.workspace }} \ - --tmpfs $HOME \ - --tmpfs /tmp \ - --tmpfs /var \ - --dev /dev \ - --proc /proc \ - --die-with-parent \ - --new-session \ - strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] - + python-version: [3.9, "3.10", "3.11", "3.12"] + steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha || github.ref }} - - - name: Configure Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: "${{ matrix.python-version }}" - - - name: Configure dependencies - run: | - sudo apt install bubblewrap - pip install --user --upgrade pip - pip install --user pipx - pip install --user setuptools - pipx ensurepath - pipx install poetry - poetry config virtualenvs.in-project true - poetry install --with dev - poetry self add "poetry-dynamic-versioning[plugin]" - - - name: Run tests - run: | - poetry run pytest + - name: Checkout code + uses: actions/checkout@v5 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: latest + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v3 + with: + path: ./.venv + key: venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }} + + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --no-root + + - name: Install package + run: poetry install --no-interaction + + - name: Run tests with pytest + run: | + poetry run pytest -v --cov=src --cov-report=term-missing --cov-report=xml - # - name: Run lint - # run: | - # pipx install black==23.3.0 - # pipx install flake8==5.0.4 - # pipx install isort==5.11.5 - # pipx install pyupgrade==3.3.2 - # black . --check - # flake8 . --count --show-source --statistics - # isort . --diff --profile black - # pyupgrade . --py37-plus --keep-runtime-typing + - name: Run ruff linting + run: | + poetry run ruff check . - - if: ${{ matrix.python-version == '3.10' }} - name: Upload coverage - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # pin@5.4.3 - with: - token: ${{ secrets.CODECOV_TOKEN }} \ No newline at end of file + # - if: ${{ matrix.python-version == '3.10' }} + # name: Upload coverage + # uses: codecov/codecov-action@fdcc8476540edceab3de004e990f80d881c6cc00 # pin@5.5.0 + # with: + # token: ${{ secrets.CODECOV_TOKEN }} \ No newline at end of file diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 0000000..b500d05 --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,16 @@ +line-length = 100 +target-version = "py39" +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "UP", # pyupgrade + "S", # bandit (security) +] +ignore = ["E501", "B904"] # Line too long (handled by black), Exception handling without from + +[per-file-ignores] +"tests/*" = ["S101", "S105", "S106"] # Allow assert and ignore hardcoded password warnings in test files diff --git a/EXAMPLES.md b/EXAMPLES.md new file mode 100644 index 0000000..db6e8f6 --- /dev/null +++ b/EXAMPLES.md @@ -0,0 +1,160 @@ +# Auth0 API Python Examples + +This document provides examples for using the `auth0-api-python` package to validate Auth0 tokens in your API. + +## Bearer Authentication + +Bearer authentication is the standard OAuth 2.0 token authentication method. + +### Using verify_access_token + +```python +import asyncio +from auth0_api_python import ApiClient, ApiClientOptions + +async def validate_bearer_token(headers): + api_client = ApiClient(ApiClientOptions( + domain="your-tenant.auth0.com", + audience="https://api.example.com" + )) + + try: + # Extract the token from the Authorization header + auth_header = headers.get("authorization", "") + if not auth_header.startswith("Bearer "): + return {"error": "Missing or invalid authorization header"}, 401 + + token = auth_header.split(" ")[1] + + # Verify the access token + claims = await api_client.verify_access_token(token) + return {"success": True, "user": claims["sub"]} + except Exception as e: + return {"error": str(e)}, getattr(e, "get_status_code", lambda: 401)() + +# Example usage +headers = {"authorization": "Bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9..."} +result = asyncio.run(validate_bearer_token(headers)) +``` + +### Using verify_request + +```python +import asyncio +from auth0_api_python import ApiClient, ApiClientOptions +from auth0_api_python.errors import BaseAuthError + +async def validate_request(headers): + api_client = ApiClient(ApiClientOptions( + domain="your-tenant.auth0.com", + audience="https://api.example.com" + )) + + try: + # Verify the request with Bearer token + claims = await api_client.verify_request( + headers=headers + ) + return {"success": True, "user": claims["sub"]} + except BaseAuthError as e: + return {"error": str(e)}, e.get_status_code(), e.get_headers() + +# Example usage +headers = {"authorization": "Bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9..."} +result = asyncio.run(validate_request(headers)) +``` + + +## DPoP Authentication + +[DPoP](https://www.rfc-editor.org/rfc/rfc9449.html) (Demonstrating Proof of Posession) is an application-level mechanism for sender-constraining OAuth 2.0 access and refresh tokens by proving that the client application is in possession of a certain private key. + +This guide covers the DPoP implementation in `auth0-api-python` with complete examples for both operational modes. + +For more information about DPoP specification, see [RFC 9449](https://tools.ietf.org/html/rfc9449). + +## Configuration Modes + +### 1. Allowed Mode (Default) +```python +from auth0_api_python import ApiClient, ApiClientOptions + +api_client = ApiClient(ApiClientOptions( + domain="your-tenant.auth0.com", + audience="https://api.example.com", + dpop_enabled=True, # Default: enables DPoP support + dpop_required=False # Default: allows both Bearer and DPoP +)) +``` + +### 2. Required Mode +```python +api_client = ApiClient(ApiClientOptions( + domain="your-tenant.auth0.com", + audience="https://api.example.com", + dpop_required=True # Enforces DPoP-only authentication +)) +``` + +## Getting Started + +### Basic Usage with verify_request() + +The `verify_request()` method automatically detects the authentication scheme: + +```python +import asyncio +from auth0_api_python import ApiClient, ApiClientOptions + +async def handle_api_request(headers, http_method, http_url): + api_client = ApiClient(ApiClientOptions( + domain="your-tenant.auth0.com", + audience="https://api.example.com" + )) + + try: + # Automatically handles both Bearer and DPoP schemes + claims = await api_client.verify_request( + headers=headers, + http_method=http_method, + http_url=http_url + ) + return {"success": True, "user": claims["sub"]} + except Exception as e: + return {"error": str(e)}, e.get_status_code() + +# Example usage +headers = { + "authorization": "DPoP eyJ0eXAiOiJKV1Q...", + "dpop": "eyJ0eXAiOiJkcG9wK2p3dC..." +} +result = asyncio.run(handle_api_request(headers, "GET", "https://api.example.com/data")) +``` + +### Direct DPoP Proof Verification + +For more control, use `verify_dpop_proof()` directly: + +```python +async def verify_dpop_token(access_token, dpop_proof, http_method, http_url): + api_client = ApiClient(ApiClientOptions( + domain="your-tenant.auth0.com", + audience="https://api.example.com" + )) + + # First verify the access token + token_claims = await api_client.verify_access_token(access_token) + + # Then verify the DPoP proof + proof_claims = await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method=http_method, + http_url=http_url + ) + + return { + "token_claims": token_claims, + "proof_claims": proof_claims + } +``` \ No newline at end of file diff --git a/README.md b/README.md index 68f351b..ecfec2e 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,24 @@ It’s intended as a foundation for building more framework-specific integration πŸ“š [Documentation](#documentation) - πŸš€ [Getting Started](#getting-started) - πŸ’¬ [Feedback](#feedback) +## Features & Authentication Schemes + +This SDK provides comprehensive support for securing APIs with Auth0-issued access tokens: + +### **Authentication Schemes** +- **Bearer Token Authentication** - Traditional OAuth 2.0 Bearer tokens (RS256) +- **DPoP Authentication** - Enhanced security with Demonstrating Proof-of-Possession (ES256) +- **Mixed Mode Support** - Seamlessly handles both Bearer and DPoP in the same API + +### **Core Features** +- **Unified Entry Point**: `verify_request()` - automatically detects and validates Bearer or DPoP schemes +- **OIDC Discovery** - Automatic fetching of Auth0 metadata and JWKS +- **JWT Validation** - Complete RS256 signature verification with claim validation +- **DPoP Proof Verification** - Full RFC 9449 compliance with ES256 signature validation +- **Flexible Configuration** - Support for both "Allowed" and "Required" DPoP modes +- **Comprehensive Error Handling** - Detailed errors with proper HTTP status codes and WWW-Authenticate headers +- **Framework Agnostic** - Works with FastAPI, Django, Flask, or any Python web framework + ## Documentation - [Docs Site](https://auth0.com/docs) - explore our docs site and learn more about Auth0. @@ -80,6 +98,61 @@ decoded_and_verified_token = await api_client.verify_access_token( If the token lacks `my_custom_claim` or fails any standard check (issuer mismatch, expired token, invalid signature), the method raises a `VerifyAccessTokenError`. +### 4. DPoP Authentication + +> [!NOTE] +> This feature is currently available in [Early Access](https://auth0.com/docs/troubleshoot/product-lifecycle/product-release-stages#early-access). Please reach out to Auth0 support to get it enabled for your tenant. + +This library supports **DPoP (Demonstrating Proof-of-Possession)** for enhanced security, allowing clients to prove possession of private keys bound to access tokens. + +#### Allowed Mode (Default) + +Accepts both Bearer and DPoP tokens - ideal for gradual migration: + +```python +api_client = ApiClient(ApiClientOptions( + domain="", + audience="", + dpop_enabled=True, # Default - enables DPoP support + dpop_required=False # Default - allows both Bearer and DPoP +)) + +# Use verify_request() for automatic scheme detection +result = await api_client.verify_request( + headers={ + "authorization": "DPoP eyJ0eXAiOiJKV1Q...", # DPoP scheme + "dpop": "eyJ0eXAiOiJkcG9wK2p3dC...", # DPoP proof + }, + http_method="GET", + http_url="https://api.example.com/resource" +) +``` + +#### Required Mode + +Enforces DPoP-only authentication, rejecting Bearer tokens: + +```python +api_client = ApiClient(ApiClientOptions( + domain="", + audience="", + dpop_required=True # Rejects Bearer tokens +)) +``` + +#### Configuration Options + +```python +api_client = ApiClient(ApiClientOptions( + domain="", + audience="", + dpop_enabled=True, # Enable/disable DPoP support + dpop_required=False, # Require DPoP (reject Bearer) + dpop_iat_leeway=30, # Clock skew tolerance (seconds) + dpop_iat_offset=300, # Maximum proof age (seconds) +)) +``` + ## Feedback ### Contributing diff --git a/poetry.lock b/poetry.lock index e68f15f..79189bb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,15 +1,79 @@ -# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. + +[[package]] +name = "ada-url" +version = "1.26.0" +description = "URL parser and manipulator based on the WHAT WG URL standard" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "ada_url-1.26.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:c2d1578f592be814d40f0a56031809b40500f61cb240966d0ec25ba152b55eb0"}, + {file = "ada_url-1.26.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:321d581274a60f227609be9b6c0863eced4a31b5bf8219d72bf305710d58116d"}, + {file = "ada_url-1.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:071ca476ed5e35651cd39986faea45f100b338147d69218b8170d491d6345baf"}, + {file = "ada_url-1.26.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9bc2cb70aa714f4093d8406bef1c1ae8c998818dda4e512645e6fc802959fdf1"}, + {file = "ada_url-1.26.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f50ec59bd673941b4e9563e152d7917eda5859b834f2e63093dafecf9896d396"}, + {file = "ada_url-1.26.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9d27819cac073dcf0909f3d884198d107d7149ccd21f8c084aed5a6eb2d4e579"}, + {file = "ada_url-1.26.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ea2ffc0f8976d05217324844a0f18bb90c8ec6ac08c31646a4a4a6396e8af906"}, + {file = "ada_url-1.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:3eb5ce4b81d1f8344d032c69af1804bc1475ba4db3d5b586e6f1dae0884fcbcf"}, + {file = "ada_url-1.26.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:6eec591ed6c13b323501e2ce1f29f0dc731affb11036140119382baa08f17f3b"}, + {file = "ada_url-1.26.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:3f8298b60ddd76f2b225b4e5b16b5def61c157c1cdd856c2093b3fcaa3e98441"}, + {file = "ada_url-1.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba6d518b4bafec467c8d879a1620f0aef400307cb5ae0f96772139f66c611d56"}, + {file = "ada_url-1.26.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dfbd6fb73f182ec1488ce24716d64af5fbcc8af90a511a571ca408d8f91d36ad"}, + {file = "ada_url-1.26.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de7b083b700cb71490e9a716ea42c9fee3b4f973aeccde08c6e6066f1184f59a"}, + {file = "ada_url-1.26.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:272fa1ac5ec60bd1a5399c824e63bbd3084ab1410cb89c5497cc1b3e93513cf2"}, + {file = "ada_url-1.26.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2c244dabc7d88861efee7e822525b89fdcc8fec7d17f89ca0368a90eb401c76d"}, + {file = "ada_url-1.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:db36fa791b80e2f1034c91a41ab489d3b78aead79f52173b67619bd830d3ff83"}, + {file = "ada_url-1.26.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5ad5fca18df30b93aa4196bc236aef37dfb4e8b1ade93deea14c03b9c2d87486"}, + {file = "ada_url-1.26.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:3a52d5e157738519ab504913972e3abf4e800a45574e9b431c4ee88589f213d5"}, + {file = "ada_url-1.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b56a0a685c7440aa7f49ff4827bca55c03ba4c54e7b9744a867d195eca2564b7"}, + {file = "ada_url-1.26.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f060bfdd2774c8313f0353325aeadf9afdd940f4c0833628d3fa4b7b09fe7949"}, + {file = "ada_url-1.26.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e23086b6f65d21a988457cad4cc63235796b1f213a66d7173d05206690fabb69"}, + {file = "ada_url-1.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f3d5b942b19a81236e1aae94bc7315fbeaceeefa2775c2f40ab9196009151da0"}, + {file = "ada_url-1.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4606887898806cc4cbf19a285a1dab131e2409dc293a534c8505ef15eeac7fb7"}, + {file = "ada_url-1.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:09b9a6e47d6084ac64957a947bfadab4fc1117b157cd0463091c46434bb11d01"}, + {file = "ada_url-1.26.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:023c8f520ba3a2a7c389f1205d4b2a9384bd06c8cf8b48ae58c43cdb4cfa1881"}, + {file = "ada_url-1.26.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:6e8db691157ada513c5e877fd66f0cea54ef473fcf7e6bad429608f2c32d5d63"}, + {file = "ada_url-1.26.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c158ae6850b1ac66c1dbd54a7b5dac5a2a953ca33db0cd6bcef1c97b1b5536de"}, + {file = "ada_url-1.26.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc723bf495730c22ec2890b8e5d4bbe591b73e97af6e8a862e0ca44ac4197660"}, + {file = "ada_url-1.26.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd375e12247a1d6ca190a67bc88463b60c013361d3f99e2347f8a7af2f548a1c"}, + {file = "ada_url-1.26.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:da783beac508b487d1c09a2afa35e3e14e39f164dd2c4a2d91db16ac63cfe65d"}, + {file = "ada_url-1.26.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:01e9e18dad01adc4703ffda5600c5ae0e5da547124e4ae0a74b0d30cdcf952b1"}, + {file = "ada_url-1.26.0-cp313-cp313-win_amd64.whl", hash = "sha256:bbc9c955a37c15984495d487a9e1b5ff8aa681101cd3f087faab30fab03d53c8"}, + {file = "ada_url-1.26.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:e2a9fa9293f0137b04c1804fa357e906401977111cc8f7da2aa0e4971d152455"}, + {file = "ada_url-1.26.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:8678a5303a6d21d4c7639e8cb7d236f21a43615705ec841c2e0201a9c295de09"}, + {file = "ada_url-1.26.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bf74c390d3ba9d264521d60e6c4a211aae9dbc5fd324c80bec8bfd5eea228347"}, + {file = "ada_url-1.26.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a7db1d9356dfaf3d69e8fe052ea2f301044b2ec111f050a16ea49ea53645f1"}, + {file = "ada_url-1.26.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2558db19ac40b1cd4d936b57724442a3340e4cd7b9ef55fa9b793fef525a12d7"}, + {file = "ada_url-1.26.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:28542b0b958b75f5ce6cf3121f43eeb3884d29b6e873d5aad70c9a4807938178"}, + {file = "ada_url-1.26.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d60751f85392c2e1b610fb1799b6ec496e64b6c7072bac307c7109813da5745"}, + {file = "ada_url-1.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:3851518c53c8b5b2c2fb75a3571987f5669d2f37f7fe81e28e6080d42079f6e2"}, + {file = "ada_url-1.26.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:09ea872cc1d064123586ca3c0f934daf6d2bf0ed92dfdbebf166268ec1952595"}, + {file = "ada_url-1.26.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:0b2df12d04f5a57d17175182fb631cc1c21b21d6a1174fc1dee26c9978cec39b"}, + {file = "ada_url-1.26.0-pp310-pypy310_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc6d6ce54384cae19c599b4464c391ad5208f244c885cf150957aeec81102bb7"}, + {file = "ada_url-1.26.0-pp310-pypy310_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c6f15abd4204760419683b7457ffbc4a71c86383b10273454db4773ad3e763c"}, + {file = "ada_url-1.26.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9c5018f739e2ddb092cd0f2a2a8ee0125bbf101ac93b1cbc762988b4515b1672"}, + {file = "ada_url-1.26.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:476b5ff71d89ce07ddc8d059c404f754582ad66946f6eb4ebb8a3162c917bc79"}, + {file = "ada_url-1.26.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8d9e4fdc053711d42bb1ca3a1d1b201fa4628b6fdc8c65bdf158e9ec3ad1be0a"}, + {file = "ada_url-1.26.0-pp39-pypy39_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80bee51c57e53b878c1855b4c97c4037d5d1d35f83ade0f3664e82f2e9259ca3"}, + {file = "ada_url-1.26.0-pp39-pypy39_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e666ea81c54d8c705fa6262ef502fa483d6ca48727c6340f488f98d1d4716147"}, + {file = "ada_url-1.26.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d4e1a6d4d60d3603233b4dd6c3e461d25768d7127c346fc6dcd83920a619500e"}, + {file = "ada_url-1.26.0.tar.gz", hash = "sha256:87988926d78a68bc08de0595362163fa3d3126bf9e0223aaf9d98272de2625f4"}, +] + +[package.dependencies] +cffi = "*" [[package]] name = "anyio" -version = "4.9.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" +version = "4.10.0" +description = "High-level concurrency and networking framework on top of asyncio or Trio" optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, - {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, + {file = "anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1"}, + {file = "anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6"}, ] [package.dependencies] @@ -19,52 +83,33 @@ sniffio = ">=1.1" typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] name = "authlib" -version = "1.5.2" +version = "1.6.3" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "authlib-1.5.2-py2.py3-none-any.whl", hash = "sha256:8804dd4402ac5e4a0435ac49e0b6e19e395357cfa632a3f624dcb4f6df13b4b1"}, - {file = "authlib-1.5.2.tar.gz", hash = "sha256:fe85ec7e50c5f86f1e2603518bb3b4f632985eb4a355e52256530790e326c512"}, + {file = "authlib-1.6.3-py2.py3-none-any.whl", hash = "sha256:7ea0f082edd95a03b7b72edac65ec7f8f68d703017d7e37573aee4fc603f2a48"}, + {file = "authlib-1.6.3.tar.gz", hash = "sha256:9f7a982cc395de719e4c2215c5707e7ea690ecf84f1ab126f28c053f4219e610"}, ] [package.dependencies] cryptography = "*" -[[package]] -name = "backports-tarfile" -version = "1.2.0" -description = "Backport of CPython tarfile module" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -markers = "(platform_machine != \"ppc64le\" and platform_machine != \"s390x\") and python_version < \"3.12\"" -files = [ - {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"}, - {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] - [[package]] name = "certifi" -version = "2025.1.31" +version = "2025.8.3" description = "Python package for providing Mozilla's CA Bundle." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" groups = ["main", "dev"] files = [ - {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, - {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, + {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"}, + {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"}, ] [[package]] @@ -73,7 +118,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -143,111 +188,97 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] -markers = {main = "platform_python_implementation != \"PyPy\"", dev = "(platform_machine != \"ppc64le\" and platform_machine != \"s390x\") and sys_platform == \"linux\" and platform_python_implementation != \"PyPy\""} [package.dependencies] pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.4.1" +version = "3.4.3" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["main", "dev"] +groups = ["main"] files = [ - {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, - {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, - {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f"}, + {file = "charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849"}, + {file = "charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37"}, + {file = "charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce"}, + {file = "charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce"}, + {file = "charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f2be7e0cf7754b9a30eb01f4295cc3d4358a479843b31f328afd210e2c7598c"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c60e092517a73c632ec38e290eba714e9627abe9d301c8c8a12ec32c314a2a4b"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:252098c8c7a873e17dd696ed98bbe91dbacd571da4b87df3736768efa7a792e4"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3653fad4fe3ed447a596ae8638b437f827234f01a8cd801842e43f3d0a6b281b"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8999f965f922ae054125286faf9f11bc6932184b93011d138925a1773830bbe9"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d95bfb53c211b57198bb91c46dd5a2d8018b3af446583aab40074bf7988401cb"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:5b413b0b1bfd94dbf4023ad6945889f374cd24e3f62de58d6bb102c4d9ae534a"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:b5e3b2d152e74e100a9e9573837aba24aab611d39428ded46f4e4022ea7d1942"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a2d08ac246bb48479170408d6c19f6385fa743e7157d716e144cad849b2dd94b"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-win32.whl", hash = "sha256:ec557499516fc90fd374bf2e32349a2887a876fbf162c160e3c01b6849eaf557"}, + {file = "charset_normalizer-3.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:5d8d01eac18c423815ed4f4a2ec3b439d654e55ee4ad610e153cf02faf67ea40"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-win32.whl", hash = "sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432"}, + {file = "charset_normalizer-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca"}, + {file = "charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a"}, + {file = "charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14"}, ] [[package]] @@ -265,82 +296,107 @@ files = [ [[package]] name = "coverage" -version = "7.8.0" +version = "7.10.5" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, - {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, - {file = "coverage-7.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3"}, - {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676"}, - {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d"}, - {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a"}, - {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c"}, - {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f"}, - {file = "coverage-7.8.0-cp310-cp310-win32.whl", hash = "sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f"}, - {file = "coverage-7.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23"}, - {file = "coverage-7.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27"}, - {file = "coverage-7.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea"}, - {file = "coverage-7.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7"}, - {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040"}, - {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543"}, - {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2"}, - {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318"}, - {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9"}, - {file = "coverage-7.8.0-cp311-cp311-win32.whl", hash = "sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c"}, - {file = "coverage-7.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78"}, - {file = "coverage-7.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc"}, - {file = "coverage-7.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6"}, - {file = "coverage-7.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d"}, - {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05"}, - {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a"}, - {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6"}, - {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47"}, - {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe"}, - {file = "coverage-7.8.0-cp312-cp312-win32.whl", hash = "sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545"}, - {file = "coverage-7.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b"}, - {file = "coverage-7.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd"}, - {file = "coverage-7.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00"}, - {file = "coverage-7.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64"}, - {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067"}, - {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008"}, - {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733"}, - {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323"}, - {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3"}, - {file = "coverage-7.8.0-cp313-cp313-win32.whl", hash = "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d"}, - {file = "coverage-7.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487"}, - {file = "coverage-7.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25"}, - {file = "coverage-7.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42"}, - {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502"}, - {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1"}, - {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4"}, - {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73"}, - {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a"}, - {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883"}, - {file = "coverage-7.8.0-cp313-cp313t-win32.whl", hash = "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada"}, - {file = "coverage-7.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257"}, - {file = "coverage-7.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f"}, - {file = "coverage-7.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a"}, - {file = "coverage-7.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82"}, - {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814"}, - {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c"}, - {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd"}, - {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4"}, - {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899"}, - {file = "coverage-7.8.0-cp39-cp39-win32.whl", hash = "sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f"}, - {file = "coverage-7.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3"}, - {file = "coverage-7.8.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd"}, - {file = "coverage-7.8.0-py3-none-any.whl", hash = "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7"}, - {file = "coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501"}, + {file = "coverage-7.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c6a5c3414bfc7451b879141ce772c546985163cf553f08e0f135f0699a911801"}, + {file = "coverage-7.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bc8e4d99ce82f1710cc3c125adc30fd1487d3cf6c2cd4994d78d68a47b16989a"}, + {file = "coverage-7.10.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:02252dc1216e512a9311f596b3169fad54abcb13827a8d76d5630c798a50a754"}, + {file = "coverage-7.10.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:73269df37883e02d460bee0cc16be90509faea1e3bd105d77360b512d5bb9c33"}, + {file = "coverage-7.10.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f8a81b0614642f91c9effd53eec284f965577591f51f547a1cbeb32035b4c2f"}, + {file = "coverage-7.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6a29f8e0adb7f8c2b95fa2d4566a1d6e6722e0a637634c6563cb1ab844427dd9"}, + {file = "coverage-7.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fcf6ab569436b4a647d4e91accba12509ad9f2554bc93d3aee23cc596e7f99c3"}, + {file = "coverage-7.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:90dc3d6fb222b194a5de60af8d190bedeeddcbc7add317e4a3cd333ee6b7c879"}, + {file = "coverage-7.10.5-cp310-cp310-win32.whl", hash = "sha256:414a568cd545f9dc75f0686a0049393de8098414b58ea071e03395505b73d7a8"}, + {file = "coverage-7.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:e551f9d03347196271935fd3c0c165f0e8c049220280c1120de0084d65e9c7ff"}, + {file = "coverage-7.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c177e6ffe2ebc7c410785307758ee21258aa8e8092b44d09a2da767834f075f2"}, + {file = "coverage-7.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:14d6071c51ad0f703d6440827eaa46386169b5fdced42631d5a5ac419616046f"}, + {file = "coverage-7.10.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:61f78c7c3bc272a410c5ae3fde7792b4ffb4acc03d35a7df73ca8978826bb7ab"}, + {file = "coverage-7.10.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f39071caa126f69d63f99b324fb08c7b1da2ec28cbb1fe7b5b1799926492f65c"}, + {file = "coverage-7.10.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343a023193f04d46edc46b2616cdbee68c94dd10208ecd3adc56fcc54ef2baa1"}, + {file = "coverage-7.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:585ffe93ae5894d1ebdee69fc0b0d4b7c75d8007983692fb300ac98eed146f78"}, + {file = "coverage-7.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0ef4e66f006ed181df29b59921bd8fc7ed7cd6a9289295cd8b2824b49b570df"}, + {file = "coverage-7.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eb7b0bbf7cc1d0453b843eca7b5fa017874735bef9bfdfa4121373d2cc885ed6"}, + {file = "coverage-7.10.5-cp311-cp311-win32.whl", hash = "sha256:1d043a8a06987cc0c98516e57c4d3fc2c1591364831e9deb59c9e1b4937e8caf"}, + {file = "coverage-7.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:fefafcca09c3ac56372ef64a40f5fe17c5592fab906e0fdffd09543f3012ba50"}, + {file = "coverage-7.10.5-cp311-cp311-win_arm64.whl", hash = "sha256:7e78b767da8b5fc5b2faa69bb001edafcd6f3995b42a331c53ef9572c55ceb82"}, + {file = "coverage-7.10.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c2d05c7e73c60a4cecc7d9b60dbfd603b4ebc0adafaef371445b47d0f805c8a9"}, + {file = "coverage-7.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32ddaa3b2c509778ed5373b177eb2bf5662405493baeff52278a0b4f9415188b"}, + {file = "coverage-7.10.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dd382410039fe062097aa0292ab6335a3f1e7af7bba2ef8d27dcda484918f20c"}, + {file = "coverage-7.10.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7fa22800f3908df31cea6fb230f20ac49e343515d968cc3a42b30d5c3ebf9b5a"}, + {file = "coverage-7.10.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f366a57ac81f5e12797136552f5b7502fa053c861a009b91b80ed51f2ce651c6"}, + {file = "coverage-7.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f1dc8f1980a272ad4a6c84cba7981792344dad33bf5869361576b7aef42733a"}, + {file = "coverage-7.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2285c04ee8676f7938b02b4936d9b9b672064daab3187c20f73a55f3d70e6b4a"}, + {file = "coverage-7.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c2492e4dd9daab63f5f56286f8a04c51323d237631eb98505d87e4c4ff19ec34"}, + {file = "coverage-7.10.5-cp312-cp312-win32.whl", hash = "sha256:38a9109c4ee8135d5df5505384fc2f20287a47ccbe0b3f04c53c9a1989c2bbaf"}, + {file = "coverage-7.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:6b87f1ad60b30bc3c43c66afa7db6b22a3109902e28c5094957626a0143a001f"}, + {file = "coverage-7.10.5-cp312-cp312-win_arm64.whl", hash = "sha256:672a6c1da5aea6c629819a0e1461e89d244f78d7b60c424ecf4f1f2556c041d8"}, + {file = "coverage-7.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c"}, + {file = "coverage-7.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44"}, + {file = "coverage-7.10.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc"}, + {file = "coverage-7.10.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869"}, + {file = "coverage-7.10.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f"}, + {file = "coverage-7.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5"}, + {file = "coverage-7.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c"}, + {file = "coverage-7.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2"}, + {file = "coverage-7.10.5-cp313-cp313-win32.whl", hash = "sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4"}, + {file = "coverage-7.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b"}, + {file = "coverage-7.10.5-cp313-cp313-win_arm64.whl", hash = "sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84"}, + {file = "coverage-7.10.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7"}, + {file = "coverage-7.10.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b"}, + {file = "coverage-7.10.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae"}, + {file = "coverage-7.10.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760"}, + {file = "coverage-7.10.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235"}, + {file = "coverage-7.10.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5"}, + {file = "coverage-7.10.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db"}, + {file = "coverage-7.10.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e"}, + {file = "coverage-7.10.5-cp313-cp313t-win32.whl", hash = "sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee"}, + {file = "coverage-7.10.5-cp313-cp313t-win_amd64.whl", hash = "sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14"}, + {file = "coverage-7.10.5-cp313-cp313t-win_arm64.whl", hash = "sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff"}, + {file = "coverage-7.10.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:68c5e0bc5f44f68053369fa0d94459c84548a77660a5f2561c5e5f1e3bed7031"}, + {file = "coverage-7.10.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cf33134ffae93865e32e1e37df043bef15a5e857d8caebc0099d225c579b0fa3"}, + {file = "coverage-7.10.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ad8fa9d5193bafcf668231294241302b5e683a0518bf1e33a9a0dfb142ec3031"}, + {file = "coverage-7.10.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:146fa1531973d38ab4b689bc764592fe6c2f913e7e80a39e7eeafd11f0ef6db2"}, + {file = "coverage-7.10.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6013a37b8a4854c478d3219ee8bc2392dea51602dd0803a12d6f6182a0061762"}, + {file = "coverage-7.10.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:eb90fe20db9c3d930fa2ad7a308207ab5b86bf6a76f54ab6a40be4012d88fcae"}, + {file = "coverage-7.10.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:384b34482272e960c438703cafe63316dfbea124ac62006a455c8410bf2a2262"}, + {file = "coverage-7.10.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:467dc74bd0a1a7de2bedf8deaf6811f43602cb532bd34d81ffd6038d6d8abe99"}, + {file = "coverage-7.10.5-cp314-cp314-win32.whl", hash = "sha256:556d23d4e6393ca898b2e63a5bca91e9ac2d5fb13299ec286cd69a09a7187fde"}, + {file = "coverage-7.10.5-cp314-cp314-win_amd64.whl", hash = "sha256:f4446a9547681533c8fa3e3c6cf62121eeee616e6a92bd9201c6edd91beffe13"}, + {file = "coverage-7.10.5-cp314-cp314-win_arm64.whl", hash = "sha256:5e78bd9cf65da4c303bf663de0d73bf69f81e878bf72a94e9af67137c69b9fe9"}, + {file = "coverage-7.10.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5661bf987d91ec756a47c7e5df4fbcb949f39e32f9334ccd3f43233bbb65e508"}, + {file = "coverage-7.10.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a46473129244db42a720439a26984f8c6f834762fc4573616c1f37f13994b357"}, + {file = "coverage-7.10.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1f64b8d3415d60f24b058b58d859e9512624bdfa57a2d1f8aff93c1ec45c429b"}, + {file = "coverage-7.10.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:44d43de99a9d90b20e0163f9770542357f58860a26e24dc1d924643bd6aa7cb4"}, + {file = "coverage-7.10.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a931a87e5ddb6b6404e65443b742cb1c14959622777f2a4efd81fba84f5d91ba"}, + {file = "coverage-7.10.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9559b906a100029274448f4c8b8b0a127daa4dade5661dfd821b8c188058842"}, + {file = "coverage-7.10.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b08801e25e3b4526ef9ced1aa29344131a8f5213c60c03c18fe4c6170ffa2874"}, + {file = "coverage-7.10.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ed9749bb8eda35f8b636fb7632f1c62f735a236a5d4edadd8bbcc5ea0542e732"}, + {file = "coverage-7.10.5-cp314-cp314t-win32.whl", hash = "sha256:609b60d123fc2cc63ccee6d17e4676699075db72d14ac3c107cc4976d516f2df"}, + {file = "coverage-7.10.5-cp314-cp314t-win_amd64.whl", hash = "sha256:0666cf3d2c1626b5a3463fd5b05f5e21f99e6aec40a3192eee4d07a15970b07f"}, + {file = "coverage-7.10.5-cp314-cp314t-win_arm64.whl", hash = "sha256:bc85eb2d35e760120540afddd3044a5bf69118a91a296a8b3940dfc4fdcfe1e2"}, + {file = "coverage-7.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:62835c1b00c4a4ace24c1a88561a5a59b612fbb83a525d1c70ff5720c97c0610"}, + {file = "coverage-7.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5255b3bbcc1d32a4069d6403820ac8e6dbcc1d68cb28a60a1ebf17e47028e898"}, + {file = "coverage-7.10.5-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3876385722e335d6e991c430302c24251ef9c2a9701b2b390f5473199b1b8ebf"}, + {file = "coverage-7.10.5-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8048ce4b149c93447a55d279078c8ae98b08a6951a3c4d2d7e87f4efc7bfe100"}, + {file = "coverage-7.10.5-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4028e7558e268dd8bcf4d9484aad393cafa654c24b4885f6f9474bf53183a82a"}, + {file = "coverage-7.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03f47dc870eec0367fcdd603ca6a01517d2504e83dc18dbfafae37faec66129a"}, + {file = "coverage-7.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2d488d7d42b6ded7ea0704884f89dcabd2619505457de8fc9a6011c62106f6e5"}, + {file = "coverage-7.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b3dcf2ead47fa8be14224ee817dfc1df98043af568fe120a22f81c0eb3c34ad2"}, + {file = "coverage-7.10.5-cp39-cp39-win32.whl", hash = "sha256:02650a11324b80057b8c9c29487020073d5e98a498f1857f37e3f9b6ea1b2426"}, + {file = "coverage-7.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:b45264dd450a10f9e03237b41a9a24e85cbb1e278e5a32adb1a303f58f0017f3"}, + {file = "coverage-7.10.5-py3-none-any.whl", hash = "sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a"}, + {file = "coverage-7.10.5.tar.gz", hash = "sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" @@ -348,7 +404,7 @@ version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, @@ -378,7 +434,6 @@ files = [ {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] -markers = {dev = "(platform_machine != \"ppc64le\" and platform_machine != \"s390x\") and sys_platform == \"linux\""} [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} @@ -393,61 +448,52 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -[[package]] -name = "docutils" -version = "0.21.2" -description = "Docutils -- Python Documentation Utilities" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, - {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, -] - [[package]] name = "exceptiongroup" -version = "1.2.2" +version = "1.3.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["main", "dev"] markers = "python_version < \"3.11\"" files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + [package.extras] test = ["pytest (>=6)"] [[package]] name = "h11" -version = "0.14.0" +version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] [[package]] name = "httpcore" -version = "1.0.7" +version = "1.0.9" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, - {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, ] [package.dependencies] certifi = "*" -h11 = ">=0.13,<0.15" +h11 = ">=0.16" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] @@ -474,32 +520,12 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "id" -version = "1.5.0" -description = "A tool for generating OIDC identities" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "id-1.5.0-py3-none-any.whl", hash = "sha256:f1434e1cef91f2cbb8a4ec64663d5a23b9ed43ef44c4c957d02583d61714c658"}, - {file = "id-1.5.0.tar.gz", hash = "sha256:292cb8a49eacbbdbce97244f47a97b4c62540169c976552e497fd57df0734c1d"}, -] - -[package.dependencies] -requests = "*" - -[package.extras] -dev = ["build", "bump (>=1.3.2)", "id[lint,test]"] -lint = ["bandit", "interrogate", "mypy", "ruff (<0.8.2)", "types-requests"] -test = ["coverage[toml]", "pretend", "pytest", "pytest-cov"] - [[package]] name = "idna" version = "3.10" @@ -515,31 +541,6 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] -[[package]] -name = "importlib-metadata" -version = "8.6.1" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "(platform_machine != \"ppc64le\" and platform_machine != \"s390x\") and python_version < \"3.12\" or python_version < \"3.10\"" -files = [ - {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, - {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - [[package]] name = "iniconfig" version = "2.1.0" @@ -552,229 +553,33 @@ files = [ {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] -[[package]] -name = "jaraco-classes" -version = "3.4.0" -description = "Utility functions for Python class constructs" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\"" -files = [ - {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"}, - {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"}, -] - -[package.dependencies] -more-itertools = "*" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "jaraco-context" -version = "6.0.1" -description = "Useful decorators and context managers" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\"" -files = [ - {file = "jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4"}, - {file = "jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3"}, -] - -[package.dependencies] -"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""} - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["portend", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "jaraco-functools" -version = "4.1.0" -description = "Functools like those found in stdlib" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\"" -files = [ - {file = "jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649"}, - {file = "jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d"}, -] - -[package.dependencies] -more-itertools = "*" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["jaraco.classes", "pytest (>=6,!=8.1.*)"] -type = ["pytest-mypy"] - -[[package]] -name = "jeepney" -version = "0.9.0" -description = "Low-level, pure Python DBus protocol wrapper." -optional = false -python-versions = ">=3.7" -groups = ["dev"] -markers = "(platform_machine != \"ppc64le\" and platform_machine != \"s390x\") and sys_platform == \"linux\"" -files = [ - {file = "jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683"}, - {file = "jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732"}, -] - -[package.extras] -test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] -trio = ["trio"] - -[[package]] -name = "keyring" -version = "25.6.0" -description = "Store and access your passwords safely." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\"" -files = [ - {file = "keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd"}, - {file = "keyring-25.6.0.tar.gz", hash = "sha256:0b39998aa941431eb3d9b0d4b2460bc773b9df6fed7621c2dfb291a7e0187a66"}, -] - -[package.dependencies] -importlib_metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} -"jaraco.classes" = "*" -"jaraco.context" = "*" -"jaraco.functools" = "*" -jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} -pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} -SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -completion = ["shtab (>=1.1.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["pyfakefs", "pytest (>=6,!=8.1.*)"] -type = ["pygobject-stubs", "pytest-mypy", "shtab", "types-pywin32"] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "more-itertools" -version = "10.6.0" -description = "More routines for operating on iterables, beyond itertools" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\"" -files = [ - {file = "more-itertools-10.6.0.tar.gz", hash = "sha256:2cd7fad1009c31cc9fb6a035108509e6547547a7a738374f10bd49a09eb3ee3b"}, - {file = "more_itertools-10.6.0-py3-none-any.whl", hash = "sha256:6eb054cb4b6db1473f6e15fcc676a08e4732548acd47c708f0e179c2c7c01e89"}, -] - -[[package]] -name = "nh3" -version = "0.2.21" -description = "Python binding to Ammonia HTML sanitizer Rust crate" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "nh3-0.2.21-cp313-cp313t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:fcff321bd60c6c5c9cb4ddf2554e22772bb41ebd93ad88171bbbb6f271255286"}, - {file = "nh3-0.2.21-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31eedcd7d08b0eae28ba47f43fd33a653b4cdb271d64f1aeda47001618348fde"}, - {file = "nh3-0.2.21-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d426d7be1a2f3d896950fe263332ed1662f6c78525b4520c8e9861f8d7f0d243"}, - {file = "nh3-0.2.21-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9d67709bc0d7d1f5797b21db26e7a8b3d15d21c9c5f58ccfe48b5328483b685b"}, - {file = "nh3-0.2.21-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:55823c5ea1f6b267a4fad5de39bc0524d49a47783e1fe094bcf9c537a37df251"}, - {file = "nh3-0.2.21-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:818f2b6df3763e058efa9e69677b5a92f9bc0acff3295af5ed013da544250d5b"}, - {file = "nh3-0.2.21-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:b3b5c58161e08549904ac4abd450dacd94ff648916f7c376ae4b2c0652b98ff9"}, - {file = "nh3-0.2.21-cp313-cp313t-win32.whl", hash = "sha256:637d4a10c834e1b7d9548592c7aad760611415fcd5bd346f77fd8a064309ae6d"}, - {file = "nh3-0.2.21-cp313-cp313t-win_amd64.whl", hash = "sha256:713d16686596e556b65e7f8c58328c2df63f1a7abe1277d87625dcbbc012ef82"}, - {file = "nh3-0.2.21-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:a772dec5b7b7325780922dd904709f0f5f3a79fbf756de5291c01370f6df0967"}, - {file = "nh3-0.2.21-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d002b648592bf3033adfd875a48f09b8ecc000abd7f6a8769ed86b6ccc70c759"}, - {file = "nh3-0.2.21-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2a5174551f95f2836f2ad6a8074560f261cf9740a48437d6151fd2d4d7d617ab"}, - {file = "nh3-0.2.21-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b8d55ea1fc7ae3633d758a92aafa3505cd3cc5a6e40470c9164d54dff6f96d42"}, - {file = "nh3-0.2.21-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ae319f17cd8960d0612f0f0ddff5a90700fa71926ca800e9028e7851ce44a6f"}, - {file = "nh3-0.2.21-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63ca02ac6f27fc80f9894409eb61de2cb20ef0a23740c7e29f9ec827139fa578"}, - {file = "nh3-0.2.21-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5f77e62aed5c4acad635239ac1290404c7e940c81abe561fd2af011ff59f585"}, - {file = "nh3-0.2.21-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:087ffadfdcd497658c3adc797258ce0f06be8a537786a7217649fc1c0c60c293"}, - {file = "nh3-0.2.21-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ac7006c3abd097790e611fe4646ecb19a8d7f2184b882f6093293b8d9b887431"}, - {file = "nh3-0.2.21-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:6141caabe00bbddc869665b35fc56a478eb774a8c1dfd6fba9fe1dfdf29e6efa"}, - {file = "nh3-0.2.21-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:20979783526641c81d2f5bfa6ca5ccca3d1e4472474b162c6256745fbfe31cd1"}, - {file = "nh3-0.2.21-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a7ea28cd49293749d67e4fcf326c554c83ec912cd09cd94aa7ec3ab1921c8283"}, - {file = "nh3-0.2.21-cp38-abi3-win32.whl", hash = "sha256:6c9c30b8b0d291a7c5ab0967ab200598ba33208f754f2f4920e9343bdd88f79a"}, - {file = "nh3-0.2.21-cp38-abi3-win_amd64.whl", hash = "sha256:bb0014948f04d7976aabae43fcd4cb7f551f9f8ce785a4c9ef66e6c2590f8629"}, - {file = "nh3-0.2.21.tar.gz", hash = "sha256:4990e7ee6a55490dbf00d61a6f476c9a3258e31e711e13713b2ea7d6616f670e"}, -] - [[package]] name = "packaging" -version = "24.2" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] name = "pluggy" -version = "1.5.0" +version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "pycparser" @@ -782,23 +587,22 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -markers = {main = "platform_python_implementation != \"PyPy\"", dev = "(platform_machine != \"ppc64le\" and platform_machine != \"s390x\") and sys_platform == \"linux\" and platform_python_implementation != \"PyPy\""} [[package]] name = "pygments" -version = "2.19.1" +version = "2.19.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, ] [package.extras] @@ -806,26 +610,27 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" -version = "8.3.5" +version = "8.4.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, - {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, + {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"}, + {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"}, ] [package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1", markers = "python_version < \"3.11\""} +iniconfig = ">=1" +packaging = ">=20" pluggy = ">=1.5,<2" +pygments = ">=2.7.2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" @@ -886,14 +691,14 @@ testing = ["pytest-asyncio (==0.24.*)", "pytest-cov (==6.*)"] [[package]] name = "pytest-mock" -version = "3.14.0" +version = "3.14.1" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, - {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, + {file = "pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0"}, + {file = "pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e"}, ] [package.dependencies] @@ -902,54 +707,21 @@ pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] -[[package]] -name = "pywin32-ctypes" -version = "0.2.3" -description = "A (partial) reimplementation of pywin32 using ctypes/cffi" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -markers = "(platform_machine != \"ppc64le\" and platform_machine != \"s390x\") and sys_platform == \"win32\"" -files = [ - {file = "pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755"}, - {file = "pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8"}, -] - -[[package]] -name = "readme-renderer" -version = "44.0" -description = "readme_renderer is a library for rendering readme descriptions for Warehouse" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "readme_renderer-44.0-py3-none-any.whl", hash = "sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151"}, - {file = "readme_renderer-44.0.tar.gz", hash = "sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1"}, -] - -[package.dependencies] -docutils = ">=0.21.2" -nh3 = ">=0.2.14" -Pygments = ">=2.5.1" - -[package.extras] -md = ["cmarkgfm (>=0.8.0)"] - [[package]] name = "requests" -version = "2.32.3" +version = "2.32.5" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.8" -groups = ["main", "dev"] +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" @@ -958,72 +730,32 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] -name = "requests-toolbelt" -version = "1.0.0" -description = "A utility belt for advanced users of python-requests" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["dev"] -files = [ - {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, - {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, -] - -[package.dependencies] -requests = ">=2.0.1,<3.0.0" - -[[package]] -name = "rfc3986" -version = "2.0.0" -description = "Validating URI References per RFC 3986" +name = "ruff" +version = "0.1.15" +description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"}, - {file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"}, -] - -[package.extras] -idna2008 = ["idna"] - -[[package]] -name = "rich" -version = "14.0.0" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.8.0" -groups = ["dev"] -files = [ - {file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}, - {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "secretstorage" -version = "3.3.3" -description = "Python bindings to FreeDesktop.org Secret Service API" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -markers = "(platform_machine != \"ppc64le\" and platform_machine != \"s390x\") and sys_platform == \"linux\"" -files = [ - {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, - {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, ] -[package.dependencies] -cryptography = ">=2.0" -jeepney = ">=0.6" - [[package]] name = "sniffio" version = "1.3.1" @@ -1079,86 +811,38 @@ files = [ {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] -[[package]] -name = "twine" -version = "6.1.0" -description = "Collection of utilities for publishing packages on PyPI" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "twine-6.1.0-py3-none-any.whl", hash = "sha256:a47f973caf122930bf0fbbf17f80b83bc1602c9ce393c7845f289a3001dc5384"}, - {file = "twine-6.1.0.tar.gz", hash = "sha256:be324f6272eff91d07ee93f251edf232fc647935dd585ac003539b42404a8dbd"}, -] - -[package.dependencies] -id = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -keyring = {version = ">=15.1", markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\""} -packaging = ">=24.0" -readme-renderer = ">=35.0" -requests = ">=2.20" -requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0" -rfc3986 = ">=1.4.0" -rich = ">=12.0.0" -urllib3 = ">=1.26.0" - -[package.extras] -keyring = ["keyring (>=15.1)"] - [[package]] name = "typing-extensions" -version = "4.13.1" -description = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "dev"] markers = "python_version < \"3.13\"" files = [ - {file = "typing_extensions-4.13.1-py3-none-any.whl", hash = "sha256:4b6cf02909eb5495cfbc3f6e8fd49217e6cc7944e145cdda8caa3734777f9e69"}, - {file = "typing_extensions-4.13.1.tar.gz", hash = "sha256:98795af00fb9640edec5b8e31fc647597b4691f099ad75f469a2616be1a76dff"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] [[package]] name = "urllib3" -version = "2.3.0" +version = "2.5.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main"] files = [ - {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, - {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, + {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, + {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "zipp" -version = "3.21.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "(platform_machine != \"ppc64le\" and platform_machine != \"s390x\") and python_version < \"3.12\" or python_version < \"3.10\"" -files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "f520b72141154b1ab70c231fd79737388402228a6a98305dcb0d3c78cd069cdf" +content-hash = "8148934753f46458ec2c8fc058999422b3e985796d1279aa3eb1ee8238c5e46d" diff --git a/pyproject.toml b/pyproject.toml index b6d6fe0..66ea53d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,7 @@ python = "^3.9" authlib = "^1.0" # For JWT/OIDC features requests = "^2.31.0" # If you use requests for HTTP calls (e.g., discovery) httpx = "^0.28.1" +ada-url = "^1.25.0" [tool.poetry.group.dev.dependencies] pytest = "^8.0" @@ -22,7 +23,7 @@ pytest-cov = "^4.0" pytest-asyncio = "^0.20.3" pytest-mock = "^3.14.0" pytest-httpx = "^0.35.0" -twine = "^6.1.0" +ruff = "^0.1.0" [tool.pytest.ini_options] addopts = "--cov=src --cov-report=term-missing:skip-covered --cov-report=xml" diff --git a/requirements.txt b/requirements.txt index c3356da..6b0293b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,45 +1,12 @@ -anyio==4.9.0 -Authlib==1.5.2 -backports.tarfile==1.2.0 -certifi==2025.1.31 -cffi==1.17.1 -charset-normalizer==3.4.1 -coverage==7.8.0 -cryptography==44.0.1 -docutils==0.21.2 -exceptiongroup==1.2.2 -h11==0.14.0 -httpcore==1.0.7 -httpx==0.28.1 -id==1.5.0 -idna==3.10 -importlib_metadata==8.6.1 -iniconfig==2.1.0 -jaraco.classes==3.4.0 -jaraco.context==6.0.1 -jaraco.functools==4.1.0 -keyring==25.6.0 -markdown-it-py==3.0.0 -mdurl==0.1.2 -more-itertools==10.6.0 -nh3==0.2.21 -packaging==24.2 -pluggy==1.5.0 -pycparser==2.22 -Pygments==2.19.1 -pytest==8.3.5 -pytest-asyncio==0.20.3 -pytest-cov==4.1.0 -pytest-httpx==0.35.0 -pytest-mock==3.14.0 -readme_renderer==44.0 -requests==2.32.3 -requests-toolbelt==1.0.0 -rfc3986==2.0.0 -rich==14.0.0 -sniffio==1.3.1 -tomli==2.2.1 -twine==6.1.0 -typing_extensions==4.13.1 -urllib3==2.4.0 -zipp==3.21.0 +# Core runtime dependencies +authlib>=1.6.3 +httpx>=0.28.1 +ada-url>=1.26.0 + +# Development and testing dependencies +pytest>=8.0 +pytest-cov>=4.0 +pytest-asyncio>=0.20.3 +pytest-mock>=3.14.1 +pytest-httpx>=0.35.0 + diff --git a/src/auth0_api_python/__init__.py b/src/auth0_api_python/__init__.py index a9b98fd..f487dd8 100644 --- a/src/auth0_api_python/__init__.py +++ b/src/auth0_api_python/__init__.py @@ -11,4 +11,4 @@ __all__ = [ "ApiClient", "ApiClientOptions" -] \ No newline at end of file +] diff --git a/src/auth0_api_python/api_client.py b/src/auth0_api_python/api_client.py index b38409e..0eb7a22 100644 --- a/src/auth0_api_python/api_client.py +++ b/src/auth0_api_python/api_client.py @@ -1,11 +1,26 @@ import time -from typing import Optional, List, Dict, Any +from typing import Any, Optional -from authlib.jose import JsonWebToken, JsonWebKey +from authlib.jose import JsonWebKey, JsonWebToken from .config import ApiClientOptions -from .errors import MissingRequiredArgumentError, VerifyAccessTokenError -from .utils import fetch_oidc_metadata, fetch_jwks, get_unverified_header +from .errors import ( + BaseAuthError, + InvalidAuthSchemeError, + InvalidDpopProofError, + MissingAuthorizationError, + MissingRequiredArgumentError, + VerifyAccessTokenError, +) +from .utils import ( + calculate_jwk_thumbprint, + fetch_jwks, + fetch_oidc_metadata, + get_unverified_header, + normalize_url_for_htu, + sha256_base64url, +) + class ApiClient: """ @@ -14,46 +29,205 @@ class ApiClient: """ def __init__(self, options: ApiClientOptions): - if not options.domain: raise MissingRequiredArgumentError("domain") if not options.audience: raise MissingRequiredArgumentError("audience") self.options = options - self._metadata: Optional[Dict[str, Any]] = None - self._jwks_data: Optional[Dict[str, Any]] = None + self._metadata: Optional[dict[str, Any]] = None + self._jwks_data: Optional[dict[str, Any]] = None self._jwt = JsonWebToken(["RS256"]) - async def _discover(self) -> Dict[str, Any]: - """Lazy-load OIDC discovery metadata.""" - if self._metadata is None: - self._metadata = await fetch_oidc_metadata( - domain=self.options.domain, - custom_fetch=self.options.custom_fetch - ) - return self._metadata + self._dpop_algorithms = ["ES256"] + self._dpop_jwt = JsonWebToken(self._dpop_algorithms) - async def _load_jwks(self) -> Dict[str, Any]: - """Fetches and caches JWKS data from the OIDC metadata.""" - if self._jwks_data is None: - metadata = await self._discover() - jwks_uri = metadata["jwks_uri"] - self._jwks_data = await fetch_jwks( - jwks_uri=jwks_uri, - custom_fetch=self.options.custom_fetch + def is_dpop_required(self) -> bool: + """Check if DPoP authentication is required.""" + return getattr(self.options, "dpop_required", False) + + + async def verify_request( + self, + headers: dict[str, str], + http_method: Optional[str] = None, + http_url: Optional[str] = None + ) -> dict[str, Any]: + """ + Dispatch based on Authorization scheme: + β€’ If scheme is 'DPoP', verifies both access token and DPoP proof + β€’ If scheme is 'Bearer', verifies only the access token + + Args: + headers: HTTP headers dict containing (header keys should be lowercase): + - "authorization": The Authorization header value (required) + - "dpop": The DPoP proof header value (required for DPoP) + http_method: The HTTP method (required for DPoP) + http_url: The HTTP URL (required for DPoP) + + Returns: + The decoded access token claims + + Raises: + MissingRequiredArgumentError: If required args are missing + InvalidAuthSchemeError: If an unsupported scheme is provided + InvalidDpopProofError: If DPoP verification fails + VerifyAccessTokenError: If access token verification fails + """ + authorization_header = headers.get("authorization", "") + dpop_proof = headers.get("dpop") + + if not authorization_header: + if self.is_dpop_required(): + raise self._prepare_error( + InvalidAuthSchemeError("") + ) + else : + raise self._prepare_error(MissingAuthorizationError()) + + + parts = authorization_header.split(" ") + if len(parts) != 2: + if len(parts) < 2: + raise self._prepare_error(MissingAuthorizationError()) + elif len(parts) > 2: + raise self._prepare_error( + InvalidAuthSchemeError("") + ) + + scheme, token = parts + + scheme = scheme.strip().lower() + + if self.is_dpop_required() and scheme != "dpop": + raise self._prepare_error( + InvalidAuthSchemeError(""), + auth_scheme=scheme ) - return self._jwks_data + if not token.strip(): + raise self._prepare_error(MissingAuthorizationError()) + + + if scheme == "dpop": + if not self.options.dpop_enabled: + raise self._prepare_error(MissingAuthorizationError()) + + if not dpop_proof: + if self.is_dpop_required(): + raise self._prepare_error( + InvalidAuthSchemeError(""), + auth_scheme=scheme + ) + else: + raise self._prepare_error( + InvalidAuthSchemeError(""), + auth_scheme=scheme + ) + + if "," in dpop_proof: + raise self._prepare_error( + InvalidDpopProofError("Multiple DPoP proofs are not allowed"), + auth_scheme=scheme + ) + + try: + dpop_header = get_unverified_header(dpop_proof) + except Exception: + raise self._prepare_error(InvalidDpopProofError("Failed to verify DPoP proof"), auth_scheme=scheme) + + if not http_method or not http_url: + missing_params = [] + if not http_method: + missing_params.append("http_method") + if not http_url: + missing_params.append("http_url") + + raise self._prepare_error( + MissingRequiredArgumentError(f"DPoP authentication requires {' and '.join(missing_params)}"), + auth_scheme=scheme + ) + + try: + access_token_claims = await self.verify_access_token(token) + except VerifyAccessTokenError as e: + raise self._prepare_error(e, auth_scheme=scheme) + + cnf_claim = access_token_claims.get("cnf") + + if not cnf_claim: + raise self._prepare_error( + VerifyAccessTokenError("JWT Access Token has no jkt confirmation claim"), + auth_scheme=scheme + ) + + if not isinstance(cnf_claim, dict): + raise self._prepare_error( + VerifyAccessTokenError("JWT Access Token has invalid confirmation claim format"), + auth_scheme=scheme + ) + try: + await self.verify_dpop_proof( + access_token=token, + proof=dpop_proof, + http_method=http_method, + http_url=http_url + ) + except InvalidDpopProofError as e: + raise self._prepare_error(e, auth_scheme=scheme) + + # DPoP binding verification + jwk_dict = dpop_header["jwk"] + actual_jkt = calculate_jwk_thumbprint(jwk_dict) + expected_jkt = cnf_claim.get("jkt") + + if not expected_jkt: + raise self._prepare_error( + VerifyAccessTokenError("Access token 'cnf' claim missing 'jkt'"), + auth_scheme=scheme + ) + + if expected_jkt != actual_jkt: + raise self._prepare_error( + VerifyAccessTokenError("JWT Access Token confirmation mismatch"), + auth_scheme=scheme + ) + + return access_token_claims + + if scheme == "bearer": + try: + claims = await self.verify_access_token(token) + if claims.get("cnf") and isinstance(claims["cnf"], dict) and claims["cnf"].get("jkt"): + if self.options.dpop_enabled: + raise self._prepare_error( + VerifyAccessTokenError( + "DPoP-bound token requires the DPoP authentication scheme, not Bearer" + ), + auth_scheme=scheme + ) + if dpop_proof: + if self.options.dpop_enabled: + raise self._prepare_error( + InvalidAuthSchemeError( + "DPoP proof requires DPoP authentication scheme, not Bearer" + ), + auth_scheme=scheme + ) + return claims + except VerifyAccessTokenError as e: + raise self._prepare_error(e, auth_scheme=scheme) + + raise self._prepare_error(MissingAuthorizationError()) async def verify_access_token( self, access_token: str, - required_claims: Optional[List[str]] = None - ) -> Dict[str, Any]: + required_claims: Optional[list[str]] = None + ) -> dict[str, Any]: """ Asynchronously verifies the provided JWT access token. - + - Fetches OIDC metadata and JWKS if not already cached. - Decodes and validates signature (RS256) with the correct key. - Checks standard claims: 'iss', 'aud', 'exp', 'iat' @@ -71,9 +245,8 @@ async def verify_access_token( required_claims = required_claims or [] - try: - header = await get_unverified_header(access_token) + header = get_unverified_header(access_token) kid = header["kid"] except Exception as e: raise VerifyAccessTokenError(f"Failed to parse token header: {str(e)}") from e @@ -100,10 +273,9 @@ async def verify_access_token( metadata = await self._discover() issuer = metadata["issuer"] - if claims.get("iss") != issuer: raise VerifyAccessTokenError("Issuer mismatch") - + expected_aud = self.options.audience actual_aud = claims.get("aud") @@ -120,9 +292,261 @@ async def verify_access_token( if "iat" not in claims: raise VerifyAccessTokenError("Missing 'iat' claim in token") - #Additional required_claims + # Additional required_claims for rc in required_claims: if rc not in claims: raise VerifyAccessTokenError(f"Missing required claim: {rc}") - return claims \ No newline at end of file + return claims + + async def verify_dpop_proof( + self, + access_token: str, + proof: str, + http_method: str, + http_url: str + ) -> dict[str, Any]: + """ + 1. Single well-formed compact JWS + 2. typ="dpop+jwt", alg∈allowed, algβ‰ none + 3. jwk header present & public only + 4. Signature verifies with jwk + 5. Validates all required claims + Raises InvalidDpopProofError on any failure. + """ + if not proof: + raise MissingRequiredArgumentError("dpop_proof") + if not access_token: + raise MissingRequiredArgumentError("access_token") + if not http_method or not http_url: + raise MissingRequiredArgumentError("http_method/http_url") + + header = get_unverified_header(proof) + + if header.get("typ") != "dpop+jwt": + raise InvalidDpopProofError("Unexpected JWT 'typ' header parameter value") + + alg = header.get("alg") + if alg not in self._dpop_algorithms: + raise InvalidDpopProofError("Unsupported algorithm in DPoP proof") + + jwk_dict = header.get("jwk") + if not jwk_dict or not isinstance(jwk_dict, dict): + raise InvalidDpopProofError("Missing or invalid jwk in header") + + if "d" in jwk_dict: + raise InvalidDpopProofError("Private key material found in jwk header") + + if jwk_dict.get("kty") != "EC": + raise InvalidDpopProofError("Only EC keys are supported for DPoP") + + if jwk_dict.get("crv") != "P-256": + raise InvalidDpopProofError("Only P-256 curve is supported") + + public_key = JsonWebKey.import_key(jwk_dict) + try: + claims = self._dpop_jwt.decode(proof, public_key) + except Exception as e: + raise InvalidDpopProofError(f"JWT signature verification failed: {e}") + + # Checks all required claims are present + self._validate_claims_presence(claims, ["iat", "ath", "htm", "htu", "jti"]) + + jti = claims["jti"] + + if not isinstance(jti, str): + raise InvalidDpopProofError("jti claim must be a string") + + if not jti.strip(): + raise InvalidDpopProofError("jti claim must not be empty") + + + now = int(time.time()) + iat = claims["iat"] + offset = getattr(self.options, "dpop_iat_offset", 300) # default 5 minutes + leeway = getattr(self.options, "dpop_iat_leeway", 30) # default 30 seconds + + if not isinstance(iat, (int, float)): + raise InvalidDpopProofError("Invalid iat claim (must be integer or float)") + + if iat < now - offset: + raise InvalidDpopProofError("DPoP Proof iat is too old") + elif iat > now + leeway: + raise InvalidDpopProofError("DPoP Proof iat is from the future") + + if claims["htm"].lower() != http_method.lower(): + raise InvalidDpopProofError("DPoP Proof htm mismatch") + + try: + normalized_htu = normalize_url_for_htu(claims["htu"]) + normalized_http_url = normalize_url_for_htu(http_url) + if normalized_htu != normalized_http_url: + raise InvalidDpopProofError("DPoP Proof htu mismatch") + except ValueError: + raise InvalidDpopProofError("DPoP Proof htu mismatch") + + if claims["ath"] != sha256_base64url(access_token): + raise InvalidDpopProofError("DPoP Proof ath mismatch") + + return claims + + # ===== Private Methods ===== + + async def _discover(self) -> dict[str, Any]: + """Lazy-load OIDC discovery metadata.""" + if self._metadata is None: + self._metadata = await fetch_oidc_metadata( + domain=self.options.domain, + custom_fetch=self.options.custom_fetch + ) + return self._metadata + + async def _load_jwks(self) -> dict[str, Any]: + """Fetches and caches JWKS data from the OIDC metadata.""" + if self._jwks_data is None: + metadata = await self._discover() + jwks_uri = metadata["jwks_uri"] + self._jwks_data = await fetch_jwks( + jwks_uri=jwks_uri, + custom_fetch=self.options.custom_fetch + ) + return self._jwks_data + + def _validate_claims_presence( + self, + claims: dict[str, Any], + required_claims: list[str] + ) -> None: + """ + Validates that all required claims are present in the claims dict. + + Args: + claims: The claims dictionary to validate + required_claims: List of claim names that must be present + + Raises: + InvalidDpopProofError: If any required claim is missing + """ + missing_claims = [] + + for claim in required_claims: + if claim not in claims: + missing_claims.append(claim) + + if missing_claims: + if len(missing_claims) == 1: + error_message = f"Missing required claim: {missing_claims[0]}" + else: + error_message = f"Missing required claims: {', '.join(missing_claims)}" + + raise InvalidDpopProofError(error_message) + + def _prepare_error(self, error: BaseAuthError, auth_scheme: Optional[str] = None) -> BaseAuthError: + """ + Prepare an error with WWW-Authenticate headers based on error type and context. + + Args: + error: The error to prepare + auth_scheme: The authentication scheme that was used ("bearer" or "dpop") + """ + error_code = error.get_error_code() + error_description = error.get_error_description() + + www_auth_headers = self._build_www_authenticate( + error_code=error_code, + error_description=error_description, + auth_scheme=auth_scheme + ) + + headers = {} + www_auth_values = [] + for header_name, header_value in www_auth_headers: + if header_name == "WWW-Authenticate": + www_auth_values.append(header_value) + + if www_auth_values: + headers["WWW-Authenticate"] = ", ".join(www_auth_values) + + error._headers = headers + + return error + + def _build_www_authenticate( + self, + *, + error_code: Optional[str] = None, + error_description: Optional[str] = None, + auth_scheme: Optional[str] = None + ) -> list[tuple[str, str]]: + """ + Returns one or two ('WWW-Authenticate', ...) tuples based on context. + If dpop_required mode β†’ single DPoP challenge (with optional error params). + Otherwise β†’ Bearer and/or DPoP challenges based on auth_scheme and error. + + Args: + error_code: Error code (e.g., "invalid_token", "invalid_request") + error_description: Error description if any + auth_scheme: The authentication scheme that was used ("bearer" or "dpop") + """ + # Check if we should omit error parameters (invalid_request with empty description) + should_omit_error = (error_code == "invalid_request" and error_description == "") + + # If DPoP is disabled, only return Bearer challenges + if not self.options.dpop_enabled: + if error_code and error_code != "unauthorized" and not should_omit_error: + bearer_parts = [] + bearer_parts.append(f'error="{error_code}"') + if error_description: + bearer_parts.append(f'error_description="{error_description}"') + return [("WWW-Authenticate", "Bearer " + ", ".join(bearer_parts))] + return [("WWW-Authenticate", 'Bearer realm="api"')] + + algs = " ".join(self._dpop_algorithms) + dpop_required = self.is_dpop_required() + + # No error details or should omit error cases + if error_code == "unauthorized" or not error_code or should_omit_error: + if dpop_required: + return [("WWW-Authenticate", f'DPoP algs="{algs}"')] + return [("WWW-Authenticate", f'Bearer realm="api", DPoP algs="{algs}"')] + + if dpop_required: + # DPoP-required mode: Single DPoP challenge with error + dpop_parts = [] + if error_code and not should_omit_error: + dpop_parts.append(f'error="{error_code}"') + if error_description: + dpop_parts.append(f'error_description="{error_description}"') + dpop_parts.append(f'algs="{algs}"') + dpop_header = "DPoP " + ", ".join(dpop_parts) + return [("WWW-Authenticate", dpop_header)] + + # DPoP-allowed mode: For DPoP errors, always include both challenges + if auth_scheme == "dpop" and error_code and not should_omit_error: + bearer_header = 'Bearer realm="api"' + dpop_parts = [] + dpop_parts.append(f'error="{error_code}"') + if error_description: + dpop_parts.append(f'error_description="{error_description}"') + dpop_parts.append(f'algs="{algs}"') + dpop_header = "DPoP " + ", ".join(dpop_parts) + return [ + ("WWW-Authenticate", bearer_header), + ("WWW-Authenticate", dpop_header), + ] + + # If auth_scheme is "bearer", include error on Bearer challenge + if auth_scheme == "bearer" and error_code and not should_omit_error: + bearer_parts = [] + bearer_parts.append(f'error="{error_code}"') + if error_description: + bearer_parts.append(f'error_description="{error_description}"') + bearer_header = "Bearer " + ", ".join(bearer_parts) + dpop_header = f'DPoP algs="{algs}"' + return [("WWW-Authenticate", f'{bearer_header}, {dpop_header}')] + + # Default: no error or should omit error context + return [ + ("WWW-Authenticate", 'Bearer realm="api"'), + ("WWW-Authenticate", f'DPoP algs="{algs}"'), + ] diff --git a/src/auth0_api_python/config.py b/src/auth0_api_python/config.py index de2f4f8..0cd555a 100644 --- a/src/auth0_api_python/config.py +++ b/src/auth0_api_python/config.py @@ -2,7 +2,8 @@ Configuration classes and utilities for auth0-api-python. """ -from typing import Optional, Callable +from typing import Callable, Optional + class ApiClientOptions: """ @@ -12,13 +13,25 @@ class ApiClientOptions: domain: The Auth0 domain, e.g., "my-tenant.us.auth0.com". audience: The expected 'aud' claim in the token. custom_fetch: Optional callable that can replace the default HTTP fetch logic. + dpop_enabled: Whether DPoP is enabled (default: True for backward compatibility). + dpop_required: Whether DPoP is required (default: False, allows both Bearer and DPoP). + dpop_iat_leeway: Leeway in seconds for DPoP proof iat claim (default: 30). + dpop_iat_offset: Maximum age in seconds for DPoP proof iat claim (default: 300). """ def __init__( self, domain: str, audience: str, - custom_fetch: Optional[Callable[..., object]] = None + custom_fetch: Optional[Callable[..., object]] = None, + dpop_enabled: bool = True, + dpop_required: bool = False, + dpop_iat_leeway: int = 30, + dpop_iat_offset: int = 300, ): self.domain = domain self.audience = audience self.custom_fetch = custom_fetch + self.dpop_enabled = dpop_enabled + self.dpop_required = dpop_required + self.dpop_iat_leeway = dpop_iat_leeway + self.dpop_iat_offset = dpop_iat_offset diff --git a/src/auth0_api_python/errors.py b/src/auth0_api_python/errors.py index e450059..e696c15 100644 --- a/src/auth0_api_python/errors.py +++ b/src/auth0_api_python/errors.py @@ -1,21 +1,96 @@ """ -Custom exceptions for auth0-api-python SDK +Custom exceptions for auth0-api-python SDK with HTTP response metadata """ -class MissingRequiredArgumentError(Exception): + +class BaseAuthError(Exception): + """Base class for all auth errors with HTTP response metadata.""" + + def __init__(self, message: str): + super().__init__(message) + self.message = message + self.name = self.__class__.__name__ + self._headers = {} # Will be set by ApiClient._prepare_error + + def get_status_code(self) -> int: + """Return the HTTP status code for this error.""" + raise NotImplementedError("Subclasses must implement get_status_code()") + + def get_error_code(self) -> str: + """Return the OAuth/DPoP error code.""" + raise NotImplementedError("Subclasses must implement get_error_code()") + + def get_error_description(self) -> str: + """Return the error description.""" + return self.message + + def get_headers(self) -> dict[str, str]: + """Return HTTP headers (including WWW-Authenticate if set).""" + return self._headers + + +class MissingRequiredArgumentError(BaseAuthError): """Error raised when a required argument is missing.""" - code = "missing_required_argument_error" - def __init__(self, argument: str): - super().__init__(f"The argument '{argument}' is required but was not provided.") + def __init__(self, argument: str, message: str = None): + if message: + super().__init__(message) + else: + super().__init__(f"The argument '{argument}' is required but was not provided.") self.argument = argument - self.name = self.__class__.__name__ + def get_status_code(self) -> int: + return 400 -class VerifyAccessTokenError(Exception): + def get_error_code(self) -> str: + return "invalid_request" + + +class VerifyAccessTokenError(BaseAuthError): """Error raised when verifying the access token fails.""" - code = "verify_access_token_error" + + def get_status_code(self) -> int: + return 401 + + def get_error_code(self) -> str: + return "invalid_token" + + +class InvalidAuthSchemeError(BaseAuthError): + """Error raised when the provided authentication scheme is unsupported.""" def __init__(self, message: str): super().__init__(message) - self.name = self.__class__.__name__ + if ":" in message and "'" in message: + self.scheme = message.split("'")[1] + else: + self.scheme = None + + def get_status_code(self) -> int: + return 400 + + def get_error_code(self) -> str: + return "invalid_request" + + +class InvalidDpopProofError(BaseAuthError): + """Error raised when validating a DPoP proof fails.""" + + def get_status_code(self) -> int: + return 400 + + def get_error_code(self) -> str: + return "invalid_dpop_proof" + + +class MissingAuthorizationError(BaseAuthError): + """Authorization header is missing, empty, or malformed.""" + + def __init__(self): + super().__init__("") + + def get_status_code(self) -> int: + return 400 + + def get_error_code(self) -> str: + return "invalid_request" diff --git a/src/auth0_api_python/token_utils.py b/src/auth0_api_python/token_utils.py index 8f75b98..c234681 100644 --- a/src/auth0_api_python/token_utils.py +++ b/src/auth0_api_python/token_utils.py @@ -1,7 +1,10 @@ import time -from typing import Optional, Dict, Any, Union +import uuid +from typing import Any, Optional, Union + from authlib.jose import JsonWebKey, jwt +from .utils import calculate_jwk_thumbprint, normalize_url_for_htu, sha256_base64url # A private RSA JWK for test usage. @@ -28,7 +31,7 @@ async def generate_token( issuer: Union[str, bool, None] = None, iat: bool = True, exp: bool = True, - claims: Optional[Dict[str, Any]] = None, + claims: Optional[dict[str, Any]] = None, expiration_time: int = 3600, ) -> str: """ @@ -81,4 +84,138 @@ async def generate_token( header = {"alg": "RS256", "kid": PRIVATE_JWK["kid"]} token = jwt.encode(header, token_claims, key) - return token + # Ensure we return a string, not bytes + return token.decode('utf-8') if isinstance(token, bytes) else token + + +# A private EC P-256 private key for DPoP proof generation (test only) +PRIVATE_EC_JWK = { + "kty": "EC", + "crv": "P-256", + "x": "MKBCTNIcKUSDii11ySs3526iDZ8AiTo7Tu6KPAqv7D4", + "y": "4Etl6SRW2YiLUrN5vfvVHuhp7x8PxltmWWlbbM4IFyM", + "d": "870MB6gfuTJ4HtUnUvYMyJpr5eUZNP4Bk43bVdj3eAE" +} + + +async def generate_dpop_proof( + access_token: str, + http_method: str, + http_url: str, + jti: Optional[str] = None, + iat: bool = True, + claims: Optional[dict[str, Any]] = None, + header_overrides: Optional[dict[str, Any]] = None, + iat_time: Optional[int] = None, + include_jti: bool = True +) -> str: + """ + Generates a real ES256-signed DPoP proof JWT using the EC private key above. + + Args: + access_token: The access token to create proof for (used for ath claim). + http_method: The HTTP method (e.g., "GET", "POST") for htm claim. + http_url: The HTTP URL for htu claim. + jti: The unique identifier for the proof. If omitted, generates random UUID. + iat: Whether to set the 'iat' (issued at) claim. If False, skip it. + claims: Additional custom claims to merge into the proof. + header_overrides: Override header parameters (e.g., for testing invalid headers). + iat_time: Fixed time for iat claim (for testing). If None, uses current time. + include_jti: Whether to include the 'jti' claim. If False, jti is completely omitted. + + Returns: + An ES256-signed DPoP proof JWT string. + + Example usage: + proof = await generate_dpop_proof( + access_token="eyJ...", + http_method="GET", + http_url="https://api.example.com/resource", + iat=False, # Skip iat for testing + claims={"custom": "claim"} + ) + """ + + + proof_claims = dict(claims or {}) + + if iat: + proof_claims["iat"] = iat_time if iat_time is not None else int(time.time()) + + if include_jti: + if jti is not None: + proof_claims["jti"] = jti + else: + proof_claims["jti"] = str(uuid.uuid4()) + + proof_claims["htm"] = http_method + proof_claims["htu"] = normalize_url_for_htu(http_url) + proof_claims["ath"] = sha256_base64url(access_token) + + + public_jwk = {k: v for k, v in PRIVATE_EC_JWK.items() if k != "d"} + + + header = { + "alg": "ES256", + "typ": "dpop+jwt", + "jwk": public_jwk + } + + + if header_overrides: + header.update(header_overrides) + + key = JsonWebKey.import_key(PRIVATE_EC_JWK) + token = jwt.encode(header, proof_claims, key) + # Ensure we return a string, not bytes + return token.decode('utf-8') if isinstance(token, bytes) else token + + +async def generate_token_with_cnf( + domain: str, + user_id: str, + audience: str, + jkt_thumbprint: Optional[str] = None, + **kwargs +) -> str: + """ + Generates an access token with cnf (confirmation) claim for DPoP binding. + Extends the existing generate_token() function with DPoP support. + + Args: + domain: The Auth0 domain (used if issuer is not False). + user_id: The 'sub' claim in the token. + audience: The 'aud' claim in the token. + jkt_thumbprint: JWK thumbprint to include in cnf claim. If None, calculates from PRIVATE_EC_JWK. + **kwargs: Additional arguments passed to generate_token(). + + Returns: + A RS256-signed JWT string with cnf claim. + + Example usage: + token = await generate_token_with_cnf( + domain="auth0.local", + user_id="user123", + audience="my-api", + jkt_thumbprint="custom_thumbprint" + ) + """ + + + if jkt_thumbprint is None: + jkt_thumbprint = calculate_jwk_thumbprint(PRIVATE_EC_JWK) + + + existing_claims = kwargs.get('claims', {}) + cnf_claims = dict(existing_claims) + cnf_claims["cnf"] = {"jkt": jkt_thumbprint} + kwargs['claims'] = cnf_claims + + + return await generate_token( + domain=domain, + user_id=user_id, + audience=audience, + **kwargs + ) diff --git a/src/auth0_api_python/utils.py b/src/auth0_api_python/utils.py index 2d66ecb..4ab8051 100644 --- a/src/auth0_api_python/utils.py +++ b/src/auth0_api_python/utils.py @@ -1,17 +1,22 @@ """ -Utility functions for OIDC discovery and JWKS fetching (asynchronously) +Utility functions for OIDC discovery and JWKS fetching (asynchronously) using httpx or a custom fetch approach. """ -import httpx import base64 +import hashlib import json -from typing import Any, Dict, Optional, Callable, Union +import re +from typing import Any, Callable, Optional, Union + +import httpx +from ada_url import URL + async def fetch_oidc_metadata( - domain: str, + domain: str, custom_fetch: Optional[Callable[..., Any]] = None -) -> Dict[str, Any]: +) -> dict[str, Any]: """ Asynchronously fetch the OIDC config from https://{domain}/.well-known/openid-configuration. Returns a dict with keys like issuer, jwks_uri, authorization_endpoint, etc. @@ -29,14 +34,14 @@ async def fetch_oidc_metadata( async def fetch_jwks( - jwks_uri: str, + jwks_uri: str, custom_fetch: Optional[Callable[..., Any]] = None -) -> Dict[str, Any]: +) -> dict[str, Any]: """ Asynchronously fetch the JSON Web Key Set from jwks_uri. Returns the raw JWKS JSON, e.g. {'keys': [...]} - If custom_fetch is provided, it must be an async callable + If custom_fetch is provided, it must be an async callable that fetches data from the jwks_uri. """ if custom_fetch: @@ -47,22 +52,22 @@ async def fetch_jwks( resp = await client.get(jwks_uri) resp.raise_for_status() return resp.json() - -async def get_unverified_header(token: Union[str, bytes]) -> dict: + +def get_unverified_header(token: Union[str, bytes]) -> dict: """ Parse the first segment (header) of a JWT without verifying signature. Ensures correct Base64 padding before decode to avoid garbage bytes. """ if isinstance(token, bytes): token = token.decode("utf-8") - try: - header_b64, _, _ = token.split(".", 2) - except ValueError: - raise ValueError("Not enough segments in token") - - header_b64 = remove_bytes_prefix(header_b64) + parts = token.split(".") + if len(parts) != 3: + raise ValueError(f"Invalid token format: expected 3 segments, got {len(parts)}") + + header_b64 = parts[0] + header_b64 = remove_bytes_prefix(header_b64) header_b64 = fix_base64_padding(header_b64) header_data = base64.urlsafe_b64decode(header_b64) @@ -72,7 +77,7 @@ async def get_unverified_header(token: Union[str, bytes]) -> dict: def fix_base64_padding(segment: str) -> str: """ - If `segment`'s length is not a multiple of 4, add '=' padding + If `segment`'s length is not a multiple of 4, add '=' padding so that base64.urlsafe_b64decode won't produce nonsense bytes. No extra '=' added if length is already a multiple of 4. """ @@ -85,4 +90,68 @@ def remove_bytes_prefix(s: str) -> str: """If the string looks like b'eyJh...', remove the leading b' and trailing '.""" if s.startswith("b'"): return s[2:] # cut off the leading b' - return s \ No newline at end of file + return s + +def normalize_url_for_htu(raw_url: str) -> str: + """ + Normalize URL for DPoP htu comparison . + + Args: + raw_url: The raw URL string to normalize + Returns: + The normalized URL string + Raises: + ValueError: If the URL is invalid or cannot be parsed + """ + + try: + url_obj = URL(raw_url) + + normalized_url = url_obj.origin + url_obj.pathname + + normalized_url = re.sub( + r'%([0-9a-fA-F]{2})', + lambda m: f'%{m.group(1).upper()}', + normalized_url + ) + + return normalized_url + except Exception as e: + raise ValueError(f"Invalid URL format: {raw_url}") from e + +def sha256_base64url(input_str: Union[str, bytes]) -> str: + """ + Compute SHA-256 digest of the input string and return a + Base64URL-encoded string *without* padding. + """ + if isinstance(input_str, str): + digest = hashlib.sha256(input_str.encode("utf-8")).digest() + else: + digest = hashlib.sha256(input_str).digest() + b64 = base64.urlsafe_b64encode(digest).decode("utf-8") + return b64.rstrip("=") + +def calculate_jwk_thumbprint(jwk: dict[str, str]) -> str: + """ + Compute the RFC 7638 JWK thumbprint for a public JWK. + + - For EC keys, includes only: crv, kty, x, y + - Serializes with no whitespace, keys sorted lexicographically + - Hashes with SHA-256 and returns base64url-encoded string without padding + """ + kty = jwk.get("kty") + + if kty == "EC": + if not all(k in jwk for k in ["crv", "x", "y"]): + raise ValueError("EC key missing required parameters") + members = ("crv", "kty", "x", "y") + else: + raise ValueError(f"{kty}(Key Type) Parameter missing or unsupported ") + + ordered = {k: jwk[k] for k in members if k in jwk} + + thumbprint_json = json.dumps(ordered, separators=(",", ":"), sort_keys=True) + + digest = hashlib.sha256(thumbprint_json.encode("utf-8")).digest() + + return base64.urlsafe_b64encode(digest).decode("utf-8").rstrip("=") diff --git a/tests/test_api_client.py b/tests/test_api_client.py index 8cc3bce..afa3c99 100644 --- a/tests/test_api_client.py +++ b/tests/test_api_client.py @@ -1,12 +1,29 @@ +import base64 +import json +import time + import pytest +from auth0_api_python.api_client import ApiClient +from auth0_api_python.config import ApiClientOptions +from auth0_api_python.errors import ( + InvalidAuthSchemeError, + InvalidDpopProofError, + MissingAuthorizationError, + MissingRequiredArgumentError, + VerifyAccessTokenError, +) +from auth0_api_python.token_utils import ( + PRIVATE_EC_JWK, + PRIVATE_JWK, + generate_dpop_proof, + generate_token, + generate_token_with_cnf, + sha256_base64url, +) from pytest_httpx import HTTPXMock -from unittest.mock import AsyncMock, patch - -from src.auth0_api_python.api_client import ApiClient -from src.auth0_api_python.config import ApiClientOptions -from src.auth0_api_python.errors import MissingRequiredArgumentError, VerifyAccessTokenError -from src.auth0_api_python.token_utils import generate_token +# Create public RSA JWK by selecting only public key components +PUBLIC_RSA_JWK = {k: PRIVATE_JWK[k] for k in ["kty", "n", "e", "alg", "use", "kid"] if k in PRIVATE_JWK} @pytest.mark.asyncio async def test_init_missing_args(): @@ -15,7 +32,7 @@ async def test_init_missing_args(): """ with pytest.raises(MissingRequiredArgumentError): _ = ApiClient(ApiClientOptions(domain="", audience="some_audience")) - + with pytest.raises(MissingRequiredArgumentError): _ = ApiClient(ApiClientOptions(domain="example.us.auth0.com", audience="")) @@ -23,7 +40,7 @@ async def test_init_missing_args(): @pytest.mark.asyncio async def test_verify_access_token_successfully(httpx_mock: HTTPXMock): """ - Test that a valid RS256 token with correct issuer, audience, iat, and exp + Test that a valid RS256 token with correct issuer, audience, iat, and exp is verified successfully by ApiClient. """ httpx_mock.add_response( @@ -388,3 +405,1187 @@ async def test_verify_access_token_fail_no_audience_config(): error_str = str(err.value).lower() assert "audience" in error_str and ("required" in error_str or "not provided" in error_str) + +@pytest.mark.asyncio +async def test_verify_access_token_fail_malformed_token(): + """Test that a malformed token fails verification.""" + + api_client = ApiClient(ApiClientOptions(domain="auth0.local", audience="my-audience")) + + with pytest.raises(VerifyAccessTokenError) as e: + await api_client.verify_access_token("header.payload") + assert "failed to parse token" in str(e.value).lower() + + with pytest.raises(VerifyAccessTokenError) as e: + await api_client.verify_access_token("header.pay!load.signature") + assert "failed to parse token" in str(e.value).lower() + + + +# DPOP PROOF VERIFICATION TESTS + +# --- Core Success Tests --- + +@pytest.mark.asyncio +async def test_verify_dpop_proof_successfully(): + """ + Test that a valid DPoP proof is verified successfully by ApiClient. + """ + access_token = "test_token" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + # Verify the DPoP proof + claims = await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + assert claims["jti"] # Verify it has the required jti claim + assert claims["htm"] == "GET" + assert claims["htu"] == "https://api.example.com/resource" + assert isinstance(claims["iat"], int) + expected_ath = sha256_base64url(access_token) + assert claims["ath"] == expected_ath + + +# --- Header Validation Tests --- + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_no_access_token(): + """ + Test that verify_dpop_proof fails when access_token is missing. + """ + dpop_proof = await generate_dpop_proof( + access_token="test_token", + http_method="GET", + http_url="https://api.example.com/resource" + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(MissingRequiredArgumentError) as err: + await api_client.verify_dpop_proof( + access_token="", # Empty access token + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "access_token" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_no_dpop_proof(): + """ + Test that verify_dpop_proof fails when dpop_proof is missing. + """ + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(MissingRequiredArgumentError) as err: + await api_client.verify_dpop_proof( + access_token="test_token", + proof="", # Empty proof + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "dpop_proof" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_no_http_method_url(): + """ + Test that verify_dpop_proof fails when http_method or http_url is missing. + """ + access_token = "test_token" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(MissingRequiredArgumentError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="", # Empty method + http_url="https://api.example.com/resource" + ) + + assert "http_method" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_no_http_url(): + """ + Test that verify_dpop_proof fails when http_url is missing. + """ + access_token = "test_token" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(MissingRequiredArgumentError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="" # Empty url + ) + + assert "http_url" in str(err.value).lower() + + +# --- Claim Validation Tests --- + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_no_typ(): + """ + Test that a DPoP proof missing 'typ' header fails verification. + """ + access_token = "test_token" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + header_overrides={"typ": None} # Remove typ header + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "unexpected jwt 'typ'" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_invalid_typ(): + """ + Test that a DPoP proof with invalid 'typ' header fails verification. + """ + access_token = "test_token" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + header_overrides={"typ": "jwt"} # Wrong typ value + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "unexpected jwt 'typ'" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_invalid_alg(): + """ + Test that a DPoP proof with unsupported algorithm fails verification. + """ + access_token = "test_token" + + valid_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + parts = valid_proof.split('.') + header = json.loads(base64.urlsafe_b64decode(parts[0] + '==').decode('utf-8')) + header['alg'] = 'RS256' # Invalid algorithm for DPoP (should be ES256) + + modified_header = base64.urlsafe_b64encode( + json.dumps(header, separators=(',', ':')).encode('utf-8') + ).decode('utf-8').rstrip('=') + + invalid_proof = f"{modified_header}.{parts[1]}.{parts[2]}" + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=invalid_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "unsupported alg" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_no_jwk(): + """ + Test that a DPoP proof missing 'jwk' header fails verification. + """ + access_token = "test_token" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + header_overrides={"jwk": None} # Remove jwk header + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "missing or invalid jwk" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_invalid_jwk_format(): + """ + Test that a DPoP proof with invalid 'jwk' format fails verification. + """ + access_token = "test_token" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + header_overrides={"jwk": "invalid_jwk"} # Invalid jwk format + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "missing or invalid jwk" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_private_key_in_jwk(): + """ + Test that a DPoP proof with private key material in jwk fails verification. + """ + + access_token = "test_token" + # Include private key material (the 'd' parameter) + invalid_jwk = dict(PRIVATE_EC_JWK) # This includes the 'd' parameter + + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + header_overrides={"jwk": invalid_jwk} # JWK with private key material + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "private key" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_with_missing_jwk_parameters(): + """Test verify_dpop_proof with missing JWK parameters.""" + access_token = "test_token" + + incomplete_jwk = {"kty": "RSA"} + + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + header_overrides={"jwk": incomplete_jwk} + ) + + api_client = ApiClient(ApiClientOptions(domain="auth0.local", audience="my-audience")) + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + assert "only ec keys are supported" in str(err.value).lower() + +# --- IAT (Issued At Time) Validation Tests --- + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_no_iat(): + """ + Test that a DPoP proof missing 'iat' claim fails verification. + """ + access_token = "test_token" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + iat=False # Skip iat claim + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "missing required claim" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_invalid_iat_in_future(): + """ + Test IAT validation with a timestamp in the future. + """ + access_token = "test_token" + # Use a future timestamp (more than leeway allows) + future_time = int(time.time()) + 3600 # 1 hour in the future + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + iat_time=future_time # Invalid future timestamp + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "iat is from the future" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_iat_exact_boundary_conditions(): + """ + Test IAT timing validation at exact boundary conditions. + """ + access_token = "test_token" + + # Test with timestamp exactly at the leeway boundary (should pass) + current_time = int(time.time()) + boundary_time = current_time + 30 # Exactly at default leeway limit + + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + iat_time=boundary_time + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + # Should succeed as it's within leeway + result = await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert result is not None + +@pytest.mark.asyncio +async def test_verify_dpop_proof_iat_in_past(): + """ + Test IAT validation with timestamp in the past. + """ + access_token = "test_token" + # Use a timestamp too far in the past + past_time = int(time.time()) - 3600 # 1 hour ago + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + iat_time=past_time + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "iat is too old" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_iat_within_leeway(): + """ + Test that IAT timestamps within acceptable leeway pass validation. + """ + access_token = "test_token" + current_time = int(time.time()) + + # Test within acceptable skew (should pass) + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + iat_time=current_time - 30 # 30 seconds ago, should be acceptable + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + # This should succeed due to clock skew tolerance + result = await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + assert result is not None + +# --- JTI (JWT ID) Validation Tests --- + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_empty_jti(): + """ + Test that a DPoP proof with empty 'jti' claim fails verification. + """ + access_token = "test_token" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + jti="" # Empty jti claim + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "jti claim must not be empty" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_custom_jti_value(): + """ + Test for a custom JTI value. + """ + access_token = "test_token" + + custom_jti = "unique-jti-12345" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + jti=custom_jti # Use jti parameter instead of claims + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + # First verification should succeed + result = await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert result is not None + assert result["jti"] == custom_jti + +@pytest.mark.asyncio +async def test_verify_dpop_proof_with_missing_jti(): + """Test verify_dpop_proof with missing jti claim.""" + access_token = "test_token" + + # Generate DPoP proof WITHOUT jti claim from the start + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource", + include_jti=False # Completely omit jti claim + ) + + api_client = ApiClient(ApiClientOptions(domain="auth0.local", audience="my-audience")) + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + assert "missing required claim: jti" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_htm_mismatch(): + """ + Test that a DPoP proof with mismatched 'htm' claim fails verification. + """ + access_token = "test_token" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="POST", # Generate proof for POST + http_url="https://api.example.com/resource", + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", # But verify with GET + http_url="https://api.example.com/resource" + ) + + assert "htm mismatch" in str(err.value).lower() + +# --- HTU (HTTP URI) Validation Tests --- + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_htu_mismatch(): + """ + Test that a DPoP proof with mismatched 'htu' claim fails verification. + """ + access_token = "test_token" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/wrong-resource", # Generate proof for wrong URL + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" # But verify with correct URL + ) + + assert "htu mismatch" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_htu_url_normalization_case_sensitivity(): + """ + Test HTU URL normalization handles case sensitivity correctly. + """ + access_token = "test_token" + + # Test with different case in domain (should be normalized and pass) + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://API.EXAMPLE.COM/resource" # Uppercase domain + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + # This should succeed due to URL normalization + result = await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" # Lowercase domain + ) + assert result is not None + + +@pytest.mark.asyncio +async def test_verify_dpop_proof_htu_trailing_slash_mismatch(): + """ + Test that HTU URLs with trailing slash differences cause verification failure. + """ + access_token = "test_token" + # Generate proof with trailing slash + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource/" + ) + api_client = ApiClient(ApiClientOptions(domain="auth0.local", audience="my-audience")) + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + assert "htu mismatch" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_htu_query_parameters(): + """ + Test HTU URL validation with query parameters - normalized behavior. + Query parameters are stripped during normalization, so different params should succeed. + """ + access_token = "test_token" + + # Test with query parameters (should be normalized) + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource?param1=value1" # With query params + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + # This should succeed due to URL normalization + result = await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource?param2=value2" # Different query params + ) + assert result is not None + + +@pytest.mark.asyncio +async def test_verify_dpop_proof_htu_port_numbers(): + """ + Test HTU URL validation with explicit port numbers - normalized behavior. + Default ports (443 for HTTPS, 80 for HTTP) are stripped during normalization. + """ + access_token = "test_token" + + # Test with explicit default port (should be normalized) + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com:443/resource" # Explicit HTTPS port + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + # This should succeed due to URL normalization + result = await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" # Implicit HTTPS port + ) + assert result is not None + +@pytest.mark.asyncio +async def test_verify_dpop_proof_htu_fragment_handling(): + """ + Test HTU URL validation ignores fragments. + """ + access_token = "test_token" + + # Test with fragment (should be ignored) + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource#fragment1" # With fragment + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + # This should succeed as fragments are ignored + result = await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource#fragment2" # Different fragment + ) + assert result is not None + + +@pytest.mark.asyncio +async def test_verify_dpop_proof_htu_trailing_slash_preserved(): + """ + Test that trailing slashes are preserved when query params and fragments are removed. + """ + access_token = "test_token" + + # Generate proof with trailing slash and query parameters + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource/?abc=def" + ) + + api_client = ApiClient(ApiClientOptions(domain="auth0.local", audience="my-audience")) + + # This should succeed because normalization preserves + result = await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource/" # With trailing slash, no query params + ) + + assert result["htu"] == "https://api.example.com/resource/" + + # Additional test with a different combination + dpop_proof2 = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource/?abc=def#fragment" + ) + + result2 = await api_client.verify_dpop_proof( + access_token=access_token, + proof=dpop_proof2, + http_method="GET", + http_url="https://api.example.com/resource/" + ) + + assert result2["htu"] == "https://api.example.com/resource/" + +@pytest.mark.asyncio +async def test_verify_dpop_proof_fail_ath_mismatch(): + """ + Test that a DPoP proof with mismatched 'ath' claim fails verification. + """ + access_token = "test_token" + wrong_token = "wrong_token" + + dpop_proof = await generate_dpop_proof( + access_token=wrong_token, # Generate proof for wrong token + http_method="GET", + http_url="https://api.example.com/resource", + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_dpop_proof( + access_token=access_token, # But verify with correct token + proof=dpop_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "ath" in str(err.value).lower() or "hash" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_dpop_proof_with_invalid_signature(): + """Test verify_dpop_proof with invalid signature.""" + access_token = "test_token" + + valid_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + parts = valid_proof.split('.') + if len(parts) == 3: + header, payload, signature = parts + tampered_proof = f"{header}.{payload}.{signature[:-5]}12345" + else: + tampered_proof = valid_proof + + api_client = ApiClient(ApiClientOptions(domain="auth0.local", audience="my-audience")) + with pytest.raises(InvalidDpopProofError) as e: + await api_client.verify_dpop_proof( + access_token=access_token, + proof=tampered_proof, + http_method="GET", + http_url="https://api.example.com/resource" + ) + assert "signature verification failed" in str(e.value).lower() + +# VERIFY_REQUEST TESTS + +# --- Success Tests --- + +@pytest.mark.asyncio +async def test_verify_request_bearer_scheme_success(httpx_mock: HTTPXMock): + """ + Test successful Bearer token verification through verify_request. + """ + # Mock OIDC discovery + httpx_mock.add_response( + method="GET", + url="https://auth0.local/.well-known/openid-configuration", + json={ + "jwks_uri": "https://auth0.local/.well-known/jwks.json", + "issuer": "https://auth0.local/", + }, + ) + + # Mock JWKS endpoint + httpx_mock.add_response( + method="GET", + url="https://auth0.local/.well-known/jwks.json", + json={"keys": [PUBLIC_RSA_JWK]}, + ) + + # Generate a valid Bearer token + token = await generate_token( + domain="auth0.local", + user_id="test_user", + audience="my-audience", + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + # Test Bearer scheme + result = await api_client.verify_request( + headers={"authorization": f"Bearer {token}"}, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "sub" in result + assert result["aud"] == "my-audience" + assert result["iss"] == "https://auth0.local/" + +@pytest.mark.asyncio +async def test_verify_request_dpop_scheme_success(httpx_mock: HTTPXMock): + """ + Test successful DPoP token verification through verify_request. + """ + # Mock OIDC discovery + httpx_mock.add_response( + method="GET", + url="https://auth0.local/.well-known/openid-configuration", + json={ + "jwks_uri": "https://auth0.local/.well-known/jwks.json", + "issuer": "https://auth0.local/", + }, + ) + + # Mock JWKS endpoint + httpx_mock.add_response( + method="GET", + url="https://auth0.local/.well-known/jwks.json", + json={"keys": [PUBLIC_RSA_JWK]}, + ) + + # Generate DPoP bound token and proof + access_token = await generate_token_with_cnf( + domain="auth0.local", + user_id="test_user", + audience="my-audience", + ) + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + # Test DPoP scheme + result = await api_client.verify_request( + headers={"authorization": f"DPoP {access_token}", "dpop": dpop_proof}, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "sub" in result + assert result["aud"] == "my-audience" + assert result["iss"] == "https://auth0.local/" + + +# --- Configuration & Error Handling Tests --- + +@pytest.mark.asyncio +async def test_verify_request_fail_dpop_required_mode(): + """ + Test that Bearer tokens are rejected when DPoP is required. + """ + # Generate a valid Bearer token + token = await generate_token( + domain="auth0.local", + user_id="test_user", + audience="my-audience", + ) + + api_client = ApiClient( + ApiClientOptions( + domain="auth0.local", + audience="my-audience", + dpop_required=True # Require DPoP + ) + ) + + with pytest.raises(InvalidAuthSchemeError) as err: + await api_client.verify_request( + headers={"authorization": f"Bearer {token}"}, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert err.value.get_status_code() == 400 + assert "invalid_request" in str(err.value.get_error_code()).lower() + +@pytest.mark.asyncio +async def test_verify_request_fail_dpop_enabled_bearer_with_cnf_conflict(httpx_mock: HTTPXMock): + """ + Test that Bearer tokens with cnf claim are rejected when DPoP is enabled. + """ + # Mock OIDC discovery + httpx_mock.add_response( + method="GET", + url="https://auth0.local/.well-known/openid-configuration", + json={ + "jwks_uri": "https://auth0.local/.well-known/jwks.json", + "issuer": "https://auth0.local/", + }, + ) + + # Mock JWKS endpoint + httpx_mock.add_response( + method="GET", + url="https://auth0.local/.well-known/jwks.json", + json={"keys": [PUBLIC_RSA_JWK]}, + ) + + # Generate a token with cnf claim (DPoP-bound token) + token = await generate_token_with_cnf( + domain="auth0.local", + user_id="test_user", + audience="my-audience", + ) + + api_client = ApiClient( + ApiClientOptions( + domain="auth0.local", + audience="my-audience", + dpop_enabled=True # DPoP enabled + ) + ) + + with pytest.raises(VerifyAccessTokenError) as err: + await api_client.verify_request( + headers={"authorization": f"Bearer {token}"}, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "dpop-bound token requires the dpop authentication scheme, not bearer" in str(err.value).lower() + +@pytest.mark.asyncio +async def test_verify_request_fail_dpop_disabled(): + """ + Test that DPoP tokens are rejected when DPoP is disabled. + """ + access_token = "test_token" + dpop_proof = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + api_client = ApiClient( + ApiClientOptions( + domain="auth0.local", + audience="my-audience", + dpop_enabled=False # DPoP disabled + ) + ) + + with pytest.raises(MissingAuthorizationError) as err: + await api_client.verify_request( + headers={"authorization": f"DPoP {access_token}", "dpop": dpop_proof}, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert err.value.get_status_code() == 400 + assert "invalid_request" in str(err.value.get_error_code()).lower() + +@pytest.mark.asyncio +async def test_verify_request_fail_missing_authorization_header(): + """ + Test that requests without Authorization header are rejected. + """ + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(MissingAuthorizationError) as err: + await api_client.verify_request( + headers={}, + http_method="GET", + http_url="https://api.example.com/resource" + ) + assert err.value.get_status_code() == 400 + assert "invalid_request" in str(err.value.get_error_code()).lower() + +@pytest.mark.asyncio +async def test_verify_request_fail_unsupported_scheme(): + """ + Test that unsupported authentication schemes are rejected. + """ + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(MissingAuthorizationError) as err: + await api_client.verify_request( + headers={"authorization": "Basic dXNlcjpwYXNz"}, + http_method="GET", + http_url="https://api.example.com/resource" + ) + assert err.value.get_status_code() == 400 + assert "invalid_request" in str(err.value.get_error_code()).lower() + +@pytest.mark.asyncio +async def test_verify_request_fail_empty_bearer_token(): + """Test verify_request with empty token value.""" + api_client = ApiClient(ApiClientOptions(domain="auth0.local", audience="my-audience")) + with pytest.raises(MissingAuthorizationError) as err: + await api_client.verify_request({"Authorization": "Bearer "}) + assert err.value.get_status_code() == 400 + assert "invalid_request" in str(err.value.get_error_code()).lower() + +@pytest.mark.asyncio +async def test_verify_request_with_multiple_spaces_in_authorization(): + """Test verify_request with authorization header containing multiple spaces.""" + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + with pytest.raises(InvalidAuthSchemeError) as err: + await api_client.verify_request({"authorization": "Bearer token with extra spaces"}) + assert err.value.get_status_code() == 400 + assert "invalid_request" in str(err.value.get_error_code()).lower() + +@pytest.mark.asyncio +async def test_verify_request_fail_missing_dpop_header(): + """ + Test that DPoP scheme requests without DPoP header are rejected. + """ + access_token = "test_token" + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidAuthSchemeError) as err: + await api_client.verify_request( + headers={"authorization": f"DPoP {access_token}"}, # Missing DPoP header + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert err.value.get_status_code() == 400 + assert "invalid_request" in str(err.value.get_error_code()).lower() + +@pytest.mark.asyncio +async def test_verify_request_fail_multiple_dpop_proofs(): + """ + Test that requests with multiple DPoP proofs are rejected. + """ + access_token = "test_token" + dpop_proof1 = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource" + ) + dpop_proof2 = await generate_dpop_proof( + access_token=access_token, + http_method="GET", + http_url="https://api.example.com/resource" + ) + + api_client = ApiClient( + ApiClientOptions(domain="auth0.local", audience="my-audience") + ) + + with pytest.raises(InvalidDpopProofError) as err: + await api_client.verify_request( + headers={"authorization": f"DPoP {access_token}", "dpop": f"{dpop_proof1}, {dpop_proof2}"}, # Multiple proofs + http_method="GET", + http_url="https://api.example.com/resource" + ) + + assert "multiple" in str(err.value).lower() + +