diff --git a/.github/workflows/run-checks.yaml b/.github/workflows/run-checks.yaml index 112fd93..439dd5f 100644 --- a/.github/workflows/run-checks.yaml +++ b/.github/workflows/run-checks.yaml @@ -43,7 +43,7 @@ jobs: run_daemon: true - name: Run pytest with coverage - run: uv run pytest --cov=py_hamt tests/ --cov-report=xml + run: uv run pytest --ipfs --cov=py_hamt tests/ --cov-report=xml - name: Upload coverage reports to Codecov uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5 diff --git a/README.md b/README.md index 85861c0..522d991 100644 --- a/README.md +++ b/README.md @@ -45,7 +45,7 @@ First, make sure you have the ipfs kubo daemon installed and running with the de ```sh bash run-checks.sh ``` -This will run tests with code coverage, and check formatting and linting. Under the hood it will be using the `pre-commit` command to run through all the checks within .pre-commit-config.yaml. +This will run tests with code coverage, and check formatting and linting. Under the hood it will be using the `pre-commit` command to run through all the checks within .pre-commit-config.yaml. If a local ipfs daemon is not running it will not run all tests, but it will spawn a docker ipfs container if docker is installed and run as many integration tests as possible. We use `pytest` with 100% code coverage, and with test inputs that are both handwritten as well as generated by `hypothesis`. This allows us to try out millions of randomized inputs to create a more robust library. @@ -55,6 +55,33 @@ We use `pytest` with 100% code coverage, and with test inputs that are both hand > [!NOTE] > Due to the restricted performance on GitHub actions runners, you may also sometimes see hypothesis tests running with errors because they exceeded test deadlines. Rerun the action if this happens. +### Tests + +Due to the dependency on [IPFS](https://github.com/ipfs/kubo) in order to be able to run all integration tests which use IPFS a local ipfs daemon is required. The Github Actions found in `.github/workflows/run-checks.yaml` uses the `setup-ipfs` step which ensures that a local ipfs daemon is available. Locally if you wish to run the full integration tests you must ensure a local ipfs daemon is running (by running `ipfs daemon` once installed). If not, pytest will spawn a local docker image to run the ipfs tests. If [Docker](https://www.docker.com/) is not installed then tests will simply run the unit tests. + +**To summarize:** + +*In GitHub Actions:* +```bash IPFS daemon is running on default ports +uv run pytest --ipfs # All tests run, including test_kubo_default_urls +``` + +*Locally with Docker (no local daemon):* +```bash +pytest --ipfs # test_kubo_default_urls auto-skips, other tests use Docker +``` + +*Locally with IPFS daemon:* +```bash +pytest --ipfs # All tests run +``` + +*Quick local testing (no IPFS):* +```bash +pytest # All IPFS tests skip +``` + + ## CPU and Memory Profiling We use python's native `cProfile` for running CPU profiles and snakeviz for visualizing the profile. We use `memray` for the memory profiling. We will walk through using the profiling tools on the test suite. diff --git a/pyproject.toml b/pyproject.toml index 87a491e..08ed51a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,8 @@ dev = [ "xarray[complete]>=2025.3.0", "mypy>=1.15.0", "pandas-stubs>=2.2.3.250527", + "docker>=7.1.0", + "types-docker>=7.1.0.20250523", "pre-commit>=4.2.0", ] diff --git a/run-checks.sh b/run-checks.sh index 97e2eb5..c2fd395 100644 --- a/run-checks.sh +++ b/run-checks.sh @@ -1,7 +1,7 @@ #!/bin/bash # Run pytest with coverage -uv run pytest --cov=py_hamt tests/ +uv run pytest --ipfs --cov=py_hamt tests/ # Check coverage uv run coverage report --fail-under=100 --show-missing diff --git a/tests/conftest.py b/tests/conftest.py index 845bf38..943a78c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,4 +10,26 @@ async def global_client_session(): """One aiohttp.ClientSession shared by the whole test run.""" async with aiohttp.ClientSession() as session: yield session - # aiohttp’s async context manager awaits session.close() for us + # aiohttp's async context manager awaits session.close() for us + + +def pytest_addoption(parser): + parser.addoption( + "--ipfs", + action="store_true", + default=False, + help="run tests that require a Kubo daemon", + ) + + +def pytest_configure(config): + config.addinivalue_line("markers", "ipfs: tests that need a live IPFS node") + + +def pytest_collection_modifyitems(config, items): + if config.getoption("--ipfs"): + return # user explicitly asked → run them + skip = pytest.mark.skip(reason="needs --ipfs to run") + for item in items: + if "ipfs" in item.keywords: + item.add_marker(skip) diff --git a/tests/test_kubo_cas.py b/tests/test_kubo_cas.py index 6f60db3..1d08136 100644 --- a/tests/test_kubo_cas.py +++ b/tests/test_kubo_cas.py @@ -5,7 +5,7 @@ import pytest from dag_cbor import IPLDKind from hypothesis import given, settings -from testing_utils import create_ipfs, ipld_strategy # noqa +from testing_utils import ipld_strategy # noqa from py_hamt import KuboCAS from py_hamt.store import InMemoryCAS @@ -43,94 +43,173 @@ async def test_memory_store_invalid_key_type(): await s.load(invalid_key) +# @pytest.mark.ipfs +# @pytest.mark.asyncio(loop_scope="session") +# @given(data=ipld_strategy()) +# @settings( +# deadline=1000, print_blob=True +# ) # Increased deadline, print_blob for debugging +# async def test_kubo_default_urls( +# global_client_session, data: IPLDKind +# ): # Inject the session fixture +# """ +# Tests KuboCAS using its default URLs and when None is passed for URLs, +# leveraging a globally managed aiohttp.ClientSession. +# """ +# # Test Case 1: KuboCAS instantiated without explicit URLs (should use its defaults) +# # We pass the managed global_client_session to it. +# # KuboCAS itself is responsible for having default URLs if none are provided. +# async with KuboCAS(session=global_client_session) as kubo_cas_default: +# # print(f"Testing with default URLs: RPC={kubo_cas_default.rpc_base_url}, Gateway={kubo_cas_default.gateway_base_url}") +# encoded_data = dag_cbor.encode(data) + +# for codec in ["raw", "dag-cbor"]: +# # The codec is a string, but we use Literal to ensure type safety +# # where codec_raw = "raw" and codec_dag_cbor = "dag-cbor" +# # necessary because when you iterate over a list of strings, +# # even if they are literal strings, mypy widens the type to just str +# codec_typed = cast(Literal["raw", "dag-cbor"], codec) +# # print(f"Saving with codec: {codec}, data: {data}") +# try: +# cid = await kubo_cas_default.save(encoded_data, codec=codec_typed) +# # print(f"Saved. CID: {cid}") +# loaded_encoded_data = await kubo_cas_default.load(cid) +# # print(f"Loaded encoded data length: {len(loaded_encoded_data)}") +# result = dag_cbor.decode(loaded_encoded_data) +# # print(f"Decoded result: {result}") +# assert ( +# data == result +# ), f"Data mismatch for codec {codec} with default URLs" +# except Exception as e: +# pytest.fail( +# f"Error during KuboCAS default URL test (codec: {codec}): {e}" +# ) + +# # Test Case 2: KuboCAS instantiated with None for URLs (should also use its defaults) +# # We pass the managed global_client_session to it. +# async with KuboCAS( +# rpc_base_url=None, gateway_base_url=None, session=global_client_session +# ) as kubo_cas_none_urls: +# # print(f"Testing with None URLs: RPC={kubo_cas_none_urls.rpc_base_url}, Gateway={kubo_cas_none_urls.gateway_base_url}") +# encoded_data = dag_cbor.encode( +# data +# ) # Re-encode just in case, though it's the same data +# for codec in ["raw", "dag-cbor"]: +# # print(f"Saving with codec: {codec}, data: {data}") +# codec_typed = cast(Literal["raw", "dag-cbor"], codec) +# try: +# cid = await kubo_cas_none_urls.save(encoded_data, codec=codec_typed) +# # print(f"Saved. CID: {cid}") +# loaded_encoded_data = await kubo_cas_none_urls.load(cid) +# # print(f"Loaded encoded data length: {len(loaded_encoded_data)}") +# result = dag_cbor.decode(loaded_encoded_data) +# # print(f"Decoded result: {result}") +# assert data == result, f"Data mismatch for codec {codec} with None URLs" +# except Exception as e: +# pytest.fail(f"Error during KuboCAS None URL test (codec: {codec}): {e}") + + +# @given(data=ipld_strategy()) +# @settings(deadline=1000) +# @pytest.mark.asyncio +# async def test_kubo_default_urls(data: IPLDKind): +# try: +# async with KuboCAS() as kubo_cas: +# for codec in ("raw", "dag-cbor"): +# cid = await kubo_cas.save(dag_cbor.encode(data), codec=codec) +# result = dag_cbor.decode(await kubo_cas.load(cid)) +# assert data == result + +# async with KuboCAS(gateway_base_url=None, rpc_base_url=None) as kubo_cas: +# for codec in ("raw", "dag-cbor"): +# cid = await kubo_cas.save(dag_cbor.encode(data), codec=codec) +# result = dag_cbor.decode(await kubo_cas.load(cid)) +# assert data == result +# finally: +# # if Hypothesis cancels early, make sure every open CAS is closed +# for obj in list(globals().values()): +# if isinstance(obj, KuboCAS): +# await obj.aclose() + + +# Test that always works with Docker or local daemon +@pytest.mark.ipfs @pytest.mark.asyncio(loop_scope="session") @given(data=ipld_strategy()) -@settings( - deadline=1000, print_blob=True -) # Increased deadline, print_blob for debugging -async def test_kubo_default_urls( - global_client_session, data: IPLDKind -): # Inject the session fixture +@settings(deadline=1000, print_blob=True) +async def test_kubo_urls_explicit(create_ipfs, global_client_session, data: IPLDKind): """ - Tests KuboCAS using its default URLs and when None is passed for URLs, - leveraging a globally managed aiohttp.ClientSession. + Tests KuboCAS functionality with explicitly provided URLs. + Works with both Docker containers and local IPFS daemons. """ - # Test Case 1: KuboCAS instantiated without explicit URLs (should use its defaults) - # We pass the managed global_client_session to it. - # KuboCAS itself is responsible for having default URLs if none are provided. - async with KuboCAS(session=global_client_session) as kubo_cas_default: - # print(f"Testing with default URLs: RPC={kubo_cas_default.rpc_base_url}, Gateway={kubo_cas_default.gateway_base_url}") + rpc_url, gateway_url = create_ipfs + + # Test the same functionality but with explicit URLs + async with KuboCAS( + rpc_base_url=rpc_url, + gateway_base_url=gateway_url, + session=global_client_session, + ) as kubo_cas: encoded_data = dag_cbor.encode(data) + for codec in ["raw", "dag-cbor"]: + codec_typed = cast(Literal["raw", "dag-cbor"], codec) + cid = await kubo_cas.save(encoded_data, codec=codec_typed) + loaded_encoded_data = await kubo_cas.load(cid) + result = dag_cbor.decode(loaded_encoded_data) + assert data == result + +@pytest.mark.ipfs +@pytest.mark.asyncio(loop_scope="session") +@given(data=ipld_strategy()) +@settings(deadline=1000, print_blob=True) +async def test_kubo_default_urls(global_client_session, data: IPLDKind): + """ + Tests KuboCAS using its default URLs and when None is passed for URLs. + Requires a local IPFS daemon on default ports. + """ + # Check if local IPFS daemon is available on default ports + import http.client + + try: + conn = http.client.HTTPConnection("127.0.0.1", 5001, timeout=1) + conn.request("POST", "/api/v0/version") + response = conn.getresponse() + if response.status != 200: + pytest.skip("No IPFS daemon running on default ports (127.0.0.1:5001)") + except Exception: + pytest.skip("No IPFS daemon running on default ports (127.0.0.1:5001)") + + # Your original test code continues here + async with KuboCAS(session=global_client_session) as kubo_cas_default: + encoded_data = dag_cbor.encode(data) for codec in ["raw", "dag-cbor"]: - # The codec is a string, but we use Literal to ensure type safety - # where codec_raw = "raw" and codec_dag_cbor = "dag-cbor" - # necessary because when you iterate over a list of strings, - # even if they are literal strings, mypy widens the type to just str codec_typed = cast(Literal["raw", "dag-cbor"], codec) - # print(f"Saving with codec: {codec}, data: {data}") try: cid = await kubo_cas_default.save(encoded_data, codec=codec_typed) - # print(f"Saved. CID: {cid}") loaded_encoded_data = await kubo_cas_default.load(cid) - # print(f"Loaded encoded data length: {len(loaded_encoded_data)}") result = dag_cbor.decode(loaded_encoded_data) - # print(f"Decoded result: {result}") - assert data == result, ( - f"Data mismatch for codec {codec} with default URLs" - ) + assert data == result except Exception as e: pytest.fail( f"Error during KuboCAS default URL test (codec: {codec}): {e}" ) - # Test Case 2: KuboCAS instantiated with None for URLs (should also use its defaults) - # We pass the managed global_client_session to it. async with KuboCAS( rpc_base_url=None, gateway_base_url=None, session=global_client_session ) as kubo_cas_none_urls: - # print(f"Testing with None URLs: RPC={kubo_cas_none_urls.rpc_base_url}, Gateway={kubo_cas_none_urls.gateway_base_url}") - encoded_data = dag_cbor.encode( - data - ) # Re-encode just in case, though it's the same data + encoded_data = dag_cbor.encode(data) for codec in ["raw", "dag-cbor"]: - # print(f"Saving with codec: {codec}, data: {data}") codec_typed = cast(Literal["raw", "dag-cbor"], codec) try: cid = await kubo_cas_none_urls.save(encoded_data, codec=codec_typed) - # print(f"Saved. CID: {cid}") loaded_encoded_data = await kubo_cas_none_urls.load(cid) - # print(f"Loaded encoded data length: {len(loaded_encoded_data)}") result = dag_cbor.decode(loaded_encoded_data) - # print(f"Decoded result: {result}") - assert data == result, f"Data mismatch for codec {codec} with None URLs" + assert data == result except Exception as e: pytest.fail(f"Error during KuboCAS None URL test (codec: {codec}): {e}") -# @given(data=ipld_strategy()) -# @settings(deadline=1000) -# @pytest.mark.asyncio -# async def test_kubo_default_urls(data: IPLDKind): -# try: -# async with KuboCAS() as kubo_cas: -# for codec in ("raw", "dag-cbor"): -# cid = await kubo_cas.save(dag_cbor.encode(data), codec=codec) -# result = dag_cbor.decode(await kubo_cas.load(cid)) -# assert data == result - -# async with KuboCAS(gateway_base_url=None, rpc_base_url=None) as kubo_cas: -# for codec in ("raw", "dag-cbor"): -# cid = await kubo_cas.save(dag_cbor.encode(data), codec=codec) -# result = dag_cbor.decode(await kubo_cas.load(cid)) -# assert data == result -# finally: -# # if Hypothesis cancels early, make sure every open CAS is closed -# for obj in list(globals().values()): -# if isinstance(obj, KuboCAS): -# await obj.aclose() - - @pytest.mark.asyncio @given(data=ipld_strategy()) @settings( diff --git a/tests/testing_utils.py b/tests/testing_utils.py index 7ad3fb9..154f4cf 100644 --- a/tests/testing_utils.py +++ b/tests/testing_utils.py @@ -1,12 +1,8 @@ -import json +import http.client import os -import shutil import socket -import subprocess -import tempfile import time -from pathlib import Path -from typing import Any, Generator, Tuple +from urllib.parse import urlparse import pytest from hypothesis import strategies as st @@ -52,66 +48,150 @@ def ipld_strategy() -> SearchStrategy: ], # ensure unique keys, otherwise we can't do the length and size checks when using these KVs for the HAMT ) +# ---------- helpers --------------------------------------------------------- -def find_free_port() -> int: - with socket.socket() as s: - s.bind(("", 0)) # Bind to a free port provided by the host. - return int(s.getsockname()[1]) # Return the port number assigned. +def _rpc_is_up(url: str) -> bool: + """POST /api/v0/version on the *RPC* port (5001 by default).""" + p = urlparse(url) + if not p.hostname: + return False + try: + conn = http.client.HTTPConnection(p.hostname, p.port, timeout=1) + conn.request("POST", "/api/v0/version") + return conn.getresponse().status == 200 + except Exception: + return False -@pytest.fixture(scope="module") -def create_ipfs() -> Generator[Tuple[str, str], None, None]: - # Create temporary directory, set it as the IPFS Path - temp_dir: Path = Path(tempfile.mkdtemp()) - custom_env: dict[str, str] = os.environ.copy() - custom_env["IPFS_PATH"] = str(temp_dir) - # IPFS init - subprocess.run( - ["ipfs", "init", "--profile", "pebbleds"], check=True, env=custom_env - ) +def _gw_is_up(url: str) -> bool: + """HEAD / on the *gateway* port (8080 by default).""" + p = urlparse(url) + if not p.hostname: + return False + try: + conn = http.client.HTTPConnection(p.hostname, p.port, timeout=1) + conn.request("HEAD", "/") + return conn.getresponse().status in (200, 404) # 404 = empty root + except Exception: + return False - # Edit the config file so that it serves on randomly selected and available ports to not conflict with any currently running ipfs daemons - swarm_port: int = find_free_port() - rpc_port: int = find_free_port() - gateway_port: int = find_free_port() - config_path: Path = temp_dir / "config" - config: dict[str, Any] - with open(config_path, "r") as f: - config = json.load(f) +def _free_port() -> int: + with socket.socket() as s: + s.bind(("", 0)) + return s.getsockname()[1] - swarm_addrs: list[str] = config["Addresses"]["Swarm"] - new_port_swarm_addrs: list[str] = [ - s.replace("4001", str(swarm_port)) for s in swarm_addrs - ] - config["Addresses"]["Swarm"] = new_port_swarm_addrs - rpc_multiaddr: str = config["Addresses"]["API"] - gateway_multiaddr: str = config["Addresses"]["Gateway"] +try: + import docker +except ImportError: + docker = None - config["Addresses"]["API"] = rpc_multiaddr.replace("5001", str(rpc_port)) - config["Addresses"]["Gateway"] = gateway_multiaddr.replace( - "8080", str(gateway_port) - ) - with open(config_path, "w") as f: - json.dump(config, f, indent=2) +def _docker_client_or_none(): + """Try to get a working Docker client, with macOS Docker Desktop support.""" - # Start the daemon - rpc_uri_stem: str = f"http://127.0.0.1:{rpc_port}" - gateway_uri_stem: str = f"http://127.0.0.1:{gateway_port}" + if docker is None: + return None - ipfs_process: subprocess.Popen[bytes] = subprocess.Popen( - ["ipfs", "daemon"], env=custom_env - ) - # Should be enough time for the ipfs daemon process to start up - time.sleep(5) - - yield rpc_uri_stem, gateway_uri_stem + # Common Docker socket locations (in order of preference) + socket_locations = [ + # Docker Desktop on macOS + f"unix://{os.path.expanduser('~')}/.docker/run/docker.sock", + # Docker Desktop alternative location + f"unix://{os.path.expanduser('~')}/.docker/desktop/docker.sock", + # Standard Linux locations + "unix:///var/run/docker.sock", + "unix:///run/docker.sock", + ] - # Close the daemon - ipfs_process.kill() + # First, try with DOCKER_HOST if it's already set + existing_host = os.environ.get("DOCKER_HOST") + if existing_host: + try: + c = docker.DockerClient(base_url=existing_host) + c.ping() + return c + except Exception: + pass + + # Try each known socket location + for socket_path in socket_locations: + try: + # Check if the socket file exists (for unix sockets) + if socket_path.startswith("unix://"): + socket_file = socket_path.replace("unix://", "") + if not os.path.exists(socket_file): + continue + + c = docker.DockerClient(base_url=socket_path) + c.ping() + # If successful, set DOCKER_HOST for any child processes + os.environ["DOCKER_HOST"] = socket_path + return c + except Exception: + continue + + # Last resort: try docker.from_env() which might work with some setups + try: + c = docker.from_env() + c.ping() + return c + except Exception: + pass + + return None + + +# ---------- fixture --------------------------------------------------------- + + +@pytest.fixture(scope="session") +def create_ipfs(): + """Yield `(rpc_url, gateway_url)`. + + Order of preference: + 1. reuse an already-running daemon (checked via RPC probe) + 2. launch Docker container (if docker is installed & running) + 3. skip the IPFS-marked tests + """ + rpc = os.getenv("IPFS_RPC_URL") or "http://127.0.0.1:5001" + gw = os.getenv("IPFS_GATEWAY_URL") or "http://127.0.0.1:8080" + + # 1. reuse existing node ------------------------------------------------- + if _rpc_is_up(rpc) and _gw_is_up(gw): + yield rpc, gw + return + + # 2. fall back to Docker ------------------------------------------------- + client = _docker_client_or_none() + if client is None: + pytest.skip("Neither IPFS daemon nor Docker available – skipping IPFS tests") + + image = "ipfs/kubo:v0.35.0" + rpc_p = _free_port() + gw_p = _free_port() + + container = client.containers.run( + image, + "daemon --init --offline", + ports={"5001/tcp": rpc_p, "8080/tcp": gw_p}, + detach=True, + auto_remove=True, + ) - # Delete the temporary directory - shutil.rmtree(temp_dir) + try: + # Wait for container to be ready + for _ in range(30): # 30 attempts, 0.5s each = 15s max + if _rpc_is_up(f"http://127.0.0.1:{rpc_p}") and _gw_is_up( + f"http://127.0.0.1:{gw_p}" + ): + break + time.sleep(0.5) + else: + raise RuntimeError("IPFS container failed to start within timeout") + + yield f"http://127.0.0.1:{rpc_p}", f"http://127.0.0.1:{gw_p}" + finally: + container.stop(timeout=3) diff --git a/uv.lock b/uv.lock index 623ec9c..02ec99d 100644 --- a/uv.lock +++ b/uv.lock @@ -534,6 +534,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/25/65/89601dcc7383f0e5109e59eab90677daa9abb260d821570cd6089c8894bf/distributed-2025.5.1-py3-none-any.whl", hash = "sha256:74782b965ddb24ce59c6441fa777e944b5962d82325cc41f228537b59bb7fbbe", size = 1014789 }, ] +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 }, +] + [[package]] name = "donfig" version = "0.8.1.post1" @@ -1665,6 +1679,7 @@ dependencies = [ [package.dev-dependencies] dev = [ + { name = "docker" }, { name = "hypothesis" }, { name = "memray" }, { name = "mypy" }, @@ -1678,6 +1693,7 @@ dev = [ { name = "pytest-cov" }, { name = "ruff" }, { name = "snakeviz" }, + { name = "types-docker" }, { name = "xarray", extra = ["complete"] }, ] @@ -1693,6 +1709,7 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ + { name = "docker", specifier = ">=7.1.0" }, { name = "hypothesis", specifier = ">=6.115.5" }, { name = "memray", specifier = ">=1.14.0" }, { name = "mypy", specifier = ">=1.15.0" }, @@ -1706,6 +1723,7 @@ dev = [ { name = "pytest-cov", specifier = ">=6.0.0" }, { name = "ruff", specifier = ">=0.7.1" }, { name = "snakeviz", specifier = ">=2.2.0" }, + { name = "types-docker", specifier = ">=7.1.0.20250523" }, { name = "xarray", extras = ["complete"], specifier = ">=2025.3.0" }, ] @@ -1925,6 +1943,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 }, ] +[[package]] +name = "pywin32" +version = "310" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/ec/4fdbe47932f671d6e348474ea35ed94227fb5df56a7c30cbbb42cd396ed0/pywin32-310-cp312-cp312-win32.whl", hash = "sha256:8a75a5cc3893e83a108c05d82198880704c44bbaee4d06e442e471d3c9ea4f3d", size = 8796239 }, + { url = "https://files.pythonhosted.org/packages/e3/e5/b0627f8bb84e06991bea89ad8153a9e50ace40b2e1195d68e9dff6b03d0f/pywin32-310-cp312-cp312-win_amd64.whl", hash = "sha256:bf5c397c9a9a19a6f62f3fb821fbf36cac08f03770056711f765ec1503972060", size = 9503839 }, + { url = "https://files.pythonhosted.org/packages/1f/32/9ccf53748df72301a89713936645a664ec001abd35ecc8578beda593d37d/pywin32-310-cp312-cp312-win_arm64.whl", hash = "sha256:2349cc906eae872d0663d4d6290d13b90621eaf78964bb1578632ff20e152966", size = 8459470 }, + { url = "https://files.pythonhosted.org/packages/1c/09/9c1b978ffc4ae53999e89c19c77ba882d9fce476729f23ef55211ea1c034/pywin32-310-cp313-cp313-win32.whl", hash = "sha256:5d241a659c496ada3253cd01cfaa779b048e90ce4b2b38cd44168ad555ce74ab", size = 8794384 }, + { url = "https://files.pythonhosted.org/packages/45/3c/b4640f740ffebadd5d34df35fecba0e1cfef8fde9f3e594df91c28ad9b50/pywin32-310-cp313-cp313-win_amd64.whl", hash = "sha256:667827eb3a90208ddbdcc9e860c81bde63a135710e21e4cb3348968e4bd5249e", size = 9503039 }, + { url = "https://files.pythonhosted.org/packages/b4/f4/f785020090fb050e7fb6d34b780f2231f302609dc964672f72bfaeb59a28/pywin32-310-cp313-cp313-win_arm64.whl", hash = "sha256:e308f831de771482b7cf692a1f308f8fca701b2d8f9dde6cc440c7da17e47b33", size = 8458152 }, +] + [[package]] name = "pyyaml" version = "6.0.2" @@ -2186,6 +2217,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/55/a7/535c44c7bea4578e48281d83c615219f3ab19e6abc67625ef637c73987be/tornado-6.5.1-cp39-abi3-win_arm64.whl", hash = "sha256:02420a0eb7bf617257b9935e2b754d1b63897525d8a289c9d65690d580b4dcf7", size = 443596 }, ] +[[package]] +name = "types-docker" +version = "7.1.0.20250523" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1a/d6/7acc13bcd8ee773dbb0dac967091d8afe3ffae19a51c153c9771a8becd8d/types_docker-7.1.0.20250523.tar.gz", hash = "sha256:fd7a2dbc75cbf58170f2ae9ac31d6e810ead646a5b28c016698edb293d43d60d", size = 30995 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/f0/188faad66fff121c0391a91f7fccda496fdae135e407712dfcca17720749/types_docker-7.1.0.20250523-py3-none-any.whl", hash = "sha256:ce6276bec00be41d1b00f87e31d0d39dcd0811a44c18f06b0046def3ee22b96e", size = 45816 }, +] + [[package]] name = "types-pytz" version = "2025.2.0.20250516" @@ -2195,6 +2239,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c1/ba/e205cd11c1c7183b23c97e4bcd1de7bc0633e2e867601c32ecfc6ad42675/types_pytz-2025.2.0.20250516-py3-none-any.whl", hash = "sha256:e0e0c8a57e2791c19f718ed99ab2ba623856b11620cb6b637e5f62ce285a7451", size = 10136 }, ] +[[package]] +name = "types-requests" +version = "2.32.0.20250602" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/b0/5321e6eeba5d59e4347fcf9bf06a5052f085c3aa0f4876230566d6a4dc97/types_requests-2.32.0.20250602.tar.gz", hash = "sha256:ee603aeefec42051195ae62ca7667cd909a2f8128fdf8aad9e8a5219ecfab3bf", size = 23042 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/18/9b782980e575c6581d5c0c1c99f4c6f89a1d7173dad072ee96b2756c02e6/types_requests-2.32.0.20250602-py3-none-any.whl", hash = "sha256:f4f335f87779b47ce10b8b8597b409130299f6971ead27fead4fe7ba6ea3e726", size = 20638 }, +] + [[package]] name = "typing-extensions" version = "4.13.2"