From 9e05441c44b0231c022db1a53194fdc3c47eecc6 Mon Sep 17 00:00:00 2001 From: William Easton Date: Wed, 24 Sep 2025 21:43:41 -0500 Subject: [PATCH 01/31] Readme cleanup --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 78f7f767..08e667bd 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ # KV Store Adapter -A pluggable, async-first key-value store interface for Python applications with support for multiple backends and TTL (Time To Live) functionality. +A pluggable, async-only key-value store interface for modern Python applications. ## Features -- **Async-first**: Built from the ground up with `async`/`await` support +- **Async-only**: Built from the ground up with `async`/`await` support - **Multiple backends**: Redis, Elasticsearch, In-memory, Disk, and more - **TTL support**: Automatic expiration handling across all store types - **Type-safe**: Full type hints with Protocol-based interfaces From 8324451687e399a239faa4940ee7624b57c64325 Mon Sep 17 00:00:00 2001 From: William Easton Date: Wed, 24 Sep 2025 21:43:57 -0500 Subject: [PATCH 02/31] Add pydantic anyurl test case --- tests/adapters/test_pydantic.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/adapters/test_pydantic.py b/tests/adapters/test_pydantic.py index b2bf81df..6f0c1520 100644 --- a/tests/adapters/test_pydantic.py +++ b/tests/adapters/test_pydantic.py @@ -1,7 +1,7 @@ from datetime import datetime, timezone import pytest -from pydantic import BaseModel +from pydantic import AnyHttpUrl, BaseModel from kv_store_adapter.adapters.pydantic import PydanticAdapter from kv_store_adapter.stores.memory.store import MemoryStore @@ -17,6 +17,7 @@ class Product(BaseModel): name: str price: float quantity: int + url: AnyHttpUrl class Order(BaseModel): @@ -31,7 +32,7 @@ class Order(BaseModel): FIXED_UPDATED_AT: datetime = datetime(year=2021, month=1, day=1, hour=15, minute=0, second=0, tzinfo=timezone.utc) SAMPLE_USER: User = User(name="John Doe", email="john.doe@example.com", age=30) -SAMPLE_PRODUCT: Product = Product(name="Widget", price=29.99, quantity=10) +SAMPLE_PRODUCT: Product = Product(name="Widget", price=29.99, quantity=10, url=AnyHttpUrl("https://example.com")) SAMPLE_ORDER: Order = Order(created_at=datetime.now(), updated_at=datetime.now(), user=SAMPLE_USER, product=SAMPLE_PRODUCT, paid=False) From 9d4c9a5bed2c055c294d64866a66ebf10abbe432 Mon Sep 17 00:00:00 2001 From: William Easton Date: Wed, 24 Sep 2025 21:44:05 -0500 Subject: [PATCH 03/31] docstring updates --- src/kv_store_adapter/stores/disk/store.py | 26 ++++++++++++++----- .../stores/elasticsearch/store.py | 10 ++++--- 2 files changed, 25 insertions(+), 11 deletions(-) diff --git a/src/kv_store_adapter/stores/disk/store.py b/src/kv_store_adapter/stores/disk/store.py index 41a9d49b..c85295e8 100644 --- a/src/kv_store_adapter/stores/disk/store.py +++ b/src/kv_store_adapter/stores/disk/store.py @@ -18,22 +18,34 @@ class DiskStore(BaseManagedKVStore): _cache: Cache @overload - def __init__(self, *, cache: Cache) -> None: ... + def __init__(self, *, disk_cache: Cache) -> None: + """Initialize the disk cache. + + Args: + disk_cache: An existing diskcache Cache instance to use. + """ @overload - def __init__(self, *, path: Path | str, size_limit: int | None = None) -> None: ... + def __init__(self, *, directory: Path | str, size_limit: int | None = None) -> None: + """Initialize the disk cache. + + Args: + directory: The directory to use for the disk cache. + size_limit: The maximum size of the disk cache. Defaults to 1GB. + """ - def __init__(self, *, cache: Cache | None = None, path: Path | str | None = None, size_limit: int | None = None) -> None: + def __init__(self, *, disk_cache: Cache | None = None, directory: Path | str | None = None, size_limit: int | None = None) -> None: """Initialize the in-memory cache. Args: - disk_cache: The disk cache to use. + disk_cache: An existing diskcache Cache instance to use. + directory: The directory to use for the disk cache. size_limit: The maximum size of the disk cache. Defaults to 1GB. """ - if isinstance(path, Path): - path = str(object=path) + if isinstance(directory, Path): + directory = str(object=directory) - self._cache = cache or Cache(directory=path, size_limit=size_limit or DEFAULT_DISK_STORE_SIZE_LIMIT) + self._cache = disk_cache or Cache(directory=directory, size_limit=size_limit or DEFAULT_DISK_STORE_SIZE_LIMIT) super().__init__() diff --git a/src/kv_store_adapter/stores/elasticsearch/store.py b/src/kv_store_adapter/stores/elasticsearch/store.py index 535d6f28..4393166c 100644 --- a/src/kv_store_adapter/stores/elasticsearch/store.py +++ b/src/kv_store_adapter/stores/elasticsearch/store.py @@ -64,21 +64,23 @@ class ElasticsearchStore(BaseManagedKVStore): _index: str @overload - def __init__(self, *, client: AsyncElasticsearch, index: str) -> None: ... + def __init__(self, *, elasticsearch_client: AsyncElasticsearch, index: str) -> None: ... @overload def __init__(self, *, url: str, api_key: str, index: str) -> None: ... - def __init__(self, *, client: AsyncElasticsearch | None = None, url: str | None = None, api_key: str | None = None, index: str) -> None: + def __init__( + self, *, elasticsearch_client: AsyncElasticsearch | None = None, url: str | None = None, api_key: str | None = None, index: str + ) -> None: """Initialize the elasticsearch store. Args: - client: The elasticsearch client to use. + elasticsearch_client: The elasticsearch client to use. url: The url of the elasticsearch cluster. api_key: The api key to use. index: The index to use. Defaults to "kv-store". """ - self._client = client or AsyncElasticsearch(hosts=[url], api_key=api_key) # pyright: ignore[reportArgumentType] + self._client = elasticsearch_client or AsyncElasticsearch(hosts=[url], api_key=api_key, **ELASTICSEARCH_CLIENT_DEFAULTS) # pyright: ignore[reportArgumentType] self._index = index or DEFAULT_INDEX super().__init__() From bc4e627679d9b5d08b94cbb09d46038909374685 Mon Sep 17 00:00:00 2001 From: William Easton Date: Thu, 25 Sep 2025 08:01:18 -0500 Subject: [PATCH 04/31] Additional clean-up --- .github/copilot-instructions.md | 71 +++++++++++++++++++++++ src/kv_store_adapter/stores/disk/store.py | 2 +- 2 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 .github/copilot-instructions.md diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 00000000..e60cc312 --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,71 @@ +## Architecture Overview +The `py-kv-store-adapter` project provides a pluggable, async-first interface for various key-value (KV) store backends in Python. Its core purpose is to abstract away the underlying KV store implementation, offering a consistent `KVStoreProtocol` for interacting with different storage solutions like Redis, Elasticsearch, in-memory caches, and disk-based stores. The architecture distinguishes between two main store types: `Unmanaged Stores` (`BaseKVStore`) which handle their own TTL and collection management, and `Managed Stores` (`BaseManagedKVStore`) which automatically manage `ManagedEntry` objects for TTL and expiration. The system also supports `Adapters` for transforming data (e.g., Pydantic models) and `Wrappers` for adding cross-cutting concerns like statistics tracking or TTL clamping, which can be chained. Key concepts include collections for namespacing, compound keys for internal storage, and automatic TTL management. + +## Code Style & Conventions +- **Python Version**: Requires Python 3.10 or higher (`pyproject.toml:project.requires-python`). +- **Dependency Management**: Uses `uv` for dependency management (`DEVELOPING.md:L10`). Development dependencies are managed via `uv sync --group dev` (`DEVELOPING.md:L24`). +- **Linting & Formatting**: Enforced by Ruff (`pyproject.toml:[tool.ruff]`). + - Line length: 140 characters (`pyproject.toml:line-length`). + - Fixable issues: All auto-fixable issues are configured to be fixed (`pyproject.toml:lint.fixable`). + - Ignored rules: `COM812`, `PLR0913` (too many arguments) (`pyproject.toml:lint.ignore`). + - Extended select rules: A broad range of linting rules are enabled, including `A`, `ARG`, `B`, `C4`, `COM`, `DTZ`, `E`, `EM`, `F`, `FURB`, `I`, `LOG`, `N`, `PERF`, `PIE`, `PLR`, `PLW`, `PT`, `PTH`, `Q`, `RET`, `RSE`, `RUF`, `S`, `SIM`, `TC`, `TID`, `TRY`, `UP`, `W` (`pyproject.toml:lint.extend-select`). + - Per-file ignores: Test files (`**/tests/*.py`) ignore `S101` (asserts), `DTZ005` (datetime.UTC), `PLR2004` (magic values), `E501` (line length) (`pyproject.toml:[tool.ruff.lint.extend-per-file-ignores]`). +- **Type Checking**: Uses Pyright (`pyproject.toml:[tool.pyright]`). + - Python version: 3.10 (`pyproject.toml:pythonVersion`). + - Type checking mode: `recommended` (`pyproject.toml:typeCheckingMode`). + - `src/` directory is included for type checking (`pyproject.toml:include`). + - Missing type stubs, explicit `Any`, and missing module sources are not reported (`pyproject.toml:reportMissingTypeStubs`, `reportExplicitAny`, `reportMissingModuleSource`). + +## Quick Recipes +| Command | Description | +|---|---| +| Install dependencies | `uv sync --group dev` (`DEVELOPING.md:L24`) | +| Run all tests | `uv run pytest` (`DEVELOPING.md:L169`) | +| Run tests with coverage | `uv run pytest --cov=src/kv_store_adapter --cov-report=html` (`DEVELOPING.md:L172`) | +| Run specific test file | `uv run pytest tests/stores/redis/test_redis.py` (`DEVELOPING.md:L175`) | +| Check code style (lint) | `uv run ruff check` (`DEVELOPING.md:L277`) | +| Fix auto-fixable lint issues | `uv run ruff check --fix` (`DEVELOPING.md:L280`) | +| Format code | `uv run ruff format` (`DEVELOPING.md:L283`) | +| Type check | `pyright` (`DEVELOPING.md:L292`) | +| Start external services for integration tests | `docker-compose up -d` (`DEVELOPING.md:L187`) | +| Stop external services | `docker-compose down` (`DEVELOPING.md:L193`) | + +## Dependencies & Compatibility +- **Critical Runtime Dependencies**: + - `cachetools>=6.0.0` for `MemoryStore` (`pyproject.toml:L26`). + - `diskcache>=5.6.0` for `DiskStore` (`pyproject.toml:L27`). + - `redis>=6.0.0` for `RedisStore` (`pyproject.toml:L28`). + - `elasticsearch>=9.0.0`, `aiohttp>=3.12` for `ElasticsearchStore` (`pyproject.toml:L29`). + - `pydantic>=2.11.9` for `PydanticAdapter` (`pyproject.toml:L30`). +- **Toolchain & Versions**: + - Python: `>=3.10` (`pyproject.toml:L6`). + - `uv`: Used for dependency management and running commands (`DEVELOPING.md:L10`). + - `pytest`: Test runner (`pyproject.toml:L45`). `asyncio_mode = \"auto\"` is configured for async tests (`pyproject.toml:L33`). + - `ruff`: Linter and formatter (`pyproject.toml:L48`). + - `basedpyright`: Type checker (`pyproject.toml:L54`). +- **Observability**: + - The `StatisticsWrapper` (`src/kv_store_adapter/stores/wrappers/statistics.py`) provides in-memory tracking of operation counts, hits, and misses for `get`, `put`, `delete`, `exists`, `keys`, and `clear_collection` operations per collection. It can be enabled during initialization. + +## Unique Workflows +- **Adding New Store Implementations**: Developers can extend the system by creating new store classes that inherit from either `BaseKVStore` (unmanaged) or `BaseManagedKVStore` (managed), implementing abstract methods for `get`, `put`, `delete`, `ttl`, `exists`, `keys`, `clear_collection`, and `list_collections` (`DEVELOPING.md:L298-L349`). +- **Wrapper/Adapter Chaining**: The design allows for chaining multiple wrappers and adapters to compose complex behaviors, such as `PydanticAdapter(SingleCollectionWrapper(store, \"users\"), User)` (`README.md:L174`). +- **CI/CD**: GitHub Actions workflows (`.github/workflows/`) are configured to run tests, linting, type checking, and formatting on pull requests and pushes to `main` (`.github/workflows/test_pull_request.yml`). A separate workflow handles publishing to PyPI on release creation (`.github/workflows/publish-py-kv-store-adapter.yml`). + +## API Surface Map +The primary API surface is defined by the `KVStoreProtocol` (`src/kv_store_adapter/types.py:L26-L44`) and extended by `BaseKVStore` (`src/kv_store_adapter/stores/base/unmanaged.py:L11-L76`) and `BaseManagedKVStore` (`src/kv_store_adapter/stores/base/managed.py:L21-L122`). +- **Core KV Operations**: `get(collection, key)`, `put(collection, key, value, ttl)`, `delete(collection, key)`, `exists(collection, key)`, `ttl(collection, key)`. +- **Management Operations (BaseKVStore)**: `keys(collection)`, `clear_collection(collection)`, `list_collections()`, `cull()`. + + +## Onboarding Steps +- **Understand Core Concepts**: Familiarize yourself with `KVStoreProtocol`, `BaseKVStore`, `BaseManagedKVStore`, `ManagedEntry`, `Collections`, `Compound Keys`, `TTL Management`, `Wrappers`, and `Adapters` by reading `README.md` and `DEVELOPING.md`. +- **Development Setup**: Follow the \"Development Setup\" in `DEVELOPING.md` to clone the repository, install `uv`, sync dependencies (`uv sync --group dev`), activate the virtual environment, and install pre-commit hooks. +- **Testing**: Review `DEVELOPING.md`'s \"Testing\" section for how to run tests, set up test environments using Docker Compose, and write new tests using `BaseKVStoreTestCase` or `BaseManagedKVStoreTestCase` from `tests/cases.py`. +- **Code Quality**: Understand the `ruff` and `pyright` configurations in `pyproject.toml` and how to run them (`uv run ruff check`, `uv run ruff format`, `pyright`). +- **Adding New Stores**: If extending the project, follow the \"Adding New Store Implementations\" guide in `DEVELOPING.md` for detailed steps on choosing a base class, creating the store, structuring the package, and adding tests. + +## Getting Unstuck +- **General Development Issues**: Refer to the \"Development Guide\" in [`DEVELOPING.md`](DEVELOPING.md) for setup, testing, and contribution guidelines. +- **Integration Tests with External Services**: If integration tests fail, ensure Docker and Docker Compose are running and the necessary services (Redis, Elasticsearch) are started via `docker-compose up -d` as described in [`DEVELOPING.md:L181-L194`](DEVELOPING.md:L181-L194). Check `.env` file configuration for external services (`DEVELOPING.md:L197-L211`). +- **Redis Test Failures**: The `tests/stores/redis/test_redis.py` fixture `setup_redis` attempts to manage a Dockerized Redis instance. If Redis fails to start, check Docker logs or manually ensure the `redis-test` container is running and accessible. +- **SingleCollectionWrapper Limitations**: This wrapper does not support `clear_collection` or `list_collections` operations, raising `NotImplementedError` if called (`src/kv_store_adapter/stores/wrappers/single_collection.py:L47-L64` \ No newline at end of file diff --git a/src/kv_store_adapter/stores/disk/store.py b/src/kv_store_adapter/stores/disk/store.py index c85295e8..c8a2d170 100644 --- a/src/kv_store_adapter/stores/disk/store.py +++ b/src/kv_store_adapter/stores/disk/store.py @@ -35,7 +35,7 @@ def __init__(self, *, directory: Path | str, size_limit: int | None = None) -> N """ def __init__(self, *, disk_cache: Cache | None = None, directory: Path | str | None = None, size_limit: int | None = None) -> None: - """Initialize the in-memory cache. + """Initialize the disk cache. Args: disk_cache: An existing diskcache Cache instance to use. From afafecb9db93315fd13acffd535c109eec00a353 Mon Sep 17 00:00:00 2001 From: William Easton Date: Thu, 25 Sep 2025 08:03:36 -0500 Subject: [PATCH 05/31] Fix disk init tests --- tests/stores/disk/test_disk.py | 2 +- tests/stores/wrappers/test_passthrough.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/stores/disk/test_disk.py b/tests/stores/disk/test_disk.py index 1bb4e144..7fe3ce9f 100644 --- a/tests/stores/disk/test_disk.py +++ b/tests/stores/disk/test_disk.py @@ -15,4 +15,4 @@ class TestMemoryStore(BaseStoreTests): @pytest.fixture async def store(self) -> AsyncGenerator[DiskStore, None]: with tempfile.TemporaryDirectory() as temp_dir: - yield DiskStore(path=temp_dir, size_limit=TEST_SIZE_LIMIT) + yield DiskStore(directory=temp_dir, size_limit=TEST_SIZE_LIMIT) diff --git a/tests/stores/wrappers/test_passthrough.py b/tests/stores/wrappers/test_passthrough.py index 051fa859..cdb76be8 100644 --- a/tests/stores/wrappers/test_passthrough.py +++ b/tests/stores/wrappers/test_passthrough.py @@ -16,7 +16,7 @@ class TestPrefixCollectionWrapper(BaseStoreTests): @pytest.fixture async def primary_store(self) -> AsyncGenerator[DiskStore, None]: with tempfile.TemporaryDirectory() as temp_dir: - yield DiskStore(path=temp_dir, size_limit=DISK_STORE_SIZE_LIMIT) + yield DiskStore(directory=temp_dir, size_limit=DISK_STORE_SIZE_LIMIT) @pytest.fixture async def cache_store(self) -> MemoryStore: From d77e93692053f511645df8f06e72f77834589404 Mon Sep 17 00:00:00 2001 From: William Easton Date: Thu, 25 Sep 2025 08:14:32 -0500 Subject: [PATCH 06/31] Additional clean-up --- .github/copilot-instructions.md | 6 +-- .github/workflows/test_pull_request.yml | 8 ++-- DEVELOPING.md | 2 +- README.md | 12 +++--- src/kv_store_adapter/adapters/pydantic.py | 8 ++-- .../adapters/single_collection.py | 8 ++-- src/kv_store_adapter/types.py | 43 ++++++++++++++++++- .../elasticsearch/test_elasticsearch.py | 2 +- tests/test_types.py | 4 +- 9 files changed, 66 insertions(+), 27 deletions(-) diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index e60cc312..039e9057 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -1,5 +1,5 @@ ## Architecture Overview -The `py-kv-store-adapter` project provides a pluggable, async-first interface for various key-value (KV) store backends in Python. Its core purpose is to abstract away the underlying KV store implementation, offering a consistent `KVStoreProtocol` for interacting with different storage solutions like Redis, Elasticsearch, in-memory caches, and disk-based stores. The architecture distinguishes between two main store types: `Unmanaged Stores` (`BaseKVStore`) which handle their own TTL and collection management, and `Managed Stores` (`BaseManagedKVStore`) which automatically manage `ManagedEntry` objects for TTL and expiration. The system also supports `Adapters` for transforming data (e.g., Pydantic models) and `Wrappers` for adding cross-cutting concerns like statistics tracking or TTL clamping, which can be chained. Key concepts include collections for namespacing, compound keys for internal storage, and automatic TTL management. +The `py-kv-store-adapter` project provides a pluggable, async-first interface for various key-value (KV) store backends in Python. Its core purpose is to abstract away the underlying KV store implementation, offering a consistent `KVStore` for interacting with different storage solutions like Redis, Elasticsearch, in-memory caches, and disk-based stores. The architecture distinguishes between two main store types: `Unmanaged Stores` (`BaseKVStore`) which handle their own TTL and collection management, and `Managed Stores` (`BaseManagedKVStore`) which automatically manage `ManagedEntry` objects for TTL and expiration. The system also supports `Adapters` for transforming data (e.g., Pydantic models) and `Wrappers` for adding cross-cutting concerns like statistics tracking or TTL clamping, which can be chained. Key concepts include collections for namespacing, compound keys for internal storage, and automatic TTL management. ## Code Style & Conventions - **Python Version**: Requires Python 3.10 or higher (`pyproject.toml:project.requires-python`). @@ -52,13 +52,13 @@ The `py-kv-store-adapter` project provides a pluggable, async-first interface fo - **CI/CD**: GitHub Actions workflows (`.github/workflows/`) are configured to run tests, linting, type checking, and formatting on pull requests and pushes to `main` (`.github/workflows/test_pull_request.yml`). A separate workflow handles publishing to PyPI on release creation (`.github/workflows/publish-py-kv-store-adapter.yml`). ## API Surface Map -The primary API surface is defined by the `KVStoreProtocol` (`src/kv_store_adapter/types.py:L26-L44`) and extended by `BaseKVStore` (`src/kv_store_adapter/stores/base/unmanaged.py:L11-L76`) and `BaseManagedKVStore` (`src/kv_store_adapter/stores/base/managed.py:L21-L122`). +The primary API surface is defined by the `KVStore` (`src/kv_store_adapter/types.py:L26-L44`) and extended by `BaseKVStore` (`src/kv_store_adapter/stores/base/unmanaged.py:L11-L76`) and `BaseManagedKVStore` (`src/kv_store_adapter/stores/base/managed.py:L21-L122`). - **Core KV Operations**: `get(collection, key)`, `put(collection, key, value, ttl)`, `delete(collection, key)`, `exists(collection, key)`, `ttl(collection, key)`. - **Management Operations (BaseKVStore)**: `keys(collection)`, `clear_collection(collection)`, `list_collections()`, `cull()`. ## Onboarding Steps -- **Understand Core Concepts**: Familiarize yourself with `KVStoreProtocol`, `BaseKVStore`, `BaseManagedKVStore`, `ManagedEntry`, `Collections`, `Compound Keys`, `TTL Management`, `Wrappers`, and `Adapters` by reading `README.md` and `DEVELOPING.md`. +- **Understand Core Concepts**: Familiarize yourself with `KVStore`, `BaseKVStore`, `BaseManagedKVStore`, `ManagedEntry`, `Collections`, `Compound Keys`, `TTL Management`, `Wrappers`, and `Adapters` by reading `README.md` and `DEVELOPING.md`. - **Development Setup**: Follow the \"Development Setup\" in `DEVELOPING.md` to clone the repository, install `uv`, sync dependencies (`uv sync --group dev`), activate the virtual environment, and install pre-commit hooks. - **Testing**: Review `DEVELOPING.md`'s \"Testing\" section for how to run tests, set up test environments using Docker Compose, and write new tests using `BaseKVStoreTestCase` or `BaseManagedKVStoreTestCase` from `tests/cases.py`. - **Code Quality**: Understand the `ruff` and `pyright` configurations in `pyproject.toml` and how to run them (`uv run ruff check`, `uv run ruff format`, `pyright`). diff --git a/.github/workflows/test_pull_request.yml b/.github/workflows/test_pull_request.yml index 1c5e5691..9cbf0b46 100644 --- a/.github/workflows/test_pull_request.yml +++ b/.github/workflows/test_pull_request.yml @@ -12,9 +12,9 @@ on: jobs: publish: runs-on: ubuntu-latest - permissions: - id-token: write - environment: pypi + strategy: + matrix: + python-version: [3.10, 3.11, 3.12, 3.13, 3.14] steps: - name: Checkout repository @@ -24,7 +24,7 @@ jobs: uses: astral-sh/setup-uv@v6 - name: "Install" - run: uv sync --locked --group dev + run: uv sync --locked --group dev --python ${{ matrix.python-version }} - name: "Lint" run: uv run ruff check --exit-non-zero-on-fix --fix diff --git a/DEVELOPING.md b/DEVELOPING.md index 1db0721e..91813384 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -65,7 +65,7 @@ tests/ ## Store Configuration -All stores implement the `KVStoreProtocol` interface. Here are detailed configuration options: +All stores implement the `KVStore` interface. Here are detailed configuration options: ### Redis Store High-performance store with native TTL support: diff --git a/README.md b/README.md index 08e667bd..20ca9e2b 100644 --- a/README.md +++ b/README.md @@ -30,12 +30,12 @@ pip install kv-store-adapter[memory,disk,redis,elasticsearch] # The KV Store Protocol -The simplest way to get started is to use the `KVStoreProtocol` interface, which allows you to write code that works with any supported KV Store: +The simplest way to get started is to use the `KVStore` interface, which allows you to write code that works with any supported KV Store: ```python import asyncio -from kv_store_adapter.types import KVStoreProtocol +from kv_store_adapter.types import KVStore from kv_store_adapter.stores.redis import RedisStore from kv_store_adapter.stores.memory import MemoryStore @@ -56,7 +56,7 @@ asyncio.run(example()) ## Store Implementations -Choose the store that best fits your needs. All stores implement the same `KVStoreProtocol` interface: +Choose the store that best fits your needs. All stores implement the same `KVStore` interface: ### Production Stores @@ -74,12 +74,12 @@ For detailed configuration options and all available stores, see [DEVELOPING.md] ## Atomicity / Consistency -We strive to support atomicity and consistency across all stores and operations in the KVStoreProtocol. That being said, +We strive to support atomicity and consistency across all stores and operations in the KVStore. That being said, there are operations available via the BaseKVStore class which are management operations like listing keys, listing collections, clearing collections, culling expired entries, etc. These operations may not be atomic, may be eventually consistent across stores, or may have other limitations (like limited to returning a certain number of keys). ## Protocol Adapters -The library provides an adapter pattern simplifying the use of the protocol/store. Adapters themselves do not implement the `KVStoreProtocol` interface and cannot be nested. Adapters can be used with anything that implements the `KVStoreProtocol` interface but do not comply with the full `BaseKVStore` interface and thus lack management operations like listing keys, listing collections, clearing collections, culling expired entries, etc. +The library provides an adapter pattern simplifying the use of the protocol/store. Adapters themselves do not implement the `KVStore` interface and cannot be nested. Adapters can be used with anything that implements the `KVStore` interface but do not comply with the full `BaseKVStore` interface and thus lack management operations like listing keys, listing collections, clearing collections, culling expired entries, etc. The following adapters are available: @@ -111,7 +111,7 @@ asyncio.run(example()) ## Wrappers -The library provides a wrapper pattern for adding functionality to a store. Wrappers themselves implement the `KVStoreProtocol` interface meaning that you can wrap any +The library provides a wrapper pattern for adding functionality to a store. Wrappers themselves implement the `KVStore` interface meaning that you can wrap any store with any wrapper, and chain wrappers together as needed. ### Statistics Tracking diff --git a/src/kv_store_adapter/adapters/pydantic.py b/src/kv_store_adapter/adapters/pydantic.py index ed91cd1e..5794cc9b 100644 --- a/src/kv_store_adapter/adapters/pydantic.py +++ b/src/kv_store_adapter/adapters/pydantic.py @@ -4,16 +4,16 @@ from pydantic_core import PydanticSerializationError from kv_store_adapter.errors import DeserializationError, SerializationError -from kv_store_adapter.types import KVStoreProtocol +from kv_store_adapter.types import KVStore T = TypeVar("T", bound=BaseModel) class PydanticAdapter(Generic[T]): - """Adapter around a KVStoreProtocol-compliant Store that allows type-safe persistence of Pydantic models.""" + """Adapter around a KVStore-compliant Store that allows type-safe persistence of Pydantic models.""" - def __init__(self, store_protocol: KVStoreProtocol, pydantic_model: type[T]) -> None: - self.store_protocol: KVStoreProtocol = store_protocol + def __init__(self, store_protocol: KVStore, pydantic_model: type[T]) -> None: + self.store_protocol: KVStore = store_protocol self.pydantic_model: type[T] = pydantic_model async def get(self, collection: str, key: str) -> T | None: diff --git a/src/kv_store_adapter/adapters/single_collection.py b/src/kv_store_adapter/adapters/single_collection.py index 23ea6f6e..66e10e1e 100644 --- a/src/kv_store_adapter/adapters/single_collection.py +++ b/src/kv_store_adapter/adapters/single_collection.py @@ -1,13 +1,13 @@ from typing import Any -from kv_store_adapter.types import KVStoreProtocol +from kv_store_adapter.types import KVStore class SingleCollectionAdapter: - """Adapter around a KVStoreProtocol-compliant Store that only allows one collection.""" + """Adapter around a KVStore-compliant Store that only allows one collection.""" - def __init__(self, store: KVStoreProtocol, collection: str) -> None: - self.store: KVStoreProtocol = store + def __init__(self, store: KVStore, collection: str) -> None: + self.store: KVStore = store self.collection: str = collection async def get(self, key: str) -> dict[str, Any] | None: diff --git a/src/kv_store_adapter/types.py b/src/kv_store_adapter/types.py index 7aeedbfd..3230563f 100644 --- a/src/kv_store_adapter/types.py +++ b/src/kv_store_adapter/types.py @@ -1,6 +1,6 @@ from dataclasses import dataclass from datetime import datetime, timezone -from typing import Any, Protocol +from typing import Any, Protocol, runtime_checkable @dataclass @@ -23,7 +23,8 @@ def is_expired(self) -> bool: return self.expires_at <= datetime.now(tz=timezone.utc) -class KVStoreProtocol(Protocol): +@runtime_checkable +class KVStore(Protocol): """Protocol defining the interface for key-value store implementations.""" async def get(self, collection: str, key: str) -> dict[str, Any] | None: @@ -41,3 +42,41 @@ async def delete(self, collection: str, key: str) -> bool: async def exists(self, collection: str, key: str) -> bool: """Check if a key exists in the specified collection.""" ... + + +@runtime_checkable +class BulkKVStore(KVStore, Protocol): + """Protocol defining the interface for bulk key-value store implementations.""" + + async def get_many(self, collection: str, keys: list[str]) -> list[dict[str, Any]]: + """Retrieve multiple values by key from the specified collection.""" + ... + + async def put_many(self, collection: str, keys: list[str], values: list[dict[str, Any]]) -> None: + """Store multiple key-value pairs in the specified collection.""" + ... + + async def delete_many(self, collection: str, keys: list[str]) -> None: + """Delete multiple key-value pairs from the specified collection.""" + ... + + +@runtime_checkable +class ManageKVStore(KVStore, Protocol): + """Protocol defining the interface for managed key-value store implementations.""" + + async def keys(self, collection: str) -> list[str]: + """List all keys in the specified collection.""" + ... + + async def collections(self) -> list[str]: + """List all available collection names (may include empty collections).""" + ... + + async def delete_collection(self, collection: str) -> int: + """Clear all keys in a collection, returning the number of keys deleted.""" + ... + + async def cull(self) -> None: + """Remove all expired entries from the store.""" + ... diff --git a/tests/stores/elasticsearch/test_elasticsearch.py b/tests/stores/elasticsearch/test_elasticsearch.py index 5a6e53c3..440ffcbb 100644 --- a/tests/stores/elasticsearch/test_elasticsearch.py +++ b/tests/stores/elasticsearch/test_elasticsearch.py @@ -38,7 +38,7 @@ async def eventually_consistent(self) -> None: @pytest.fixture async def store(self, elasticsearch_client: AsyncElasticsearch) -> ElasticsearchStore: _ = await elasticsearch_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test") - return ElasticsearchStore(client=elasticsearch_client, index="kv-store-e2e-test") + return ElasticsearchStore(elasticsearch_client=elasticsearch_client, index="kv-store-e2e-test") @pytest.mark.skip(reason="Distributed Caches are unbounded") @override diff --git a/tests/test_types.py b/tests/test_types.py index b60ab1c7..c056bcb2 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -1,7 +1,7 @@ from datetime import datetime, timedelta, timezone from kv_store_adapter.stores.memory import MemoryStore -from kv_store_adapter.types import KVStoreProtocol, TTLInfo +from kv_store_adapter.types import KVStore, TTLInfo def test_ttl_info(): @@ -24,7 +24,7 @@ def test_ttl_info(): async def test_kv_store_protocol(): - async def test_kv_store_protocol(kv_store: KVStoreProtocol): + async def test_kv_store_protocol(kv_store: KVStore): assert await kv_store.get(collection="test", key="test") is None await kv_store.put(collection="test", key="test", value={"test": "test"}) assert await kv_store.delete(collection="test", key="test") From da814d5e14549a8525c53177e23981daa29d0c38 Mon Sep 17 00:00:00 2001 From: William Easton Date: Thu, 25 Sep 2025 23:55:13 -0500 Subject: [PATCH 07/31] Refactor project --- .github/copilot-instructions.md | 28 +- .github/workflows/test_pull_request.yml | 4 +- DEVELOPING.md | 188 +++++---- README.md | 81 ++-- pyproject.toml | 7 +- src/kv_store_adapter/__init__.py | 2 + src/kv_store_adapter/adapters/__init__.py | 1 + src/kv_store_adapter/adapters/pydantic.py | 91 ++++- .../adapters/raise_on_missing.py | 190 ++++++++++ .../adapters/single_collection.py | 23 -- src/kv_store_adapter/errors.py | 10 + src/kv_store_adapter/stores/base.py | 357 ++++++++++++++++++ src/kv_store_adapter/stores/base/__init__.py | 0 src/kv_store_adapter/stores/base/managed.py | 121 ------ src/kv_store_adapter/stores/base/unmanaged.py | 75 ---- src/kv_store_adapter/stores/disk/__init__.py | 3 +- .../stores/disk/multi_store.py | 136 +++++++ src/kv_store_adapter/stores/disk/store.py | 102 +++-- .../stores/elasticsearch/store.py | 196 +++++----- .../stores/memcached/__init__.py | 3 + .../stores/memcached/store.py | 104 +++++ src/kv_store_adapter/stores/memory/store.py | 183 ++++++--- src/kv_store_adapter/stores/null/store.py | 45 +-- src/kv_store_adapter/stores/redis/store.py | 110 +++--- .../stores/simple/__init__.py | 3 +- .../stores/simple/json_store.py | 69 ---- src/kv_store_adapter/stores/simple/store.py | 168 +++------ src/kv_store_adapter/stores/utils/compound.py | 18 +- .../stores/utils/managed_entry.py | 93 +++-- .../stores/utils/time_to_live.py | 36 +- .../stores/wrappers/__init__.py | 6 - .../stores/wrappers/clamp_ttl.py | 69 ---- .../stores/wrappers/passthrough_cache.py | 81 ---- .../stores/wrappers/prefix_collection.py | 76 ---- .../stores/wrappers/prefix_key.py | 69 ---- .../stores/wrappers/single_collection.py | 68 ---- .../stores/wrappers/statistics.py | 197 ---------- src/kv_store_adapter/types.py | 192 +++++++--- src/kv_store_adapter/wrappers/base.py | 51 +++ src/kv_store_adapter/wrappers/clamp_ttl.py | 61 +++ .../wrappers/passthrough_cache.py | 184 +++++++++ .../wrappers/prefix_collections.py | 78 ++++ src/kv_store_adapter/wrappers/prefix_keys.py | 76 ++++ .../wrappers/single_collection.py | 80 ++++ src/kv_store_adapter/wrappers/statistics.py | 214 +++++++++++ tests/adapters/test_pydantic.py | 8 +- tests/adapters/test_raise.py | 37 ++ tests/adapters/test_single_collection.py | 28 -- tests/stores/conftest.py | 189 +++++----- tests/stores/disk/test_disk.py | 4 +- tests/stores/disk/test_multi_disk.py | 19 + .../elasticsearch/test_elasticsearch.py | 11 +- tests/stores/memcached/test_memcached.py | 73 ++++ tests/stores/redis/test_redis.py | 4 +- tests/stores/simple/test_json_store.py | 12 - tests/stores/simple/test_store.py | 5 - tests/stores/wrappers/test_clamp_ttl.py | 49 +-- ...ssthrough.py => test_passthrough_cache.py} | 6 +- .../stores/wrappers/test_prefix_collection.py | 6 +- tests/stores/wrappers/test_prefix_key.py | 6 +- .../stores/wrappers/test_single_collection.py | 21 +- tests/stores/wrappers/test_statistics.py | 14 + tests/test_types.py | 23 +- uv.lock | 81 ++-- 64 files changed, 2755 insertions(+), 1790 deletions(-) create mode 100644 src/kv_store_adapter/adapters/__init__.py create mode 100644 src/kv_store_adapter/adapters/raise_on_missing.py delete mode 100644 src/kv_store_adapter/adapters/single_collection.py create mode 100644 src/kv_store_adapter/stores/base.py delete mode 100644 src/kv_store_adapter/stores/base/__init__.py delete mode 100644 src/kv_store_adapter/stores/base/managed.py delete mode 100644 src/kv_store_adapter/stores/base/unmanaged.py create mode 100644 src/kv_store_adapter/stores/disk/multi_store.py create mode 100644 src/kv_store_adapter/stores/memcached/__init__.py create mode 100644 src/kv_store_adapter/stores/memcached/store.py delete mode 100644 src/kv_store_adapter/stores/simple/json_store.py delete mode 100644 src/kv_store_adapter/stores/wrappers/__init__.py delete mode 100644 src/kv_store_adapter/stores/wrappers/clamp_ttl.py delete mode 100644 src/kv_store_adapter/stores/wrappers/passthrough_cache.py delete mode 100644 src/kv_store_adapter/stores/wrappers/prefix_collection.py delete mode 100644 src/kv_store_adapter/stores/wrappers/prefix_key.py delete mode 100644 src/kv_store_adapter/stores/wrappers/single_collection.py delete mode 100644 src/kv_store_adapter/stores/wrappers/statistics.py create mode 100644 src/kv_store_adapter/wrappers/base.py create mode 100644 src/kv_store_adapter/wrappers/clamp_ttl.py create mode 100644 src/kv_store_adapter/wrappers/passthrough_cache.py create mode 100644 src/kv_store_adapter/wrappers/prefix_collections.py create mode 100644 src/kv_store_adapter/wrappers/prefix_keys.py create mode 100644 src/kv_store_adapter/wrappers/single_collection.py create mode 100644 src/kv_store_adapter/wrappers/statistics.py create mode 100644 tests/adapters/test_raise.py delete mode 100644 tests/adapters/test_single_collection.py create mode 100644 tests/stores/disk/test_multi_disk.py create mode 100644 tests/stores/memcached/test_memcached.py delete mode 100644 tests/stores/simple/test_json_store.py rename tests/stores/wrappers/{test_passthrough.py => test_passthrough_cache.py} (78%) create mode 100644 tests/stores/wrappers/test_statistics.py diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 039e9057..7def6705 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -1,5 +1,5 @@ ## Architecture Overview -The `py-kv-store-adapter` project provides a pluggable, async-first interface for various key-value (KV) store backends in Python. Its core purpose is to abstract away the underlying KV store implementation, offering a consistent `KVStore` for interacting with different storage solutions like Redis, Elasticsearch, in-memory caches, and disk-based stores. The architecture distinguishes between two main store types: `Unmanaged Stores` (`BaseKVStore`) which handle their own TTL and collection management, and `Managed Stores` (`BaseManagedKVStore`) which automatically manage `ManagedEntry` objects for TTL and expiration. The system also supports `Adapters` for transforming data (e.g., Pydantic models) and `Wrappers` for adding cross-cutting concerns like statistics tracking or TTL clamping, which can be chained. Key concepts include collections for namespacing, compound keys for internal storage, and automatic TTL management. +The `py-kv-store-adapter` project provides a pluggable, async-first interface for various key-value (KV) store backends in Python. Its core purpose is to abstract away the underlying KV store implementation, offering a consistent `KVStore` protocol for interacting with different storage solutions like Redis, Elasticsearch, in-memory caches, and disk-based stores. The architecture uses a unified `BaseStore` class that automatically manages `ManagedEntry` objects for consistent TTL and expiration handling across all store implementations. The system supports `Adapters` for transforming data (e.g., Pydantic models, raise-on-missing behavior) and `Wrappers` for adding cross-cutting concerns like statistics tracking, TTL clamping, key/collection prefixing, or single-collection mapping, which can be chained together. Key concepts include collections for namespacing, compound keys for internal storage in flat stores, automatic TTL management with timezone-aware timestamps, and a separation between adapters (which don't implement KVStore) and wrappers (which do implement KVStore and can be chained). ## Code Style & Conventions - **Python Version**: Requires Python 3.10 or higher (`pyproject.toml:project.requires-python`). @@ -33,7 +33,7 @@ The `py-kv-store-adapter` project provides a pluggable, async-first interface fo ## Dependencies & Compatibility - **Critical Runtime Dependencies**: - `cachetools>=6.0.0` for `MemoryStore` (`pyproject.toml:L26`). - - `diskcache>=5.6.0` for `DiskStore` (`pyproject.toml:L27`). + - `diskcache>=5.6.0`, `pathvalidate>=3.3.1` for `DiskStore` (`pyproject.toml:L27`). - `redis>=6.0.0` for `RedisStore` (`pyproject.toml:L28`). - `elasticsearch>=9.0.0`, `aiohttp>=3.12` for `ElasticsearchStore` (`pyproject.toml:L29`). - `pydantic>=2.11.9` for `PydanticAdapter` (`pyproject.toml:L30`). @@ -44,28 +44,30 @@ The `py-kv-store-adapter` project provides a pluggable, async-first interface fo - `ruff`: Linter and formatter (`pyproject.toml:L48`). - `basedpyright`: Type checker (`pyproject.toml:L54`). - **Observability**: - - The `StatisticsWrapper` (`src/kv_store_adapter/stores/wrappers/statistics.py`) provides in-memory tracking of operation counts, hits, and misses for `get`, `put`, `delete`, `exists`, `keys`, and `clear_collection` operations per collection. It can be enabled during initialization. + - The `StatisticsWrapper` (`src/kv_store_adapter/wrappers/statistics.py`) provides in-memory tracking of operation counts, hits, and misses for `get`, `put`, `delete`, and `ttl` operations per collection. It can be enabled during initialization. ## Unique Workflows -- **Adding New Store Implementations**: Developers can extend the system by creating new store classes that inherit from either `BaseKVStore` (unmanaged) or `BaseManagedKVStore` (managed), implementing abstract methods for `get`, `put`, `delete`, `ttl`, `exists`, `keys`, `clear_collection`, and `list_collections` (`DEVELOPING.md:L298-L349`). +- **Adding New Store Implementations**: Developers can extend the system by creating new store classes that inherit from the unified `BaseStore` class, implementing abstract methods `_get_managed_entry`, `_put_managed_entry`, and `_delete_managed_entry` (`DEVELOPING.md:L312-L399`). - **Wrapper/Adapter Chaining**: The design allows for chaining multiple wrappers and adapters to compose complex behaviors, such as `PydanticAdapter(SingleCollectionWrapper(store, \"users\"), User)` (`README.md:L174`). -- **CI/CD**: GitHub Actions workflows (`.github/workflows/`) are configured to run tests, linting, type checking, and formatting on pull requests and pushes to `main` (`.github/workflows/test_pull_request.yml`). A separate workflow handles publishing to PyPI on release creation (`.github/workflows/publish-py-kv-store-adapter.yml`). +- **CI/CD**: GitHub Actions workflows (`.github/workflows/`) are configured to run tests, linting, type checking, and formatting on pull requests and pushes to `main`. ## API Surface Map -The primary API surface is defined by the `KVStore` (`src/kv_store_adapter/types.py:L26-L44`) and extended by `BaseKVStore` (`src/kv_store_adapter/stores/base/unmanaged.py:L11-L76`) and `BaseManagedKVStore` (`src/kv_store_adapter/stores/base/managed.py:L21-L122`). -- **Core KV Operations**: `get(collection, key)`, `put(collection, key, value, ttl)`, `delete(collection, key)`, `exists(collection, key)`, `ttl(collection, key)`. -- **Management Operations (BaseKVStore)**: `keys(collection)`, `clear_collection(collection)`, `list_collections()`, `cull()`. +The primary API surface is defined by the `KVStore` protocol (`src/kv_store_adapter/types.py:L175-L180`) and implemented by the unified `BaseStore` class (`src/kv_store_adapter/stores/base.py:L29-L353`). +- **Core KV Operations**: `get(key, *, collection=None)`, `put(key, value, *, collection=None, ttl=None)`, `delete(key, *, collection=None)`, `ttl(key, *, collection=None)`. +- **Bulk Operations**: `get_many(keys, *, collection=None)`, `put_many(keys, values, *, collection=None, ttl=None)`, `delete_many(keys, *, collection=None)`, `ttl_many(keys, *, collection=None)`. +- **Management Operations (Extended Stores)**: `keys(collection=None, *, limit=None)`, `collections(*, limit=None)`, `destroy()`, `destroy_collection(collection)`, `cull()`. +- **Adapters**: `PydanticAdapter` for type-safe Pydantic model handling, `RaiseOnMissingAdapter` for optional exception-based missing key handling. +- **Wrappers**: `StatisticsWrapper`, `ClampTTLWrapper`, `PassthroughCacheWrapper`, `PrefixKeysWrapper`, `PrefixCollectionsWrapper`, `SingleCollectionWrapper`. ## Onboarding Steps -- **Understand Core Concepts**: Familiarize yourself with `KVStore`, `BaseKVStore`, `BaseManagedKVStore`, `ManagedEntry`, `Collections`, `Compound Keys`, `TTL Management`, `Wrappers`, and `Adapters` by reading `README.md` and `DEVELOPING.md`. +- **Understand Core Concepts**: Familiarize yourself with `KVStore`, `BaseStore`, `ManagedEntry`, `Collections`, `Compound Keys`, `TTL Management`, `Wrappers`, and `Adapters` by reading `README.md` and `DEVELOPING.md`. - **Development Setup**: Follow the \"Development Setup\" in `DEVELOPING.md` to clone the repository, install `uv`, sync dependencies (`uv sync --group dev`), activate the virtual environment, and install pre-commit hooks. -- **Testing**: Review `DEVELOPING.md`'s \"Testing\" section for how to run tests, set up test environments using Docker Compose, and write new tests using `BaseKVStoreTestCase` or `BaseManagedKVStoreTestCase` from `tests/cases.py`. +- **Testing**: Review `DEVELOPING.md`'s \"Testing\" section for how to run tests, set up test environments using Docker Compose, and write new tests using `BaseStoreTests` from `tests/stores/conftest.py`. - **Code Quality**: Understand the `ruff` and `pyright` configurations in `pyproject.toml` and how to run them (`uv run ruff check`, `uv run ruff format`, `pyright`). -- **Adding New Stores**: If extending the project, follow the \"Adding New Store Implementations\" guide in `DEVELOPING.md` for detailed steps on choosing a base class, creating the store, structuring the package, and adding tests. +- **Adding New Stores**: If extending the project, follow the \"Adding New Store Implementations\" guide in `DEVELOPING.md` for detailed steps on creating stores that inherit from the unified `BaseStore` class. ## Getting Unstuck - **General Development Issues**: Refer to the \"Development Guide\" in [`DEVELOPING.md`](DEVELOPING.md) for setup, testing, and contribution guidelines. - **Integration Tests with External Services**: If integration tests fail, ensure Docker and Docker Compose are running and the necessary services (Redis, Elasticsearch) are started via `docker-compose up -d` as described in [`DEVELOPING.md:L181-L194`](DEVELOPING.md:L181-L194). Check `.env` file configuration for external services (`DEVELOPING.md:L197-L211`). -- **Redis Test Failures**: The `tests/stores/redis/test_redis.py` fixture `setup_redis` attempts to manage a Dockerized Redis instance. If Redis fails to start, check Docker logs or manually ensure the `redis-test` container is running and accessible. -- **SingleCollectionWrapper Limitations**: This wrapper does not support `clear_collection` or `list_collections` operations, raising `NotImplementedError` if called (`src/kv_store_adapter/stores/wrappers/single_collection.py:L47-L64` \ No newline at end of file +- **Redis Test Failures**: The `tests/stores/redis/test_redis.py` fixture `setup_redis` attempts to manage a Dockerized Redis instance. If Redis fails to start, check Docker logs or manually ensure the `redis-test` container is running and accessible. \ No newline at end of file diff --git a/.github/workflows/test_pull_request.yml b/.github/workflows/test_pull_request.yml index 9cbf0b46..9a710811 100644 --- a/.github/workflows/test_pull_request.yml +++ b/.github/workflows/test_pull_request.yml @@ -11,10 +11,12 @@ on: jobs: publish: - runs-on: ubuntu-latest strategy: matrix: python-version: [3.10, 3.11, 3.12, 3.13, 3.14] + platform: [ubuntu-22.04, ubuntu-latest, macos-14, macos-latest, windows-2022, windows-latest] + + runs-on: ${{ matrix.platform }} steps: - name: Checkout repository diff --git a/DEVELOPING.md b/DEVELOPING.md index 91813384..4019f757 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -42,19 +42,46 @@ src/kv_store_adapter/ ├── __init__.py # Main package exports ├── types.py # Core types and protocols ├── errors.py # Exception hierarchy +├── adapters/ # Protocol adapters +│ ├── __init__.py # Adapter exports +│ ├── pydantic.py # Pydantic model adapter +│ └── raise_on_missing.py # Raise-on-missing adapter ├── stores/ # Store implementations -│ ├── base/ # Abstract base classes -│ ├── redis/ # Redis implementation -│ ├── memory/ # In-memory TLRU cache -│ ├── disk/ # Disk-based storage -│ ├── elasticsearch/ # Elasticsearch implementation -│ ├── simple/ # Simple dict-based stores -│ ├── null/ # Null object pattern store -│ ├── utils/ # Utility functions -│ │ ├── compound_keys.py # Key composition utilities -│ │ ├── managed_entry.py # ManagedEntry dataclass -│ │ └── time_to_live.py # TTL calculation -│ └── wrappers/ # Wrappers implementations +│ ├── __init__.py # Store exports +│ ├── base.py # Abstract base classes +│ ├── redis/ # Redis implementation +│ │ ├── __init__.py # Redis exports +│ │ └── store.py # RedisStore implementation +│ ├── memory/ # In-memory TLRU cache +│ │ ├── __init__.py # Memory exports +│ │ └── store.py # MemoryStore implementation +│ ├── disk/ # Disk-based storage +│ │ ├── __init__.py # Disk exports +│ │ ├── store.py # DiskStore implementation +│ │ └── multi_store.py # Multi-disk store +│ ├── elasticsearch/ # Elasticsearch implementation +│ │ ├── __init__.py # Elasticsearch exports +│ │ ├── store.py # ElasticsearchStore implementation +│ │ └── utils.py # Elasticsearch utilities +│ ├── simple/ # Simple dict-based stores +│ │ ├── __init__.py # Simple store exports +│ │ └── store.py # SimpleStore implementation +│ ├── null/ # Null object pattern store +│ │ ├── __init__.py # Null store exports +│ │ └── store.py # NullStore implementation +│ └── utils/ # Utility functions +│ ├── compound.py # Key composition utilities +│ ├── managed_entry.py # ManagedEntry dataclass +│ └── time_to_live.py # TTL calculation +├── wrappers/ # Wrapper implementations +│ ├── __init__.py # Wrapper exports +│ ├── base.py # Base wrapper class +│ ├── statistics.py # Statistics tracking wrapper +│ ├── clamp_ttl.py # TTL clamping wrapper +│ ├── passthrough_cache.py # Passthrough cache wrapper +│ ├── prefix_collections.py # Collection prefix wrapper +│ ├── prefix_keys.py # Key prefix wrapper +│ └── single_collection.py # Single collection wrapper tests/ ├── conftest.py # Test configuration @@ -71,7 +98,7 @@ All stores implement the `KVStore` interface. Here are detailed configuration op High-performance store with native TTL support: ```python -from kv_store_adapter import RedisStore +from kv_store_adapter.stores.redis.store import RedisStore # Connection options store = RedisStore(host="localhost", port=6379, db=0, password="secret") @@ -83,26 +110,26 @@ store = RedisStore(client=existing_redis_client) In-memory TLRU (Time-aware Least Recently Used) cache: ```python -from kv_store_adapter import MemoryStore +from kv_store_adapter.stores.memory.store import MemoryStore -store = MemoryStore(max_entries=1000) # Default: 1000 entries +store = MemoryStore(max_entries_per_collection=1000) # Default: 1000 entries per collection ``` ### Disk Store Persistent disk-based storage using diskcache: ```python -from kv_store_adapter import DiskStore +from kv_store_adapter.stores.disk.store import DiskStore -store = DiskStore(path="/path/to/cache", size_limit=1024*1024*1024) # 1GB -store = DiskStore(cache=existing_cache_instance) +store = DiskStore(directory="/path/to/cache", size_limit=1024*1024*1024) # 1GB +store = DiskStore(disk_cache=existing_cache_instance) ``` ### Elasticsearch Store Full-text searchable storage with Elasticsearch: ```python -from kv_store_adapter import ElasticsearchStore +from kv_store_adapter.stores.elasticsearch.store import ElasticsearchStore store = ElasticsearchStore( url="https://localhost:9200", @@ -112,27 +139,21 @@ store = ElasticsearchStore( store = ElasticsearchStore(client=existing_client, index="custom-index") ``` -### Simple Stores -Dictionary-based stores for testing and development: +### Simple Store +Dictionary-based store for testing and development: ```python -from kv_store_adapter import SimpleStore, SimpleManagedStore, SimpleJSONStore +from kv_store_adapter.stores.simple.store import SimpleStore -# Basic dictionary store +# Basic managed dictionary store store = SimpleStore(max_entries=1000) - -# Managed store with automatic entry wrapping -managed_store = SimpleManagedStore(max_entries=1000) - -# JSON-serialized storage -json_store = SimpleJSONStore(max_entries=1000) ``` ### Null Store Null object pattern store for testing: ```python -from kv_store_adapter import NullStore +from kv_store_adapter.stores.null.store import NullStore store = NullStore() # Accepts all operations but stores nothing ``` @@ -141,24 +162,21 @@ store = NullStore() # Accepts all operations but stores nothing ### Store Types -The project supports two main store architectures: - -1. **Unmanaged Stores (`BaseKVStore`)** - - Handle their own TTL management - - Directly store user values - - Examples: `SimpleStore`, `NullStore` +All stores now inherit from the unified `BaseStore` class which uses `ManagedEntry` objects: -2. **Managed Stores (`BaseManagedKVStore`)** - - Use `ManagedEntry` wrapper objects +1. **Managed Stores (`BaseStore`)** + - Use `ManagedEntry` wrapper objects for consistent TTL and metadata handling - Automatic TTL handling and expiration checking - - Examples: `RedisStore`, `MemoryStore`, `DiskStore`, `ElasticsearchStore` + - Consistent behavior across all store implementations + - Examples: `RedisStore`, `MemoryStore`, `DiskStore`, `ElasticsearchStore`, `SimpleStore`, `NullStore` ### Key Concepts - **Collections**: Logical namespaces for organizing keys - **Compound Keys**: Internal key format `collection::key` for flat stores - **TTL Management**: Automatic expiration handling with timezone-aware timestamps -- **Wrappers**: Wrapper pattern for adding functionality (statistics, logging, etc.) +- **Wrappers**: Wrapper pattern for adding functionality (statistics, TTL clamping, prefixing, etc.) +- **Adapters**: Transform data to/from stores (Pydantic models, raise-on-missing behavior, etc.) ## Testing @@ -219,17 +237,17 @@ Tests are organized by store type and use common test cases: ```python # tests/stores/mystore/test_mystore.py import pytest -from kv_store_adapter.stores.mystore import MyStore -from tests.cases import BaseKVStoreTestCase +from kv_store_adapter.stores.mystore.store import MyStore +from tests.stores.conftest import BaseStoreTests -class TestMyStore(BaseKVStoreTestCase): +class TestMyStore(BaseStoreTests): @pytest.fixture async def store(self): """Provide store instance for testing.""" store = MyStore() yield store # Cleanup if needed - await store.clear_collection("test") + await store.destroy() ``` #### Common Test Cases @@ -237,15 +255,11 @@ class TestMyStore(BaseKVStoreTestCase): Use the provided base test cases for consistency: ```python -from tests.cases import BaseKVStoreTestCase, BaseManagedKVStoreTestCase +from tests.stores.conftest import BaseStoreTests -class TestMyUnmanagedStore(BaseKVStoreTestCase): +class TestMyStore(BaseStoreTests): # Inherits all standard KV store tests pass - -class TestMyManagedStore(BaseManagedKVStoreTestCase): - # Inherits managed store specific tests - pass ``` #### Custom Test Methods @@ -253,7 +267,7 @@ class TestMyManagedStore(BaseManagedKVStoreTestCase): Add store-specific tests as needed: ```python -class TestRedisStore(BaseManagedKVStoreTestCase): +class TestRedisStore(BaseStoreTests): async def test_redis_specific_feature(self, store): """Test Redis-specific functionality.""" # Your test implementation @@ -299,53 +313,83 @@ pyright src/kv_store_adapter/stores/redis/store.py ### 1. Choose Base Class -Decide between `BaseKVStore` (unmanaged) or `BaseManagedKVStore` (managed): +All stores inherit from the unified `BaseStore` class, which provides consistent TTL and metadata handling: ```python -from kv_store_adapter.stores.base.unmanaged import BaseKVStore -# or -from kv_store_adapter.stores.base.managed import BaseManagedKVStore +from kv_store_adapter.stores.base import BaseStore ``` +You can also inherit from specialized base classes for additional functionality: +- `BaseEnumerateKeysStore` - Adds key enumeration support +- `BaseEnumerateCollectionsStore` - Adds collection enumeration support +- `BaseDestroyStore` - Adds store destruction support +- `BaseDestroyCollectionStore` - Adds collection destruction support +- `BaseCullStore` - Adds expired entry culling support + ### 2. Create Store Class ```python # src/kv_store_adapter/stores/mystore/store.py -from typing import Any -from kv_store_adapter.stores.base.managed import BaseManagedKVStore +from typing_extensions import override +from kv_store_adapter.stores.base import BaseStore from kv_store_adapter.stores.utils.managed_entry import ManagedEntry -class MyStore(BaseManagedKVStore): +class MyStore(BaseStore): """My custom key-value store implementation.""" - def __init__(self, **kwargs): + def __init__(self, *, default_collection: str | None = None, **kwargs): """Initialize store with custom parameters.""" - super().__init__() + super().__init__(default_collection=default_collection) # Your initialization code - async def setup(self) -> None: + async def _setup(self) -> None: """Initialize store (called once before first use).""" # Setup code (connect to database, etc.) pass - async def get_entry(self, collection: str, key: str) -> ManagedEntry | None: - """Retrieve a managed entry by key from the specified collection.""" + @override + async def _get_managed_entry(self, *, collection: str, key: str) -> ManagedEntry | None: + """Retrieve a managed entry by key from the specified collection. + + Returns: + ManagedEntry if found, None if not found or expired. + """ # Your implementation pass - async def put_entry( + @override + async def _put_managed_entry( self, + *, collection: str, key: str, - cache_entry: ManagedEntry, - *, - ttl: float | None = None + managed_entry: ManagedEntry, ) -> None: - """Store a managed entry by key in the specified collection.""" + """Store a managed entry by key in the specified collection. + + Args: + collection: The collection to store in. + key: The key to store under. + managed_entry: The ManagedEntry containing value and metadata. + """ + # Your implementation + pass + + @override + async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + """Delete a managed entry by key from the specified collection. + + Args: + key: The key to delete. + collection: The collection to delete from. + + Returns: + True if the key was deleted, False if it didn't exist. + """ # Your implementation pass - # Implement other required methods... + # Implement other optional methods as needed... ``` ### 3. Create Package Structure @@ -368,10 +412,10 @@ __all__ = ["MyStore"] ```python # tests/stores/mystore/test_mystore.py import pytest -from kv_store_adapter.stores.mystore import MyStore -from tests.cases import BaseManagedKVStoreTestCase +from kv_store_adapter.stores.mystore.store import MyStore +from tests.stores.conftest import BaseStoreTests -class TestMyStore(BaseManagedKVStoreTestCase): +class TestMyStore(BaseStoreTests): @pytest.fixture async def store(self): store = MyStore() diff --git a/README.md b/README.md index 20ca9e2b..5805deb1 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ A pluggable, async-only key-value store interface for modern Python applications - **Multiple backends**: Redis, Elasticsearch, In-memory, Disk, and more - **TTL support**: Automatic expiration handling across all store types - **Type-safe**: Full type hints with Protocol-based interfaces -- **Adapters**: Pydantic, Single Collection, and more +- **Adapters**: Pydantic model support, raise-on-missing behavior, and more - **Wrappers**: Statistics tracking and extensible wrapper system - **Collection-based**: Organize keys into logical collections/namespaces - **Pluggable architecture**: Easy to add custom store implementations @@ -23,9 +23,13 @@ pip install kv-store-adapter[redis] pip install kv-store-adapter[elasticsearch] pip install kv-store-adapter[memory] pip install kv-store-adapter[disk] +pip install kv-store-adapter[memcached] # With all backends -pip install kv-store-adapter[memory,disk,redis,elasticsearch] +pip install kv-store-adapter[memory,disk,redis,elasticsearch,memcached] + +# With Pydantic adapter support +pip install kv-store-adapter[pydantic] ``` # The KV Store Protocol @@ -36,20 +40,20 @@ The simplest way to get started is to use the `KVStore` interface, which allows import asyncio from kv_store_adapter.types import KVStore -from kv_store_adapter.stores.redis import RedisStore -from kv_store_adapter.stores.memory import MemoryStore +from kv_store_adapter.stores.redis.store import RedisStore +from kv_store_adapter.stores.memory.store import MemoryStore async def example(): # In-memory store memory_store = MemoryStore() - await memory_store.put(collection="users", key="456", value={"name": "Bob"}, ttl=3600) # TTL is supported, but optional! - bob = await memory_store.get(collection="users", key="456") - await memory_store.delete(collection="users", key="456") + await memory_store.put(key="456", value={"name": "Bob"}, collection="users", ttl=3600) # TTL is supported, but optional! + bob = await memory_store.get(key="456", collection="users") + await memory_store.delete(key="456", collection="users") redis_store = RedisStore(url="redis://localhost:6379") - await redis_store.put(collection="products", key="123", value={"name": "Alice"}) - alice = await redis_store.get(collection="products", key="123") - await redis_store.delete(collection="products", key="123") + await redis_store.put(key="123", value={"name": "Alice"}, collection="products") + alice = await redis_store.get(key="123", collection="products") + await redis_store.delete(key="123", collection="products") asyncio.run(example()) ``` @@ -62,8 +66,9 @@ Choose the store that best fits your needs. All stores implement the same `KVSto - **RedisStore**: `RedisStore(url="redis://localhost:6379/0")` - **ElasticsearchStore**: `ElasticsearchStore(url="https://localhost:9200", api_key="your-api-key")` -- **DiskStore**: A sqlite-based store for local persistence `DiskStore(path="./cache")` -- **MemoryStore**: A fast in-memory cache `MemoryStore()` +- **MemcachedStore**: `MemcachedStore(host="localhost", port=11211")` +- **DiskStore**: A disk-based store using diskcache `DiskStore(directory="./cache")`. Also see `MultiDiskStore` for a store that creates one disk store per collection. +- **MemoryStore**: A fast in-memory TLRU cache `MemoryStore()` ### Development/Testing Stores @@ -75,16 +80,16 @@ For detailed configuration options and all available stores, see [DEVELOPING.md] ## Atomicity / Consistency We strive to support atomicity and consistency across all stores and operations in the KVStore. That being said, -there are operations available via the BaseKVStore class which are management operations like listing keys, listing collections, clearing collections, culling expired entries, etc. These operations may not be atomic, may be eventually consistent across stores, or may have other limitations (like limited to returning a certain number of keys). +there are operations available via the BaseStore class which are management operations like listing keys, listing collections, clearing collections, culling expired entries, etc. These operations may not be atomic, may be eventually consistent across stores, or may have other limitations (like limited to returning a certain number of keys). ## Protocol Adapters -The library provides an adapter pattern simplifying the use of the protocol/store. Adapters themselves do not implement the `KVStore` interface and cannot be nested. Adapters can be used with anything that implements the `KVStore` interface but do not comply with the full `BaseKVStore` interface and thus lack management operations like listing keys, listing collections, clearing collections, culling expired entries, etc. +The library provides an adapter pattern simplifying the use of the protocol/store. Adapters themselves do not implement the `KVStore` interface and cannot be nested. As a result, Adapters are the "outer" layer of the store. Adapters are primarily for improved type-safe operations. The following adapters are available: -- **PydanticAdapter**: Converts data to and from a store using Pydantic models. -- **SingleCollectionAdapter**: Provides KV operations that do not require a collection parameter. +- **PydanticAdapter**: Type-safe storage and retrieval using Pydantic models with automatic serialization/deserialization. +- **RaiseOnMissingAdapter**: Provides optional raise-on-missing behavior for get, get_many, ttl, and ttl_many operations. For example, the PydanticAdapter can be used to provide type-safe interactions with a store: @@ -92,7 +97,7 @@ For example, the PydanticAdapter can be used to provide type-safe interactions w from pydantic import BaseModel from kv_store_adapter.adapters.pydantic import PydanticAdapter -from kv_store_adapter.stores.memory import MemoryStore +from kv_store_adapter.stores.memory.store import MemoryStore class User(BaseModel): name: str @@ -100,19 +105,18 @@ class User(BaseModel): memory_store = MemoryStore() -user_adapter = PydanticAdapter(store=memory_store, pydantic_model=User) +user_adapter = PydanticAdapter(kv_store=memory_store, pydantic_model=User) async def example(): - await user_adapter.put(collection="users", key="123", value=User(name="John Doe", email="john.doe@example.com")) - user: User | None = await user_adapter.get(collection="users", key="123") + await user_adapter.put(key="123", value=User(name="John Doe", email="john.doe@example.com"), collection="users") + user: User | None = await user_adapter.get(key="123", collection="users") asyncio.run(example()) ``` ## Wrappers -The library provides a wrapper pattern for adding functionality to a store. Wrappers themselves implement the `KVStore` interface meaning that you can wrap any -store with any wrapper, and chain wrappers together as needed. +The library provides a wrapper pattern for adding functionality to a store. Wrappers themselves implement the `KVStore` interface meaning that you can wrap any store with any wrapper, and chain wrappers together as needed. ### Statistics Tracking @@ -121,17 +125,17 @@ Track operation statistics for any store: ```python import asyncio -from kv_store_adapter.stores.wrappers.statistics import StatisticsWrapper -from kv_store_adapter.stores.memory import MemoryStore +from kv_store_adapter.wrappers.statistics import StatisticsWrapper +from kv_store_adapter.stores.memory.store import MemoryStore memory_store = MemoryStore() store = StatisticsWrapper(store=memory_store) async def example(): # Use store normally - statistics are tracked automatically - await store.put("users", "123", {"name": "Alice"}) - await store.get("users", "123") - await store.get("users", "456") # Cache miss + await store.put(key="123", value={"name": "Alice"}, collection="users") + await store.get(key="123", collection="users") + await store.get(key="456", collection="users") # Cache miss # Access statistics stats = store.statistics @@ -145,10 +149,11 @@ asyncio.run(example()) Other wrappers that are available include: +- **ClampTTLWrapper**: Wraps a store and clamps the TTL to a given range. - **TTLClampWrapper**: Wraps a store and clamps the TTL to a given range. -- **PassthroughWrapper**: Wraps two stores, using the primary store as a write-through cache for the secondary store. For example, you could use a RedisStore as a distributed primary store and a MemoryStore as the cache store. -- **PrefixCollectionWrapper**: Wraps a store and prefixes all collections with a given prefix. -- **PrefixKeyWrapper**: Wraps a store and prefixes all keys with a given prefix. +- **PassthroughCacheWrapper**: Wraps two stores to provide a read-through cache. Reads go to the cache store first and fall back to the primary store, populating the cache with the primary's TTL; writes evict from the cache and then write to the primary. For example, use a RedisStore as the primary and a MemoryStore as the cache store. +- **PrefixCollectionsWrapper**: Wraps a store and prefixes all collections with a given prefix. +- **PrefixKeysWrapper**: Wraps a store and prefixes all keys with a given prefix. - **SingleCollectionWrapper**: Wraps a store and forces all requests into a single collection. See [DEVELOPING.md](DEVELOPING.md) for more information on how to create your own wrappers. @@ -161,8 +166,8 @@ Imagine you have a service where you want to cache 3 pydantic models in a single import asyncio from kv_store_adapter.adapters.pydantic import PydanticAdapter -from kv_store_adapter.stores.wrappers.single_collection import SingleCollectionWrapper -from kv_store_adapter.stores.memory import MemoryStore +from kv_store_adapter.wrappers.single_collection import SingleCollectionWrapper +from kv_store_adapter.stores.memory.store import MemoryStore from pydantic import BaseModel class User(BaseModel): @@ -171,21 +176,23 @@ class User(BaseModel): store = MemoryStore() -users_store = PydanticAdapter(SingleCollectionWrapper(store, "users"), User) -products_store = PydanticAdapter(SingleCollectionWrapper(store, "products"), Product) -orders_store = PydanticAdapter(SingleCollectionWrapper(store, "orders"), Order) +users_store = PydanticAdapter(kv_store=SingleCollectionWrapper(store=store, single_collection="users", default_collection="default"), pydantic_model=User) +products_store = PydanticAdapter(kv_store=SingleCollectionWrapper(store=store, single_collection="products", default_collection="default"), pydantic_model=Product) +orders_store = PydanticAdapter(kv_store=SingleCollectionWrapper(store=store, single_collection="orders", default_collection="default"), pydantic_model=Order) async def example(): new_user: User = User(name="John Doe", email="john.doe@example.com") - await users_store.put(collection="allowed_users", key="123", value=new_user) + await users_store.put(key="123", value=new_user, collection="allowed_users") - john_doe: User | None = await users_store.get(collection="allowed_users", key="123") + john_doe: User | None = await users_store.get(key="123", collection="allowed_users") asyncio.run(example()) ``` The SingleCollectionWrapper will result in writes to the `allowed_users` collection being redirected to the `users` collection but the keys will be prefixed with the original collection `allowed_users__` name. So the key `123` will be stored as `allowed_users__123` in the `users` collection. +Note: The above example shows the conceptual usage, but you would need to define `Product` and `Order` models as well for the complete example to work. + ## Development See [DEVELOPING.md](DEVELOPING.md) for development setup, testing, and contribution guidelines. diff --git a/pyproject.toml b/pyproject.toml index 92b4117d..6bb4e33d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "kv-store-adapter" -version = "0.1.2" +version = "0.2.0" description = "A pluggable interface for KV Stores" readme = "README.md" requires-python = ">=3.10" @@ -24,8 +24,9 @@ build-backend = "hatchling.build" [project.optional-dependencies] memory = ["cachetools>=6.0.0"] -disk = ["diskcache>=5.6.0"] +disk = ["diskcache>=5.6.0", "pathvalidate>=3.3.1",] redis = ["redis>=6.0.0"] +memcached = ["aiomcache>=0.8.0"] elasticsearch = ["elasticsearch>=9.0.0", "aiohttp>=3.12"] pydantic = ["pydantic>=2.11.9"] @@ -40,7 +41,7 @@ env_files = [".env"] [dependency-groups] dev = [ - "kv-store-adapter[memory,disk,redis,elasticsearch]", + "kv-store-adapter[memory,disk,redis,elasticsearch,memcached]", "kv-store-adapter[pydantic]", "pytest", "pytest-mock", diff --git a/src/kv_store_adapter/__init__.py b/src/kv_store_adapter/__init__.py index 8b137891..084ec8ae 100644 --- a/src/kv_store_adapter/__init__.py +++ b/src/kv_store_adapter/__init__.py @@ -1 +1,3 @@ +from .types import KVStore +__all__ = ["KVStore"] diff --git a/src/kv_store_adapter/adapters/__init__.py b/src/kv_store_adapter/adapters/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/src/kv_store_adapter/adapters/__init__.py @@ -0,0 +1 @@ + diff --git a/src/kv_store_adapter/adapters/pydantic.py b/src/kv_store_adapter/adapters/pydantic.py index 5794cc9b..6651f352 100644 --- a/src/kv_store_adapter/adapters/pydantic.py +++ b/src/kv_store_adapter/adapters/pydantic.py @@ -1,3 +1,4 @@ +from collections.abc import Sequence from typing import Any, Generic, TypeVar from pydantic import BaseModel, ValidationError @@ -12,31 +13,85 @@ class PydanticAdapter(Generic[T]): """Adapter around a KVStore-compliant Store that allows type-safe persistence of Pydantic models.""" - def __init__(self, store_protocol: KVStore, pydantic_model: type[T]) -> None: - self.store_protocol: KVStore = store_protocol + def __init__(self, kv_store: KVStore, pydantic_model: type[T]) -> None: + self.kv_store: KVStore = kv_store self.pydantic_model: type[T] = pydantic_model - async def get(self, collection: str, key: str) -> T | None: - if value := await self.store_protocol.get(collection=collection, key=key): - try: - return self.pydantic_model.model_validate(obj=value) - except ValidationError as e: - msg = f"Invalid Pydantic model: {e}" - raise DeserializationError(msg) from e - - return None + def _validate_model(self, value: dict[str, Any]) -> T: + try: + return self.pydantic_model.model_validate(obj=value) + except ValidationError as e: + msg = f"Invalid Pydantic model: {e}" + raise DeserializationError(msg) from e - async def put(self, collection: str, key: str, value: T, *, ttl: float | None = None) -> None: + def _serialize_model(self, value: T) -> dict[str, Any]: try: - value_dict: dict[str, Any] = value.model_dump(mode="json") + return value.model_dump(mode="json") except PydanticSerializationError as e: msg = f"Invalid Pydantic model: {e}" raise SerializationError(msg) from e - await self.store_protocol.put(collection=collection, key=key, value=value_dict, ttl=ttl) + async def get(self, key: str, *, collection: str | None = None) -> T | None: + """Get and validate a model by key. + + Returns the parsed model instance, or None if not present. + Raises DeserializationError if the stored data cannot be validated as the model. + """ + if value := await self.kv_store.get(key=key, collection=collection): + return self._validate_model(value=value) + + return None + + async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[T | None]: + """Batch get and validate models by keys, preserving order. + + Each element is either a parsed model instance or None if missing. + """ + values: list[dict[str, Any] | None] = await self.kv_store.get_many(keys=keys, collection=collection) + + return [self._validate_model(value=value) if value else None for value in values] + + async def put(self, key: str, value: T, *, collection: str | None = None, ttl: float | None = None) -> None: + """Serialize and store a model. + + Propagates SerializationError if the model cannot be serialized. + """ + value_dict: dict[str, Any] = self._serialize_model(value=value) + + await self.kv_store.put(key=key, value=value_dict, collection=collection, ttl=ttl) + + async def put_many(self, keys: Sequence[str], values: Sequence[T], *, collection: str | None = None, ttl: float | None = None) -> None: + """Serialize and store multiple models, preserving order alignment with keys.""" + value_dicts: list[dict[str, Any]] = [self._serialize_model(value=value) for value in values] + + await self.kv_store.put_many(keys=keys, values=value_dicts, collection=collection, ttl=ttl) + + async def delete(self, key: str, *, collection: str | None = None) -> bool: + """Delete a model by key. Returns True if a value was deleted, else False.""" + return await self.kv_store.delete(key=key, collection=collection) + + async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + """Delete multiple models by key. Returns the count of deleted entries.""" + return await self.kv_store.delete_many(keys=keys, collection=collection) + + async def ttl(self, key: str, *, collection: str | None = None) -> tuple[T | None, float | None]: + """Get a model and its TTL seconds if present. + + Returns (model, ttl_seconds) or (None, None) if missing. + """ + entry: dict[str, Any] | None + ttl_info: float | None + + entry, ttl_info = await self.kv_store.ttl(key=key, collection=collection) + + if entry is not None: + model_validate: T = self._validate_model(value=entry) + return (model_validate, ttl_info) + + return (None, None) - async def delete(self, collection: str, key: str) -> bool: - return await self.store_protocol.delete(collection=collection, key=key) + async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[T | None, float | None]]: + """Batch get models with TTLs. Each element is (model|None, ttl_seconds|None).""" + entries: list[tuple[dict[str, Any] | None, float | None]] = await self.kv_store.ttl_many(keys=keys, collection=collection) - async def exists(self, collection: str, key: str) -> bool: - return await self.store_protocol.exists(collection=collection, key=key) + return [(self._validate_model(value=entry) if entry else None, ttl_info) for entry, ttl_info in entries] diff --git a/src/kv_store_adapter/adapters/raise_on_missing.py b/src/kv_store_adapter/adapters/raise_on_missing.py new file mode 100644 index 00000000..82ac922f --- /dev/null +++ b/src/kv_store_adapter/adapters/raise_on_missing.py @@ -0,0 +1,190 @@ +from collections.abc import Sequence +from typing import Any, Literal, overload + +from kv_store_adapter.errors import MissingKeyError +from kv_store_adapter.types import KVStore + + +class RaiseOnMissingAdapter: + """Adapter around a KVStore that raises on missing values for get/get_many/ttl/ttl_many. + + When `raise_on_missing=True`, methods raise `MissingKeyError` instead of returning None. + """ + + def __init__(self, kv_store: KVStore) -> None: + self.kv_store: KVStore = kv_store + + @overload + async def get(self, key: str, *, collection: str | None = None, raise_on_missing: Literal[False] = False) -> dict[str, Any] | None: ... + + @overload + async def get(self, key: str, *, collection: str | None = None, raise_on_missing: Literal[True]) -> dict[str, Any]: ... + + async def get( + self, + key: str, + *, + collection: str | None = None, + raise_on_missing: bool = False, + ) -> dict[str, Any] | None: + """Retrieve a value by key from the specified collection. + + Args: + key: The key to retrieve the value from. + collection: The collection to retrieve the value from. If no collection is provided, it will use the default collection. + raise_on_missing: Whether to raise a MissingKeyError if the key is not found. + + Returns: + The value associated with the key. If the key is not found, None will be returned. + """ + result = await self.kv_store.get(key=key, collection=collection) + + if result is not None: + return result + + if raise_on_missing: + raise MissingKeyError(operation="get", collection=collection, key=key) + + return None + + @overload + async def get_many( + self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[False] = False + ) -> list[dict[str, Any] | None]: ... + + @overload + async def get_many( + self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[True] + ) -> list[dict[str, Any]]: ... + + async def get_many( + self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: bool = False + ) -> list[dict[str, Any]] | list[dict[str, Any] | None]: + """Retrieve multiple values by key from the specified collection. + + Args: + keys: The keys to retrieve the values from. + collection: The collection to retrieve keys from. If no collection is provided, it will use the default collection. + + Returns: + The values for the keys, or [] if the key is not found. + """ + results: list[dict[str, Any] | None] = await self.kv_store.get_many(collection=collection, keys=keys) + + for i, key in enumerate(keys): + if results[i] is None and raise_on_missing: + raise MissingKeyError(operation="get_many", collection=collection, key=key) + + return results + + @overload + async def ttl( + self, key: str, *, collection: str | None = None, raise_on_missing: Literal[False] = False + ) -> tuple[dict[str, Any] | None, float | None]: ... + + @overload + async def ttl( + self, key: str, *, collection: str | None = None, raise_on_missing: Literal[True] + ) -> tuple[dict[str, Any], float | None]: ... + + async def ttl( + self, key: str, *, collection: str | None = None, raise_on_missing: bool = False + ) -> tuple[dict[str, Any] | None, float | None]: + """Retrieve the value and TTL information for a key-value pair from the specified collection. + + Args: + key: The key to retrieve the TTL information from. + collection: The collection to retrieve the TTL information from. If no collection is provided, + it will use the default collection. + + Returns: + The value and TTL information for the key. If the key is not found, (None, None) will be returned. + """ + value, ttl = await self.kv_store.ttl(key=key, collection=collection) + + if value is not None: + return value, ttl + + if raise_on_missing: + raise MissingKeyError(operation="ttl", collection=collection, key=key) + + return (None, None) + + @overload + async def ttl_many( + self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[False] = False + ) -> list[tuple[dict[str, Any] | None, float | None]]: ... + + @overload + async def ttl_many( + self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[True] + ) -> list[tuple[dict[str, Any], float | None]]: ... + + async def ttl_many( + self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: bool = False + ) -> list[tuple[dict[str, Any], float | None]] | list[tuple[dict[str, Any] | None, float | None]]: + """Retrieve multiple values and TTL information by key from the specified collection. + + Args: + keys: The keys to retrieve the values and TTL information from. + collection: The collection to retrieve keys from. If no collection is provided, it will use the default collection. + """ + results: list[tuple[dict[str, Any] | None, float | None]] = await self.kv_store.ttl_many(collection=collection, keys=keys) + + for i, key in enumerate(keys): + if results[i][0] is None and raise_on_missing: + raise MissingKeyError(operation="ttl_many", collection=collection, key=key) + + return results + + async def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + """Store a key-value pair in the specified collection with optional TTL. + + Args: + key: The key to store the value in. + value: The value to store. + collection: The collection to store the value in. If no collection is provided, it will use the default collection. + ttl: The optional time-to-live (expiry duration) for the key-value pair. Defaults to no TTL. Note: The + backend store will convert the provided format to its own internal format. + """ + return await self.kv_store.put(key=key, value=value, collection=collection, ttl=ttl) + + async def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + """Store multiple key-value pairs in the specified collection. + + Args: + keys: The keys to store the values in. + values: The values to store. + collection: The collection to store keys in. If no collection is provided, it will use the default collection. + ttl: The optional time-to-live (expiry duration) for the key-value pairs. Defaults to no TTL. Note: The + backend store will convert the provided format to its own internal format. + """ + return await self.kv_store.put_many(keys=keys, values=values, collection=collection, ttl=ttl) + + async def delete(self, key: str, *, collection: str | None = None) -> bool: + """Delete a key-value pair from the specified collection. + + Args: + key: The key to delete the value from. + collection: The collection to delete the value from. If no collection is provided, it will use the default collection. + """ + return await self.kv_store.delete(key=key, collection=collection) + + async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + """Delete multiple key-value pairs from the specified collection. + + Args: + keys: The keys to delete the values from. + collection: The collection to delete keys from. If no collection is provided, it will use the default collection. + + Returns: + The number of keys deleted. + """ + return await self.kv_store.delete_many(keys=keys, collection=collection) diff --git a/src/kv_store_adapter/adapters/single_collection.py b/src/kv_store_adapter/adapters/single_collection.py deleted file mode 100644 index 66e10e1e..00000000 --- a/src/kv_store_adapter/adapters/single_collection.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Any - -from kv_store_adapter.types import KVStore - - -class SingleCollectionAdapter: - """Adapter around a KVStore-compliant Store that only allows one collection.""" - - def __init__(self, store: KVStore, collection: str) -> None: - self.store: KVStore = store - self.collection: str = collection - - async def get(self, key: str) -> dict[str, Any] | None: - return await self.store.get(collection=self.collection, key=key) - - async def put(self, key: str, value: dict[str, Any], *, ttl: float | None = None) -> None: - await self.store.put(collection=self.collection, key=key, value=value, ttl=ttl) - - async def delete(self, key: str) -> bool: - return await self.store.delete(collection=self.collection, key=key) - - async def exists(self, key: str) -> bool: - return await self.store.exists(collection=self.collection, key=key) diff --git a/src/kv_store_adapter/errors.py b/src/kv_store_adapter/errors.py index 67f73af1..da5425c4 100644 --- a/src/kv_store_adapter/errors.py +++ b/src/kv_store_adapter/errors.py @@ -22,6 +22,16 @@ def __init__(self, message: str | None = None, extra_info: ExtraInfoType | None super().__init__(": ".join(message_parts)) +class MissingKeyError(KVStoreAdapterError): + """Raised when a key is missing from the store.""" + + def __init__(self, operation: str, collection: str | None = None, key: str | None = None): + super().__init__( + message="A key was requested that was required but not found in the store.", + extra_info={"operation": operation, "collection": collection or "default", "key": key}, + ) + + class SetupError(KVStoreAdapterError): """Raised when a store setup fails.""" diff --git a/src/kv_store_adapter/stores/base.py b/src/kv_store_adapter/stores/base.py new file mode 100644 index 00000000..368051ab --- /dev/null +++ b/src/kv_store_adapter/stores/base.py @@ -0,0 +1,357 @@ +""" +Base abstract class for managed key-value store implementations. +""" + +import asyncio +from abc import ABC, abstractmethod +from asyncio.locks import Lock +from collections import defaultdict +from collections.abc import Sequence +from typing import Any + +from typing_extensions import override + +from kv_store_adapter.errors import SetupError +from kv_store_adapter.stores.utils.managed_entry import ManagedEntry +from kv_store_adapter.stores.utils.time_to_live import now +from kv_store_adapter.types import ( + CullProtocol, + DestroyCollectionProtocol, + DestroyStoreProtocol, + EnumerateCollectionsProtocol, + EnumerateKeysProtocol, + KeyValueProtocol, +) + +DEFAULT_COLLECTION_NAME = "default_collection" + + +class BaseStore(KeyValueProtocol, ABC): + """An opinionated Abstract base class for managed key-value stores using ManagedEntry objects. + + This class implements all of the methods required for compliance with the KVStore protocol but + requires subclasses to implement the _get_managed_entry, _put_managed_entry, and _delete_managed_entry methods. + + Subclasses can also override the _get_managed_entries, _put_managed_entries, and _delete_managed_entries methods if desired. + + Subclasses can implement the _setup, which will be called once before the first use of the store, and _setup_collection, which will + be called once per collection before the first use of a collection. + """ + + _setup_complete: bool + _setup_lock: asyncio.Lock + + _setup_collection_locks: defaultdict[str, Lock] + _setup_collection_complete: defaultdict[str, bool] + + default_collection: str + + def __init__(self, *, default_collection: str | None = None) -> None: + """Initialize the managed key-value store. + + Args: + default_collection: The default collection to use if no collection is provided. + Defaults to "default_collection". + """ + + self._setup_complete = False + self._setup_lock = asyncio.Lock() + self._setup_collection_locks = defaultdict[str, asyncio.Lock](asyncio.Lock) + self._setup_collection_complete = defaultdict[str, bool](bool) + + self.default_collection = default_collection or DEFAULT_COLLECTION_NAME + + super().__init__() + + async def _setup(self) -> None: + """Initialize the store (called once before first use).""" + + async def _setup_collection(self, *, collection: str) -> None: # pyright: ignore[reportUnusedParameter] + """Initialize the collection (called once before first use of the collection).""" + + async def setup(self) -> None: + if not self._setup_complete: + async with self._setup_lock: + if not self._setup_complete: + try: + await self._setup() + except Exception as e: + raise SetupError(message=f"Failed to setup store: {e}", extra_info={"store": self.__class__.__name__}) from e + self._setup_complete = True + + async def setup_collection(self, *, collection: str) -> None: + await self.setup() + + if not self._setup_collection_complete[collection]: + async with self._setup_collection_locks[collection]: + if not self._setup_collection_complete[collection]: + try: + await self._setup_collection(collection=collection) + except Exception as e: + raise SetupError(message=f"Failed to setup collection: {e}", extra_info={"collection": collection}) from e + self._setup_collection_complete[collection] = True + + @abstractmethod + async def _get_managed_entry(self, *, collection: str, key: str) -> ManagedEntry | None: + """Retrieve a cache entry by key from the specified collection.""" + + async def _get_managed_entries(self, *, collection: str, keys: Sequence[str]) -> list[ManagedEntry | None]: + """Retrieve multiple managed entries by key from the specified collection.""" + + return [await self._get_managed_entry(collection=collection, key=key) for key in keys] + + @override + async def get( + self, + key: str, + *, + collection: str | None = None, + ) -> dict[str, Any] | None: + """Retrieve a value by key from the specified collection. + + Args: + collection: The collection to retrieve the value from. If no collection is provided, it will use the default collection. + key: The key to retrieve the value from. + + Returns: + The value associated with the key, or None if not found or expired. + """ + collection = collection or self.default_collection + await self.setup_collection(collection=collection) + + managed_entry: ManagedEntry | None = await self._get_managed_entry(collection=collection, key=key) + + if not managed_entry: + return None + + if managed_entry.is_expired: + return None + + return managed_entry.value + + @override + async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + collection = collection or self.default_collection + await self.setup_collection(collection=collection) + + entries = await self._get_managed_entries(keys=keys, collection=collection) + return [entry.value if entry and not entry.is_expired else None for entry in entries] + + @override + async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + collection = collection or self.default_collection + await self.setup_collection(collection=collection) + + managed_entry: ManagedEntry | None = await self._get_managed_entry(collection=collection, key=key) + + if not managed_entry or managed_entry.is_expired: + return (None, None) + + return (managed_entry.value, managed_entry.ttl) + + @override + async def ttl_many( + self, + keys: Sequence[str], + *, + collection: str | None = None, + ) -> list[tuple[dict[str, Any] | None, float | None]]: + """Retrieve multiple values and TTLs by key from the specified collection. + + Returns a list of tuples of the form (value, ttl_seconds). Missing or expired + entries are represented as (None, None). + """ + collection = collection or self.default_collection + await self.setup_collection(collection=collection) + + entries = await self._get_managed_entries(keys=keys, collection=collection) + return [(entry.value, entry.ttl) if entry and not entry.is_expired else (None, None) for entry in entries] + + @abstractmethod + async def _put_managed_entry(self, *, collection: str, key: str, managed_entry: ManagedEntry) -> None: + """Store a managed entry by key in the specified collection.""" + ... + + async def _put_managed_entries(self, *, collection: str, keys: Sequence[str], managed_entries: Sequence[ManagedEntry]) -> None: + """Store multiple managed entries by key in the specified collection.""" + + for key, managed_entry in zip(keys, managed_entries, strict=True): + await self._put_managed_entry( + collection=collection, + key=key, + managed_entry=managed_entry, + ) + + @override + async def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + """Store a key-value pair in the specified collection with optional TTL.""" + collection = collection or self.default_collection + await self.setup_collection(collection=collection) + + managed_entry: ManagedEntry = ManagedEntry(value=value, ttl=ttl, created_at=now()) + + await self._put_managed_entry( + collection=collection, + key=key, + managed_entry=managed_entry, + ) + + @override + async def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + """Store multiple key-value pairs in the specified collection.""" + if len(keys) != len(values): + msg = "put_many called but a different number of keys and values were provided" + raise ValueError(msg) from None + + if ttl and isinstance(ttl, Sequence) and len(ttl) != len(keys): + msg = "put_many called but a different number of keys and ttl values were provided" + raise ValueError(msg) from None + + collection = collection or self.default_collection + await self.setup_collection(collection=collection) + + ttl_for_entries: list[float | None] = [] + + if ttl is None: + ttl_for_entries = [None for _ in range(len(keys))] + + if isinstance(ttl, Sequence): + ttl_for_entries.extend(ttl) + + if isinstance(ttl, float): + ttl_for_entries.extend([ttl for _ in range(len(keys))]) + + managed_entries: list[ManagedEntry] = [] + + for value, value_ttl in zip(values, ttl_for_entries, strict=True): + managed_entries.append(ManagedEntry(value=value, ttl=value_ttl, created_at=now())) + + await self._put_managed_entries(collection=collection, keys=keys, managed_entries=managed_entries) + + @abstractmethod + async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + """Delete a managed entry by key from the specified collection.""" + ... + + async def _delete_managed_entries(self, *, keys: Sequence[str], collection: str) -> int: + """Delete multiple managed entries by key from the specified collection.""" + + deleted_count: int = 0 + + for key in keys: + if await self._delete_managed_entry(key=key, collection=collection): + deleted_count += 1 + + return deleted_count + + @override + async def delete(self, key: str, *, collection: str | None = None) -> bool: + collection = collection or self.default_collection + await self.setup_collection(collection=collection) + + return await self._delete_managed_entry(key=key, collection=collection) + + @override + async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + """Delete multiple managed entries by key from the specified collection.""" + collection = collection or self.default_collection + await self.setup_collection(collection=collection) + + return await self._delete_managed_entries(keys=keys, collection=collection) + + +class BaseEnumerateKeysStore(BaseStore, EnumerateKeysProtocol, ABC): + """An abstract base class for enumerate key-value stores. + + Subclasses must implement the get_collection_keys and get_collection_names methods. + """ + + @override + async def keys(self, collection: str | None = None, *, limit: int | None = None) -> list[str]: + """List all keys in the specified collection.""" + + collection = collection or self.default_collection + await self.setup_collection(collection=collection) + + return await self._get_collection_keys(collection=collection, limit=limit) + + @abstractmethod + async def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> list[str]: + """List all keys in the specified collection.""" + + +class BaseEnumerateCollectionsStore(BaseStore, EnumerateCollectionsProtocol, ABC): + @override + async def collections(self, *, limit: int | None = None) -> list[str]: + """List all available collection names (may include empty collections).""" + await self.setup() + + return await self._get_collection_names(limit=limit) + + @abstractmethod + async def _get_collection_names(self, *, limit: int | None = None) -> list[str]: + """List all available collection names (may include empty collections).""" + + +class BaseDestroyStore(BaseStore, DestroyStoreProtocol, ABC): + """An abstract base class for destroyable stores. + + Subclasses must implement the delete_store method. + """ + + @override + async def destroy(self) -> bool: + """Destroy the store.""" + await self.setup() + + return await self._delete_store() + + @abstractmethod + async def _delete_store(self) -> bool: + """Delete the store.""" + ... + + +class BaseDestroyCollectionStore(BaseStore, DestroyCollectionProtocol, ABC): + """An abstract base class for destroyable collections. + + Subclasses must implement the delete_collection method. + """ + + @override + async def destroy_collection(self, collection: str) -> bool: + """Destroy the collection.""" + await self.setup() + + return await self._delete_collection(collection=collection) + + @abstractmethod + async def _delete_collection(self, *, collection: str) -> bool: + """Delete the collection.""" + ... + + +class BaseCullStore(BaseStore, CullProtocol, ABC): + """An abstract base class for cullable stores. + + Subclasses must implement the cull method. + """ + + @override + async def cull(self) -> None: + """Cull the store.""" + await self.setup() + + return await self._cull() + + @abstractmethod + async def _cull(self) -> None: + """Cull the store.""" + ... diff --git a/src/kv_store_adapter/stores/base/__init__.py b/src/kv_store_adapter/stores/base/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/kv_store_adapter/stores/base/managed.py b/src/kv_store_adapter/stores/base/managed.py deleted file mode 100644 index 16bf3f4c..00000000 --- a/src/kv_store_adapter/stores/base/managed.py +++ /dev/null @@ -1,121 +0,0 @@ -""" -Base abstract class for managed key-value store implementations. -""" - -import asyncio -from abc import ABC, abstractmethod -from asyncio.locks import Lock -from collections import defaultdict -from datetime import datetime, timezone -from typing import Any - -from typing_extensions import override - -from kv_store_adapter.errors import SetupError -from kv_store_adapter.stores.base.unmanaged import BaseKVStore -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry -from kv_store_adapter.stores.utils.time_to_live import calculate_expires_at -from kv_store_adapter.types import TTLInfo - - -class BaseManagedKVStore(BaseKVStore, ABC): - """An opinionated Abstract base class for managed key-value stores using ManagedEntry objects. - - This class handles TTL management, expiration checking, and entry wrapping automatically. - Implementations only need to handle storage and retrieval of ManagedEntry objects and culling of expired entries. - """ - - _setup_complete: bool - _setup_lock: asyncio.Lock - - _setup_collection_locks: defaultdict[str, Lock] - _setup_collection_complete: defaultdict[str, bool] - - def __init__(self) -> None: - self._setup_complete = False - self._setup_lock = asyncio.Lock() - self._setup_collection_locks = defaultdict[str, asyncio.Lock](asyncio.Lock) - self._setup_collection_complete = defaultdict[str, bool](bool) - - async def setup(self) -> None: - """Initialize the store (called once before first use).""" - - async def setup_collection(self, collection: str) -> None: # pyright: ignore[reportUnusedParameter] - """Initialize the collection (called once before first use of the collection).""" - - async def setup_collection_once(self, collection: str) -> None: - await self.setup_once() - - if not self._setup_collection_complete[collection]: - async with self._setup_collection_locks[collection]: - if not self._setup_collection_complete[collection]: - try: - await self.setup_collection(collection=collection) - except Exception as e: - raise SetupError(message=f"Failed to setup collection: {e}", extra_info={"collection": collection}) from e - self._setup_collection_complete[collection] = True - - async def setup_once(self) -> None: - if not self._setup_complete: - async with self._setup_lock: - if not self._setup_complete: - try: - await self.setup() - except Exception as e: - raise SetupError(message=f"Failed to setup store: {e}", extra_info={"store": self.__class__.__name__}) from e - self._setup_complete = True - - @override - async def get(self, collection: str, key: str) -> dict[str, Any] | None: - """Retrieve a non-expired value by key from the specified collection.""" - await self.setup_collection_once(collection=collection) - - if cache_entry := await self.get_entry(collection=collection, key=key): - if cache_entry.is_expired: - # _ = await self.delete(collection=collection, key=key) - return None - - return cache_entry.value - return None - - @override - async def ttl(self, collection: str, key: str) -> TTLInfo | None: - await self.setup_collection_once(collection=collection) - - if cache_entry := await self.get_entry(collection=collection, key=key): - return cache_entry.to_ttl_info() - - return None - - @abstractmethod - async def get_entry(self, collection: str, key: str) -> ManagedEntry | None: - """Retrieve a cache entry by key from the specified collection.""" - - @override - async def put(self, collection: str, key: str, value: dict[str, Any], *, ttl: float | None = None) -> None: - """Store a key-value pair in the specified collection with optional TTL.""" - await self.setup_collection_once(collection=collection) - - created_at: datetime = datetime.now(tz=timezone.utc) - - cache_entry: ManagedEntry = ManagedEntry( - created_at=created_at, - expires_at=calculate_expires_at(created_at=created_at, ttl=ttl), - ttl=ttl, - collection=collection, - key=key, - value=value, - ) - - await self.put_entry(collection=collection, key=key, cache_entry=cache_entry, ttl=ttl) - - @abstractmethod - async def put_entry(self, collection: str, key: str, cache_entry: ManagedEntry, *, ttl: float | None = None) -> None: - """Store a managed entry by key in the specified collection.""" - ... - - @override - async def exists(self, collection: str, key: str) -> bool: - await self.setup_collection_once(collection=collection) - - return await super().exists(collection=collection, key=key) diff --git a/src/kv_store_adapter/stores/base/unmanaged.py b/src/kv_store_adapter/stores/base/unmanaged.py deleted file mode 100644 index c9a157da..00000000 --- a/src/kv_store_adapter/stores/base/unmanaged.py +++ /dev/null @@ -1,75 +0,0 @@ -""" -Base abstract class for unmanaged key-value store implementations. -""" - -from abc import ABC, abstractmethod -from typing import Any - -from kv_store_adapter.types import TTLInfo - - -class BaseKVStore(ABC): - """Abstract base class for key-value store implementations. - - The "value" passed to the implementation will be a dictionary of the value to store. - - When using this ABC, your implementation will: - 1. Implement `get` and `set` to get and save values - 2. Self-manage Expiration - 3. Self-manage Collections - 4. Self-manage Expired Entry Culling - """ - - @abstractmethod - async def get(self, collection: str, key: str) -> dict[str, Any] | None: - """Retrieve a non-expired value by key from the specified collection.""" - ... - - @abstractmethod - async def put( - self, - collection: str, - key: str, - value: dict[str, Any], - *, - ttl: float | None = None, - ) -> None: - """Store a key-value pair in the specified collection with optional TTL.""" - ... - - @abstractmethod - async def delete(self, collection: str, key: str) -> bool: - """Delete a key from the specified collection, returning True if it existed.""" - ... - - @abstractmethod - async def ttl(self, collection: str, key: str) -> TTLInfo | None: - """Get TTL information for a key, or None if the key doesn't exist.""" - ... - - @abstractmethod - async def exists(self, collection: str, key: str) -> bool: - """Check if a key exists in the specified collection.""" - - return await self.get(collection=collection, key=key) is not None - - @abstractmethod - async def keys(self, collection: str) -> list[str]: - """List all keys in the specified collection.""" - - ... - - @abstractmethod - async def clear_collection(self, collection: str) -> int: - """Clear all keys in a collection, returning the number of keys deleted.""" - ... - - @abstractmethod - async def list_collections(self) -> list[str]: - """List all available collection names (may include empty collections).""" - ... - - @abstractmethod - async def cull(self) -> None: - """Remove all expired entries from the store.""" - ... diff --git a/src/kv_store_adapter/stores/disk/__init__.py b/src/kv_store_adapter/stores/disk/__init__.py index 795e2cdd..54d2e329 100644 --- a/src/kv_store_adapter/stores/disk/__init__.py +++ b/src/kv_store_adapter/stores/disk/__init__.py @@ -1,3 +1,4 @@ +from .multi_store import MultiDiskStore from .store import DiskStore -__all__ = ["DiskStore"] +__all__ = ["DiskStore", "MultiDiskStore"] diff --git a/src/kv_store_adapter/stores/disk/multi_store.py b/src/kv_store_adapter/stores/disk/multi_store.py new file mode 100644 index 00000000..ba5bcd7e --- /dev/null +++ b/src/kv_store_adapter/stores/disk/multi_store.py @@ -0,0 +1,136 @@ +import time +from collections.abc import Callable +from pathlib import Path +from typing import overload + +from typing_extensions import override + +from kv_store_adapter.stores.base import BaseStore +from kv_store_adapter.stores.utils.compound import compound_key +from kv_store_adapter.stores.utils.managed_entry import ManagedEntry + +try: + from diskcache import Cache + from pathvalidate import sanitize_filename +except ImportError as e: + msg = "DiskStore requires py-kv-store-adapter[disk]" + raise ImportError(msg) from e + +DEFAULT_DISK_STORE_SIZE_LIMIT = 1 * 1024 * 1024 * 1024 # 1GB + +CacheFactory = Callable[[str], Cache] + + +def _sanitize_collection_for_filesystem(collection: str) -> str: + """Sanitize the collection name so that it can be used as a directory name on the filesystem.""" + + return sanitize_filename(filename=collection) + + +class MultiDiskStore(BaseStore): + """A disk-based store that uses the diskcache library to store data. The MultiDiskStore creates one diskcache Cache + instance per collection.""" + + _cache: dict[str, Cache] + + _disk_cache_factory: CacheFactory + + _base_directory: Path + + _max_size: int | None + + @overload + def __init__(self, *, disk_cache_factory: CacheFactory, default_collection: str | None = None) -> None: + """Initialize the disk caches. + + Args: + disk_cache_factory: A factory function that creates a diskcache Cache instance for a given collection. + default_collection: The default collection to use if no collection is provided. + """ + + @overload + def __init__(self, *, base_directory: Path, max_size: int | None = None, default_collection: str | None = None) -> None: + """Initialize the disk caches. + + Args: + base_directory: The directory to use for the disk caches. + max_size: The maximum size of the disk caches. Defaults to 1GB. + default_collection: The default collection to use if no collection is provided. + """ + + def __init__( + self, + *, + disk_cache_factory: CacheFactory | None = None, + base_directory: Path | None = None, + max_size: int | None = None, + default_collection: str | None = None, + ) -> None: + """Initialize the disk caches. + + Args: + disk_cache_factory: A factory function that creates a diskcache Cache instance for a given collection. + base_directory: The directory to use for the disk caches. + max_size: The maximum size of the disk caches. Defaults to 1GB. + default_collection: The default collection to use if no collection is provided. + """ + if disk_cache_factory is None and base_directory is None: + msg = "Either disk_cache_factory or base_directory must be provided" + raise ValueError(msg) + + if base_directory is None: + base_directory = Path.cwd() + + self._max_size = max_size + + self._base_directory = base_directory.resolve() + + def default_disk_cache_factory(collection: str) -> Cache: + sanitized_collection: str = _sanitize_collection_for_filesystem(collection=collection) + + return Cache(directory=self._base_directory / sanitized_collection, size_limit=self._max_size or DEFAULT_DISK_STORE_SIZE_LIMIT) + + self._disk_cache_factory = disk_cache_factory or default_disk_cache_factory + + self._cache = {} + + super().__init__(default_collection=default_collection) + + @override + async def _setup_collection(self, *, collection: str) -> None: + self._cache[collection] = self._disk_cache_factory(collection) + + @override + async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + combo_key: str = compound_key(collection=collection, key=key) + + expire_epoch: float + + managed_entry_str, expire_epoch = self._cache[collection].get(key=combo_key, expire_time=True) # pyright: ignore[reportAny] + + if not isinstance(managed_entry_str, str): + return None + + ttl = (expire_epoch - time.time()) if expire_epoch else None + + managed_entry: ManagedEntry = ManagedEntry.from_json(json_str=managed_entry_str, ttl=ttl) + + return managed_entry + + @override + async def _put_managed_entry( + self, + *, + key: str, + collection: str, + managed_entry: ManagedEntry, + ) -> None: + combo_key: str = compound_key(collection=collection, key=key) + + _ = self._cache[collection].set(key=combo_key, value=managed_entry.to_json(include_expiration=False), expire=managed_entry.ttl) + + @override + async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + combo_key: str = compound_key(collection=collection, key=key) + + return self._cache[collection].delete(key=combo_key, retry=True) diff --git a/src/kv_store_adapter/stores/disk/store.py b/src/kv_store_adapter/stores/disk/store.py index c8a2d170..1b175562 100644 --- a/src/kv_store_adapter/stores/disk/store.py +++ b/src/kv_store_adapter/stores/disk/store.py @@ -1,108 +1,100 @@ +import time from pathlib import Path -from typing import Any, overload +from typing import overload -from diskcache import Cache from typing_extensions import override -from kv_store_adapter.stores.base.managed import BaseManagedKVStore -from kv_store_adapter.stores.utils.compound import compound_key, get_collections_from_compound_keys, get_keys_from_compound_keys +from kv_store_adapter.stores.base import BaseStore +from kv_store_adapter.stores.utils.compound import compound_key from kv_store_adapter.stores.utils.managed_entry import ManagedEntry -DEFAULT_DISK_STORE_SIZE_LIMIT = 1 * 1024 * 1024 * 1024 # 1GB +try: + from diskcache import Cache +except ImportError as e: + msg = "DiskStore requires py-kv-store-adapter[disk]" + raise ImportError(msg) from e +DEFAULT_DISK_STORE_MAX_SIZE = 1 * 1024 * 1024 * 1024 # 1GB -class DiskStore(BaseManagedKVStore): - """A disk-based store that uses the diskcache library to store data. The diskcache library is a syncronous implementation of an LRU - cache and may not be suitable for high-traffic applications.""" + +class DiskStore(BaseStore): + """A disk-based store that uses the diskcache library to store data.""" _cache: Cache @overload - def __init__(self, *, disk_cache: Cache) -> None: + def __init__(self, *, disk_cache: Cache, default_collection: str | None = None) -> None: """Initialize the disk cache. Args: disk_cache: An existing diskcache Cache instance to use. + default_collection: The default collection to use if no collection is provided. """ @overload - def __init__(self, *, directory: Path | str, size_limit: int | None = None) -> None: + def __init__(self, *, directory: Path | str, max_size: int | None = None, default_collection: str | None = None) -> None: """Initialize the disk cache. Args: directory: The directory to use for the disk cache. - size_limit: The maximum size of the disk cache. Defaults to 1GB. + max_size: The maximum size of the disk cache. Defaults to 1GB. + default_collection: The default collection to use if no collection is provided. """ - def __init__(self, *, disk_cache: Cache | None = None, directory: Path | str | None = None, size_limit: int | None = None) -> None: + def __init__( + self, + *, + disk_cache: Cache | None = None, + directory: Path | str | None = None, + max_size: int | None = None, + default_collection: str | None = None, + ) -> None: """Initialize the disk cache. Args: disk_cache: An existing diskcache Cache instance to use. directory: The directory to use for the disk cache. - size_limit: The maximum size of the disk cache. Defaults to 1GB. + max_size: The maximum size of the disk cache. Defaults to 1GB. + default_collection: The default collection to use if no collection is provided. """ if isinstance(directory, Path): directory = str(object=directory) - self._cache = disk_cache or Cache(directory=directory, size_limit=size_limit or DEFAULT_DISK_STORE_SIZE_LIMIT) - - super().__init__() + self._cache = disk_cache or Cache(directory=directory, size_limit=max_size or DEFAULT_DISK_STORE_MAX_SIZE) - @override - async def setup(self) -> None: - pass + super().__init__(default_collection=default_collection) @override - async def get_entry(self, collection: str, key: str) -> ManagedEntry | None: + async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: combo_key: str = compound_key(collection=collection, key=key) - cache_entry: Any = self._cache.get(combo_key) # pyright: ignore[reportAny] + expire_epoch: float | None - if not isinstance(cache_entry, str): + managed_entry_str, expire_epoch = self._cache.get(key=combo_key, expire_time=True) # pyright: ignore[reportAny] + + if not isinstance(managed_entry_str, str): return None - return ManagedEntry.from_json(json_str=cache_entry) + ttl = (expire_epoch - time.time()) if expire_epoch else None + + managed_entry: ManagedEntry = ManagedEntry.from_json(json_str=managed_entry_str, ttl=ttl) + + return managed_entry @override - async def put_entry( + async def _put_managed_entry( self, - collection: str, - key: str, - cache_entry: ManagedEntry, *, - ttl: float | None = None, + key: str, + collection: str, + managed_entry: ManagedEntry, ) -> None: combo_key: str = compound_key(collection=collection, key=key) - _ = self._cache.set(key=combo_key, value=cache_entry.to_json(), expire=ttl) + _ = self._cache.set(key=combo_key, value=managed_entry.to_json(), expire=managed_entry.ttl) @override - async def delete(self, collection: str, key: str) -> bool: + async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: combo_key: str = compound_key(collection=collection, key=key) - return self._cache.delete(key=combo_key) - - @override - async def keys(self, collection: str) -> list[str]: - compound_strings: list[str] = list(self._cache.iterkeys()) - - return get_keys_from_compound_keys(compound_keys=compound_strings, collection=collection) - - @override - async def clear_collection(self, collection: str) -> int: - cleared_count: int = 0 - for key in await self.keys(collection=collection): - _ = await self.delete(collection=collection, key=key) - cleared_count += 1 - - return cleared_count - - @override - async def list_collections(self) -> list[str]: - compound_strings: list[str] = list(self._cache.iterkeys()) - return get_collections_from_compound_keys(compound_keys=compound_strings) - - @override - async def cull(self) -> None: - _ = self._cache.cull() + return self._cache.delete(key=combo_key, retry=True) diff --git a/src/kv_store_adapter/stores/elasticsearch/store.py b/src/kv_store_adapter/stores/elasticsearch/store.py index 4393166c..804a4e33 100644 --- a/src/kv_store_adapter/stores/elasticsearch/store.py +++ b/src/kv_store_adapter/stores/elasticsearch/store.py @@ -1,24 +1,37 @@ -from datetime import datetime, timezone +import hashlib from typing import TYPE_CHECKING, Any, overload -from elasticsearch import AsyncElasticsearch from typing_extensions import override -from kv_store_adapter.stores.base.managed import BaseManagedKVStore -from kv_store_adapter.stores.elasticsearch.utils import ( - get_aggregations_from_body, - get_body_from_response, - get_first_value_from_field_in_hit, - get_hits_from_response, - get_source_from_body, +from kv_store_adapter.stores.base import ( + BaseCullStore, + BaseDestroyCollectionStore, + BaseEnumerateCollectionsStore, + BaseEnumerateKeysStore, + BaseStore, ) from kv_store_adapter.stores.utils.compound import compound_key -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry, dump_to_json, load_from_json +from kv_store_adapter.stores.utils.managed_entry import ManagedEntry, load_from_json +from kv_store_adapter.stores.utils.time_to_live import now_as_epoch, try_parse_datetime + +try: + from elasticsearch import AsyncElasticsearch + + from kv_store_adapter.stores.elasticsearch.utils import ( + get_aggregations_from_body, + get_body_from_response, + get_first_value_from_field_in_hit, + get_hits_from_response, + get_source_from_body, + ) +except ImportError as e: + msg = "ElasticsearchStore requires py-kv-store-adapter[elasticsearch]" + raise ImportError(msg) from e if TYPE_CHECKING: - from elastic_transport import ObjectApiResponse + from datetime import datetime -DEFAULT_DISK_STORE_SIZE_LIMIT = 1 * 1024 * 1024 * 1024 # 1GB + from elastic_transport import ObjectApiResponse ELASTICSEARCH_CLIENT_DEFAULTS = { "http_compress": True, @@ -37,9 +50,6 @@ "expires_at": { "type": "date", }, - "ttl": { - "type": "float", - }, "collection": { "type": "keyword", }, @@ -55,8 +65,13 @@ }, } +DEFAULT_PAGE_SIZE = 10000 +PAGE_LIMIT = 10000 + +MAX_KEY_LENGTH = 256 -class ElasticsearchStore(BaseManagedKVStore): + +class ElasticsearchStore(BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, BaseDestroyCollectionStore, BaseCullStore, BaseStore): """A elasticsearch-based store.""" _client: AsyncElasticsearch @@ -64,13 +79,19 @@ class ElasticsearchStore(BaseManagedKVStore): _index: str @overload - def __init__(self, *, elasticsearch_client: AsyncElasticsearch, index: str) -> None: ... + def __init__(self, *, elasticsearch_client: AsyncElasticsearch, index: str, default_collection: str | None = None) -> None: ... @overload - def __init__(self, *, url: str, api_key: str, index: str) -> None: ... + def __init__(self, *, url: str, api_key: str, index: str, default_collection: str | None = None) -> None: ... def __init__( - self, *, elasticsearch_client: AsyncElasticsearch | None = None, url: str | None = None, api_key: str | None = None, index: str + self, + *, + elasticsearch_client: AsyncElasticsearch | None = None, + url: str | None = None, + api_key: str | None = None, + index: str, + default_collection: str | None = None, ) -> None: """Initialize the elasticsearch store. @@ -78,11 +99,12 @@ def __init__( elasticsearch_client: The elasticsearch client to use. url: The url of the elasticsearch cluster. api_key: The api key to use. - index: The index to use. Defaults to "kv-store". + index: The index to use. + default_collection: The default collection to use if no collection is provided. """ self._client = elasticsearch_client or AsyncElasticsearch(hosts=[url], api_key=api_key, **ELASTICSEARCH_CLIENT_DEFAULTS) # pyright: ignore[reportArgumentType] self._index = index or DEFAULT_INDEX - super().__init__() + super().__init__(default_collection=default_collection) @override async def setup(self) -> None: @@ -95,14 +117,22 @@ async def setup(self) -> None: ) @override - async def setup_collection(self, collection: str) -> None: + async def _setup_collection(self, *, collection: str) -> None: pass + def sanitize_document_id(self, key: str) -> str: + if len(key) > MAX_KEY_LENGTH: + sha256_hash: str = hashlib.sha256(key.encode()).hexdigest() + return sha256_hash[:256] + return key + @override - async def get_entry(self, collection: str, key: str) -> ManagedEntry | None: + async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: combo_key: str = compound_key(collection=collection, key=key) - elasticsearch_response = await self._client.options(ignore_status=404).get(index=self._index, id=combo_key) + elasticsearch_response = await self._client.options(ignore_status=404).get( + index=self._index, id=self.sanitize_document_id(key=combo_key) + ) body: dict[str, Any] = get_body_from_response(response=elasticsearch_response) @@ -112,58 +142,48 @@ async def get_entry(self, collection: str, key: str) -> ManagedEntry | None: if not (value_str := source.get("value")) or not isinstance(value_str, str): return None - if not (created_at := source.get("created_at")) or not isinstance(created_at, str): - return None - - ttl: Any | float | int | None = source.get("ttl") - expires_at: Any | str | None = source.get("expires_at") - - if not isinstance(ttl, float | int | None): - return None - - if not isinstance(expires_at, str | None): - return None + created_at: datetime | None = try_parse_datetime(value=source.get("created_at")) + expires_at: datetime | None = try_parse_datetime(value=source.get("expires_at")) return ManagedEntry( - collection=collection, - key=key, value=load_from_json(value_str), - created_at=datetime.fromisoformat(created_at), - ttl=float(ttl) if ttl else None, - expires_at=datetime.fromisoformat(expires_at) if expires_at else None, + created_at=created_at, + expires_at=expires_at, ) @override - async def put_entry( + async def _put_managed_entry( self, - collection: str, - key: str, - cache_entry: ManagedEntry, *, - ttl: float | None = None, + key: str, + collection: str, + managed_entry: ManagedEntry, ) -> None: combo_key: str = compound_key(collection=collection, key=key) + document: dict[str, Any] = { + "collection": collection, + "key": key, + "value": managed_entry.to_json(include_metadata=False), + } + + if managed_entry.created_at: + document["created_at"] = managed_entry.created_at.isoformat() + if managed_entry.expires_at: + document["expires_at"] = managed_entry.expires_at.isoformat() + _ = await self._client.index( index=self._index, - id=combo_key, - body={ - "collection": collection, - "key": key, - "value": dump_to_json(cache_entry.value), - "created_at": cache_entry.created_at.isoformat() if cache_entry.created_at else None, - "expires_at": cache_entry.expires_at.isoformat() if cache_entry.expires_at else None, - "ttl": cache_entry.ttl, - }, + id=self.sanitize_document_id(key=combo_key), + body=document, ) @override - async def delete(self, collection: str, key: str) -> bool: - await self.setup_collection_once(collection=collection) - + async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: combo_key: str = compound_key(collection=collection, key=key) + elasticsearch_response: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).delete( - index=self._index, id=combo_key + index=self._index, id=self.sanitize_document_id(key=combo_key) ) body: dict[str, Any] = get_body_from_response(response=elasticsearch_response) @@ -174,9 +194,10 @@ async def delete(self, collection: str, key: str) -> bool: return result == "deleted" @override - async def keys(self, collection: str) -> list[str]: + async def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> list[str]: """Get up to 10,000 keys in the specified collection (eventually consistent).""" - await self.setup_collection_once(collection=collection) + + limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) result: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).search( index=self._index, @@ -189,7 +210,7 @@ async def keys(self, collection: str) -> list[str]: }, }, source_includes=[], - size=10000, + size=limit, ) if not (hits := get_hits_from_response(response=result)): @@ -206,61 +227,58 @@ async def keys(self, collection: str) -> list[str]: return all_keys @override - async def clear_collection(self, collection: str) -> int: - await self.setup_collection_once(collection=collection) + async def _get_collection_names(self, *, limit: int | None = None) -> list[str]: + """List up to 10,000 collections in the elasticsearch store (eventually consistent).""" - result: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).delete_by_query( + limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) + + search_response: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).search( index=self._index, - body={ - "query": { - "term": { - "collection": collection, + aggregations={ + "collections": { + "terms": { + "field": "collection", }, }, }, + size=limit, ) - body: dict[str, Any] = get_body_from_response(response=result) + body: dict[str, Any] = get_body_from_response(response=search_response) + aggregations: dict[str, Any] = get_aggregations_from_body(body=body) - if not (deleted := body.get("deleted")) or not isinstance(deleted, int): - return 0 + buckets: list[Any] = aggregations["collections"]["buckets"] # pyright: ignore[reportAny] - return deleted + return [bucket["key"] for bucket in buckets] # pyright: ignore[reportAny] @override - async def list_collections(self) -> list[str]: - """List up to 10,000 collections in the elasticsearch store (eventually consistent).""" - await self.setup_once() - - result: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).search( + async def _delete_collection(self, *, collection: str) -> bool: + result: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).delete_by_query( index=self._index, - aggregations={ - "collections": { - "terms": { - "field": "collection", + body={ + "query": { + "term": { + "collection": collection, }, }, }, - size=10000, ) body: dict[str, Any] = get_body_from_response(response=result) - aggregations: dict[str, Any] = get_aggregations_from_body(body=body) - buckets: list[Any] = aggregations["collections"]["buckets"] # pyright: ignore[reportAny] + if not (deleted := body.get("deleted")) or not isinstance(deleted, int): + return False - return [bucket["key"] for bucket in buckets] # pyright: ignore[reportAny] + return deleted > 0 @override - async def cull(self) -> None: - await self.setup_once() - + async def _cull(self) -> None: _ = await self._client.options(ignore_status=404).delete_by_query( index=self._index, body={ "query": { "range": { - "expires_at": {"lt": datetime.now(tz=timezone.utc).timestamp()}, + "expires_at": {"lt": now_as_epoch()}, }, }, }, diff --git a/src/kv_store_adapter/stores/memcached/__init__.py b/src/kv_store_adapter/stores/memcached/__init__.py new file mode 100644 index 00000000..a70097ac --- /dev/null +++ b/src/kv_store_adapter/stores/memcached/__init__.py @@ -0,0 +1,3 @@ +from .store import MemcachedStore + +__all__ = ["MemcachedStore"] diff --git a/src/kv_store_adapter/stores/memcached/store.py b/src/kv_store_adapter/stores/memcached/store.py new file mode 100644 index 00000000..cdbb9066 --- /dev/null +++ b/src/kv_store_adapter/stores/memcached/store.py @@ -0,0 +1,104 @@ +import hashlib +from typing import overload + +from typing_extensions import override + +from kv_store_adapter.stores.base import BaseDestroyStore, BaseStore +from kv_store_adapter.stores.utils.compound import compound_key +from kv_store_adapter.stores.utils.managed_entry import ManagedEntry + +try: + from aiomcache import Client +except ImportError as e: + msg = "MemcachedStore requires py-kv-store-adapter[memcached]" + raise ImportError(msg) from e + +MAX_KEY_LENGTH = 240 + + +class MemcachedStore(BaseDestroyStore, BaseStore): + """Memcached-based key-value store using aiomcache.""" + + _client: Client + + @overload + def __init__(self, *, client: Client, default_collection: str | None = None) -> None: ... + + @overload + def __init__(self, *, host: str = "127.0.0.1", port: int = 11211, default_collection: str | None = None) -> None: ... + + def __init__( + self, + *, + client: Client | None = None, + host: str = "127.0.0.1", + port: int = 11211, + default_collection: str | None = None, + ) -> None: + """Initialize the Memcached store. + + Args: + client: An existing aiomcache client to use. + host: Memcached host. Defaults to 127.0.0.1. + port: Memcached port. Defaults to 11211. + default_collection: The default collection to use if no collection is provided. + """ + self._client = client or Client(host=host, port=port) + + super().__init__(default_collection=default_collection) + + def sanitize_key(self, key: str) -> str: + if len(key) > MAX_KEY_LENGTH: + sha256_hash: str = hashlib.sha256(key.encode()).hexdigest() + return sha256_hash[:256] + return key + + @override + async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + combo_key: str = self.sanitize_key(compound_key(collection=collection, key=key)) + + raw_value: bytes | None = await self._client.get(combo_key.encode("utf-8")) + + if not isinstance(raw_value, (bytes, bytearray)): + return None + + json_str: str = raw_value.decode(encoding="utf-8") + + return ManagedEntry.from_json(json_str=json_str) + + @override + async def _put_managed_entry( + self, + *, + key: str, + collection: str, + managed_entry: ManagedEntry, + ) -> None: + combo_key: str = self.sanitize_key(compound_key(collection=collection, key=key)) + + # Memcached treats 0 as no-expiration. Do not pass <= 0 (other than 0) to avoid permanence errors. + exptime: int + + if managed_entry.ttl is None: # noqa: SIM108 + exptime = 0 + else: + exptime = max(int(managed_entry.ttl), 1) + + json_value: str = managed_entry.to_json() + + _ = await self._client.set( + key=combo_key.encode(encoding="utf-8"), + value=json_value.encode(encoding="utf-8"), + exptime=exptime, + ) + + @override + async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + combo_key: str = self.sanitize_key(compound_key(collection=collection, key=key)) + + return await self._client.delete(key=combo_key.encode(encoding="utf-8")) + + @override + async def _delete_store(self) -> bool: + _ = await self._client.flush_all() + return True diff --git a/src/kv_store_adapter/stores/memory/store.py b/src/kv_store_adapter/stores/memory/store.py index 7f0a371a..fac7553a 100644 --- a/src/kv_store_adapter/stores/memory/store.py +++ b/src/kv_store_adapter/stores/memory/store.py @@ -1,109 +1,170 @@ import sys +from dataclasses import dataclass, field +from datetime import datetime from typing import Any -from cachetools import TLRUCache -from typing_extensions import override +from typing_extensions import Self, override -from kv_store_adapter.stores.base.managed import BaseManagedKVStore -from kv_store_adapter.stores.utils.compound import compound_key, uncompound_key +from kv_store_adapter.stores.base import ( + BaseDestroyCollectionStore, + BaseDestroyStore, + BaseEnumerateCollectionsStore, + BaseEnumerateKeysStore, +) from kv_store_adapter.stores.utils.managed_entry import ManagedEntry +from kv_store_adapter.stores.utils.time_to_live import epoch_to_datetime +try: + from cachetools import TLRUCache +except ImportError as e: + msg = "MemoryStore requires py-kv-store-adapter[memory]" + raise ImportError(msg) from e -def _memory_cache_ttu(_key: Any, value: ManagedEntry, now: float) -> float: # pyright: ignore[reportAny] + +@dataclass +class MemoryCacheEntry: + json_str: str + + expires_at: datetime | None + + ttl_at_insert: float | None = field(default=None) + + @classmethod + def from_managed_entry(cls, managed_entry: ManagedEntry, ttl: float | None = None) -> Self: + return cls( + json_str=managed_entry.to_json(), + expires_at=managed_entry.expires_at, + ttl_at_insert=ttl, + ) + + def to_managed_entry(self) -> ManagedEntry: + return ManagedEntry.from_json(json_str=self.json_str) + + +def _memory_cache_ttu(_key: Any, value: MemoryCacheEntry, now: float) -> float: # pyright: ignore[reportAny] """Calculate time-to-use for cache entries based on their TTL.""" - return now + value.ttl if value.ttl else sys.maxsize + if value.ttl_at_insert is None: + return sys.maxsize + + expiration_epoch: float = now + value.ttl_at_insert + + value.expires_at = epoch_to_datetime(epoch=expiration_epoch) + + return expiration_epoch -def _memory_cache_getsizeof(value: ManagedEntry) -> int: # pyright: ignore[reportUnusedParameter] # noqa: ARG001 +def _memory_cache_getsizeof(value: MemoryCacheEntry) -> int: # pyright: ignore[reportUnusedParameter] # noqa: ARG001 """Return size of cache entry (always 1 for entry counting).""" return 1 DEFAULT_MEMORY_CACHE_LIMIT = 1000 +DEFAULT_PAGE_SIZE = 10000 +PAGE_LIMIT = 10000 -class MemoryStore(BaseManagedKVStore): - """In-memory key-value store using TLRU (Time-aware Least Recently Used) cache.""" - max_entries: int - _cache: TLRUCache[str, ManagedEntry] +class MemoryCollection: + _cache: TLRUCache[str, MemoryCacheEntry] def __init__(self, max_entries: int = DEFAULT_MEMORY_CACHE_LIMIT): - """Initialize the in-memory cache. - - Args: - max_entries: The maximum number of entries to store in the cache. Defaults to 1000. - """ - self.max_entries = max_entries - self._cache = TLRUCache[str, ManagedEntry]( + self._cache = TLRUCache[str, MemoryCacheEntry]( maxsize=max_entries, ttu=_memory_cache_ttu, getsizeof=_memory_cache_getsizeof, ) - super().__init__() + def get(self, key: str) -> ManagedEntry | None: + managed_entry_str: MemoryCacheEntry | None = self._cache.get(key) - @override - async def setup(self) -> None: - pass + if managed_entry_str is None: + return None + + managed_entry: ManagedEntry = managed_entry_str.to_managed_entry() + + return managed_entry + + def put(self, key: str, value: ManagedEntry) -> None: + self._cache[key] = MemoryCacheEntry.from_managed_entry(managed_entry=value, ttl=value.ttl) + + def delete(self, key: str) -> bool: + return self._cache.pop(key, None) is not None + + def keys(self, *, limit: int | None = None) -> list[str]: + limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) + return list(self._cache.keys())[:limit] + + +class MemoryStore(BaseDestroyStore, BaseDestroyCollectionStore, BaseEnumerateCollectionsStore, BaseEnumerateKeysStore): + """In-memory key-value store using TLRU (Time-aware Least Recently Used) cache.""" + + max_entries_per_collection: int + + _cache: dict[str, MemoryCollection] + + def __init__(self, *, max_entries_per_collection: int = DEFAULT_MEMORY_CACHE_LIMIT, default_collection: str | None = None): + """Initialize the in-memory cache. + + Args: + max_entries_per_collection: The maximum number of entries per collection. Defaults to 1000. + """ + + self.max_entries_per_collection = max_entries_per_collection + + self._cache = {} + + super().__init__(default_collection=default_collection) @override - async def get_entry(self, collection: str, key: str) -> ManagedEntry | None: - combo_key: str = compound_key(collection=collection, key=key) + async def _setup_collection(self, *, collection: str) -> None: + self._cache[collection] = MemoryCollection(max_entries=self.max_entries_per_collection) - if cache_entry := self._cache.get(combo_key): - return cache_entry + @override + async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + collection_cache: MemoryCollection = self._cache[collection] - return None + return collection_cache.get(key=key) @override - async def put_entry( + async def _put_managed_entry( self, - collection: str, - key: str, - cache_entry: ManagedEntry, *, - ttl: float | None = None, + key: str, + collection: str, + managed_entry: ManagedEntry, ) -> None: - combo_key: str = compound_key(collection=collection, key=key) - self._cache[combo_key] = cache_entry + collection_cache: MemoryCollection = self._cache[collection] + + collection_cache.put(key=key, value=managed_entry) @override - async def delete(self, collection: str, key: str) -> bool: - combo_key: str = compound_key(collection=collection, key=key) - return self._cache.pop(combo_key, None) is not None + async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + collection_cache: MemoryCollection = self._cache[collection] + + return collection_cache.delete(key=key) @override - async def keys(self, collection: str) -> list[str]: - keys: list[str] = [] + async def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> list[str]: + collection_cache: MemoryCollection = self._cache[collection] - for key in self._cache: - entry_collection, entry_key = uncompound_key(key=key) - if entry_collection == collection: - keys.append(entry_key) + return collection_cache.keys(limit=limit) - return keys + @override + async def _get_collection_names(self, *, limit: int | None = None) -> list[str]: + limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) + return list(self._cache.keys())[:limit] @override - async def clear_collection(self, collection: str) -> int: - cleared_count: int = 0 + async def _delete_collection(self, *, collection: str) -> bool: + if collection not in self._cache: + return False - for key in await self.keys(collection=collection): - _ = await self.delete(collection=collection, key=key) - cleared_count += 1 + del self._cache[collection] - return cleared_count + return True @override - async def list_collections(self) -> list[str]: - collections: set[str] = set() - for key in self._cache: - entry_collection, _ = uncompound_key(key=key) - collections.add(entry_collection) - return list(collections) + async def _delete_store(self) -> bool: + self._cache.clear() - @override - async def cull(self) -> None: - for collection in await self.list_collections(): - for key in await self.keys(collection=collection): - _ = await self.get_entry(collection=collection, key=key) + return True diff --git a/src/kv_store_adapter/stores/null/store.py b/src/kv_store_adapter/stores/null/store.py index 8e1d0e98..1772d020 100644 --- a/src/kv_store_adapter/stores/null/store.py +++ b/src/kv_store_adapter/stores/null/store.py @@ -1,53 +1,26 @@ -from typing import Any - from typing_extensions import override -from kv_store_adapter.stores.base.unmanaged import BaseKVStore -from kv_store_adapter.types import TTLInfo +from kv_store_adapter.stores.base import BaseStore +from kv_store_adapter.stores.utils.managed_entry import ManagedEntry -class NullStore(BaseKVStore): +class NullStore(BaseStore): """Null object pattern store that accepts all operations but stores nothing.""" @override - async def get(self, collection: str, key: str) -> dict[str, Any] | None: + async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: return None @override - async def put( + async def _put_managed_entry( self, - collection: str, - key: str, - value: dict[str, Any], *, - ttl: float | None = None, + key: str, + collection: str, + managed_entry: ManagedEntry, ) -> None: pass @override - async def delete(self, collection: str, key: str) -> bool: - return False - - @override - async def ttl(self, collection: str, key: str) -> TTLInfo | None: - return None - - @override - async def keys(self, collection: str) -> list[str]: - return [] - - @override - async def clear_collection(self, collection: str) -> int: - return 0 - - @override - async def list_collections(self) -> list[str]: - return [] - - @override - async def cull(self) -> None: - pass - - @override - async def exists(self, collection: str, key: str) -> bool: + async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: return False diff --git a/src/kv_store_adapter/stores/redis/store.py b/src/kv_store_adapter/stores/redis/store.py index 875ef27c..ca96282d 100644 --- a/src/kv_store_adapter/stores/redis/store.py +++ b/src/kv_store_adapter/stores/redis/store.py @@ -1,33 +1,43 @@ from typing import Any, overload from urllib.parse import urlparse -from redis.asyncio import Redis from typing_extensions import override -from kv_store_adapter.errors import StoreConnectionError -from kv_store_adapter.stores.base.managed import BaseManagedKVStore -from kv_store_adapter.stores.utils.compound import compound_key, get_keys_from_compound_keys, uncompound_key +from kv_store_adapter.stores.base import BaseDestroyStore, BaseEnumerateKeysStore, BaseStore +from kv_store_adapter.stores.utils.compound import compound_key, get_keys_from_compound_keys from kv_store_adapter.stores.utils.managed_entry import ManagedEntry +try: + from redis.asyncio import Redis +except ImportError as e: + msg = "RedisStore requires py-kv-store-adapter[redis]" + raise ImportError(msg) from e -class RedisStore(BaseManagedKVStore): +DEFAULT_PAGE_SIZE = 10000 +PAGE_LIMIT = 10000 + + +class RedisStore(BaseDestroyStore, BaseEnumerateKeysStore, BaseStore): """Redis-based key-value store.""" _client: Redis @overload - def __init__(self, *, client: Redis) -> None: ... + def __init__(self, *, client: Redis, default_collection: str | None = None) -> None: ... @overload - def __init__(self, *, url: str) -> None: ... + def __init__(self, *, url: str, default_collection: str | None = None) -> None: ... @overload - def __init__(self, *, host: str = "localhost", port: int = 6379, db: int = 0, password: str | None = None) -> None: ... + def __init__( + self, *, host: str = "localhost", port: int = 6379, db: int = 0, password: str | None = None, default_collection: str | None = None + ) -> None: ... def __init__( self, *, client: Redis | None = None, + default_collection: str | None = None, url: str | None = None, host: str = "localhost", port: int = 6379, @@ -43,6 +53,7 @@ def __init__( port: Redis port. Defaults to 6379. db: Redis database number. Defaults to 0. password: Redis password. Defaults to None. + default_collection: The default collection to use if no collection is provided. """ if client: self._client = client @@ -64,95 +75,60 @@ def __init__( decode_responses=True, ) - super().__init__() + super().__init__(default_collection=default_collection) @override - async def setup(self) -> None: - if not await self._client.ping(): # pyright: ignore[reportUnknownMemberType] - raise StoreConnectionError(message="Failed to connect to Redis") - - @override - async def get_entry(self, collection: str, key: str) -> ManagedEntry | None: + async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: combo_key: str = compound_key(collection=collection, key=key) - cache_entry: Any = await self._client.get(name=combo_key) # pyright: ignore[reportAny] + redis_response: Any = await self._client.get(name=combo_key) # pyright: ignore[reportAny] - if cache_entry is None: + if not isinstance(redis_response, str): return None - if not isinstance(cache_entry, str): - return None + managed_entry: ManagedEntry = ManagedEntry.from_json(json_str=redis_response) - return ManagedEntry.from_json(json_str=cache_entry) + return managed_entry @override - async def put_entry( + async def _put_managed_entry( self, - collection: str, - key: str, - cache_entry: ManagedEntry, *, - ttl: float | None = None, + key: str, + collection: str, + managed_entry: ManagedEntry, ) -> None: combo_key: str = compound_key(collection=collection, key=key) - json_value: str = cache_entry.to_json() + json_value: str = managed_entry.to_json() - if ttl is not None: + if managed_entry.ttl is not None: # Redis does not support <= 0 TTLs - ttl = max(int(ttl), 1) + ttl = max(int(managed_entry.ttl), 1) _ = await self._client.setex(name=combo_key, time=ttl, value=json_value) # pyright: ignore[reportAny] else: _ = await self._client.set(name=combo_key, value=json_value) # pyright: ignore[reportAny] @override - async def delete(self, collection: str, key: str) -> bool: - await self.setup_collection_once(collection=collection) - + async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: combo_key: str = compound_key(collection=collection, key=key) - return await self._client.delete(combo_key) != 0 # pyright: ignore[reportAny] - - @override - async def keys(self, collection: str) -> list[str]: - await self.setup_collection_once(collection=collection) - - pattern = compound_key(collection=collection, key="*") - compound_keys: list[str] = await self._client.keys(pattern) # pyright: ignore[reportUnknownMemberType, reportAny] - return get_keys_from_compound_keys(compound_keys=compound_keys, collection=collection) + return await self._client.delete(combo_key) != 0 # pyright: ignore[reportAny] @override - async def clear_collection(self, collection: str) -> int: - await self.setup_collection_once(collection=collection) + async def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> list[str]: + limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) pattern = compound_key(collection=collection, key="*") - deleted_count: int = 0 - - async for key in self._client.scan_iter(name=pattern): # pyright: ignore[reportUnknownMemberType, reportUnknownVariableType] - if not isinstance(key, str): - continue - - deleted_count += await self._client.delete(key) # pyright: ignore[reportAny] - - return deleted_count - - @override - async def list_collections(self) -> list[str]: - await self.setup_once() - - pattern: str = compound_key(collection="*", key="*") - - collections: set[str] = set() - - async for key in self._client.scan_iter(name=pattern): # pyright: ignore[reportUnknownMemberType, reportUnknownVariableType] - if not isinstance(key, str): - continue - - collections.add(uncompound_key(key=key)[0]) + # redis.asyncio scan returns tuple(cursor, keys) + _cursor: int + keys: list[str] + _cursor, keys = await self._client.scan(cursor=0, match=pattern, count=limit) # pyright: ignore[reportUnknownMemberType, reportAny] - return list[str](collections) + return get_keys_from_compound_keys(compound_keys=keys, collection=collection) @override - async def cull(self) -> None: ... + async def _delete_store(self) -> bool: + return await self._client.flushdb() # pyright: ignore[reportUnknownMemberType, reportAny] diff --git a/src/kv_store_adapter/stores/simple/__init__.py b/src/kv_store_adapter/stores/simple/__init__.py index dbbcf5af..a0c59924 100644 --- a/src/kv_store_adapter/stores/simple/__init__.py +++ b/src/kv_store_adapter/stores/simple/__init__.py @@ -1,4 +1,3 @@ -from .json_store import SimpleJSONStore from .store import SimpleStore -__all__ = ["SimpleJSONStore", "SimpleStore"] +__all__ = ["SimpleStore"] diff --git a/src/kv_store_adapter/stores/simple/json_store.py b/src/kv_store_adapter/stores/simple/json_store.py deleted file mode 100644 index 858ec362..00000000 --- a/src/kv_store_adapter/stores/simple/json_store.py +++ /dev/null @@ -1,69 +0,0 @@ -from typing_extensions import override - -from kv_store_adapter.stores.base.managed import BaseManagedKVStore -from kv_store_adapter.stores.utils.compound import compound_key, get_collections_from_compound_keys, get_keys_from_compound_keys -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry - -DEFAULT_SIMPLE_JSON_STORE_MAX_ENTRIES = 1000 - - -class SimpleJSONStore(BaseManagedKVStore): - """Simple JSON-serialized dictionary-based key-value store for testing.""" - - max_entries: int - _data: dict[str, str] - - def __init__(self, max_entries: int = DEFAULT_SIMPLE_JSON_STORE_MAX_ENTRIES): - super().__init__() - self.max_entries = max_entries - self._data = {} - - @override - async def setup(self) -> None: - pass - - @override - async def get_entry(self, collection: str, key: str) -> ManagedEntry | None: - combo_key: str = compound_key(collection=collection, key=key) - - if not (data := self._data.get(combo_key)): - return None - - return ManagedEntry.from_json(json_str=data) - - @override - async def put_entry(self, collection: str, key: str, cache_entry: ManagedEntry, *, ttl: float | None = None) -> None: - combo_key: str = compound_key(collection=collection, key=key) - - if len(self._data) >= self.max_entries: - _ = self._data.pop(next(iter(self._data))) - - self._data[combo_key] = cache_entry.to_json() - - @override - async def delete(self, collection: str, key: str) -> bool: - combo_key: str = compound_key(collection=collection, key=key) - return self._data.pop(combo_key, None) is not None - - @override - async def keys(self, collection: str) -> list[str]: - return get_keys_from_compound_keys(compound_keys=list(self._data.keys()), collection=collection) - - @override - async def clear_collection(self, collection: str) -> int: - keys: list[str] = get_keys_from_compound_keys(compound_keys=list(self._data.keys()), collection=collection) - - for key in keys: - _ = self._data.pop(key) - - return len(keys) - - @override - async def list_collections(self) -> list[str]: - return get_collections_from_compound_keys(compound_keys=list(self._data.keys())) - - @override - async def cull(self) -> None: - for collection in await self.list_collections(): - for key in get_keys_from_compound_keys(compound_keys=list(self._data.keys()), collection=collection): - _ = await self.get_entry(collection=collection, key=key) diff --git a/src/kv_store_adapter/stores/simple/store.py b/src/kv_store_adapter/stores/simple/store.py index a3859d4f..101b560f 100644 --- a/src/kv_store_adapter/stores/simple/store.py +++ b/src/kv_store_adapter/stores/simple/store.py @@ -1,166 +1,98 @@ from collections import defaultdict -from datetime import datetime, timezone -from typing import Any +from dataclasses import dataclass +from datetime import datetime from typing_extensions import override -from kv_store_adapter.stores.base.managed import BaseManagedKVStore -from kv_store_adapter.stores.base.unmanaged import BaseKVStore +from kv_store_adapter.stores.base import ( + BaseDestroyStore, + BaseEnumerateCollectionsStore, + BaseEnumerateKeysStore, + BaseStore, +) from kv_store_adapter.stores.utils.compound import compound_key, get_collections_from_compound_keys, get_keys_from_compound_keys -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry -from kv_store_adapter.stores.utils.time_to_live import calculate_expires_at -from kv_store_adapter.types import TTLInfo +from kv_store_adapter.stores.utils.managed_entry import ManagedEntry, load_from_json +from kv_store_adapter.stores.utils.time_to_live import seconds_to DEFAULT_SIMPLE_MANAGED_STORE_MAX_ENTRIES = 1000 DEFAULT_SIMPLE_STORE_MAX_ENTRIES = 1000 -class SimpleStore(BaseKVStore): - """Simple dictionary-based key-value store for testing and development.""" +@dataclass +class SimpleStoreEntry: + json_str: str - max_entries: int - _data: dict[str, dict[str, Any]] - _expirations: dict[str, datetime] + created_at: datetime | None + expires_at: datetime | None - def __init__(self, max_entries: int = DEFAULT_SIMPLE_STORE_MAX_ENTRIES): - super().__init__() - self.max_entries = max_entries - self._data = defaultdict[str, dict[str, Any]](dict) - self._expirations = defaultdict[str, datetime]() + @property + def current_ttl(self) -> float | None: + if self.expires_at is None: + return None - async def setup(self) -> None: - pass + return seconds_to(datetime=self.expires_at) - @override - async def get(self, collection: str, key: str) -> dict[str, Any] | None: - combo_key: str = compound_key(collection=collection, key=key) + def to_managed_entry(self) -> ManagedEntry: + managed_entry: ManagedEntry = ManagedEntry( + value=load_from_json(json_str=self.json_str), + expires_at=self.expires_at, + created_at=self.created_at, + ) - if not (data := self._data.get(combo_key)): - return None + return managed_entry - if not (expiration := self._expirations.get(combo_key)): - return data - if expiration <= datetime.now(tz=timezone.utc): - del self._data[combo_key] - del self._expirations[combo_key] - return None - - return data +class SimpleStore(BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, BaseDestroyStore, BaseStore): + """Simple managed dictionary-based key-value store for testing and development.""" - @override - async def exists(self, collection: str, key: str) -> bool: - return await self.get(collection=collection, key=key) is not None + max_entries: int - @override - async def put(self, collection: str, key: str, value: dict[str, Any], *, ttl: float | None = None) -> None: - combo_key: str = compound_key(collection=collection, key=key) + _data: dict[str, SimpleStoreEntry] - if len(self._data) >= self.max_entries: - _ = self._data.pop(next(iter(self._data))) + def __init__(self, max_entries: int = DEFAULT_SIMPLE_MANAGED_STORE_MAX_ENTRIES, default_collection: str | None = None): + self.max_entries = max_entries - _ = self._data[combo_key] = value + self._data = defaultdict[str, SimpleStoreEntry]() - if expires_at := calculate_expires_at(ttl=ttl): - _ = self._expirations[combo_key] = expires_at + super().__init__(default_collection=default_collection) @override - async def delete(self, collection: str, key: str) -> bool: + async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: combo_key: str = compound_key(collection=collection, key=key) - return self._data.pop(combo_key, None) is not None - @override - async def ttl(self, collection: str, key: str) -> TTLInfo | None: - combo_key: str = compound_key(collection=collection, key=key) + store_entry: SimpleStoreEntry | None = self._data.get(combo_key) - if not (expiration := self._expirations.get(combo_key)): + if store_entry is None: return None - return TTLInfo(collection=collection, key=key, created_at=None, ttl=None, expires_at=expiration) + return store_entry.to_managed_entry() @override - async def keys(self, collection: str) -> list[str]: - return get_keys_from_compound_keys(compound_keys=list(self._data.keys()), collection=collection) - - @override - async def clear_collection(self, collection: str) -> int: - keys: list[str] = get_keys_from_compound_keys(compound_keys=list(self._data.keys()), collection=collection) - - for key in keys: - _ = self._data.pop(key) - _ = self._expirations.pop(key) - - return len(keys) - - @override - async def list_collections(self) -> list[str]: - return get_collections_from_compound_keys(compound_keys=list(self._data.keys())) - - @override - async def cull(self) -> None: - for collection in await self.list_collections(): - for key in get_keys_from_compound_keys(compound_keys=list(self._data.keys()), collection=collection): - if not (expiration := self._expirations.get(key)): - continue - - if expiration <= datetime.now(tz=timezone.utc): - _ = self._data.pop(key) - _ = self._expirations.pop(key) - - -class SimpleManagedStore(BaseManagedKVStore): - """Simple managed dictionary-based key-value store for testing and development.""" - - max_entries: int - _data: dict[str, ManagedEntry] - - def __init__(self, max_entries: int = DEFAULT_SIMPLE_MANAGED_STORE_MAX_ENTRIES): - super().__init__() - self.max_entries = max_entries - self._data = defaultdict[str, ManagedEntry]() - - @override - async def setup(self) -> None: - pass - - @override - async def get_entry(self, collection: str, key: str) -> ManagedEntry | None: - combo_key: str = compound_key(collection=collection, key=key) - return self._data.get(combo_key) - - @override - async def put_entry(self, collection: str, key: str, cache_entry: ManagedEntry, *, ttl: float | None = None) -> None: + async def _put_managed_entry(self, *, key: str, collection: str, managed_entry: ManagedEntry) -> None: combo_key: str = compound_key(collection=collection, key=key) if len(self._data) >= self.max_entries: _ = self._data.pop(next(iter(self._data))) - self._data[combo_key] = cache_entry + self._data[combo_key] = SimpleStoreEntry( + json_str=managed_entry.to_json(include_metadata=False), expires_at=managed_entry.expires_at, created_at=managed_entry.created_at + ) @override - async def delete(self, collection: str, key: str) -> bool: + async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: combo_key: str = compound_key(collection=collection, key=key) + return self._data.pop(combo_key, None) is not None @override - async def keys(self, collection: str) -> list[str]: + async def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> list[str]: return get_keys_from_compound_keys(compound_keys=list(self._data.keys()), collection=collection) @override - async def clear_collection(self, collection: str) -> int: - keys: list[str] = get_keys_from_compound_keys(compound_keys=list(self._data.keys()), collection=collection) - - for key in keys: - _ = self._data.pop(key) - - return len(keys) - - @override - async def list_collections(self) -> list[str]: + async def _get_collection_names(self, *, limit: int | None = None) -> list[str]: return get_collections_from_compound_keys(compound_keys=list(self._data.keys())) @override - async def cull(self) -> None: - for collection in await self.list_collections(): - for key in get_keys_from_compound_keys(compound_keys=list(self._data.keys()), collection=collection): - _ = await self.get_entry(collection=collection, key=key) + async def _delete_store(self) -> bool: + self._data.clear() + return True diff --git a/src/kv_store_adapter/stores/utils/compound.py b/src/kv_store_adapter/stores/utils/compound.py index c7231621..75aa068e 100644 --- a/src/kv_store_adapter/stores/utils/compound.py +++ b/src/kv_store_adapter/stores/utils/compound.py @@ -42,9 +42,12 @@ def prefix_key(key: str, prefix: str, separator: str | None = None) -> str: return compound_string(first=prefix, second=key, separator=separator) -def unprefix_key(key: str, separator: str | None = None) -> str: +def unprefix_key(key: str, prefix: str, separator: str | None = None) -> str: separator = separator or DEFAULT_PREFIX_SEPARATOR - return uncompound_string(string=key, separator=separator)[1] + if not key.startswith(prefix + separator): + msg = f"Key {key} is not prefixed with {prefix}" + raise ValueError(msg) + return key[len(prefix + separator) :] def prefix_collection(collection: str, prefix: str, separator: str | None = None) -> str: @@ -52,18 +55,21 @@ def prefix_collection(collection: str, prefix: str, separator: str | None = None return compound_string(first=prefix, second=collection, separator=separator) -def unprefix_collection(collection: str, separator: str | None = None) -> str: +def unprefix_collection(collection: str, prefix: str, separator: str | None = None) -> str: separator = separator or DEFAULT_PREFIX_SEPARATOR - return uncompound_string(string=collection, separator=separator)[1] + if not collection.startswith(prefix + separator): + msg = f"Collection {collection} is not prefixed with {prefix}" + raise ValueError(msg) + return collection[len(prefix + separator) :] def get_collections_from_compound_keys(compound_keys: list[str], separator: str | None = None) -> list[str]: - """Returns a unique list of collections from a list of compound keys.""" + """Return a unique list of collections from a list of compound keys.""" separator = separator or DEFAULT_COMPOUND_SEPARATOR return list({key_collection for key_collection, _ in uncompound_strings(strings=compound_keys)}) def get_keys_from_compound_keys(compound_keys: list[str], collection: str, separator: str | None = None) -> list[str]: - """Returns a list of keys from a list of compound keys for a given collection.""" + """Return all keys from a list of compound keys for a given collection.""" separator = separator or DEFAULT_COMPOUND_SEPARATOR return [key for key_collection, key in uncompound_strings(strings=compound_keys) if key_collection == collection] diff --git a/src/kv_store_adapter/stores/utils/managed_entry.py b/src/kv_store_adapter/stores/utils/managed_entry.py index 170a11ae..5b53b01b 100644 --- a/src/kv_store_adapter/stores/utils/managed_entry.py +++ b/src/kv_store_adapter/stores/utils/managed_entry.py @@ -1,67 +1,90 @@ import json -from dataclasses import dataclass +from dataclasses import dataclass, field from datetime import datetime from typing import Any, cast from typing_extensions import Self from kv_store_adapter.errors import DeserializationError, SerializationError -from kv_store_adapter.types import TTLInfo +from kv_store_adapter.stores.utils.time_to_live import now, now_plus, try_parse_datetime -@dataclass +@dataclass(kw_only=True) class ManagedEntry: - """A managed cache entry containing value data and TTL metadata.""" + """A managed cache entry containing value data and TTL metadata. - collection: str - key: str + The entry supports either TTL seconds or absolute expiration datetime. On init: + - If `ttl` is provided but `expires_at` is not, an `expires_at` will be computed. + - If `expires_at` is provided but `ttl` is not, a live TTL will be computed on access. + """ value: dict[str, Any] - created_at: datetime | None - ttl: float | None - expires_at: datetime | None + created_at: datetime | None = field(default=None) + ttl: float | None = field(default=None) + expires_at: datetime | None = field(default=None) + + def __post_init__(self) -> None: + if self.ttl is not None and self.expires_at is None: + self.expires_at = now_plus(seconds=self.ttl) + + elif self.expires_at is not None and self.ttl is None: + self.recalculate_ttl() @property def is_expired(self) -> bool: - return self.to_ttl_info().is_expired - - def to_ttl_info(self) -> TTLInfo: - return TTLInfo(collection=self.collection, key=self.key, created_at=self.created_at, ttl=self.ttl, expires_at=self.expires_at) - - def to_json(self) -> str: - return dump_to_json( - obj={ - "created_at": self.created_at.isoformat() if self.created_at else None, - "ttl": self.ttl, - "expires_at": self.expires_at.isoformat() if self.expires_at else None, - "collection": self.collection, - "key": self.key, - "value": self.value, - } - ) + if self.expires_at is None: + return False + return self.expires_at <= now() + + def recalculate_ttl(self) -> None: + if self.expires_at is not None and self.ttl is None: + self.ttl = (self.expires_at - now()).total_seconds() + + def to_json(self, include_metadata: bool = True, include_expiration: bool = True, include_creation: bool = True) -> str: + data: dict[str, Any] = {} + + if include_metadata: + data["value"] = self.value + if include_creation and self.created_at: + data["created_at"] = self.created_at.isoformat() + if include_expiration and self.expires_at: + data["expires_at"] = self.expires_at.isoformat() + else: + data = self.value + + return dump_to_json(obj=data) @classmethod - def from_json(cls, json_str: str) -> Self: + def from_json(cls, json_str: str, includes_metadata: bool = True, ttl: float | None = None) -> Self: data: dict[str, Any] = load_from_json(json_str=json_str) - created_at: str | None = data.get("created_at") - expires_at: str | None = data.get("expires_at") - ttl: float | None = data.get("ttl") + + if not includes_metadata: + return cls( + value=data, + ) + + created_at: datetime | None = try_parse_datetime(value=data.get("created_at")) + expires_at: datetime | None = try_parse_datetime(value=data.get("expires_at")) + + value: dict[str, Any] | None = data.get("value") + + if value is None: + msg = "Value is None" + raise DeserializationError(msg) return cls( - created_at=datetime.fromisoformat(created_at) if created_at else None, + created_at=created_at, + expires_at=expires_at, ttl=ttl, - expires_at=datetime.fromisoformat(expires_at) if expires_at else None, - collection=data["collection"], # pyright: ignore[reportAny] - key=data["key"], # pyright: ignore[reportAny] - value=data["value"], # pyright: ignore[reportAny] + value=value, ) def dump_to_json(obj: dict[str, Any]) -> str: try: return json.dumps(obj) - except json.JSONDecodeError as e: + except (json.JSONDecodeError, TypeError) as e: msg: str = f"Failed to serialize object to JSON: {e}" raise SerializationError(msg) from e diff --git a/src/kv_store_adapter/stores/utils/time_to_live.py b/src/kv_store_adapter/stores/utils/time_to_live.py index 4eee4d80..d6373a46 100644 --- a/src/kv_store_adapter/stores/utils/time_to_live.py +++ b/src/kv_store_adapter/stores/utils/time_to_live.py @@ -1,10 +1,34 @@ +import time from datetime import datetime, timedelta, timezone +from typing import Any -def calculate_expires_at(created_at: datetime | None = None, ttl: float | None = None) -> datetime | None: - """Calculate expiration timestamp from creation time and TTL seconds.""" - if ttl is None: - return None +def epoch_to_datetime(epoch: float) -> datetime: + """Convert an epoch timestamp to a datetime object.""" + return datetime.fromtimestamp(epoch, tz=timezone.utc) - expires_at: datetime = (created_at or datetime.now(tz=timezone.utc)) + timedelta(seconds=ttl) - return expires_at + +def now_as_epoch() -> float: + """Get the current time as epoch seconds.""" + return time.time() + + +def now() -> datetime: + """Get the current time as a datetime object.""" + return datetime.now(tz=timezone.utc) + + +def seconds_to(datetime: datetime) -> float: + """Get the number of seconds between the current time and a datetime object.""" + return (datetime - now()).total_seconds() + + +def now_plus(seconds: float) -> datetime: + """Get the current time plus a number of seconds as a datetime object.""" + return datetime.now(tz=timezone.utc) + timedelta(seconds=seconds) + + +def try_parse_datetime(value: Any) -> datetime | None: # pyright: ignore[reportAny] + if isinstance(value, str): + return datetime.fromisoformat(value) + return None diff --git a/src/kv_store_adapter/stores/wrappers/__init__.py b/src/kv_store_adapter/stores/wrappers/__init__.py deleted file mode 100644 index b7e7f975..00000000 --- a/src/kv_store_adapter/stores/wrappers/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .prefix_collection import PrefixCollectionWrapper -from .prefix_key import PrefixKeyWrapper -from .single_collection import SingleCollectionWrapper -from .statistics import StatisticsWrapper - -__all__ = ["PrefixCollectionWrapper", "PrefixKeyWrapper", "SingleCollectionWrapper", "StatisticsWrapper"] diff --git a/src/kv_store_adapter/stores/wrappers/clamp_ttl.py b/src/kv_store_adapter/stores/wrappers/clamp_ttl.py deleted file mode 100644 index 59ec24b4..00000000 --- a/src/kv_store_adapter/stores/wrappers/clamp_ttl.py +++ /dev/null @@ -1,69 +0,0 @@ -from typing import Any - -from typing_extensions import override - -from kv_store_adapter.stores.base.unmanaged import BaseKVStore -from kv_store_adapter.types import TTLInfo - - -class TTLClampWrapper(BaseKVStore): - """Wrapper that enforces a maximum TTL for puts into the store.""" - - def __init__(self, store: BaseKVStore, min_ttl: float, max_ttl: float, missing_ttl: float | None = None) -> None: - """Initialize the TTL clamp wrapper. - - Args: - store: The store to wrap. - min_ttl: The minimum TTL for puts into the store. - max_ttl: The maximum TTL for puts into the store. - missing_ttl: The TTL to use for entries that do not have a TTL. Defaults to None. - """ - self.store: BaseKVStore = store - self.min_ttl: float = min_ttl - self.max_ttl: float = max_ttl - self.missing_ttl: float | None = missing_ttl - - @override - async def get(self, collection: str, key: str) -> dict[str, Any] | None: - return await self.store.get(collection=collection, key=key) - - @override - async def put(self, collection: str, key: str, value: dict[str, Any], *, ttl: float | None = None) -> None: - if ttl is None and self.missing_ttl: - ttl = self.missing_ttl - - if ttl and ttl < self.min_ttl: - ttl = self.min_ttl - - if ttl and ttl > self.max_ttl: - ttl = self.max_ttl - - await self.store.put(collection=collection, key=key, value=value, ttl=ttl) - - @override - async def delete(self, collection: str, key: str) -> bool: - return await self.store.delete(collection=collection, key=key) - - @override - async def exists(self, collection: str, key: str) -> bool: - return await self.store.exists(collection=collection, key=key) - - @override - async def keys(self, collection: str) -> list[str]: - return await self.store.keys(collection=collection) - - @override - async def clear_collection(self, collection: str) -> int: - return await self.store.clear_collection(collection=collection) - - @override - async def ttl(self, collection: str, key: str) -> TTLInfo | None: - return await self.store.ttl(collection=collection, key=key) - - @override - async def list_collections(self) -> list[str]: - return await self.store.list_collections() - - @override - async def cull(self) -> None: - await self.store.cull() diff --git a/src/kv_store_adapter/stores/wrappers/passthrough_cache.py b/src/kv_store_adapter/stores/wrappers/passthrough_cache.py deleted file mode 100644 index 713bf037..00000000 --- a/src/kv_store_adapter/stores/wrappers/passthrough_cache.py +++ /dev/null @@ -1,81 +0,0 @@ -from typing import Any - -from typing_extensions import override - -from kv_store_adapter.stores.base.unmanaged import BaseKVStore -from kv_store_adapter.types import TTLInfo - - -class PassthroughCacheWrapper(BaseKVStore): - """Wrapper that users two stores, ideal for combining a local and distributed store.""" - - def __init__(self, primary_store: BaseKVStore, cache_store: BaseKVStore) -> None: - """Initialize the passthrough cache wrapper. Items are first checked in the primary store and if not found, are - checked in the secondary store. Operations are performed on both stores but are not atomic. - - Operations like expiry culling against the primary store will not be reflected in the cache store. This may - lead to stale data in the cache store. One way to combat this is to use a TTLClampWrapper on the cache store to - enforce a lower TTL on the cache store than the primary store. - - Args: - primary_store: The primary store the data will live in. - cache_store: The write-through (likely ephemeral) cache to use. - """ - self.cache_store: BaseKVStore = cache_store - self.primary_store: BaseKVStore = primary_store - - @override - async def get(self, collection: str, key: str) -> dict[str, Any] | None: - if cache_store_value := await self.cache_store.get(collection=collection, key=key): - return cache_store_value - - if primary_store_value := await self.primary_store.get(collection=collection, key=key): - ttl_info: TTLInfo | None = await self.primary_store.ttl(collection=collection, key=key) - - await self.cache_store.put(collection=collection, key=key, value=primary_store_value, ttl=ttl_info.ttl if ttl_info else None) - - return primary_store_value - return None - - @override - async def put(self, collection: str, key: str, value: dict[str, Any], *, ttl: float | None = None) -> None: - _ = await self.cache_store.delete(collection=collection, key=key) - await self.primary_store.put(collection=collection, key=key, value=value, ttl=ttl) - - @override - async def delete(self, collection: str, key: str) -> bool: - deleted = await self.primary_store.delete(collection=collection, key=key) - _ = await self.cache_store.delete(collection=collection, key=key) - return deleted - - @override - async def exists(self, collection: str, key: str) -> bool: - return await self.get(collection=collection, key=key) is not None - - @override - async def keys(self, collection: str) -> list[str]: - return await self.primary_store.keys(collection=collection) - - @override - async def clear_collection(self, collection: str) -> int: - removed: int = await self.primary_store.clear_collection(collection=collection) - _ = await self.cache_store.clear_collection(collection=collection) - return removed - - @override - async def ttl(self, collection: str, key: str) -> TTLInfo | None: - if ttl_info := await self.cache_store.ttl(collection=collection, key=key): - return ttl_info - - return await self.primary_store.ttl(collection=collection, key=key) - - @override - async def list_collections(self) -> list[str]: - collections: list[str] = await self.primary_store.list_collections() - - return collections - - @override - async def cull(self) -> None: - await self.primary_store.cull() - await self.cache_store.cull() diff --git a/src/kv_store_adapter/stores/wrappers/prefix_collection.py b/src/kv_store_adapter/stores/wrappers/prefix_collection.py deleted file mode 100644 index 6488e611..00000000 --- a/src/kv_store_adapter/stores/wrappers/prefix_collection.py +++ /dev/null @@ -1,76 +0,0 @@ -from typing import Any - -from typing_extensions import override - -from kv_store_adapter.stores.base.unmanaged import BaseKVStore -from kv_store_adapter.stores.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_collection, unprefix_collection -from kv_store_adapter.types import TTLInfo - - -class PrefixCollectionWrapper(BaseKVStore): - """Wrapper that prefixes all collections with a given prefix.""" - - def __init__(self, store: BaseKVStore, prefix: str, separator: str | None = None) -> None: - """Initialize the prefix collection wrapper. - - Args: - store: The store to wrap. - prefix: The prefix to add to all collections. - separator: The separator to use between the prefix and the collection. Defaults to "__". - """ - self.store: BaseKVStore = store - self.prefix: str = prefix - self.separator: str = separator or DEFAULT_PREFIX_SEPARATOR - - @override - async def get(self, collection: str, key: str) -> dict[str, Any] | None: - prefixed_collection: str = prefix_collection(collection=collection, prefix=self.prefix, separator=self.separator) - return await self.store.get(collection=prefixed_collection, key=key) - - @override - async def put(self, collection: str, key: str, value: dict[str, Any], *, ttl: float | None = None) -> None: - prefixed_collection: str = prefix_collection(collection=collection, prefix=self.prefix, separator=self.separator) - await self.store.put(collection=prefixed_collection, key=key, value=value, ttl=ttl) - - @override - async def delete(self, collection: str, key: str) -> bool: - prefixed_collection: str = prefix_collection(collection=collection, prefix=self.prefix, separator=self.separator) - return await self.store.delete(collection=prefixed_collection, key=key) - - @override - async def exists(self, collection: str, key: str) -> bool: - prefixed_collection: str = prefix_collection(collection=collection, prefix=self.prefix, separator=self.separator) - return await self.store.exists(collection=prefixed_collection, key=key) - - @override - async def keys(self, collection: str) -> list[str]: - prefixed_collection: str = prefix_collection(collection=collection, prefix=self.prefix, separator=self.separator) - return await self.store.keys(collection=prefixed_collection) - - @override - async def clear_collection(self, collection: str) -> int: - prefixed_collection: str = prefix_collection(collection=collection, prefix=self.prefix, separator=self.separator) - return await self.store.clear_collection(collection=prefixed_collection) - - @override - async def ttl(self, collection: str, key: str) -> TTLInfo | None: - prefixed_collection: str = prefix_collection(collection=collection, prefix=self.prefix, separator=self.separator) - ttl_info: TTLInfo | None = await self.store.ttl(collection=prefixed_collection, key=key) - if ttl_info: - ttl_info.collection = collection - ttl_info.key = key - return ttl_info - - @override - async def list_collections(self) -> list[str]: - collections: list[str] = await self.store.list_collections() - - return [ - unprefix_collection(collection=collection, separator=self.separator) - for collection in collections - if collection.startswith(self.prefix) - ] - - @override - async def cull(self) -> None: - await self.store.cull() diff --git a/src/kv_store_adapter/stores/wrappers/prefix_key.py b/src/kv_store_adapter/stores/wrappers/prefix_key.py deleted file mode 100644 index a7c43fe2..00000000 --- a/src/kv_store_adapter/stores/wrappers/prefix_key.py +++ /dev/null @@ -1,69 +0,0 @@ -from typing import Any - -from typing_extensions import override - -from kv_store_adapter.stores.base.unmanaged import BaseKVStore -from kv_store_adapter.stores.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key -from kv_store_adapter.types import TTLInfo - - -class PrefixKeyWrapper(BaseKVStore): - """Wrapper that prefixes all keys with a given prefix.""" - - def __init__(self, store: BaseKVStore, prefix: str, separator: str | None = None) -> None: - """Initialize the prefix key wrapper. - - Args: - store: The store to wrap. - prefix: The prefix to add to all keys. - separator: The separator to use between the prefix and the key. Defaults to "__". - """ - self.store: BaseKVStore = store - self.prefix: str = prefix - self.separator: str = separator or DEFAULT_PREFIX_SEPARATOR - - @override - async def get(self, collection: str, key: str) -> dict[str, Any] | None: - prefixed_key: str = prefix_key(key=key, prefix=self.prefix, separator=self.separator) - return await self.store.get(collection=collection, key=prefixed_key) - - @override - async def put(self, collection: str, key: str, value: dict[str, Any], *, ttl: float | None = None) -> None: - prefixed_key: str = prefix_key(key=key, prefix=self.prefix, separator=self.separator) - await self.store.put(collection=collection, key=prefixed_key, value=value, ttl=ttl) - - @override - async def delete(self, collection: str, key: str) -> bool: - prefixed_key: str = prefix_key(key=key, prefix=self.prefix, separator=self.separator) - return await self.store.delete(collection=collection, key=prefixed_key) - - @override - async def exists(self, collection: str, key: str) -> bool: - prefixed_key: str = prefix_key(key=key, prefix=self.prefix, separator=self.separator) - return await self.store.exists(collection=collection, key=prefixed_key) - - @override - async def keys(self, collection: str) -> list[str]: - keys: list[str] = await self.store.keys(collection=collection) - return [unprefix_key(key=key, separator=self.separator) for key in keys] - - @override - async def clear_collection(self, collection: str) -> int: - return await self.store.clear_collection(collection=collection) - - @override - async def ttl(self, collection: str, key: str) -> TTLInfo | None: - prefixed_key: str = prefix_key(key=key, prefix=self.prefix, separator=self.separator) - ttl_info: TTLInfo | None = await self.store.ttl(collection=collection, key=prefixed_key) - if ttl_info: - ttl_info.collection = collection - ttl_info.key = key - return ttl_info - - @override - async def list_collections(self) -> list[str]: - return await self.store.list_collections() - - @override - async def cull(self) -> None: - await self.store.cull() diff --git a/src/kv_store_adapter/stores/wrappers/single_collection.py b/src/kv_store_adapter/stores/wrappers/single_collection.py deleted file mode 100644 index 6806a6cc..00000000 --- a/src/kv_store_adapter/stores/wrappers/single_collection.py +++ /dev/null @@ -1,68 +0,0 @@ -from typing import Any - -from typing_extensions import override - -from kv_store_adapter.stores.base.unmanaged import BaseKVStore -from kv_store_adapter.stores.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key -from kv_store_adapter.types import TTLInfo - - -class SingleCollectionWrapper(BaseKVStore): - """Wrapper that forces all requests into a single collection, prefixes the keys with the original collection name. - - The single collection wrapper does not support collection operations.""" - - def __init__(self, store: BaseKVStore, collection: str, prefix_separator: str | None = None) -> None: - self.collection: str = collection - self.prefix_separator: str = prefix_separator or DEFAULT_PREFIX_SEPARATOR - self.store: BaseKVStore = store - - @override - async def get(self, collection: str, key: str) -> dict[str, Any] | None: - prefixed_key: str = prefix_key(key=key, prefix=collection, separator=self.prefix_separator) - return await self.store.get(collection=self.collection, key=prefixed_key) - - @override - async def put(self, collection: str, key: str, value: dict[str, Any], *, ttl: float | None = None) -> None: - prefixed_key: str = prefix_key(key=key, prefix=collection, separator=self.prefix_separator) - await self.store.put(collection=self.collection, key=prefixed_key, value=value, ttl=ttl) - - @override - async def delete(self, collection: str, key: str) -> bool: - prefixed_key: str = prefix_key(key=key, prefix=collection, separator=self.prefix_separator) - return await self.store.delete(collection=self.collection, key=prefixed_key) - - @override - async def exists(self, collection: str, key: str) -> bool: - prefixed_key: str = prefix_key(key=key, prefix=collection, separator=self.prefix_separator) - return await self.store.exists(collection=self.collection, key=prefixed_key) - - @override - async def keys(self, collection: str) -> list[str]: - keys: list[str] = await self.store.keys(collection=collection) - return [unprefix_key(key=key, separator=self.prefix_separator) for key in keys] - - @override - async def clear_collection(self, collection: str) -> int: - msg = "Clearing a collection is not supported for SingleCollectionWrapper" - raise NotImplementedError(msg) - - # return await self.store.clear_collection(collection=self.collection) - - @override - async def ttl(self, collection: str, key: str) -> TTLInfo | None: - prefixed_key: str = prefix_key(key=key, prefix=collection, separator=self.prefix_separator) - ttl: TTLInfo | None = await self.store.ttl(collection=self.collection, key=prefixed_key) - if ttl: - ttl.collection = collection - ttl.key = key - return ttl - - @override - async def list_collections(self) -> list[str]: - msg = "Listing collections is not supported for SingleCollectionWrapper" - raise NotImplementedError(msg) - - @override - async def cull(self) -> None: - await self.store.cull() diff --git a/src/kv_store_adapter/stores/wrappers/statistics.py b/src/kv_store_adapter/stores/wrappers/statistics.py deleted file mode 100644 index 5163808f..00000000 --- a/src/kv_store_adapter/stores/wrappers/statistics.py +++ /dev/null @@ -1,197 +0,0 @@ -from dataclasses import dataclass, field -from typing import Any - -from typing_extensions import override - -from kv_store_adapter.stores.base.unmanaged import BaseKVStore -from kv_store_adapter.types import TTLInfo - - -@dataclass -class BaseStatistics: - """Base statistics container with operation counting.""" - - count: int = field(default=0) - """The number of operations.""" - - def increment(self) -> None: - self.count += 1 - - -@dataclass -class BaseHitMissStatistics(BaseStatistics): - """Statistics container with hit/miss tracking for cache-like operations.""" - - hit: int = field(default=0) - """The number of hits.""" - miss: int = field(default=0) - """The number of misses.""" - - def increment_hit(self) -> None: - self.increment() - self.hit += 1 - - def increment_miss(self) -> None: - self.increment() - self.miss += 1 - - -@dataclass -class GetStatistics(BaseHitMissStatistics): - """A class for statistics about a KV Store collection.""" - - -@dataclass -class SetStatistics(BaseStatistics): - """A class for statistics about a KV Store collection.""" - - -@dataclass -class DeleteStatistics(BaseHitMissStatistics): - """A class for statistics about a KV Store collection.""" - - -@dataclass -class ExistsStatistics(BaseHitMissStatistics): - """A class for statistics about a KV Store collection.""" - - -@dataclass -class KeysStatistics(BaseStatistics): - """A class for statistics about a KV Store collection.""" - - -@dataclass -class ClearCollectionStatistics(BaseHitMissStatistics): - """A class for statistics about a KV Store collection.""" - - -@dataclass -class ListCollectionsStatistics(BaseStatistics): - """A class for statistics about a KV Store collection.""" - - -@dataclass -class KVStoreCollectionStatistics(BaseStatistics): - """A class for statistics about a KV Store collection.""" - - get: GetStatistics = field(default_factory=GetStatistics) - """The statistics for the get operation.""" - - set: SetStatistics = field(default_factory=SetStatistics) - """The statistics for the set operation.""" - - delete: DeleteStatistics = field(default_factory=DeleteStatistics) - """The statistics for the delete operation.""" - - exists: ExistsStatistics = field(default_factory=ExistsStatistics) - """The statistics for the exists operation.""" - - keys: KeysStatistics = field(default_factory=KeysStatistics) - """The statistics for the keys operation.""" - - clear_collection: ClearCollectionStatistics = field(default_factory=ClearCollectionStatistics) - """The statistics for the clear collection operation.""" - - list_collections: ListCollectionsStatistics = field(default_factory=ListCollectionsStatistics) - """The statistics for the list collections operation.""" - - -@dataclass -class KVStoreStatistics: - """Statistics container for a KV Store.""" - - collections: dict[str, KVStoreCollectionStatistics] = field(default_factory=dict) - - def get_collection(self, collection: str) -> KVStoreCollectionStatistics: - if collection not in self.collections: - self.collections[collection] = KVStoreCollectionStatistics() - return self.collections[collection] - - -class StatisticsWrapper(BaseKVStore): - """Statistics wrapper around a KV Store that tracks operation statistics.""" - - def __init__(self, store: BaseKVStore, track_statistics: bool = True) -> None: - self.store: BaseKVStore = store - self._statistics: KVStoreStatistics | None = KVStoreStatistics() if track_statistics else None - - @property - def statistics(self) -> KVStoreStatistics | None: - return self._statistics - - @override - async def get(self, collection: str, key: str) -> dict[str, Any] | None: - if value := await self.store.get(collection=collection, key=key): - if self.statistics: - self.statistics.get_collection(collection).get.increment_hit() - return value - - if self.statistics: - self.statistics.get_collection(collection).get.increment_miss() - - return None - - @override - async def put(self, collection: str, key: str, value: dict[str, Any], *, ttl: float | None = None) -> None: - await self.store.put(collection=collection, key=key, value=value, ttl=ttl) - - if self.statistics: - self.statistics.get_collection(collection).set.increment() - - @override - async def delete(self, collection: str, key: str) -> bool: - if await self.store.delete(collection=collection, key=key): - if self.statistics: - self.statistics.get_collection(collection).delete.increment_hit() - return True - - if self.statistics: - self.statistics.get_collection(collection).delete.increment_miss() - - return False - - @override - async def exists(self, collection: str, key: str) -> bool: - if await self.store.exists(collection=collection, key=key): - if self.statistics: - self.statistics.get_collection(collection).exists.increment_hit() - return True - - if self.statistics: - self.statistics.get_collection(collection).exists.increment_miss() - - return False - - @override - async def keys(self, collection: str) -> list[str]: - keys: list[str] = await self.store.keys(collection) - - if self.statistics: - self.statistics.get_collection(collection).keys.increment() - - return keys - - @override - async def clear_collection(self, collection: str) -> int: - if count := await self.store.clear_collection(collection): - if self.statistics: - self.statistics.get_collection(collection).clear_collection.increment_hit() - return count - - if self.statistics: - self.statistics.get_collection(collection).clear_collection.increment_miss() - - return 0 - - @override - async def ttl(self, collection: str, key: str) -> TTLInfo | None: - return await self.store.ttl(collection=collection, key=key) - - @override - async def list_collections(self) -> list[str]: - return await self.store.list_collections() - - @override - async def cull(self) -> None: - await self.store.cull() diff --git a/src/kv_store_adapter/types.py b/src/kv_store_adapter/types.py index 3230563f..ad3b9cf6 100644 --- a/src/kv_store_adapter/types.py +++ b/src/kv_store_adapter/types.py @@ -1,82 +1,180 @@ -from dataclasses import dataclass -from datetime import datetime, timezone +from collections.abc import Sequence from typing import Any, Protocol, runtime_checkable -@dataclass -class TTLInfo: - """TTL (Time To Live) information for a key-value pair in a collection.""" +@runtime_checkable +class KeyValueProtocol(Protocol): + """A subset of KV operations: get/put/delete and TTL variants, including bulk calls.""" + + async def get( + self, + key: str, + *, + collection: str | None = None, + ) -> dict[str, Any] | None: + """Retrieve a value by key from the specified collection. + + Args: + key: The key to retrieve the value from. + collection: The collection to retrieve the value from. If no collection is provided, it will use the default collection. + + Returns: + The value associated with the key. If the key is not found, None will be returned. + """ + ... - collection: str - key: str - created_at: datetime | None + async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + """Retrieve the value and TTL information for a key-value pair from the specified collection. - ttl: float | None - expires_at: datetime | None + Args: + key: The key to retrieve the TTL information from. + collection: The collection to retrieve the TTL information from. If no collection is provided, + it will use the default collection. - @property - def is_expired(self) -> bool: - """Check if the key-value pair has expired based on its TTL.""" - if self.expires_at is None: - return False + Returns: + The value and TTL information for the key. If the key is not found, (None, None) will be returned. + """ + ... - return self.expires_at <= datetime.now(tz=timezone.utc) + async def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + """Store a key-value pair in the specified collection with optional TTL. + Args: + key: The key to store the value in. + value: The value to store. + collection: The collection to store the value in. If no collection is provided, it will use the default collection. + ttl: The optional time-to-live (expiry duration) for the key-value pair. Defaults to no TTL. Note: The + backend store will convert the provided format to its own internal format. + """ + ... -@runtime_checkable -class KVStore(Protocol): - """Protocol defining the interface for key-value store implementations.""" + async def delete(self, key: str, *, collection: str | None = None) -> bool: + """Delete a key-value pair from the specified collection. - async def get(self, collection: str, key: str) -> dict[str, Any] | None: - """Retrieve a value by key from the specified collection.""" + Args: + key: The key to delete the value from. + collection: The collection to delete the value from. If no collection is provided, it will use the default collection. + """ ... - async def put(self, collection: str, key: str, value: dict[str, Any], *, ttl: float | None = None) -> None: - """Store a key-value pair in the specified collection with optional TTL.""" + async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + """Retrieve multiple values by key from the specified collection. + + Args: + keys: The keys to retrieve the values from. + collection: The collection to retrieve keys from. If no collection is provided, it will use the default collection. + + Returns: + A list of values for the keys. Each value is either a dict or None if the key is not found. + """ ... - async def delete(self, collection: str, key: str) -> bool: - """Delete a key-value pair from the specified collection.""" + async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + """Retrieve multiple values and TTL information by key from the specified collection. + + Args: + keys: The keys to retrieve the values and TTL information from. + collection: The collection to retrieve keys from. If no collection is provided, it will use the default collection. + + Returns: + A list of tuples containing (value, ttl) for each key. Each tuple contains either (dict, float) or (None, None) if the + key is not found. + """ ... - async def exists(self, collection: str, key: str) -> bool: - """Check if a key exists in the specified collection.""" + async def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + """Store multiple key-value pairs in the specified collection. + + Args: + keys: The keys to store the values in. + values: The values to store. + collection: The collection to store keys in. If no collection is provided, it will use the default collection. + ttl: The optional time-to-live (expiry duration) for the key-value pairs. Defaults to no TTL. Note: The + backend store will convert the provided format to its own internal format. + """ ... + async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + """Delete multiple key-value pairs from the specified collection. -@runtime_checkable -class BulkKVStore(KVStore, Protocol): - """Protocol defining the interface for bulk key-value store implementations.""" + Args: + keys: The keys to delete the values from. + collection: The collection to delete keys from. If no collection is provided, it will use the default collection. - async def get_many(self, collection: str, keys: list[str]) -> list[dict[str, Any]]: - """Retrieve multiple values by key from the specified collection.""" + Returns: + The number of keys deleted. + """ ... - async def put_many(self, collection: str, keys: list[str], values: list[dict[str, Any]]) -> None: - """Store multiple key-value pairs in the specified collection.""" - ... - async def delete_many(self, collection: str, keys: list[str]) -> None: - """Delete multiple key-value pairs from the specified collection.""" +@runtime_checkable +class CullProtocol(Protocol): + async def cull(self) -> None: + """Cull the store. + + This will remove all expired keys from the store. + """ ... @runtime_checkable -class ManageKVStore(KVStore, Protocol): - """Protocol defining the interface for managed key-value store implementations.""" +class EnumerateKeysProtocol(Protocol): + """Protocol segment to enumerate keys in a collection.""" + + async def keys(self, collection: str | None = None, *, limit: int | None = None) -> list[str]: + """List all keys in the specified collection. - async def keys(self, collection: str) -> list[str]: - """List all keys in the specified collection.""" + Args: + collection: The collection to list the keys from. If no collection is provided, it will use the default collection. + limit: The maximum number of keys to list. The behavior when no limit is provided is store-dependent. + """ ... - async def collections(self) -> list[str]: - """List all available collection names (may include empty collections).""" + +@runtime_checkable +class EnumerateCollectionsProtocol(Protocol): + async def collections(self, *, limit: int | None = None) -> list[str]: + """List all available collection names (may include empty collections). + + Args: + limit: The maximum number of collections to list. The behavior when no limit is provided is store-dependent. + """ ... - async def delete_collection(self, collection: str) -> int: - """Clear all keys in a collection, returning the number of keys deleted.""" + +@runtime_checkable +class DestroyStoreProtocol(Protocol): + """Protocol segment for store-destruction semantics.""" + + async def destroy(self) -> bool: + """Destroy the keystore. + + This will clear all collections and keys from the store. + """ ... - async def cull(self) -> None: - """Remove all expired entries from the store.""" + +@runtime_checkable +class DestroyCollectionProtocol(Protocol): + async def destroy_collection(self, collection: str) -> bool: + """Destroy the specified collection. + + Args: + collection: The collection to destroy. + """ ... + + +class KVStore(KeyValueProtocol, Protocol): + """A protocol for key-value store operations. + + Includes basic operations: get, put, delete, ttl + Includes bulk operations: get_many, put_many, delete_many, ttl_many. + """ diff --git a/src/kv_store_adapter/wrappers/base.py b/src/kv_store_adapter/wrappers/base.py new file mode 100644 index 00000000..fcfbb6dc --- /dev/null +++ b/src/kv_store_adapter/wrappers/base.py @@ -0,0 +1,51 @@ +from collections.abc import Sequence +from typing import Any + +from typing_extensions import override + +from kv_store_adapter.types import KVStore + + +class BaseWrapper(KVStore): + """A base wrapper for KVStore implementations that passes through to the underlying store.""" + + store: KVStore + + @override + async def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + return await self.store.get(collection=collection, key=key) + + @override + async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + return await self.store.get_many(collection=collection, keys=keys) + + @override + async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + return await self.store.ttl(collection=collection, key=key) + + @override + async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + return await self.store.ttl_many(collection=collection, keys=keys) + + @override + async def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + return await self.store.put(collection=collection, key=key, value=value, ttl=ttl) + + @override + async def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + return await self.store.put_many(keys=keys, values=values, collection=collection, ttl=ttl) + + @override + async def delete(self, key: str, *, collection: str | None = None) -> bool: + return await self.store.delete(collection=collection, key=key) + + @override + async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + return await self.store.delete_many(keys=keys, collection=collection) diff --git a/src/kv_store_adapter/wrappers/clamp_ttl.py b/src/kv_store_adapter/wrappers/clamp_ttl.py new file mode 100644 index 00000000..c19cffc7 --- /dev/null +++ b/src/kv_store_adapter/wrappers/clamp_ttl.py @@ -0,0 +1,61 @@ +from collections.abc import Sequence +from typing import Any, overload + +from typing_extensions import override + +from kv_store_adapter.types import KVStore +from kv_store_adapter.wrappers.base import BaseWrapper + + +class TTLClampWrapper(BaseWrapper): + """Wrapper that enforces a maximum TTL for puts into the store.""" + + def __init__(self, store: KVStore, min_ttl: float, max_ttl: float, missing_ttl: float | None = None) -> None: + """Initialize the TTL clamp wrapper. + + Args: + store: The store to wrap. + min_ttl: The minimum TTL for puts into the store. + max_ttl: The maximum TTL for puts into the store. + missing_ttl: The TTL to use for entries that do not have a TTL. Defaults to None. + """ + self.store: KVStore = store + self.min_ttl: float = min_ttl + self.max_ttl: float = max_ttl + self.missing_ttl: float | None = missing_ttl + + super().__init__() + + @overload + def _clamp_ttl(self, ttl: float) -> float: ... + + @overload + def _clamp_ttl(self, ttl: float | None) -> float | None: ... + + def _clamp_ttl(self, ttl: float | None) -> float | None: + if ttl is None: + return self.missing_ttl + + return max(self.min_ttl, min(ttl, self.max_ttl)) + + @override + async def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + await self.store.put(collection=collection, key=key, value=value, ttl=self._clamp_ttl(ttl=ttl)) + + @override + async def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + clamped_ttl: Sequence[float | None] | float | None = None + + if isinstance(ttl, Sequence): + clamped_ttl = [self._clamp_ttl(ttl=t) for t in ttl] + elif isinstance(ttl, float): + clamped_ttl = self._clamp_ttl(ttl=ttl) + + await self.store.put_many(keys=keys, values=values, collection=collection, ttl=clamped_ttl) diff --git a/src/kv_store_adapter/wrappers/passthrough_cache.py b/src/kv_store_adapter/wrappers/passthrough_cache.py new file mode 100644 index 00000000..d0439276 --- /dev/null +++ b/src/kv_store_adapter/wrappers/passthrough_cache.py @@ -0,0 +1,184 @@ +from collections.abc import Sequence +from typing import Any + +from typing_extensions import override + +from kv_store_adapter.types import KVStore +from kv_store_adapter.wrappers.base import BaseWrapper +from kv_store_adapter.wrappers.clamp_ttl import TTLClampWrapper + +DEFAULT_MAX_TTL: float = 30 * 60 +DEFAULT_MISSING_TTL: float = 30 * 60 + + +class PassthroughCacheWrapper(BaseWrapper): + """Two-tier wrapper: reads from cache store, falls back to primary and populates cache. + + TTLs from the primary are respected when writing into the cache using a clamped TTL policy. + """ + + def __init__( + self, + primary_store: KVStore, + cache_store: KVStore, + maximum_ttl: float | None = None, + missing_ttl: float | None = None, + ) -> None: + """Initialize the passthrough cache wrapper. + + Args: + primary_store: The primary store to wrap. + cache_store: The cache store to wrap. + maximum_ttl: The maximum TTL for puts into the cache store. Defaults to 30 minutes. + missing_ttl: The TTL to use for entries that do not have a TTL. Defaults to 30 minutes. + """ + self.store: KVStore = primary_store + self.cache_store: KVStore = cache_store + + self.cache_store = TTLClampWrapper( + store=cache_store, + min_ttl=0, + max_ttl=maximum_ttl or DEFAULT_MAX_TTL, + missing_ttl=missing_ttl or DEFAULT_MISSING_TTL, + ) + + super().__init__() + + @override + async def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + if managed_entry := await self.cache_store.get(collection=collection, key=key): + return managed_entry + + uncached_entry, ttl = await self.store.ttl(collection=collection, key=key) + + if not uncached_entry: + return None + + await self.cache_store.put(collection=collection, key=key, value=uncached_entry, ttl=ttl) + + return uncached_entry + + @override + async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + key_to_value: dict[str, dict[str, Any] | None] = dict.fromkeys(keys, None) + + # First check the cache store for the entries + cached_entries: list[dict[str, Any] | None] = await self.cache_store.get_many(collection=collection, keys=keys) + + for i, key in enumerate[str](iterable=keys): + key_to_value[key] = cached_entries[i] + + uncached_keys = [key for key, value in key_to_value.items() if value is None] + + uncached_entries: list[tuple[dict[str, Any] | None, float | None]] = await self.store.ttl_many( + collection=collection, keys=uncached_keys + ) + + entries_to_cache: list[dict[str, Any]] = [] + entries_to_cache_keys: list[str] = [] + entries_to_cache_ttls: list[float | None] = [] + + for i, key in enumerate[str](iterable=uncached_keys): + entry, ttl = uncached_entries[i] + if entry is not None: + entries_to_cache_keys.append(key) + entries_to_cache.append(entry) + entries_to_cache_ttls.append(ttl) + + key_to_value[key] = entry + + if entries_to_cache: + await self.cache_store.put_many( + collection=collection, + keys=entries_to_cache_keys, + values=entries_to_cache, + ttl=entries_to_cache_ttls, + ) + + return [key_to_value[key] for key in keys] + + @override + async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + cached_entry, ttl = await self.cache_store.ttl(collection=collection, key=key) + + if cached_entry: + return cached_entry, ttl + + uncached_entry, ttl = await self.store.ttl(collection=collection, key=key) + + if not uncached_entry: + return (None, None) + + await self.cache_store.put(collection=collection, key=key, value=uncached_entry, ttl=ttl) + + return uncached_entry, ttl + + @override + async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + key_to_value: dict[str, tuple[dict[str, Any] | None, float | None]] = dict.fromkeys(keys, (None, None)) # type: ignore + + # First check the cache store for the entries + cached_entries: list[tuple[dict[str, Any] | None, float | None]] = await self.cache_store.ttl_many(collection=collection, keys=keys) + + for i, key in enumerate[str](iterable=keys): + key_to_value[key] = (cached_entries[i][0], cached_entries[i][1]) + + uncached_keys = [key for key, value in key_to_value.items() if value == (None, None)] + + uncached_entries: list[tuple[dict[str, Any] | None, float | None]] = await self.store.ttl_many( + collection=collection, keys=uncached_keys + ) + + entries_to_cache: list[dict[str, Any]] = [] + entries_to_cache_keys: list[str] = [] + entries_to_cache_ttls: list[float | None] = [] + + for i, key in enumerate[str](iterable=uncached_keys): + entry, ttl = uncached_entries[i] + if entry is not None: + entries_to_cache_keys.append(key) + entries_to_cache.append(entry) + entries_to_cache_ttls.append(ttl) + + key_to_value[key] = (entry, ttl) + + if entries_to_cache: + await self.cache_store.put_many( + collection=collection, + keys=entries_to_cache_keys, + values=entries_to_cache, + ttl=entries_to_cache_ttls, + ) + + return [key_to_value[key] for key in keys] + + @override + async def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + _ = await self.cache_store.delete(collection=collection, key=key) + + await self.store.put(collection=collection, key=key, value=value, ttl=ttl) + + @override + async def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + _ = await self.cache_store.delete_many(collection=collection, keys=keys) + + await self.store.put_many(keys=keys, values=values, collection=collection, ttl=ttl) + + @override + async def delete(self, key: str, *, collection: str | None = None) -> bool: + _ = await self.cache_store.delete(collection=collection, key=key) + + return await self.store.delete(collection=collection, key=key) + + @override + async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + _ = await self.cache_store.delete_many(collection=collection, keys=keys) + + return await self.store.delete_many(collection=collection, keys=keys) diff --git a/src/kv_store_adapter/wrappers/prefix_collections.py b/src/kv_store_adapter/wrappers/prefix_collections.py new file mode 100644 index 00000000..fe762193 --- /dev/null +++ b/src/kv_store_adapter/wrappers/prefix_collections.py @@ -0,0 +1,78 @@ +from collections.abc import Sequence +from typing import Any + +from typing_extensions import override + +from kv_store_adapter.stores.utils.compound import prefix_collection, unprefix_collection +from kv_store_adapter.types import KVStore +from kv_store_adapter.wrappers.base import BaseWrapper + + +class PrefixCollectionsWrapper(BaseWrapper): + """A wrapper that prefixes collection names before delegating to the underlying store.""" + + def __init__(self, store: KVStore, prefix: str, default_collection: str) -> None: + """Initialize the prefix collections wrapper. + + Args: + store: The store to wrap. + prefix: The prefix to add to the collections. + default_collection: The default collection to use if no collection is provided. Will be automatically prefixed with the `prefix` + """ + self.store: KVStore = store + self.prefix: str = prefix + self.default_collection: str = default_collection + super().__init__() + + def _prefix_collection(self, collection: str | None) -> str: + return prefix_collection(prefix=self.prefix, collection=collection or self.default_collection) + + def _unprefix_collection(self, collection: str) -> str: + return unprefix_collection(prefix=self.prefix, collection=collection) + + @override + async def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + new_collection: str = self._prefix_collection(collection=collection) + return await self.store.get(key=key, collection=new_collection) + + @override + async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + new_collection: str = self._prefix_collection(collection=collection) + return await self.store.get_many(keys=keys, collection=new_collection) + + @override + async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + new_collection: str = self._prefix_collection(collection=collection) + return await self.store.ttl(key=key, collection=new_collection) + + @override + async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + new_collection: str = self._prefix_collection(collection=collection) + return await self.store.ttl_many(keys=keys, collection=new_collection) + + @override + async def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + new_collection: str = self._prefix_collection(collection=collection) + return await self.store.put(key=key, value=value, collection=new_collection, ttl=ttl) + + @override + async def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + new_collection: str = self._prefix_collection(collection=collection) + return await self.store.put_many(keys=keys, values=values, collection=new_collection, ttl=ttl) + + @override + async def delete(self, key: str, *, collection: str | None = None) -> bool: + new_collection: str = self._prefix_collection(collection=collection) + return await self.store.delete(key=key, collection=new_collection) + + @override + async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + new_collection: str = self._prefix_collection(collection=collection) + return await self.store.delete_many(keys=keys, collection=new_collection) diff --git a/src/kv_store_adapter/wrappers/prefix_keys.py b/src/kv_store_adapter/wrappers/prefix_keys.py new file mode 100644 index 00000000..933813a9 --- /dev/null +++ b/src/kv_store_adapter/wrappers/prefix_keys.py @@ -0,0 +1,76 @@ +from collections.abc import Sequence +from typing import Any + +from typing_extensions import override + +from kv_store_adapter.stores.utils.compound import prefix_key, unprefix_key +from kv_store_adapter.types import KVStore +from kv_store_adapter.wrappers.base import BaseWrapper + + +class PrefixKeysWrapper(BaseWrapper): + """A wrapper for prefixing keys in a KVStore.""" + + def __init__(self, store: KVStore, prefix: str) -> None: + """Initialize the prefix keys wrapper. + + Args: + store: The store to wrap. + prefix: The prefix to add to the keys. + """ + self.store: KVStore = store + self.prefix: str = prefix + super().__init__() + + def _prefix_key(self, key: str) -> str: + return prefix_key(prefix=self.prefix, key=key) + + def _unprefix_key(self, key: str) -> str: + return unprefix_key(prefix=self.prefix, key=key) + + @override + async def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + new_key: str = self._prefix_key(key=key) + return await self.store.get(key=new_key, collection=collection) + + @override + async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + new_keys: list[str] = [self._prefix_key(key=key) for key in keys] + return await self.store.get_many(keys=new_keys, collection=collection) + + @override + async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + new_key: str = self._prefix_key(key=key) + return await self.store.ttl(key=new_key, collection=collection) + + @override + async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + new_keys: list[str] = [self._prefix_key(key=key) for key in keys] + return await self.store.ttl_many(keys=new_keys, collection=collection) + + @override + async def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + new_key: str = self._prefix_key(key=key) + return await self.store.put(key=new_key, value=value, collection=collection, ttl=ttl) + + @override + async def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + new_keys: list[str] = [self._prefix_key(key=key) for key in keys] + return await self.store.put_many(keys=new_keys, values=values, collection=collection, ttl=ttl) + + @override + async def delete(self, key: str, *, collection: str | None = None) -> bool: + new_key: str = self._prefix_key(key=key) + return await self.store.delete(key=new_key, collection=collection) + + @override + async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + new_keys: list[str] = [self._prefix_key(key=key) for key in keys] + return await self.store.delete_many(keys=new_keys, collection=collection) diff --git a/src/kv_store_adapter/wrappers/single_collection.py b/src/kv_store_adapter/wrappers/single_collection.py new file mode 100644 index 00000000..3312c9c4 --- /dev/null +++ b/src/kv_store_adapter/wrappers/single_collection.py @@ -0,0 +1,80 @@ +from collections.abc import Sequence +from typing import Any + +from typing_extensions import override + +from kv_store_adapter.stores.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key +from kv_store_adapter.types import KVStore +from kv_store_adapter.wrappers.base import BaseWrapper + + +class SingleCollectionWrapper(BaseWrapper): + """A wrapper that stores all collections within a single backing collection via key prefixing.""" + + def __init__(self, store: KVStore, single_collection: str, default_collection: str, separator: str | None = None) -> None: + """Initialize the prefix collections wrapper. + + Args: + store: The store to wrap. + single_collection: The single collection to use to store all collections. + default_collection: The default collection to use if no collection is provided. + """ + self.store: KVStore = store + self.single_collection: str = single_collection + self.default_collection: str = default_collection + self.separator: str = separator or DEFAULT_PREFIX_SEPARATOR + super().__init__() + + def _prefix_key(self, key: str, collection: str | None = None) -> str: + collection_to_use = collection or self.default_collection + return prefix_key(prefix=collection_to_use, key=key, separator=self.separator) + + def _unprefix_key(self, key: str) -> str: + return unprefix_key(prefix=self.single_collection, key=key, separator=self.separator) + + @override + async def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + new_key: str = self._prefix_key(key=key, collection=collection) + return await self.store.get(key=new_key, collection=self.single_collection) + + @override + async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] + return await self.store.get_many(keys=new_keys, collection=self.single_collection) + + @override + async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + new_key: str = self._prefix_key(key=key, collection=collection) + return await self.store.ttl(key=new_key, collection=self.single_collection) + + @override + async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] + return await self.store.ttl_many(keys=new_keys, collection=self.single_collection) + + @override + async def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + new_key: str = self._prefix_key(key=key, collection=collection) + return await self.store.put(key=new_key, value=value, collection=self.single_collection, ttl=ttl) + + @override + async def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] + return await self.store.put_many(keys=new_keys, values=values, collection=self.single_collection, ttl=ttl) + + @override + async def delete(self, key: str, *, collection: str | None = None) -> bool: + new_key: str = self._prefix_key(key=key, collection=collection) + return await self.store.delete(key=new_key, collection=self.single_collection) + + @override + async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] + return await self.store.delete_many(keys=new_keys, collection=self.single_collection) diff --git a/src/kv_store_adapter/wrappers/statistics.py b/src/kv_store_adapter/wrappers/statistics.py new file mode 100644 index 00000000..1d6f1558 --- /dev/null +++ b/src/kv_store_adapter/wrappers/statistics.py @@ -0,0 +1,214 @@ +from collections.abc import Sequence +from dataclasses import dataclass, field +from typing import Any + +from typing_extensions import override + +from kv_store_adapter.types import KVStore +from kv_store_adapter.wrappers.base import BaseWrapper + + +@dataclass +class BaseStatistics: + """Base statistics container with operation counting.""" + + count: int = field(default=0) + """The number of operations.""" + + def increment(self, *, increment: int = 1) -> None: + self.count += increment + + +@dataclass +class BaseHitMissStatistics(BaseStatistics): + """Statistics container with hit/miss tracking for cache-like operations.""" + + hit: int = field(default=0) + """The number of hits.""" + miss: int = field(default=0) + """The number of misses.""" + + def increment_hit(self, *, increment: int = 1) -> None: + self.increment(increment=increment) + self.hit += increment + + def increment_miss(self, *, increment: int = 1) -> None: + self.increment(increment=increment) + self.miss += increment + + +@dataclass +class GetStatistics(BaseHitMissStatistics): + """A class for statistics about a KV Store collection.""" + + +@dataclass +class PutStatistics(BaseStatistics): + """A class for statistics about a KV Store collection.""" + + +@dataclass +class DeleteStatistics(BaseHitMissStatistics): + """A class for statistics about a KV Store collection.""" + + +@dataclass +class ExistsStatistics(BaseHitMissStatistics): + """A class for statistics about a KV Store collection.""" + + +@dataclass +class TTLStatistics(BaseHitMissStatistics): + """A class for statistics about a KV Store collection.""" + + +@dataclass +class KVStoreCollectionStatistics(BaseStatistics): + """A class for statistics about a KV Store collection.""" + + get: GetStatistics = field(default_factory=GetStatistics) + """The statistics for the get operation.""" + + ttl: TTLStatistics = field(default_factory=TTLStatistics) + """The statistics for the ttl operation.""" + + put: PutStatistics = field(default_factory=PutStatistics) + """The statistics for the put operation.""" + + delete: DeleteStatistics = field(default_factory=DeleteStatistics) + """The statistics for the delete operation.""" + + exists: ExistsStatistics = field(default_factory=ExistsStatistics) + """The statistics for the exists operation.""" + + +@dataclass +class KVStoreStatistics: + """Statistics container for a KV Store.""" + + collections: dict[str, KVStoreCollectionStatistics] = field(default_factory=dict) + + def get_collection(self, collection: str) -> KVStoreCollectionStatistics: + if collection not in self.collections: + self.collections[collection] = KVStoreCollectionStatistics() + return self.collections[collection] + + +DEFAULT_COLLECTION_NAME = "__no_collection__" + + +class StatisticsWrapper(BaseWrapper): + """Statistics wrapper around a KV Store that tracks operation statistics. + + Note: enumeration and destroy operations are not tracked by this wrapper. + """ + + def __init__(self, store: KVStore) -> None: + self.store: KVStore = store + self._statistics: KVStoreStatistics = KVStoreStatistics() + + @property + def statistics(self) -> KVStoreStatistics: + return self._statistics + + @override + async def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + collection = collection or DEFAULT_COLLECTION_NAME + + if value := await self.store.get(collection=collection, key=key): + self.statistics.get_collection(collection=collection).get.increment_hit() + return value + + self.statistics.get_collection(collection=collection).get.increment_miss() + + return None + + @override + async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + collection = collection or DEFAULT_COLLECTION_NAME + + value, ttl = await self.store.ttl(collection=collection, key=key) + + if value: + self.statistics.get_collection(collection=collection).ttl.increment_hit() + return value, ttl + + self.statistics.get_collection(collection=collection).ttl.increment_miss() + return None, None + + @override + async def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + collection = collection or DEFAULT_COLLECTION_NAME + + await self.store.put(collection=collection, key=key, value=value, ttl=ttl) + + self.statistics.get_collection(collection=collection).put.increment() + + @override + async def delete(self, key: str, *, collection: str | None = None) -> bool: + collection = collection or DEFAULT_COLLECTION_NAME + + if await self.store.delete(collection=collection, key=key): + self.statistics.get_collection(collection=collection).delete.increment_hit() + return True + + self.statistics.get_collection(collection=collection).delete.increment_miss() + + return False + + @override + async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + collection = collection or DEFAULT_COLLECTION_NAME + + results: list[dict[str, Any] | None] = await self.store.get_many(keys=keys, collection=collection) + + hits = len([result for result in results if result is not None]) + misses = len([result for result in results if result is None]) + + self.statistics.get_collection(collection=collection).get.increment_hit(increment=hits) + self.statistics.get_collection(collection=collection).get.increment_miss(increment=misses) + + return results + + @override + async def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + collection = collection or DEFAULT_COLLECTION_NAME + + await self.store.put_many(keys=keys, values=values, collection=collection, ttl=ttl) + + self.statistics.get_collection(collection=collection).put.increment(increment=len(keys)) + + @override + async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + collection = collection or DEFAULT_COLLECTION_NAME + + deleted_count: int = await self.store.delete_many(keys=keys, collection=collection) + + hits = deleted_count + misses = len(keys) - deleted_count + + self.statistics.get_collection(collection=collection).delete.increment_hit(increment=hits) + self.statistics.get_collection(collection=collection).delete.increment_miss(increment=misses) + + return deleted_count + + @override + async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + collection = collection or DEFAULT_COLLECTION_NAME + + results: list[tuple[dict[str, Any] | None, float | None]] = await self.store.ttl_many(keys=keys, collection=collection) + + hits = len([result for result in results if result[0] is not None]) + misses = len([result for result in results if result[0] is None]) + + self.statistics.get_collection(collection=collection).ttl.increment_hit(increment=hits) + self.statistics.get_collection(collection=collection).ttl.increment_miss(increment=misses) + + return results diff --git a/tests/adapters/test_pydantic.py b/tests/adapters/test_pydantic.py index 6f0c1520..19c20bc9 100644 --- a/tests/adapters/test_pydantic.py +++ b/tests/adapters/test_pydantic.py @@ -32,7 +32,7 @@ class Order(BaseModel): FIXED_UPDATED_AT: datetime = datetime(year=2021, month=1, day=1, hour=15, minute=0, second=0, tzinfo=timezone.utc) SAMPLE_USER: User = User(name="John Doe", email="john.doe@example.com", age=30) -SAMPLE_PRODUCT: Product = Product(name="Widget", price=29.99, quantity=10, url=AnyHttpUrl("https://example.com")) +SAMPLE_PRODUCT: Product = Product(name="Widget", price=29.99, quantity=10, url=AnyHttpUrl(url="https://example.com")) SAMPLE_ORDER: Order = Order(created_at=datetime.now(), updated_at=datetime.now(), user=SAMPLE_USER, product=SAMPLE_PRODUCT, paid=False) @@ -43,15 +43,15 @@ async def store(self) -> MemoryStore: @pytest.fixture async def user_adapter(self, store: MemoryStore) -> PydanticAdapter[User]: - return PydanticAdapter[User](store_protocol=store, pydantic_model=User) + return PydanticAdapter[User](kv_store=store, pydantic_model=User) @pytest.fixture async def product_adapter(self, store: MemoryStore) -> PydanticAdapter[Product]: - return PydanticAdapter[Product](store_protocol=store, pydantic_model=Product) + return PydanticAdapter[Product](kv_store=store, pydantic_model=Product) @pytest.fixture async def order_adapter(self, store: MemoryStore) -> PydanticAdapter[Order]: - return PydanticAdapter[Order](store_protocol=store, pydantic_model=Order) + return PydanticAdapter[Order](kv_store=store, pydantic_model=Order) async def test_simple_adapter(self, user_adapter: PydanticAdapter[User]): await user_adapter.put(collection="test", key="test", value=SAMPLE_USER) diff --git a/tests/adapters/test_raise.py b/tests/adapters/test_raise.py new file mode 100644 index 00000000..ed2a1f1f --- /dev/null +++ b/tests/adapters/test_raise.py @@ -0,0 +1,37 @@ +import pytest + +from kv_store_adapter.adapters.raise_on_missing import RaiseOnMissingAdapter +from kv_store_adapter.errors import MissingKeyError +from kv_store_adapter.stores.memory.store import MemoryStore + + +@pytest.fixture +async def store() -> MemoryStore: + return MemoryStore() + + +@pytest.fixture +async def adapter(store: MemoryStore) -> RaiseOnMissingAdapter: + return RaiseOnMissingAdapter(kv_store=store) + + +async def test_get(adapter: RaiseOnMissingAdapter): + await adapter.put(collection="test", key="test", value={"test": "test"}) + assert await adapter.get(collection="test", key="test") == {"test": "test"} + + +async def test_get_missing(adapter: RaiseOnMissingAdapter): + with pytest.raises(MissingKeyError): + _ = await adapter.get(collection="test", key="test", raise_on_missing=True) + + +async def test_get_many(adapter: RaiseOnMissingAdapter): + await adapter.put(collection="test", key="test", value={"test": "test"}) + await adapter.put(collection="test", key="test_2", value={"test": "test_2"}) + assert await adapter.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + + +async def test_get_many_missing(adapter: RaiseOnMissingAdapter): + await adapter.put(collection="test", key="test", value={"test": "test"}) + with pytest.raises(MissingKeyError): + _ = await adapter.get_many(collection="test", keys=["test", "test_2"], raise_on_missing=True) diff --git a/tests/adapters/test_single_collection.py b/tests/adapters/test_single_collection.py deleted file mode 100644 index 8c18b7d2..00000000 --- a/tests/adapters/test_single_collection.py +++ /dev/null @@ -1,28 +0,0 @@ -import pytest - -from kv_store_adapter.adapters.single_collection import SingleCollectionAdapter -from kv_store_adapter.stores.memory.store import MemoryStore - - -class TestSingleCollectionAdapter: - @pytest.fixture - async def adapter(self) -> SingleCollectionAdapter: - memory_store: MemoryStore = MemoryStore() - return SingleCollectionAdapter(store=memory_store, collection="test") - - async def test_get(self, adapter: SingleCollectionAdapter): - assert await adapter.get(key="test") is None - - async def test_put_get(self, adapter: SingleCollectionAdapter): - await adapter.put(key="test", value={"test": "test"}) - assert await adapter.get(key="test") == {"test": "test"} - - async def test_delete_get(self, adapter: SingleCollectionAdapter): - _ = await adapter.delete(key="test") - assert await adapter.get(key="test") is None - - async def test_put_exists_delete_exists(self, adapter: SingleCollectionAdapter): - await adapter.put(key="test", value={"test": "test"}) - assert await adapter.exists(key="test") - assert await adapter.delete(key="test") - assert await adapter.exists(key="test") is False diff --git a/tests/stores/conftest.py b/tests/stores/conftest.py index 498e210c..e4b7ebd3 100644 --- a/tests/stores/conftest.py +++ b/tests/stores/conftest.py @@ -3,15 +3,12 @@ from abc import ABC, abstractmethod from collections.abc import AsyncGenerator from datetime import datetime, timedelta, timezone -from typing import TYPE_CHECKING import pytest -from dirty_equals import IsDatetime, IsList +from pydantic import AnyHttpUrl -from kv_store_adapter.stores.base.unmanaged import BaseKVStore - -if TYPE_CHECKING: - from kv_store_adapter.types import TTLInfo +from kv_store_adapter.errors import SerializationError +from kv_store_adapter.stores.base import BaseStore def now() -> datetime: @@ -22,86 +19,86 @@ def now_plus(seconds: int) -> datetime: return now() + timedelta(seconds=seconds) +def is_around(value: float, delta: float = 1) -> bool: + return value - delta < value < value + delta + + class BaseStoreTests(ABC): async def eventually_consistent(self) -> None: # noqa: B027 """Subclasses can override this to wait for eventually consistent operations.""" @pytest.fixture @abstractmethod - async def store(self) -> BaseKVStore | AsyncGenerator[BaseKVStore, None]: ... + async def store(self) -> BaseStore | AsyncGenerator[BaseStore, None]: ... - async def test_empty_get(self, store: BaseKVStore): + async def test_empty_get(self, store: BaseStore): """Tests that the get method returns None from an empty store.""" assert await store.get(collection="test", key="test") is None - async def test_empty_set(self, store: BaseKVStore): - """Tests that the set method does not raise an exception when called on a new store.""" + async def test_empty_put(self, store: BaseStore): + """Tests that the put method does not raise an exception when called on a new store.""" await store.put(collection="test", key="test", value={"test": "test"}) - async def test_empty_exists(self, store: BaseKVStore): - """Tests that the exists method returns False from an empty store.""" - assert await store.exists(collection="test", key="test") is False - - async def test_empty_ttl(self, store: BaseKVStore): + async def test_empty_ttl(self, store: BaseStore): """Tests that the ttl method returns None from an empty store.""" - assert await store.ttl(collection="test", key="test") is None + assert await store.ttl(collection="test", key="test") == (None, None) - async def test_empty_keys(self, store: BaseKVStore): - """Tests that the keys method returns an empty list from an empty store.""" - assert await store.keys(collection="test") == [] + async def test_put_serialization_errors(self, store: BaseStore): + """Tests that the put method does not raise an exception when called on a new store.""" + with pytest.raises(SerializationError): + await store.put(collection="test", key="test", value={"test": AnyHttpUrl("https://test.com")}) - async def test_empty_clear_collection(self, store: BaseKVStore): - """Tests that the clear collection method returns 0 from an empty store.""" - assert await store.clear_collection(collection="test") == 0 - - async def test_empty_list_collections(self, store: BaseKVStore): - """Tests that the list collections method returns an empty list from an empty store.""" - assert await store.list_collections() == [] - - async def test_empty_cull(self, store: BaseKVStore): - """Tests that the cull method does not raise an exception when called on an empty store.""" - await store.cull() - - async def test_get_set_get(self, store: BaseKVStore): + async def test_get_put_get(self, store: BaseStore): assert await store.get(collection="test", key="test") is None await store.put(collection="test", key="test", value={"test": "test"}) assert await store.get(collection="test", key="test") == {"test": "test"} - async def test_set_exists_delete_exists(self, store: BaseKVStore): - await store.put(collection="test", key="test", value={"test": "test"}) - assert await store.exists(collection="test", key="test") - assert await store.delete(collection="test", key="test") - assert await store.exists(collection="test", key="test") is False + async def test_put_many_get(self, store: BaseStore): + await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert await store.get(collection="test", key="test") == {"test": "test"} + assert await store.get(collection="test", key="test_2") == {"test": "test_2"} - async def test_get_set_get_delete_get(self, store: BaseKVStore): - """Tests that the get, set, delete, and get methods work together to store and retrieve a value from an empty store.""" + async def test_put_many_get_many(self, store: BaseStore): + await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] - assert await store.ttl(collection="test", key="test") is None + async def test_put_put_get_many(self, store: BaseStore): + await store.put(collection="test", key="test", value={"test": "test"}) + await store.put(collection="test", key="test_2", value={"test": "test_2"}) + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + async def test_put_put_get_many_missing_one(self, store: BaseStore): await store.put(collection="test", key="test", value={"test": "test"}) + await store.put(collection="test", key="test_2", value={"test": "test_2"}) + assert await store.get_many(collection="test", keys=["test", "test_2", "test_3"]) == [{"test": "test"}, {"test": "test_2"}, None] + async def test_put_get_delete_get(self, store: BaseStore): + await store.put(collection="test", key="test", value={"test": "test"}) assert await store.get(collection="test", key="test") == {"test": "test"} - assert await store.delete(collection="test", key="test") - assert await store.get(collection="test", key="test") is None - async def test_get_set_keys_delete_keys_get(self, store: BaseKVStore): - """Tests that the get, set, keys, delete, keys, clear, and get methods work together to store and retrieve a value from an empty store.""" + async def test_put_many_get_many_delete_many_get_many(self, store: BaseStore): + await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + assert await store.delete_many(collection="test", keys=["test", "test_2"]) == 2 + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] + + async def test_get_put_get_delete_get(self, store: BaseStore): + """Tests that the get, put, delete, and get methods work together to store and retrieve a value from an empty store.""" + + assert await store.get(collection="test", key="test") is None await store.put(collection="test", key="test", value={"test": "test"}) + assert await store.get(collection="test", key="test") == {"test": "test"} - assert await store.keys(collection="test") == ["test"] assert await store.delete(collection="test", key="test") - await self.eventually_consistent() - assert await store.keys(collection="test") == [] - assert await store.get(collection="test", key="test") is None - async def test_get_set_get_set_delete_get(self, store: BaseKVStore): - """Tests that the get, set, get, set, delete, and get methods work together to store and retrieve a value from an empty store.""" + async def test_get_put_get_put_delete_get(self, store: BaseStore): + """Tests that the get, put, get, put, delete, and get methods work together to store and retrieve a value from an empty store.""" await store.put(collection="test", key="test", value={"test": "test"}) assert await store.get(collection="test", key="test") == {"test": "test"} @@ -111,53 +108,53 @@ async def test_get_set_get_set_delete_get(self, store: BaseKVStore): assert await store.delete(collection="test", key="test") assert await store.get(collection="test", key="test") is None - async def test_set_ttl_get_ttl(self, store: BaseKVStore): - """Tests that the set and get ttl methods work together to store and retrieve a ttl from an empty store.""" + async def test_put_many_delete_delete_get_many(self, store: BaseStore): + await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + assert await store.delete(collection="test", key="test") + assert await store.delete(collection="test", key="test_2") + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] + + async def test_put_ttl_get_ttl(self, store: BaseStore): + """Tests that the put and get ttl methods work together to store and retrieve a ttl from an empty store.""" await store.put(collection="test", key="test", value={"test": "test"}, ttl=100) - ttl_info: TTLInfo | None = await store.ttl(collection="test", key="test") - assert ttl_info is not None - assert ttl_info.ttl == 100 - - assert ttl_info.created_at is not None - assert ttl_info.created_at == IsDatetime(approx=now()) - assert ttl_info.expires_at is not None - assert ttl_info.expires_at == IsDatetime(approx=now_plus(seconds=100)) - - assert ttl_info.collection == "test" - assert ttl_info.key == "test" - - async def test_list_collections(self, store: BaseKVStore): - """Tests that the list collections method returns an empty list from an empty store.""" - assert await store.list_collections() == [] - - async def test_cull(self, store: BaseKVStore): - """Tests that the cull method does not raise an exception when called on an empty store.""" - await store.cull() - - async def test_set_set_list_collections(self, store: BaseKVStore): - """Tests that a list collections call after adding keys to two distinct collections returns the correct collections.""" - await store.put(collection="test_one", key="test_one", value={"test": "test"}) - await self.eventually_consistent() - assert await store.list_collections() == IsList("test_one", check_order=False) - - assert await store.get(collection="test_one", key="test_one") == {"test": "test"} - await self.eventually_consistent() - assert await store.list_collections() == IsList("test_one", check_order=False) - - await store.put(collection="test_two", key="test_two", value={"test": "test"}) - await self.eventually_consistent() - assert await store.list_collections() == IsList("test_one", "test_two", check_order=False) - - assert await store.get(collection="test_two", key="test_two") == {"test": "test"} - await self.eventually_consistent() - assert await store.list_collections() == IsList("test_one", "test_two", check_order=False) - - async def test_set_expired_get_none(self, store: BaseKVStore): - """Tests that a set call with a negative ttl will return None when getting the key.""" + value, ttl = await store.ttl(collection="test", key="test") + + assert value == {"test": "test"} + assert ttl is not None + assert ttl < 100 + assert ttl > 90 + + async def test_negative_ttl(self, store: BaseStore): + """Tests that a negative ttl will return None when getting the key.""" + await store.put(collection="test", key="test", value={"test": "test"}, ttl=-100) + + async def test_put_expired_get_none(self, store: BaseStore): + """Tests that a put call with a negative ttl will return None when getting the key.""" await store.put(collection="test_collection", key="test_key", value={"test": "test"}, ttl=-100) assert await store.get(collection="test_collection", key="test_key") is None - async def test_not_unbounded(self, store: BaseKVStore): + async def test_long_collection_name(self, store: BaseStore): + """Tests that a long collection name will not raise an error.""" + await store.put(collection="test_collection" * 100, key="test_key", value={"test": "test"}) + assert await store.get(collection="test_collection" * 100, key="test_key") == {"test": "test"} + + async def test_special_characters_in_collection_name(self, store: BaseStore): + """Tests that a special characters in the collection name will not raise an error.""" + await store.put(collection="test_collection!@#$%^&*()", key="test_key", value={"test": "test"}) + assert await store.get(collection="test_collection!@#$%^&*()", key="test_key") == {"test": "test"} + + async def test_long_key_name(self, store: BaseStore): + """Tests that a long key name will not raise an error.""" + await store.put(collection="test_collection", key="test_key" * 100, value={"test": "test"}) + assert await store.get(collection="test_collection", key="test_key" * 100) == {"test": "test"} + + async def test_special_characters_in_key_name(self, store: BaseStore): + """Tests that a special characters in the key name will not raise an error.""" + await store.put(collection="test_collection", key="test_key!@#$%^&*()", value={"test": "test"}) + assert await store.get(collection="test_collection", key="test_key!@#$%^&*()") == {"test": "test"} + + async def test_not_unbounded(self, store: BaseStore): """Tests that the store is not unbounded.""" for i in range(5000): @@ -167,10 +164,10 @@ async def test_not_unbounded(self, store: BaseKVStore): assert await store.get(collection="test_collection", key="test_key_0") is None assert await store.get(collection="test_collection", key="test_key_4999") is not None - async def test_concurrent_operations(self, store: BaseKVStore): + async def test_concurrent_operations(self, store: BaseStore): """Tests that the store can handle concurrent operations.""" - async def worker(store: BaseKVStore, worker_id: int): + async def worker(store: BaseStore, worker_id: int): for i in range(100): assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None @@ -184,5 +181,3 @@ async def worker(store: BaseKVStore, worker_id: int): assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None _ = await asyncio.gather(*[worker(store, worker_id) for worker_id in range(1)]) - - assert await store.keys(collection="test_collection") == [] diff --git a/tests/stores/disk/test_disk.py b/tests/stores/disk/test_disk.py index 7fe3ce9f..af7f3bc2 100644 --- a/tests/stores/disk/test_disk.py +++ b/tests/stores/disk/test_disk.py @@ -10,9 +10,9 @@ TEST_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB -class TestMemoryStore(BaseStoreTests): +class TestDiskStore(BaseStoreTests): @override @pytest.fixture async def store(self) -> AsyncGenerator[DiskStore, None]: with tempfile.TemporaryDirectory() as temp_dir: - yield DiskStore(directory=temp_dir, size_limit=TEST_SIZE_LIMIT) + yield DiskStore(directory=(temp_dir), max_size=TEST_SIZE_LIMIT) diff --git a/tests/stores/disk/test_multi_disk.py b/tests/stores/disk/test_multi_disk.py new file mode 100644 index 00000000..dce16b02 --- /dev/null +++ b/tests/stores/disk/test_multi_disk.py @@ -0,0 +1,19 @@ +import tempfile +from collections.abc import AsyncGenerator +from pathlib import Path + +import pytest +from typing_extensions import override + +from kv_store_adapter.stores.disk.multi_store import MultiDiskStore +from tests.stores.conftest import BaseStoreTests + +TEST_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB + + +class TestMultiDiskStore(BaseStoreTests): + @override + @pytest.fixture + async def store(self) -> AsyncGenerator[MultiDiskStore, None]: + with tempfile.TemporaryDirectory() as temp_dir: + yield MultiDiskStore(base_directory=Path(temp_dir), max_size=TEST_SIZE_LIMIT) diff --git a/tests/stores/elasticsearch/test_elasticsearch.py b/tests/stores/elasticsearch/test_elasticsearch.py index 440ffcbb..d4f79756 100644 --- a/tests/stores/elasticsearch/test_elasticsearch.py +++ b/tests/stores/elasticsearch/test_elasticsearch.py @@ -1,4 +1,3 @@ -import asyncio import os from collections.abc import AsyncGenerator @@ -6,7 +5,7 @@ from elasticsearch import AsyncElasticsearch from typing_extensions import override -from kv_store_adapter.stores.base.unmanaged import BaseKVStore +from kv_store_adapter.stores.base import BaseStore from kv_store_adapter.stores.elasticsearch import ElasticsearchStore from tests.stores.conftest import BaseStoreTests @@ -30,10 +29,6 @@ async def elasticsearch_client() -> AsyncGenerator[AsyncElasticsearch, None]: @pytest.mark.skipif(os.getenv("ES_URL") is None, reason="Elasticsearch is not configured") class TestElasticsearchStore(BaseStoreTests): - @override - async def eventually_consistent(self) -> None: - await asyncio.sleep(5) - @override @pytest.fixture async def store(self, elasticsearch_client: AsyncElasticsearch) -> ElasticsearchStore: @@ -42,8 +37,8 @@ async def store(self, elasticsearch_client: AsyncElasticsearch) -> Elasticsearch @pytest.mark.skip(reason="Distributed Caches are unbounded") @override - async def test_not_unbounded(self, store: BaseKVStore): ... + async def test_not_unbounded(self, store: BaseStore): ... @pytest.mark.skip(reason="Skip concurrent tests on distributed caches") @override - async def test_concurrent_operations(self, store: BaseKVStore): ... + async def test_concurrent_operations(self, store: BaseStore): ... diff --git a/tests/stores/memcached/test_memcached.py b/tests/stores/memcached/test_memcached.py new file mode 100644 index 00000000..d24464ac --- /dev/null +++ b/tests/stores/memcached/test_memcached.py @@ -0,0 +1,73 @@ +import asyncio +import contextlib +from collections.abc import AsyncGenerator + +import pytest +from aiomcache import Client +from typing_extensions import override + +from kv_store_adapter.stores.base import BaseStore +from kv_store_adapter.stores.memcached import MemcachedStore +from tests.stores.conftest import BaseStoreTests + +# Memcached test configuration +MEMCACHED_HOST = "localhost" +MEMCACHED_PORT = 11211 + +WAIT_FOR_MEMCACHED_TIMEOUT = 30 + + +async def ping_memcached() -> bool: + client = Client(host=MEMCACHED_HOST, port=MEMCACHED_PORT) + try: + _ = await client.set(b"ping", b"1", exptime=1) + _ = await client.get(b"ping") + except Exception: + return False + else: + return True + finally: + with contextlib.suppress(Exception): + await client.close() + + +async def wait_memcached() -> bool: + for _ in range(WAIT_FOR_MEMCACHED_TIMEOUT): + if await ping_memcached(): + return True + await asyncio.sleep(delay=1) + return False + + +class MemcachedFailedToStartError(Exception): + pass + + +class TestMemcachedStore(BaseStoreTests): + @pytest.fixture(autouse=True, scope="session") + async def setup_memcached(self) -> AsyncGenerator[None, None]: + _ = await asyncio.create_subprocess_exec("docker", "stop", "memcached-test") + _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "memcached-test") + + process = await asyncio.create_subprocess_exec( + "docker", "run", "-d", "--name", "memcached-test", "-p", "11211:11211", "memcached:1.6-alpine" + ) + _ = await process.wait() + if not await wait_memcached(): + msg = "Memcached failed to start" + raise MemcachedFailedToStartError(msg) + try: + yield + finally: + _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "memcached-test") + + @override + @pytest.fixture + async def store(self, setup_memcached: MemcachedStore) -> MemcachedStore: + store = MemcachedStore(host=MEMCACHED_HOST, port=MEMCACHED_PORT) + _ = await store._client.flush_all() # pyright: ignore[reportPrivateUsage] + return store + + @pytest.mark.skip(reason="Distributed Caches are unbounded") + @override + async def test_not_unbounded(self, store: BaseStore): ... diff --git a/tests/stores/redis/test_redis.py b/tests/stores/redis/test_redis.py index 762a1520..d634a708 100644 --- a/tests/stores/redis/test_redis.py +++ b/tests/stores/redis/test_redis.py @@ -5,7 +5,7 @@ from redis.asyncio import Redis from typing_extensions import override -from kv_store_adapter.stores.base.unmanaged import BaseKVStore +from kv_store_adapter.stores.base import BaseStore from kv_store_adapter.stores.redis import RedisStore from tests.stores.conftest import BaseStoreTests @@ -85,4 +85,4 @@ async def test_redis_client_connection(self): @pytest.mark.skip(reason="Distributed Caches are unbounded") @override - async def test_not_unbounded(self, store: BaseKVStore): ... + async def test_not_unbounded(self, store: BaseStore): ... diff --git a/tests/stores/simple/test_json_store.py b/tests/stores/simple/test_json_store.py deleted file mode 100644 index dd40967d..00000000 --- a/tests/stores/simple/test_json_store.py +++ /dev/null @@ -1,12 +0,0 @@ -import pytest -from typing_extensions import override - -from kv_store_adapter.stores.simple.json_store import SimpleJSONStore -from tests.stores.conftest import BaseStoreTests - - -class TestSimpleJSONStore(BaseStoreTests): - @override - @pytest.fixture - async def store(self) -> SimpleJSONStore: - return SimpleJSONStore() diff --git a/tests/stores/simple/test_store.py b/tests/stores/simple/test_store.py index 91af7305..f45165fc 100644 --- a/tests/stores/simple/test_store.py +++ b/tests/stores/simple/test_store.py @@ -1,7 +1,6 @@ import pytest from typing_extensions import override -from kv_store_adapter.stores.base.unmanaged import BaseKVStore from kv_store_adapter.stores.simple.store import SimpleStore from tests.stores.conftest import BaseStoreTests @@ -11,7 +10,3 @@ class TestSimpleStore(BaseStoreTests): @pytest.fixture async def store(self) -> SimpleStore: return SimpleStore() - - @pytest.mark.skip(reason="SimpleStore does not track TTL explicitly") - @override - async def test_set_ttl_get_ttl(self, store: BaseKVStore): ... diff --git a/tests/stores/wrappers/test_clamp_ttl.py b/tests/stores/wrappers/test_clamp_ttl.py index 65c4c8bc..1fc48695 100644 --- a/tests/stores/wrappers/test_clamp_ttl.py +++ b/tests/stores/wrappers/test_clamp_ttl.py @@ -1,15 +1,10 @@ -from typing import TYPE_CHECKING - import pytest -from dirty_equals import IsDatetime +from dirty_equals import IsFloat from typing_extensions import override from kv_store_adapter.stores.memory.store import MemoryStore -from kv_store_adapter.stores.wrappers.clamp_ttl import TTLClampWrapper -from tests.stores.conftest import BaseStoreTests, now, now_plus - -if TYPE_CHECKING: - from kv_store_adapter.types import TTLInfo +from kv_store_adapter.wrappers.clamp_ttl import TTLClampWrapper +from tests.stores.conftest import BaseStoreTests class TestTTLClampWrapper(BaseStoreTests): @@ -28,15 +23,10 @@ async def test_put_below_min_ttl(self, memory_store: MemoryStore): await ttl_clamp_store.put(collection="test", key="test", value={"test": "test"}, ttl=5) assert await ttl_clamp_store.get(collection="test", key="test") is not None - ttl_info: TTLInfo | None = await ttl_clamp_store.ttl(collection="test", key="test") - assert ttl_info is not None - assert ttl_info.ttl == 50 - - assert ttl_info.created_at is not None - assert ttl_info.created_at == IsDatetime(approx=now()) - - assert ttl_info.expires_at is not None - assert ttl_info.expires_at == IsDatetime(approx=now_plus(seconds=50)) + value, ttl = await ttl_clamp_store.ttl(collection="test", key="test") + assert value is not None + assert ttl is not None + assert ttl == IsFloat(approx=50) async def test_put_above_max_ttl(self, memory_store: MemoryStore): ttl_clamp_store: TTLClampWrapper = TTLClampWrapper(store=memory_store, min_ttl=0, max_ttl=100) @@ -44,15 +34,10 @@ async def test_put_above_max_ttl(self, memory_store: MemoryStore): await ttl_clamp_store.put(collection="test", key="test", value={"test": "test"}, ttl=1000) assert await ttl_clamp_store.get(collection="test", key="test") is not None - ttl_info: TTLInfo | None = await ttl_clamp_store.ttl(collection="test", key="test") - assert ttl_info is not None - assert ttl_info.ttl == 100 - - assert ttl_info.created_at is not None - assert ttl_info.created_at == IsDatetime(approx=now()) - - assert ttl_info.expires_at is not None - assert ttl_info.expires_at == IsDatetime(approx=now_plus(seconds=100)) + value, ttl = await ttl_clamp_store.ttl(collection="test", key="test") + assert value is not None + assert ttl is not None + assert ttl == IsFloat(approx=100) async def test_put_missing_ttl(self, memory_store: MemoryStore): ttl_clamp_store: TTLClampWrapper = TTLClampWrapper(store=memory_store, min_ttl=0, max_ttl=100, missing_ttl=50) @@ -60,12 +45,8 @@ async def test_put_missing_ttl(self, memory_store: MemoryStore): await ttl_clamp_store.put(collection="test", key="test", value={"test": "test"}, ttl=None) assert await ttl_clamp_store.get(collection="test", key="test") is not None - ttl_info: TTLInfo | None = await ttl_clamp_store.ttl(collection="test", key="test") - assert ttl_info is not None - assert ttl_info.ttl == 50 - - assert ttl_info.expires_at is not None - assert ttl_info.expires_at == IsDatetime(approx=now_plus(seconds=50)) + value, ttl = await ttl_clamp_store.ttl(collection="test", key="test") + assert value is not None + assert ttl is not None - assert ttl_info.created_at is not None - assert ttl_info.created_at == IsDatetime(approx=now()) + assert ttl == IsFloat(approx=50) diff --git a/tests/stores/wrappers/test_passthrough.py b/tests/stores/wrappers/test_passthrough_cache.py similarity index 78% rename from tests/stores/wrappers/test_passthrough.py rename to tests/stores/wrappers/test_passthrough_cache.py index cdb76be8..46dc4892 100644 --- a/tests/stores/wrappers/test_passthrough.py +++ b/tests/stores/wrappers/test_passthrough_cache.py @@ -6,17 +6,17 @@ from kv_store_adapter.stores.disk.store import DiskStore from kv_store_adapter.stores.memory.store import MemoryStore -from kv_store_adapter.stores.wrappers.passthrough_cache import PassthroughCacheWrapper +from kv_store_adapter.wrappers.passthrough_cache import PassthroughCacheWrapper from tests.stores.conftest import BaseStoreTests DISK_STORE_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB -class TestPrefixCollectionWrapper(BaseStoreTests): +class TestPassthroughCacheWrapper(BaseStoreTests): @pytest.fixture async def primary_store(self) -> AsyncGenerator[DiskStore, None]: with tempfile.TemporaryDirectory() as temp_dir: - yield DiskStore(directory=temp_dir, size_limit=DISK_STORE_SIZE_LIMIT) + yield DiskStore(directory=temp_dir, max_size=DISK_STORE_SIZE_LIMIT) @pytest.fixture async def cache_store(self) -> MemoryStore: diff --git a/tests/stores/wrappers/test_prefix_collection.py b/tests/stores/wrappers/test_prefix_collection.py index 15fc19f3..353cfecd 100644 --- a/tests/stores/wrappers/test_prefix_collection.py +++ b/tests/stores/wrappers/test_prefix_collection.py @@ -2,13 +2,13 @@ from typing_extensions import override from kv_store_adapter.stores.memory.store import MemoryStore -from kv_store_adapter.stores.wrappers.prefix_collection import PrefixCollectionWrapper +from kv_store_adapter.wrappers.prefix_collections import PrefixCollectionsWrapper from tests.stores.conftest import BaseStoreTests class TestPrefixCollectionWrapper(BaseStoreTests): @override @pytest.fixture - async def store(self) -> PrefixCollectionWrapper: + async def store(self) -> PrefixCollectionsWrapper: memory_store: MemoryStore = MemoryStore() - return PrefixCollectionWrapper(store=memory_store, prefix="collection_prefix") + return PrefixCollectionsWrapper(store=memory_store, prefix="collection_prefix", default_collection="default_collection") diff --git a/tests/stores/wrappers/test_prefix_key.py b/tests/stores/wrappers/test_prefix_key.py index 3868e420..1e6ffa18 100644 --- a/tests/stores/wrappers/test_prefix_key.py +++ b/tests/stores/wrappers/test_prefix_key.py @@ -2,13 +2,13 @@ from typing_extensions import override from kv_store_adapter.stores.memory.store import MemoryStore -from kv_store_adapter.stores.wrappers.prefix_key import PrefixKeyWrapper +from kv_store_adapter.wrappers.prefix_keys import PrefixKeysWrapper from tests.stores.conftest import BaseStoreTests class TestPrefixKeyWrapper(BaseStoreTests): @override @pytest.fixture - async def store(self) -> PrefixKeyWrapper: + async def store(self) -> PrefixKeysWrapper: memory_store: MemoryStore = MemoryStore() - return PrefixKeyWrapper(store=memory_store, prefix="key_prefix") + return PrefixKeysWrapper(store=memory_store, prefix="key_prefix") diff --git a/tests/stores/wrappers/test_single_collection.py b/tests/stores/wrappers/test_single_collection.py index 963a4f29..42f98805 100644 --- a/tests/stores/wrappers/test_single_collection.py +++ b/tests/stores/wrappers/test_single_collection.py @@ -1,9 +1,8 @@ import pytest from typing_extensions import override -from kv_store_adapter.stores.base.unmanaged import BaseKVStore from kv_store_adapter.stores.memory.store import MemoryStore -from kv_store_adapter.stores.wrappers.single_collection import SingleCollectionWrapper +from kv_store_adapter.wrappers.single_collection import SingleCollectionWrapper from tests.stores.conftest import BaseStoreTests @@ -12,20 +11,4 @@ class TestSingleCollectionWrapper(BaseStoreTests): @pytest.fixture async def store(self) -> SingleCollectionWrapper: memory_store: MemoryStore = MemoryStore() - return SingleCollectionWrapper(store=memory_store, collection="test") - - @pytest.mark.skip(reason="SingleCollectionWrapper does not support collection operations") - @override - async def test_empty_clear_collection(self, store: BaseKVStore): ... - - @pytest.mark.skip(reason="SingleCollectionWrapper does not support collection operations") - @override - async def test_empty_list_collections(self, store: BaseKVStore): ... - - @pytest.mark.skip(reason="SingleCollectionWrapper does not support collection operations") - @override - async def test_list_collections(self, store: BaseKVStore): ... - - @pytest.mark.skip(reason="SingleCollectionWrapper does not support collection operations") - @override - async def test_set_set_list_collections(self, store: BaseKVStore): ... + return SingleCollectionWrapper(store=memory_store, single_collection="test", default_collection="test") diff --git a/tests/stores/wrappers/test_statistics.py b/tests/stores/wrappers/test_statistics.py new file mode 100644 index 00000000..6b6983ef --- /dev/null +++ b/tests/stores/wrappers/test_statistics.py @@ -0,0 +1,14 @@ +import pytest +from typing_extensions import override + +from kv_store_adapter.stores.memory.store import MemoryStore +from kv_store_adapter.wrappers.statistics import StatisticsWrapper +from tests.stores.conftest import BaseStoreTests + + +class TestStatisticsWrapper(BaseStoreTests): + @override + @pytest.fixture + async def store(self) -> StatisticsWrapper: + memory_store: MemoryStore = MemoryStore() + return StatisticsWrapper(store=memory_store) diff --git a/tests/test_types.py b/tests/test_types.py index c056bcb2..e3dd6cb4 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -1,26 +1,5 @@ -from datetime import datetime, timedelta, timezone - from kv_store_adapter.stores.memory import MemoryStore -from kv_store_adapter.types import KVStore, TTLInfo - - -def test_ttl_info(): - created_at = datetime.now(tz=timezone.utc) - expires_at = datetime.now(tz=timezone.utc) + timedelta(seconds=100) - ttl_info = TTLInfo(collection="test", key="test", created_at=created_at, ttl=100, expires_at=expires_at) - - assert ttl_info.expires_at is not None - assert ttl_info.expires_at > datetime.now(tz=timezone.utc) - assert ttl_info.expires_at < datetime.now(tz=timezone.utc) + timedelta(seconds=100) - - assert ttl_info.created_at is not None - assert ttl_info.created_at < datetime.now(tz=timezone.utc) - assert ttl_info.created_at > datetime.now(tz=timezone.utc) - timedelta(seconds=5) - - assert ttl_info.collection == "test" - assert ttl_info.key == "test" - - assert ttl_info.is_expired is False +from kv_store_adapter.types import KVStore async def test_kv_store_protocol(): diff --git a/uv.lock b/uv.lock index 7504c8d1..b0bf54d5 100644 --- a/uv.lock +++ b/uv.lock @@ -97,6 +97,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1b/8e/78ee35774201f38d5e1ba079c9958f7629b1fd079459aea9467441dbfbf5/aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84", size = 449067, upload-time = "2025-07-29T05:51:52.549Z" }, ] +[[package]] +name = "aiomcache" +version = "0.8.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/0a/914d8df1002d88ca70679d192f6e16d113e6b5cbcc13c51008db9230025f/aiomcache-0.8.2.tar.gz", hash = "sha256:43b220d7f499a32a71871c4f457116eb23460fa216e69c1d32b81e3209e51359", size = 10640, upload-time = "2024-05-07T15:03:14.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/f8/78455f6377cbe85f335f4dbd40a807dafb72bd5fa05eb946f2ad0cec3d40/aiomcache-0.8.2-py3-none-any.whl", hash = "sha256:9d78d6b6e74e775df18b350b1cddfa96bd2f0a44d49ad27fa87759a3469cef5e", size = 10145, upload-time = "2024-05-07T15:03:12.003Z" }, +] + [[package]] name = "aiosignal" version = "1.4.0" @@ -386,7 +398,7 @@ wheels = [ [[package]] name = "inline-snapshot" -version = "0.29.0" +version = "0.29.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "asttokens" }, @@ -395,24 +407,28 @@ dependencies = [ { name = "rich" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/4d/8e3b89f00df7925942acb091809ca32395373dc579517abacec5e242e8bd/inline_snapshot-0.29.0.tar.gz", hash = "sha256:8bac016fc8ff4638a6cdebca96d7042fecde471f0574d360de11f552ba77d6b5", size = 349586, upload-time = "2025-09-15T07:03:05.455Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/76/b48796a7b97a6f3286dc0a0b9f2e7e5dea71d8c86dca7106bb91c1484d0d/inline_snapshot-0.29.1.tar.gz", hash = "sha256:17e73cb6864fa067aa94c2c1f290bbdeb25b2b807c4bdf53eee39a144f92a5a7", size = 350236, upload-time = "2025-09-24T19:47:15.838Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2d/eb/5ab8628a3289fab7ab28ccd59ef6d3ef4b28706c3065388df9f975ed29b6/inline_snapshot-0.29.0-py3-none-any.whl", hash = "sha256:aaea04480f1b5ec741b9025da45c00cb166d8791f01bed0f5ea7eabd1f9784cd", size = 70235, upload-time = "2025-09-15T07:03:03.616Z" }, + { url = "https://files.pythonhosted.org/packages/f2/46/1938d92fca179c0c81268c68073bef6339054be5779cf3f7de00bad6bf91/inline_snapshot-0.29.1-py3-none-any.whl", hash = "sha256:3fd02adb25be551a6245c9787c90fea33a578e051524804ef92fab5017cf4f16", size = 70763, upload-time = "2025-09-24T19:47:14.589Z" }, ] [[package]] name = "kv-store-adapter" -version = "0.1.2" +version = "0.2.0" source = { editable = "." } [package.optional-dependencies] disk = [ { name = "diskcache" }, + { name = "pathvalidate" }, ] elasticsearch = [ { name = "aiohttp" }, { name = "elasticsearch" }, ] +memcached = [ + { name = "aiomcache" }, +] memory = [ { name = "cachetools" }, ] @@ -429,7 +445,7 @@ dev = [ { name = "dirty-equals" }, { name = "diskcache-stubs" }, { name = "inline-snapshot" }, - { name = "kv-store-adapter", extra = ["disk", "elasticsearch", "memory", "pydantic", "redis"] }, + { name = "kv-store-adapter", extra = ["disk", "elasticsearch", "memcached", "memory", "pydantic", "redis"] }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-dotenv" }, @@ -444,13 +460,15 @@ lint = [ [package.metadata] requires-dist = [ { name = "aiohttp", marker = "extra == 'elasticsearch'", specifier = ">=3.12" }, + { name = "aiomcache", marker = "extra == 'memcached'", specifier = ">=0.8.0" }, { name = "cachetools", marker = "extra == 'memory'", specifier = ">=6.0.0" }, { name = "diskcache", marker = "extra == 'disk'", specifier = ">=5.6.0" }, { name = "elasticsearch", marker = "extra == 'elasticsearch'", specifier = ">=9.0.0" }, + { name = "pathvalidate", marker = "extra == 'disk'", specifier = ">=3.3.1" }, { name = "pydantic", marker = "extra == 'pydantic'", specifier = ">=2.11.9" }, { name = "redis", marker = "extra == 'redis'", specifier = ">=6.0.0" }, ] -provides-extras = ["memory", "disk", "redis", "elasticsearch", "pydantic"] +provides-extras = ["memory", "disk", "redis", "memcached", "elasticsearch", "pydantic"] [package.metadata.requires-dev] dev = [ @@ -458,7 +476,7 @@ dev = [ { name = "dirty-equals", specifier = ">=0.10.0" }, { name = "diskcache-stubs", specifier = ">=5.6.3.6.20240818" }, { name = "inline-snapshot", specifier = ">=0.29.0" }, - { name = "kv-store-adapter", extras = ["memory", "disk", "redis", "elasticsearch"] }, + { name = "kv-store-adapter", extras = ["memory", "disk", "redis", "elasticsearch", "memcached"] }, { name = "kv-store-adapter", extras = ["pydantic"] }, { name = "pytest" }, { name = "pytest-asyncio" }, @@ -629,6 +647,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] +[[package]] +name = "pathvalidate" +version = "3.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/2a/52a8da6fe965dea6192eb716b357558e103aea0a1e9a8352ad575a8406ca/pathvalidate-3.3.1.tar.gz", hash = "sha256:b18c07212bfead624345bb8e1d6141cdcf15a39736994ea0b94035ad2b1ba177", size = 63262, upload-time = "2025-06-15T09:07:20.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/70/875f4a23bfc4731703a5835487d0d2fb999031bd415e7d17c0ae615c18b7/pathvalidate-3.3.1-py3-none-any.whl", hash = "sha256:5263baab691f8e1af96092fa5137ee17df5bdfbd6cff1fcac4d6ef4bc2e1735f", size = 24305, upload-time = "2025-06-15T09:07:19.117Z" }, +] + [[package]] name = "pluggy" version = "1.6.0" @@ -983,28 +1010,28 @@ wheels = [ [[package]] name = "ruff" -version = "0.13.1" +version = "0.13.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ab/33/c8e89216845615d14d2d42ba2bee404e7206a8db782f33400754f3799f05/ruff-0.13.1.tar.gz", hash = "sha256:88074c3849087f153d4bb22e92243ad4c1b366d7055f98726bc19aa08dc12d51", size = 5397987, upload-time = "2025-09-18T19:52:44.33Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/df/8d7d8c515d33adfc540e2edf6c6021ea1c5a58a678d8cfce9fae59aabcab/ruff-0.13.2.tar.gz", hash = "sha256:cb12fffd32fb16d32cef4ed16d8c7cdc27ed7c944eaa98d99d01ab7ab0b710ff", size = 5416417, upload-time = "2025-09-25T14:54:09.936Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/41/ca37e340938f45cfb8557a97a5c347e718ef34702546b174e5300dbb1f28/ruff-0.13.1-py3-none-linux_armv6l.whl", hash = "sha256:b2abff595cc3cbfa55e509d89439b5a09a6ee3c252d92020bd2de240836cf45b", size = 12304308, upload-time = "2025-09-18T19:51:56.253Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/ba378ef4129415066c3e1c80d84e539a0d52feb250685091f874804f28af/ruff-0.13.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4ee9f4249bf7f8bb3984c41bfaf6a658162cdb1b22e3103eabc7dd1dc5579334", size = 12937258, upload-time = "2025-09-18T19:52:00.184Z" }, - { url = "https://files.pythonhosted.org/packages/8d/b6/ec5e4559ae0ad955515c176910d6d7c93edcbc0ed1a3195a41179c58431d/ruff-0.13.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5c5da4af5f6418c07d75e6f3224e08147441f5d1eac2e6ce10dcce5e616a3bae", size = 12214554, upload-time = "2025-09-18T19:52:02.753Z" }, - { url = "https://files.pythonhosted.org/packages/70/d6/cb3e3b4f03b9b0c4d4d8f06126d34b3394f6b4d764912fe80a1300696ef6/ruff-0.13.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80524f84a01355a59a93cef98d804e2137639823bcee2931f5028e71134a954e", size = 12448181, upload-time = "2025-09-18T19:52:05.279Z" }, - { url = "https://files.pythonhosted.org/packages/d2/ea/bf60cb46d7ade706a246cd3fb99e4cfe854efa3dfbe530d049c684da24ff/ruff-0.13.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff7f5ce8d7988767dd46a148192a14d0f48d1baea733f055d9064875c7d50389", size = 12104599, upload-time = "2025-09-18T19:52:07.497Z" }, - { url = "https://files.pythonhosted.org/packages/2d/3e/05f72f4c3d3a69e65d55a13e1dd1ade76c106d8546e7e54501d31f1dc54a/ruff-0.13.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c55d84715061f8b05469cdc9a446aa6c7294cd4bd55e86a89e572dba14374f8c", size = 13791178, upload-time = "2025-09-18T19:52:10.189Z" }, - { url = "https://files.pythonhosted.org/packages/81/e7/01b1fc403dd45d6cfe600725270ecc6a8f8a48a55bc6521ad820ed3ceaf8/ruff-0.13.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ac57fed932d90fa1624c946dc67a0a3388d65a7edc7d2d8e4ca7bddaa789b3b0", size = 14814474, upload-time = "2025-09-18T19:52:12.866Z" }, - { url = "https://files.pythonhosted.org/packages/fa/92/d9e183d4ed6185a8df2ce9faa3f22e80e95b5f88d9cc3d86a6d94331da3f/ruff-0.13.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c366a71d5b4f41f86a008694f7a0d75fe409ec298685ff72dc882f882d532e36", size = 14217531, upload-time = "2025-09-18T19:52:15.245Z" }, - { url = "https://files.pythonhosted.org/packages/3b/4a/6ddb1b11d60888be224d721e01bdd2d81faaf1720592858ab8bac3600466/ruff-0.13.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4ea9d1b5ad3e7a83ee8ebb1229c33e5fe771e833d6d3dcfca7b77d95b060d38", size = 13265267, upload-time = "2025-09-18T19:52:17.649Z" }, - { url = "https://files.pythonhosted.org/packages/81/98/3f1d18a8d9ea33ef2ad508f0417fcb182c99b23258ec5e53d15db8289809/ruff-0.13.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0f70202996055b555d3d74b626406476cc692f37b13bac8828acff058c9966a", size = 13243120, upload-time = "2025-09-18T19:52:20.332Z" }, - { url = "https://files.pythonhosted.org/packages/8d/86/b6ce62ce9c12765fa6c65078d1938d2490b2b1d9273d0de384952b43c490/ruff-0.13.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:f8cff7a105dad631085d9505b491db33848007d6b487c3c1979dd8d9b2963783", size = 13443084, upload-time = "2025-09-18T19:52:23.032Z" }, - { url = "https://files.pythonhosted.org/packages/a1/6e/af7943466a41338d04503fb5a81b2fd07251bd272f546622e5b1599a7976/ruff-0.13.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9761e84255443316a258dd7dfbd9bfb59c756e52237ed42494917b2577697c6a", size = 12295105, upload-time = "2025-09-18T19:52:25.263Z" }, - { url = "https://files.pythonhosted.org/packages/3f/97/0249b9a24f0f3ebd12f007e81c87cec6d311de566885e9309fcbac5b24cc/ruff-0.13.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:3d376a88c3102ef228b102211ef4a6d13df330cb0f5ca56fdac04ccec2a99700", size = 12072284, upload-time = "2025-09-18T19:52:27.478Z" }, - { url = "https://files.pythonhosted.org/packages/f6/85/0b64693b2c99d62ae65236ef74508ba39c3febd01466ef7f354885e5050c/ruff-0.13.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cbefd60082b517a82c6ec8836989775ac05f8991715d228b3c1d86ccc7df7dae", size = 12970314, upload-time = "2025-09-18T19:52:30.212Z" }, - { url = "https://files.pythonhosted.org/packages/96/fc/342e9f28179915d28b3747b7654f932ca472afbf7090fc0c4011e802f494/ruff-0.13.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:dd16b9a5a499fe73f3c2ef09a7885cb1d97058614d601809d37c422ed1525317", size = 13422360, upload-time = "2025-09-18T19:52:32.676Z" }, - { url = "https://files.pythonhosted.org/packages/37/54/6177a0dc10bce6f43e392a2192e6018755473283d0cf43cc7e6afc182aea/ruff-0.13.1-py3-none-win32.whl", hash = "sha256:55e9efa692d7cb18580279f1fbb525146adc401f40735edf0aaeabd93099f9a0", size = 12178448, upload-time = "2025-09-18T19:52:35.545Z" }, - { url = "https://files.pythonhosted.org/packages/64/51/c6a3a33d9938007b8bdc8ca852ecc8d810a407fb513ab08e34af12dc7c24/ruff-0.13.1-py3-none-win_amd64.whl", hash = "sha256:3a3fb595287ee556de947183489f636b9f76a72f0fa9c028bdcabf5bab2cc5e5", size = 13286458, upload-time = "2025-09-18T19:52:38.198Z" }, - { url = "https://files.pythonhosted.org/packages/fd/04/afc078a12cf68592345b1e2d6ecdff837d286bac023d7a22c54c7a698c5b/ruff-0.13.1-py3-none-win_arm64.whl", hash = "sha256:c0bae9ffd92d54e03c2bf266f466da0a65e145f298ee5b5846ed435f6a00518a", size = 12437893, upload-time = "2025-09-18T19:52:41.283Z" }, + { url = "https://files.pythonhosted.org/packages/6e/84/5716a7fa4758e41bf70e603e13637c42cfb9dbf7ceb07180211b9bbf75ef/ruff-0.13.2-py3-none-linux_armv6l.whl", hash = "sha256:3796345842b55f033a78285e4f1641078f902020d8450cade03aad01bffd81c3", size = 12343254, upload-time = "2025-09-25T14:53:27.784Z" }, + { url = "https://files.pythonhosted.org/packages/9b/77/c7042582401bb9ac8eff25360e9335e901d7a1c0749a2b28ba4ecb239991/ruff-0.13.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ff7e4dda12e683e9709ac89e2dd436abf31a4d8a8fc3d89656231ed808e231d2", size = 13040891, upload-time = "2025-09-25T14:53:31.38Z" }, + { url = "https://files.pythonhosted.org/packages/c6/15/125a7f76eb295cb34d19c6778e3a82ace33730ad4e6f28d3427e134a02e0/ruff-0.13.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c75e9d2a2fafd1fdd895d0e7e24b44355984affdde1c412a6f6d3f6e16b22d46", size = 12243588, upload-time = "2025-09-25T14:53:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/9e/eb/0093ae04a70f81f8be7fd7ed6456e926b65d238fc122311293d033fdf91e/ruff-0.13.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cceac74e7bbc53ed7d15d1042ffe7b6577bf294611ad90393bf9b2a0f0ec7cb6", size = 12491359, upload-time = "2025-09-25T14:53:35.892Z" }, + { url = "https://files.pythonhosted.org/packages/43/fe/72b525948a6956f07dad4a6f122336b6a05f2e3fd27471cea612349fedb9/ruff-0.13.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae3f469b5465ba6d9721383ae9d49310c19b452a161b57507764d7ef15f4b07", size = 12162486, upload-time = "2025-09-25T14:53:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/6a/e3/0fac422bbbfb2ea838023e0d9fcf1f30183d83ab2482800e2cb892d02dfe/ruff-0.13.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f8f9e3cd6714358238cd6626b9d43026ed19c0c018376ac1ef3c3a04ffb42d8", size = 13871203, upload-time = "2025-09-25T14:53:41.943Z" }, + { url = "https://files.pythonhosted.org/packages/6b/82/b721c8e3ec5df6d83ba0e45dcf00892c4f98b325256c42c38ef136496cbf/ruff-0.13.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c6ed79584a8f6cbe2e5d7dbacf7cc1ee29cbdb5df1172e77fbdadc8bb85a1f89", size = 14929635, upload-time = "2025-09-25T14:53:43.953Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/ad56faf6daa507b83079a1ad7a11694b87d61e6bf01c66bd82b466f21821/ruff-0.13.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aed130b2fde049cea2019f55deb939103123cdd191105f97a0599a3e753d61b0", size = 14338783, upload-time = "2025-09-25T14:53:46.205Z" }, + { url = "https://files.pythonhosted.org/packages/47/77/ad1d9156db8f99cd01ee7e29d74b34050e8075a8438e589121fcd25c4b08/ruff-0.13.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1887c230c2c9d65ed1b4e4cfe4d255577ea28b718ae226c348ae68df958191aa", size = 13355322, upload-time = "2025-09-25T14:53:48.164Z" }, + { url = "https://files.pythonhosted.org/packages/64/8b/e87cfca2be6f8b9f41f0bb12dc48c6455e2d66df46fe61bb441a226f1089/ruff-0.13.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bcb10276b69b3cfea3a102ca119ffe5c6ba3901e20e60cf9efb53fa417633c3", size = 13354427, upload-time = "2025-09-25T14:53:50.486Z" }, + { url = "https://files.pythonhosted.org/packages/7f/df/bf382f3fbead082a575edb860897287f42b1b3c694bafa16bc9904c11ed3/ruff-0.13.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:afa721017aa55a555b2ff7944816587f1cb813c2c0a882d158f59b832da1660d", size = 13537637, upload-time = "2025-09-25T14:53:52.887Z" }, + { url = "https://files.pythonhosted.org/packages/51/70/1fb7a7c8a6fc8bd15636288a46e209e81913b87988f26e1913d0851e54f4/ruff-0.13.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1dbc875cf3720c64b3990fef8939334e74cb0ca65b8dbc61d1f439201a38101b", size = 12340025, upload-time = "2025-09-25T14:53:54.88Z" }, + { url = "https://files.pythonhosted.org/packages/4c/27/1e5b3f1c23ca5dd4106d9d580e5c13d9acb70288bff614b3d7b638378cc9/ruff-0.13.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939a1b2a960e9742e9a347e5bbc9b3c3d2c716f86c6ae273d9cbd64f193f22", size = 12133449, upload-time = "2025-09-25T14:53:57.089Z" }, + { url = "https://files.pythonhosted.org/packages/2d/09/b92a5ccee289f11ab128df57d5911224197d8d55ef3bd2043534ff72ca54/ruff-0.13.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:50e2d52acb8de3804fc5f6e2fa3ae9bdc6812410a9e46837e673ad1f90a18736", size = 13051369, upload-time = "2025-09-25T14:53:59.124Z" }, + { url = "https://files.pythonhosted.org/packages/89/99/26c9d1c7d8150f45e346dc045cc49f23e961efceb4a70c47dea0960dea9a/ruff-0.13.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3196bc13ab2110c176b9a4ae5ff7ab676faaa1964b330a1383ba20e1e19645f2", size = 13523644, upload-time = "2025-09-25T14:54:01.622Z" }, + { url = "https://files.pythonhosted.org/packages/f7/00/e7f1501e81e8ec290e79527827af1d88f541d8d26151751b46108978dade/ruff-0.13.2-py3-none-win32.whl", hash = "sha256:7c2a0b7c1e87795fec3404a485096bcd790216c7c146a922d121d8b9c8f1aaac", size = 12245990, upload-time = "2025-09-25T14:54:03.647Z" }, + { url = "https://files.pythonhosted.org/packages/ee/bd/d9f33a73de84fafd0146c6fba4f497c4565fe8fa8b46874b8e438869abc2/ruff-0.13.2-py3-none-win_amd64.whl", hash = "sha256:17d95fb32218357c89355f6f6f9a804133e404fc1f65694372e02a557edf8585", size = 13324004, upload-time = "2025-09-25T14:54:06.05Z" }, + { url = "https://files.pythonhosted.org/packages/c3/12/28fa2f597a605884deb0f65c1b1ae05111051b2a7030f5d8a4ff7f4599ba/ruff-0.13.2-py3-none-win_arm64.whl", hash = "sha256:da711b14c530412c827219312b7d7fbb4877fb31150083add7e8c5336549cea7", size = 12484437, upload-time = "2025-09-25T14:54:08.022Z" }, ] [[package]] From c71485b43d05b167d36b372e9154f271ebf83848 Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 07:42:04 -0500 Subject: [PATCH 08/31] Quote python versions --- .github/workflows/test_pull_request.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test_pull_request.yml b/.github/workflows/test_pull_request.yml index 9a710811..f34e3d96 100644 --- a/.github/workflows/test_pull_request.yml +++ b/.github/workflows/test_pull_request.yml @@ -13,7 +13,7 @@ jobs: publish: strategy: matrix: - python-version: [3.10, 3.11, 3.12, 3.13, 3.14] + python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] platform: [ubuntu-22.04, ubuntu-latest, macos-14, macos-latest, windows-2022, windows-latest] runs-on: ${{ matrix.platform }} From 59200d967f82532fc8b54f08156dfc726d6c42c1 Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 07:46:39 -0500 Subject: [PATCH 09/31] skip docker tests on platforms without docker --- tests/stores/conftest.py | 5 +++++ tests/stores/memcached/test_memcached.py | 4 ++-- tests/stores/redis/test_redis.py | 4 ++-- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/tests/stores/conftest.py b/tests/stores/conftest.py index e4b7ebd3..7723e0c6 100644 --- a/tests/stores/conftest.py +++ b/tests/stores/conftest.py @@ -1,5 +1,7 @@ import asyncio import hashlib +import os +import subprocess from abc import ABC, abstractmethod from collections.abc import AsyncGenerator from datetime import datetime, timedelta, timezone @@ -22,6 +24,9 @@ def now_plus(seconds: int) -> datetime: def is_around(value: float, delta: float = 1) -> bool: return value - delta < value < value + delta +def detect_docker() -> bool: + docker_ps = subprocess.run(["docker", "ps"], check=False, capture_output=True, text=True) # noqa: S607 + return docker_ps.returncode == 0 class BaseStoreTests(ABC): async def eventually_consistent(self) -> None: # noqa: B027 diff --git a/tests/stores/memcached/test_memcached.py b/tests/stores/memcached/test_memcached.py index d24464ac..79c924b2 100644 --- a/tests/stores/memcached/test_memcached.py +++ b/tests/stores/memcached/test_memcached.py @@ -8,7 +8,7 @@ from kv_store_adapter.stores.base import BaseStore from kv_store_adapter.stores.memcached import MemcachedStore -from tests.stores.conftest import BaseStoreTests +from tests.stores.conftest import BaseStoreTests, detect_docker # Memcached test configuration MEMCACHED_HOST = "localhost" @@ -42,7 +42,7 @@ async def wait_memcached() -> bool: class MemcachedFailedToStartError(Exception): pass - +@pytest.mark.skipif(not detect_docker(), reason="Docker is not available") class TestMemcachedStore(BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_memcached(self) -> AsyncGenerator[None, None]: diff --git a/tests/stores/redis/test_redis.py b/tests/stores/redis/test_redis.py index d634a708..a7d4b485 100644 --- a/tests/stores/redis/test_redis.py +++ b/tests/stores/redis/test_redis.py @@ -7,7 +7,7 @@ from kv_store_adapter.stores.base import BaseStore from kv_store_adapter.stores.redis import RedisStore -from tests.stores.conftest import BaseStoreTests +from tests.stores.conftest import BaseStoreTests, detect_docker # Redis test configuration REDIS_HOST = "localhost" @@ -38,7 +38,7 @@ async def wait_redis() -> bool: class RedisFailedToStartError(Exception): pass - +@pytest.mark.skipif(not detect_docker(), reason="Docker is not running") class TestRedisStore(BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_redis(self) -> AsyncGenerator[None, None]: From 56add19b08b40382bbeaa7a206f0fd11bccf02d6 Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 07:50:31 -0500 Subject: [PATCH 10/31] Add timeout, lint check --- .github/workflows/publish-py-kv-store-adapter.yml | 1 + .github/workflows/test_pull_request.yml | 2 ++ pyproject.toml | 1 + tests/stores/conftest.py | 3 ++- tests/stores/memcached/test_memcached.py | 1 + tests/stores/redis/test_redis.py | 7 ++++--- 6 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish-py-kv-store-adapter.yml b/.github/workflows/publish-py-kv-store-adapter.yml index 0ff28ab8..5d95c69a 100644 --- a/.github/workflows/publish-py-kv-store-adapter.yml +++ b/.github/workflows/publish-py-kv-store-adapter.yml @@ -8,6 +8,7 @@ on: jobs: publish: runs-on: ubuntu-latest + timeout-minutes: 10 permissions: id-token: write environment: pypi diff --git a/.github/workflows/test_pull_request.yml b/.github/workflows/test_pull_request.yml index f34e3d96..df7310a3 100644 --- a/.github/workflows/test_pull_request.yml +++ b/.github/workflows/test_pull_request.yml @@ -11,10 +11,12 @@ on: jobs: publish: + timeout-minutes: 10 strategy: matrix: python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] platform: [ubuntu-22.04, ubuntu-latest, macos-14, macos-latest, windows-2022, windows-latest] + runs-on: ${{ matrix.platform }} diff --git a/pyproject.toml b/pyproject.toml index 6bb4e33d..d5506896 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,6 +36,7 @@ addopts = ["--inline-snapshot=create,fix","-vv","-s"] markers = [ "skip_on_ci: Skip running the test when running on CI", ] +timeout = 5 env_files = [".env"] diff --git a/tests/stores/conftest.py b/tests/stores/conftest.py index 7723e0c6..a018770c 100644 --- a/tests/stores/conftest.py +++ b/tests/stores/conftest.py @@ -1,6 +1,5 @@ import asyncio import hashlib -import os import subprocess from abc import ABC, abstractmethod from collections.abc import AsyncGenerator @@ -24,10 +23,12 @@ def now_plus(seconds: int) -> datetime: def is_around(value: float, delta: float = 1) -> bool: return value - delta < value < value + delta + def detect_docker() -> bool: docker_ps = subprocess.run(["docker", "ps"], check=False, capture_output=True, text=True) # noqa: S607 return docker_ps.returncode == 0 + class BaseStoreTests(ABC): async def eventually_consistent(self) -> None: # noqa: B027 """Subclasses can override this to wait for eventually consistent operations.""" diff --git a/tests/stores/memcached/test_memcached.py b/tests/stores/memcached/test_memcached.py index 79c924b2..b959503f 100644 --- a/tests/stores/memcached/test_memcached.py +++ b/tests/stores/memcached/test_memcached.py @@ -42,6 +42,7 @@ async def wait_memcached() -> bool: class MemcachedFailedToStartError(Exception): pass + @pytest.mark.skipif(not detect_docker(), reason="Docker is not available") class TestMemcachedStore(BaseStoreTests): @pytest.fixture(autouse=True, scope="session") diff --git a/tests/stores/redis/test_redis.py b/tests/stores/redis/test_redis.py index a7d4b485..3ebfab63 100644 --- a/tests/stores/redis/test_redis.py +++ b/tests/stores/redis/test_redis.py @@ -38,6 +38,7 @@ async def wait_redis() -> bool: class RedisFailedToStartError(Exception): pass + @pytest.mark.skipif(not detect_docker(), reason="Docker is not running") class TestRedisStore(BaseStoreTests): @pytest.fixture(autouse=True, scope="session") @@ -61,14 +62,14 @@ async def store(self, setup_redis: RedisStore) -> RedisStore: """Create a Redis store for testing.""" # Create the store with test database redis_store = RedisStore(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB) - _ = await redis_store._client.flushdb() # pyright: ignore[reportPrivateUsage, reportUnknownMemberType] + _ = await redis_store._client.flushdb() # pyright: ignore[reportPrivateUsage, reportUnknownMemberType, reportAny] return redis_store async def test_redis_url_connection(self): """Test Redis store creation with URL.""" redis_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}" store = RedisStore(url=redis_url) - _ = await store._client.flushdb() # pyright: ignore[reportPrivateUsage, reportUnknownMemberType] + _ = await store._client.flushdb() # pyright: ignore[reportPrivateUsage, reportUnknownMemberType, reportAny] await store.put(collection="test", key="url_test", value={"test": "value"}) result = await store.get(collection="test", key="url_test") assert result == {"test": "value"} @@ -78,7 +79,7 @@ async def test_redis_client_connection(self): client = Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) store = RedisStore(client=client) - _ = await store._client.flushdb() # pyright: ignore[reportPrivateUsage, reportUnknownMemberType] + _ = await store._client.flushdb() # pyright: ignore[reportPrivateUsage, reportUnknownMemberType, reportAny] await store.put(collection="test", key="client_test", value={"test": "value"}) result = await store.get(collection="test", key="client_test") assert result == {"test": "value"} From f00fe1ac61719ce3e275d18569842e0715a891ef Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 07:53:47 -0500 Subject: [PATCH 11/31] Remove python 3.14 add pre-step for python 3.13 test --- .github/workflows/test_pull_request.yml | 38 ++++++++++++++----- .../elasticsearch/test_elasticsearch.py | 3 +- 2 files changed, 31 insertions(+), 10 deletions(-) diff --git a/.github/workflows/test_pull_request.yml b/.github/workflows/test_pull_request.yml index df7310a3..32d2d44a 100644 --- a/.github/workflows/test_pull_request.yml +++ b/.github/workflows/test_pull_request.yml @@ -10,15 +10,9 @@ on: workflow_dispatch: jobs: - publish: + test_ubuntu_python_310: timeout-minutes: 10 - strategy: - matrix: - python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] - platform: [ubuntu-22.04, ubuntu-latest, macos-14, macos-latest, windows-2022, windows-latest] - - - runs-on: ${{ matrix.platform }} + runs-on: ubuntu-latest steps: - name: Checkout repository @@ -28,7 +22,7 @@ jobs: uses: astral-sh/setup-uv@v6 - name: "Install" - run: uv sync --locked --group dev --python ${{ matrix.python-version }} + run: uv sync --locked --group dev --python 3.10 - name: "Lint" run: uv run ruff check --exit-non-zero-on-fix --fix @@ -44,3 +38,29 @@ jobs: - name: "Build" run: uv build + + test_all_platforms: + timeout-minutes: 10 + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12", "3.13"] + platform: [ubuntu-22.04, ubuntu-latest, macos-14, macos-latest, windows-2022, windows-latest] + + + runs-on: ${{ matrix.platform }} + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: "Install uv" + uses: astral-sh/setup-uv@v6 + + - name: "Install" + run: uv sync --locked --group dev --python ${{ matrix.python-version }} + + - name: "Test" + run: uv run pytest tests + + - name: "Build" + run: uv build diff --git a/tests/stores/elasticsearch/test_elasticsearch.py b/tests/stores/elasticsearch/test_elasticsearch.py index d4f79756..e5e08dc6 100644 --- a/tests/stores/elasticsearch/test_elasticsearch.py +++ b/tests/stores/elasticsearch/test_elasticsearch.py @@ -7,7 +7,7 @@ from kv_store_adapter.stores.base import BaseStore from kv_store_adapter.stores.elasticsearch import ElasticsearchStore -from tests.stores.conftest import BaseStoreTests +from tests.stores.conftest import BaseStoreTests, detect_docker TEST_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB @@ -28,6 +28,7 @@ async def elasticsearch_client() -> AsyncGenerator[AsyncElasticsearch, None]: @pytest.mark.skipif(os.getenv("ES_URL") is None, reason="Elasticsearch is not configured") +@pytest.mark.skipif(not detect_docker(), reason="Docker is not available") class TestElasticsearchStore(BaseStoreTests): @override @pytest.fixture From 89cb2ee83f5747be43231dbad013add94aebe038 Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 08:12:02 -0500 Subject: [PATCH 12/31] PR Feedback --- src/kv_store_adapter/stores/elasticsearch/store.py | 2 +- src/kv_store_adapter/stores/memcached/store.py | 2 +- src/kv_store_adapter/stores/utils/compound.py | 4 ++-- src/kv_store_adapter/wrappers/passthrough_cache.py | 8 ++++---- src/kv_store_adapter/wrappers/prefix_collections.py | 5 +++-- src/kv_store_adapter/wrappers/prefix_keys.py | 2 +- src/kv_store_adapter/wrappers/single_collection.py | 5 +++-- tests/stores/conftest.py | 8 ++++++-- tests/stores/disk/test_disk.py | 2 +- tests/stores/wrappers/test_prefix_collection.py | 2 +- tests/stores/wrappers/test_single_collection.py | 2 +- tests/test_types.py | 4 ++-- 12 files changed, 26 insertions(+), 20 deletions(-) diff --git a/src/kv_store_adapter/stores/elasticsearch/store.py b/src/kv_store_adapter/stores/elasticsearch/store.py index 804a4e33..27f8094c 100644 --- a/src/kv_store_adapter/stores/elasticsearch/store.py +++ b/src/kv_store_adapter/stores/elasticsearch/store.py @@ -123,7 +123,7 @@ async def _setup_collection(self, *, collection: str) -> None: def sanitize_document_id(self, key: str) -> str: if len(key) > MAX_KEY_LENGTH: sha256_hash: str = hashlib.sha256(key.encode()).hexdigest() - return sha256_hash[:256] + return sha256_hash[:64] return key @override diff --git a/src/kv_store_adapter/stores/memcached/store.py b/src/kv_store_adapter/stores/memcached/store.py index cdbb9066..dd76b9d2 100644 --- a/src/kv_store_adapter/stores/memcached/store.py +++ b/src/kv_store_adapter/stores/memcached/store.py @@ -50,7 +50,7 @@ def __init__( def sanitize_key(self, key: str) -> str: if len(key) > MAX_KEY_LENGTH: sha256_hash: str = hashlib.sha256(key.encode()).hexdigest() - return sha256_hash[:256] + return sha256_hash[:64] return key @override diff --git a/src/kv_store_adapter/stores/utils/compound.py b/src/kv_store_adapter/stores/utils/compound.py index 75aa068e..4b1d3ef5 100644 --- a/src/kv_store_adapter/stores/utils/compound.py +++ b/src/kv_store_adapter/stores/utils/compound.py @@ -66,10 +66,10 @@ def unprefix_collection(collection: str, prefix: str, separator: str | None = No def get_collections_from_compound_keys(compound_keys: list[str], separator: str | None = None) -> list[str]: """Return a unique list of collections from a list of compound keys.""" separator = separator or DEFAULT_COMPOUND_SEPARATOR - return list({key_collection for key_collection, _ in uncompound_strings(strings=compound_keys)}) + return list({key_collection for key_collection, _ in uncompound_strings(strings=compound_keys, separator=separator)}) def get_keys_from_compound_keys(compound_keys: list[str], collection: str, separator: str | None = None) -> list[str]: """Return all keys from a list of compound keys for a given collection.""" separator = separator or DEFAULT_COMPOUND_SEPARATOR - return [key for key_collection, key in uncompound_strings(strings=compound_keys) if key_collection == collection] + return [key for key_collection, key in uncompound_strings(strings=compound_keys, separator=separator) if key_collection == collection] diff --git a/src/kv_store_adapter/wrappers/passthrough_cache.py b/src/kv_store_adapter/wrappers/passthrough_cache.py index d0439276..3ec25b75 100644 --- a/src/kv_store_adapter/wrappers/passthrough_cache.py +++ b/src/kv_store_adapter/wrappers/passthrough_cache.py @@ -65,7 +65,7 @@ async def get_many(self, keys: Sequence[str], *, collection: str | None = None) # First check the cache store for the entries cached_entries: list[dict[str, Any] | None] = await self.cache_store.get_many(collection=collection, keys=keys) - for i, key in enumerate[str](iterable=keys): + for i, key in enumerate(iterable=keys): key_to_value[key] = cached_entries[i] uncached_keys = [key for key, value in key_to_value.items() if value is None] @@ -78,7 +78,7 @@ async def get_many(self, keys: Sequence[str], *, collection: str | None = None) entries_to_cache_keys: list[str] = [] entries_to_cache_ttls: list[float | None] = [] - for i, key in enumerate[str](iterable=uncached_keys): + for i, key in enumerate(iterable=uncached_keys): entry, ttl = uncached_entries[i] if entry is not None: entries_to_cache_keys.append(key) @@ -120,7 +120,7 @@ async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) # First check the cache store for the entries cached_entries: list[tuple[dict[str, Any] | None, float | None]] = await self.cache_store.ttl_many(collection=collection, keys=keys) - for i, key in enumerate[str](iterable=keys): + for i, key in enumerate(iterable=keys): key_to_value[key] = (cached_entries[i][0], cached_entries[i][1]) uncached_keys = [key for key, value in key_to_value.items() if value == (None, None)] @@ -133,7 +133,7 @@ async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) entries_to_cache_keys: list[str] = [] entries_to_cache_ttls: list[float | None] = [] - for i, key in enumerate[str](iterable=uncached_keys): + for i, key in enumerate(iterable=uncached_keys): entry, ttl = uncached_entries[i] if entry is not None: entries_to_cache_keys.append(key) diff --git a/src/kv_store_adapter/wrappers/prefix_collections.py b/src/kv_store_adapter/wrappers/prefix_collections.py index fe762193..a1dc436c 100644 --- a/src/kv_store_adapter/wrappers/prefix_collections.py +++ b/src/kv_store_adapter/wrappers/prefix_collections.py @@ -3,6 +3,7 @@ from typing_extensions import override +from kv_store_adapter.stores.base import DEFAULT_COLLECTION_NAME from kv_store_adapter.stores.utils.compound import prefix_collection, unprefix_collection from kv_store_adapter.types import KVStore from kv_store_adapter.wrappers.base import BaseWrapper @@ -11,7 +12,7 @@ class PrefixCollectionsWrapper(BaseWrapper): """A wrapper that prefixes collection names before delegating to the underlying store.""" - def __init__(self, store: KVStore, prefix: str, default_collection: str) -> None: + def __init__(self, store: KVStore, prefix: str, default_collection: str | None = None) -> None: """Initialize the prefix collections wrapper. Args: @@ -21,7 +22,7 @@ def __init__(self, store: KVStore, prefix: str, default_collection: str) -> None """ self.store: KVStore = store self.prefix: str = prefix - self.default_collection: str = default_collection + self.default_collection: str = default_collection or DEFAULT_COLLECTION_NAME super().__init__() def _prefix_collection(self, collection: str | None) -> str: diff --git a/src/kv_store_adapter/wrappers/prefix_keys.py b/src/kv_store_adapter/wrappers/prefix_keys.py index 933813a9..78d13c30 100644 --- a/src/kv_store_adapter/wrappers/prefix_keys.py +++ b/src/kv_store_adapter/wrappers/prefix_keys.py @@ -9,7 +9,7 @@ class PrefixKeysWrapper(BaseWrapper): - """A wrapper for prefixing keys in a KVStore.""" + """A wrapper that prefixes key names before delegating to the underlying store.""" def __init__(self, store: KVStore, prefix: str) -> None: """Initialize the prefix keys wrapper. diff --git a/src/kv_store_adapter/wrappers/single_collection.py b/src/kv_store_adapter/wrappers/single_collection.py index 3312c9c4..28bbe8a7 100644 --- a/src/kv_store_adapter/wrappers/single_collection.py +++ b/src/kv_store_adapter/wrappers/single_collection.py @@ -3,6 +3,7 @@ from typing_extensions import override +from kv_store_adapter.stores.base import DEFAULT_COLLECTION_NAME from kv_store_adapter.stores.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key from kv_store_adapter.types import KVStore from kv_store_adapter.wrappers.base import BaseWrapper @@ -11,7 +12,7 @@ class SingleCollectionWrapper(BaseWrapper): """A wrapper that stores all collections within a single backing collection via key prefixing.""" - def __init__(self, store: KVStore, single_collection: str, default_collection: str, separator: str | None = None) -> None: + def __init__(self, store: KVStore, single_collection: str, default_collection: str | None = None, separator: str | None = None) -> None: """Initialize the prefix collections wrapper. Args: @@ -21,7 +22,7 @@ def __init__(self, store: KVStore, single_collection: str, default_collection: s """ self.store: KVStore = store self.single_collection: str = single_collection - self.default_collection: str = default_collection + self.default_collection: str = default_collection or DEFAULT_COLLECTION_NAME self.separator: str = separator or DEFAULT_PREFIX_SEPARATOR super().__init__() diff --git a/tests/stores/conftest.py b/tests/stores/conftest.py index a018770c..51fca1bc 100644 --- a/tests/stores/conftest.py +++ b/tests/stores/conftest.py @@ -25,8 +25,12 @@ def is_around(value: float, delta: float = 1) -> bool: def detect_docker() -> bool: - docker_ps = subprocess.run(["docker", "ps"], check=False, capture_output=True, text=True) # noqa: S607 - return docker_ps.returncode == 0 + try: + result = subprocess.run(["docker", "ps"], check=False, capture_output=True, text=True) # noqa: S607 + except Exception: + return False + else: + return result.returncode == 0 class BaseStoreTests(ABC): diff --git a/tests/stores/disk/test_disk.py b/tests/stores/disk/test_disk.py index af7f3bc2..9c2b1017 100644 --- a/tests/stores/disk/test_disk.py +++ b/tests/stores/disk/test_disk.py @@ -15,4 +15,4 @@ class TestDiskStore(BaseStoreTests): @pytest.fixture async def store(self) -> AsyncGenerator[DiskStore, None]: with tempfile.TemporaryDirectory() as temp_dir: - yield DiskStore(directory=(temp_dir), max_size=TEST_SIZE_LIMIT) + yield DiskStore(directory=temp_dir, max_size=TEST_SIZE_LIMIT) diff --git a/tests/stores/wrappers/test_prefix_collection.py b/tests/stores/wrappers/test_prefix_collection.py index 353cfecd..df2dd07b 100644 --- a/tests/stores/wrappers/test_prefix_collection.py +++ b/tests/stores/wrappers/test_prefix_collection.py @@ -11,4 +11,4 @@ class TestPrefixCollectionWrapper(BaseStoreTests): @pytest.fixture async def store(self) -> PrefixCollectionsWrapper: memory_store: MemoryStore = MemoryStore() - return PrefixCollectionsWrapper(store=memory_store, prefix="collection_prefix", default_collection="default_collection") + return PrefixCollectionsWrapper(store=memory_store, prefix="collection_prefix") diff --git a/tests/stores/wrappers/test_single_collection.py b/tests/stores/wrappers/test_single_collection.py index 42f98805..1022cb54 100644 --- a/tests/stores/wrappers/test_single_collection.py +++ b/tests/stores/wrappers/test_single_collection.py @@ -11,4 +11,4 @@ class TestSingleCollectionWrapper(BaseStoreTests): @pytest.fixture async def store(self) -> SingleCollectionWrapper: memory_store: MemoryStore = MemoryStore() - return SingleCollectionWrapper(store=memory_store, single_collection="test", default_collection="test") + return SingleCollectionWrapper(store=memory_store, single_collection="test") diff --git a/tests/test_types.py b/tests/test_types.py index e3dd6cb4..c213c049 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -3,7 +3,7 @@ async def test_kv_store_protocol(): - async def test_kv_store_protocol(kv_store: KVStore): + async def test_protocol(kv_store: KVStore): assert await kv_store.get(collection="test", key="test") is None await kv_store.put(collection="test", key="test", value={"test": "test"}) assert await kv_store.delete(collection="test", key="test") @@ -11,7 +11,7 @@ async def test_kv_store_protocol(kv_store: KVStore): memory_store = MemoryStore() - await test_kv_store_protocol(kv_store=memory_store) + await test_protocol(kv_store=memory_store) assert await memory_store.get(collection="test", key="test") is None assert await memory_store.get(collection="test", key="test_2") == {"test": "test"} From 0add7f35d974e7080948d159e486d1b1ee7accfb Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 09:05:04 -0500 Subject: [PATCH 13/31] Close Disk Cache on deletion --- src/kv_store_adapter/stores/disk/store.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/kv_store_adapter/stores/disk/store.py b/src/kv_store_adapter/stores/disk/store.py index 1b175562..3e6aec06 100644 --- a/src/kv_store_adapter/stores/disk/store.py +++ b/src/kv_store_adapter/stores/disk/store.py @@ -98,3 +98,6 @@ async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: combo_key: str = compound_key(collection=collection, key=key) return self._cache.delete(key=combo_key, retry=True) + + def __del__(self) -> None: + self._cache.close() From 830116ea9e2a99164e6ab8804a6f605208dae0ef Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 12:57:43 -0500 Subject: [PATCH 14/31] Updates to keystores, add valkey and memcache --- .github/workflows/test_pull_request.yml | 12 +- DEVELOPING.md | 2 +- README.md | 20 +- pyproject.toml | 5 +- src/kv_store_adapter/errors.py | 14 +- src/kv_store_adapter/stores/base.py | 60 +++++- .../stores/disk/multi_store.py | 19 +- src/kv_store_adapter/stores/disk/store.py | 12 +- .../stores/elasticsearch/store.py | 39 ++-- .../stores/memcached/store.py | 12 +- src/kv_store_adapter/stores/memory/store.py | 4 +- .../stores/mongodb/__init__.py | 3 + src/kv_store_adapter/stores/mongodb/store.py | 193 ++++++++++++++++++ src/kv_store_adapter/stores/null/store.py | 2 +- src/kv_store_adapter/stores/redis/store.py | 12 +- src/kv_store_adapter/stores/simple/store.py | 6 +- .../stores/valkey/__init__.py | 3 + src/kv_store_adapter/stores/valkey/store.py | 127 ++++++++++++ .../{stores => }/utils/compound.py | 4 +- .../{stores => }/utils/managed_entry.py | 6 +- src/kv_store_adapter/utils/sanitize.py | 160 +++++++++++++++ .../{stores => }/utils/time_to_live.py | 10 +- .../wrappers/prefix_collections.py | 2 +- src/kv_store_adapter/wrappers/prefix_keys.py | 2 +- .../wrappers/single_collection.py | 2 +- tests/stores/conftest.py | 48 ++++- tests/stores/disk/test_disk.py | 8 +- tests/stores/disk/test_multi_disk.py | 4 +- .../elasticsearch/test_elasticsearch.py | 5 +- tests/stores/memcached/test_memcached.py | 6 +- tests/stores/mongodb/test_mongodb.py | 88 ++++++++ tests/stores/redis/test_redis.py | 6 +- tests/stores/valkey/test_valkey.py | 81 ++++++++ .../test_managed_entry.py} | 2 +- tests/utils/test_sanitize.py | 88 ++++++++ uv.lock | 173 +++++++++++++++- 36 files changed, 1146 insertions(+), 94 deletions(-) create mode 100644 src/kv_store_adapter/stores/mongodb/__init__.py create mode 100644 src/kv_store_adapter/stores/mongodb/store.py create mode 100644 src/kv_store_adapter/stores/valkey/__init__.py create mode 100644 src/kv_store_adapter/stores/valkey/store.py rename src/kv_store_adapter/{stores => }/utils/compound.py (97%) rename src/kv_store_adapter/{stores => }/utils/managed_entry.py (92%) create mode 100644 src/kv_store_adapter/utils/sanitize.py rename src/kv_store_adapter/{stores => }/utils/time_to_live.py (79%) create mode 100644 tests/stores/mongodb/test_mongodb.py create mode 100644 tests/stores/valkey/test_valkey.py rename tests/{stores/base/test_kv_json_store.py => utils/test_managed_entry.py} (93%) create mode 100644 tests/utils/test_sanitize.py diff --git a/.github/workflows/test_pull_request.yml b/.github/workflows/test_pull_request.yml index 32d2d44a..e896a8e7 100644 --- a/.github/workflows/test_pull_request.yml +++ b/.github/workflows/test_pull_request.yml @@ -10,10 +10,14 @@ on: workflow_dispatch: jobs: - test_ubuntu_python_310: + test_small_subset_platforms: timeout-minutes: 10 - runs-on: ubuntu-latest - + strategy: + matrix: + python-version: ["3.10"] + platform: [ubuntu-latest, macos-latest, windows-latest] + + runs-on: ${{ matrix.platform }} steps: - name: Checkout repository uses: actions/checkout@v4 @@ -22,7 +26,7 @@ jobs: uses: astral-sh/setup-uv@v6 - name: "Install" - run: uv sync --locked --group dev --python 3.10 + run: uv sync --locked --group dev --python ${{ matrix.python-version }} - name: "Lint" run: uv run ruff check --exit-non-zero-on-fix --fix diff --git a/DEVELOPING.md b/DEVELOPING.md index 4019f757..5c5982e1 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -332,7 +332,7 @@ You can also inherit from specialized base classes for additional functionality: # src/kv_store_adapter/stores/mystore/store.py from typing_extensions import override from kv_store_adapter.stores.base import BaseStore -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry +from kv_store_adapter.utils.managed_entry import ManagedEntry class MyStore(BaseStore): """My custom key-value store implementation.""" diff --git a/README.md b/README.md index 5805deb1..54bfffa9 100644 --- a/README.md +++ b/README.md @@ -5,10 +5,10 @@ A pluggable, async-only key-value store interface for modern Python applications ## Features - **Async-only**: Built from the ground up with `async`/`await` support -- **Multiple backends**: Redis, Elasticsearch, In-memory, Disk, and more +- **Multiple backends**: Elasticsearch, Memcached, MongoDB, Redis, Valkey, and In-memory, Disk, etc - **TTL support**: Automatic expiration handling across all store types - **Type-safe**: Full type hints with Protocol-based interfaces -- **Adapters**: Pydantic model support, raise-on-missing behavior, and more +- **Adapters**: Pydantic model support, raise-on-missing behavior, etc - **Wrappers**: Statistics tracking and extensible wrapper system - **Collection-based**: Organize keys into logical collections/namespaces - **Pluggable architecture**: Easy to add custom store implementations @@ -24,9 +24,11 @@ pip install kv-store-adapter[elasticsearch] pip install kv-store-adapter[memory] pip install kv-store-adapter[disk] pip install kv-store-adapter[memcached] +pip install kv-store-adapter[mongodb] +pip install kv-store-adapter[valkey] # With all backends -pip install kv-store-adapter[memory,disk,redis,elasticsearch,memcached] +pip install kv-store-adapter[memory,disk,redis,elasticsearch,memcached,mongodb,valkey] # With Pydantic adapter support pip install kv-store-adapter[pydantic] @@ -64,9 +66,11 @@ Choose the store that best fits your needs. All stores implement the same `KVSto ### Production Stores -- **RedisStore**: `RedisStore(url="redis://localhost:6379/0")` - **ElasticsearchStore**: `ElasticsearchStore(url="https://localhost:9200", api_key="your-api-key")` -- **MemcachedStore**: `MemcachedStore(host="localhost", port=11211")` +- **RedisStore**: `RedisStore(url="redis://localhost:6379/0")` +- **MongoDBStore**: `MongoDBStore(url="mongodb://localhost:27017/test")` +- **ValkeyStore**: `ValkeyStore(host="localhost", port=6379)` +- **MemcachedStore**: `MemcachedStore(host="localhost", port=11211)` - **DiskStore**: A disk-based store using diskcache `DiskStore(directory="./cache")`. Also see `MultiDiskStore` for a store that creates one disk store per collection. - **MemoryStore**: A fast in-memory TLRU cache `MemoryStore()` @@ -79,8 +83,7 @@ For detailed configuration options and all available stores, see [DEVELOPING.md] ## Atomicity / Consistency -We strive to support atomicity and consistency across all stores and operations in the KVStore. That being said, -there are operations available via the BaseStore class which are management operations like listing keys, listing collections, clearing collections, culling expired entries, etc. These operations may not be atomic, may be eventually consistent across stores, or may have other limitations (like limited to returning a certain number of keys). +We strive to support atomicity and consistency across basic key-value operations across all stores and operations in the KVStore. That being said, each store may have different guarantees for consistency and atomicity. Especially with distributed stores like MongoDB, Redis, etc and especially with bulk/management operations. ## Protocol Adapters @@ -151,10 +154,11 @@ Other wrappers that are available include: - **ClampTTLWrapper**: Wraps a store and clamps the TTL to a given range. - **TTLClampWrapper**: Wraps a store and clamps the TTL to a given range. -- **PassthroughCacheWrapper**: Wraps two stores to provide a read-through cache. Reads go to the cache store first and fall back to the primary store, populating the cache with the primary's TTL; writes evict from the cache and then write to the primary. For example, use a RedisStore as the primary and a MemoryStore as the cache store. +- **PassthroughCacheWrapper**: Wraps two stores to provide a read-through cache. Reads go to the cache store first and fall back to the primary store, populating the cache with the entry from the primary; writes evict from the cache and then write to the primary. For example, use a RedisStore as the primary and a MemoryStore as the cache store. Or a DiskStore as the primary and a MemoryStore as the cache store. - **PrefixCollectionsWrapper**: Wraps a store and prefixes all collections with a given prefix. - **PrefixKeysWrapper**: Wraps a store and prefixes all keys with a given prefix. - **SingleCollectionWrapper**: Wraps a store and forces all requests into a single collection. +- **StatisticsWrapper**: Wraps a store and tracks hit/miss statistics for the store. See [DEVELOPING.md](DEVELOPING.md) for more information on how to create your own wrappers. diff --git a/pyproject.toml b/pyproject.toml index d5506896..302a7162 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,8 @@ build-backend = "hatchling.build" memory = ["cachetools>=6.0.0"] disk = ["diskcache>=5.6.0", "pathvalidate>=3.3.1",] redis = ["redis>=6.0.0"] +mongodb = ["pymongo>=4.15.0"] +valkey = ["valkey-glide>=2.1.0"] memcached = ["aiomcache>=0.8.0"] elasticsearch = ["elasticsearch>=9.0.0", "aiohttp>=3.12"] pydantic = ["pydantic>=2.11.9"] @@ -42,7 +44,7 @@ env_files = [".env"] [dependency-groups] dev = [ - "kv-store-adapter[memory,disk,redis,elasticsearch,memcached]", + "kv-store-adapter[memory,disk,redis,elasticsearch,memcached,mongodb,valkey]", "kv-store-adapter[pydantic]", "pytest", "pytest-mock", @@ -54,6 +56,7 @@ dev = [ "inline-snapshot>=0.29.0", "pytest-redis>=3.1.3", "basedpyright>=1.31.5", + "pytest-timeout>=2.4.0", ] lint = [ "ruff" diff --git a/src/kv_store_adapter/errors.py b/src/kv_store_adapter/errors.py index da5425c4..3eea56bf 100644 --- a/src/kv_store_adapter/errors.py +++ b/src/kv_store_adapter/errors.py @@ -1,6 +1,6 @@ from typing import Any -ExtraInfoType = dict[str, Any] +ExtraInfoType = dict[str, str | int | float | bool | None] class KVStoreAdapterError(Exception): @@ -13,7 +13,7 @@ def __init__(self, message: str | None = None, extra_info: ExtraInfoType | None message_parts.append(message) if extra_info: - extra_info_str = ";".join(f"{k}: {v}" for k, v in extra_info.items()) # pyright: ignore[reportAny] + extra_info_str = ";".join(f"{k}: {v}" for k, v in extra_info.items()) if message: extra_info_str = "(" + extra_info_str + ")" @@ -32,6 +32,16 @@ def __init__(self, operation: str, collection: str | None = None, key: str | Non ) +class InvalidTTLError(KVStoreAdapterError): + """Raised when a TTL is invalid.""" + + def __init__(self, ttl: float): + super().__init__( + message="A TTL is invalid.", + extra_info={"ttl": ttl}, + ) + + class SetupError(KVStoreAdapterError): """Raised when a store setup fails.""" diff --git a/src/kv_store_adapter/stores/base.py b/src/kv_store_adapter/stores/base.py index 368051ab..1aa20fe5 100644 --- a/src/kv_store_adapter/stores/base.py +++ b/src/kv_store_adapter/stores/base.py @@ -7,13 +7,12 @@ from asyncio.locks import Lock from collections import defaultdict from collections.abc import Sequence +from types import TracebackType from typing import Any -from typing_extensions import override +from typing_extensions import Self, override -from kv_store_adapter.errors import SetupError -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry -from kv_store_adapter.stores.utils.time_to_live import now +from kv_store_adapter.errors import InvalidTTLError, SetupError from kv_store_adapter.types import ( CullProtocol, DestroyCollectionProtocol, @@ -22,10 +21,28 @@ EnumerateKeysProtocol, KeyValueProtocol, ) +from kv_store_adapter.utils.managed_entry import ManagedEntry +from kv_store_adapter.utils.time_to_live import now DEFAULT_COLLECTION_NAME = "default_collection" +def validate_one_ttl(t: float | None, raise_error: bool = False) -> bool: + if t is None: + return True + if t <= 0: + if raise_error: + raise InvalidTTLError(ttl=t) + return False + return True + + +def validate_ttls(t: list[float | None] | float | None, raise_error: bool = False) -> bool: + if not isinstance(t, (Sequence)): + t = [t] + return all(validate_one_ttl(t=ttl, raise_error=raise_error) for ttl in t) + + class BaseStore(KeyValueProtocol, ABC): """An opinionated Abstract base class for managed key-value stores using ManagedEntry objects. @@ -188,6 +205,8 @@ async def put(self, key: str, value: dict[str, Any], *, collection: str | None = collection = collection or self.default_collection await self.setup_collection(collection=collection) + _ = validate_ttls(t=ttl, raise_error=True) + managed_entry: ManagedEntry = ManagedEntry(value=value, ttl=ttl, created_at=now()) await self._put_managed_entry( @@ -220,13 +239,13 @@ async def put_many( ttl_for_entries: list[float | None] = [] if ttl is None: - ttl_for_entries = [None for _ in range(len(keys))] - - if isinstance(ttl, Sequence): - ttl_for_entries.extend(ttl) + ttl_for_entries = [None] * len(keys) + elif isinstance(ttl, Sequence): + ttl_for_entries = list(ttl) + elif isinstance(ttl, float): + ttl_for_entries = [ttl] * len(keys) - if isinstance(ttl, float): - ttl_for_entries.extend([ttl for _ in range(len(keys))]) + _ = validate_ttls(t=ttl_for_entries, raise_error=True) managed_entries: list[ManagedEntry] = [] @@ -287,6 +306,27 @@ async def _get_collection_keys(self, *, collection: str, limit: int | None = Non """List all keys in the specified collection.""" +class BaseContextManagerStore(BaseStore, ABC): + """An abstract base class for context manager stores.""" + + async def __aenter__(self) -> Self: + await self.setup() + return self + + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: + await self._close() + + async def close(self) -> None: + await self._close() + + @abstractmethod + async def _close(self) -> None: + """Close the store.""" + ... + + class BaseEnumerateCollectionsStore(BaseStore, EnumerateCollectionsProtocol, ABC): @override async def collections(self, *, limit: int | None = None) -> list[str]: diff --git a/src/kv_store_adapter/stores/disk/multi_store.py b/src/kv_store_adapter/stores/disk/multi_store.py index ba5bcd7e..e4a06f1f 100644 --- a/src/kv_store_adapter/stores/disk/multi_store.py +++ b/src/kv_store_adapter/stores/disk/multi_store.py @@ -5,9 +5,9 @@ from typing_extensions import override -from kv_store_adapter.stores.base import BaseStore -from kv_store_adapter.stores.utils.compound import compound_key -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry +from kv_store_adapter.stores.base import BaseContextManagerStore, BaseStore +from kv_store_adapter.utils.compound import compound_key +from kv_store_adapter.utils.managed_entry import ManagedEntry try: from diskcache import Cache @@ -27,7 +27,7 @@ def _sanitize_collection_for_filesystem(collection: str) -> str: return sanitize_filename(filename=collection) -class MultiDiskStore(BaseStore): +class MultiDiskStore(BaseContextManagerStore, BaseStore): """A disk-based store that uses the diskcache library to store data. The MultiDiskStore creates one diskcache Cache instance per collection.""" @@ -134,3 +134,14 @@ async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: combo_key: str = compound_key(collection=collection, key=key) return self._cache[collection].delete(key=combo_key, retry=True) + + def _sync_close(self) -> None: + for cache in self._cache.values(): + cache.close() + + @override + async def _close(self) -> None: + self._sync_close() + + def __del__(self) -> None: + self._sync_close() diff --git a/src/kv_store_adapter/stores/disk/store.py b/src/kv_store_adapter/stores/disk/store.py index 3e6aec06..30fd4961 100644 --- a/src/kv_store_adapter/stores/disk/store.py +++ b/src/kv_store_adapter/stores/disk/store.py @@ -4,9 +4,9 @@ from typing_extensions import override -from kv_store_adapter.stores.base import BaseStore -from kv_store_adapter.stores.utils.compound import compound_key -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry +from kv_store_adapter.stores.base import BaseContextManagerStore, BaseStore +from kv_store_adapter.utils.compound import compound_key +from kv_store_adapter.utils.managed_entry import ManagedEntry try: from diskcache import Cache @@ -17,7 +17,7 @@ DEFAULT_DISK_STORE_MAX_SIZE = 1 * 1024 * 1024 * 1024 # 1GB -class DiskStore(BaseStore): +class DiskStore(BaseContextManagerStore, BaseStore): """A disk-based store that uses the diskcache library to store data.""" _cache: Cache @@ -99,5 +99,9 @@ async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: return self._cache.delete(key=combo_key, retry=True) + @override + async def _close(self) -> None: + self._cache.close() + def __del__(self) -> None: self._cache.close() diff --git a/src/kv_store_adapter/stores/elasticsearch/store.py b/src/kv_store_adapter/stores/elasticsearch/store.py index 27f8094c..9d791834 100644 --- a/src/kv_store_adapter/stores/elasticsearch/store.py +++ b/src/kv_store_adapter/stores/elasticsearch/store.py @@ -4,15 +4,16 @@ from typing_extensions import override from kv_store_adapter.stores.base import ( + BaseContextManagerStore, BaseCullStore, BaseDestroyCollectionStore, BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, BaseStore, ) -from kv_store_adapter.stores.utils.compound import compound_key -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry, load_from_json -from kv_store_adapter.stores.utils.time_to_live import now_as_epoch, try_parse_datetime +from kv_store_adapter.utils.compound import compound_key +from kv_store_adapter.utils.managed_entry import ManagedEntry, load_from_json +from kv_store_adapter.utils.time_to_live import now_as_epoch, try_parse_datetime_str try: from elasticsearch import AsyncElasticsearch @@ -33,13 +34,6 @@ from elastic_transport import ObjectApiResponse -ELASTICSEARCH_CLIENT_DEFAULTS = { - "http_compress": True, - "timeout": 10, - "retry_on_timeout": True, - "max_retries": 3, -} - DEFAULT_INDEX = "kv-store" DEFAULT_MAPPING = { @@ -71,7 +65,9 @@ MAX_KEY_LENGTH = 256 -class ElasticsearchStore(BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, BaseDestroyCollectionStore, BaseCullStore, BaseStore): +class ElasticsearchStore( + BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, BaseDestroyCollectionStore, BaseCullStore, BaseContextManagerStore, BaseStore +): """A elasticsearch-based store.""" _client: AsyncElasticsearch @@ -102,7 +98,18 @@ def __init__( index: The index to use. default_collection: The default collection to use if no collection is provided. """ - self._client = elasticsearch_client or AsyncElasticsearch(hosts=[url], api_key=api_key, **ELASTICSEARCH_CLIENT_DEFAULTS) # pyright: ignore[reportArgumentType] + if elasticsearch_client is None and url is None: + msg = "Either elasticsearch_client or url must be provided" + raise ValueError(msg) + + if elasticsearch_client: + self._client = elasticsearch_client + elif url: + self._client = AsyncElasticsearch(hosts=[url], api_key=api_key, http_compress=True, request_timeout=10, retry_on_timeout=True, max_retries=3) + else: + msg = "Either elasticsearch_client or url must be provided" + raise ValueError(msg) + self._index = index or DEFAULT_INDEX super().__init__(default_collection=default_collection) @@ -142,8 +149,8 @@ async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry if not (value_str := source.get("value")) or not isinstance(value_str, str): return None - created_at: datetime | None = try_parse_datetime(value=source.get("created_at")) - expires_at: datetime | None = try_parse_datetime(value=source.get("expires_at")) + created_at: datetime | None = try_parse_datetime_str(value=source.get("created_at")) + expires_at: datetime | None = try_parse_datetime_str(value=source.get("expires_at")) return ManagedEntry( value=load_from_json(value_str), @@ -283,3 +290,7 @@ async def _cull(self) -> None: }, }, ) + + @override + async def _close(self) -> None: + await self._client.close() diff --git a/src/kv_store_adapter/stores/memcached/store.py b/src/kv_store_adapter/stores/memcached/store.py index dd76b9d2..307f4a08 100644 --- a/src/kv_store_adapter/stores/memcached/store.py +++ b/src/kv_store_adapter/stores/memcached/store.py @@ -3,9 +3,9 @@ from typing_extensions import override -from kv_store_adapter.stores.base import BaseDestroyStore, BaseStore -from kv_store_adapter.stores.utils.compound import compound_key -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry +from kv_store_adapter.stores.base import BaseContextManagerStore, BaseDestroyStore, BaseStore +from kv_store_adapter.utils.compound import compound_key +from kv_store_adapter.utils.managed_entry import ManagedEntry try: from aiomcache import Client @@ -16,7 +16,7 @@ MAX_KEY_LENGTH = 240 -class MemcachedStore(BaseDestroyStore, BaseStore): +class MemcachedStore(BaseDestroyStore, BaseContextManagerStore, BaseStore): """Memcached-based key-value store using aiomcache.""" _client: Client @@ -102,3 +102,7 @@ async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: async def _delete_store(self) -> bool: _ = await self._client.flush_all() return True + + @override + async def _close(self) -> None: + await self._client.close() diff --git a/src/kv_store_adapter/stores/memory/store.py b/src/kv_store_adapter/stores/memory/store.py index fac7553a..eb7d0b55 100644 --- a/src/kv_store_adapter/stores/memory/store.py +++ b/src/kv_store_adapter/stores/memory/store.py @@ -11,8 +11,8 @@ BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, ) -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry -from kv_store_adapter.stores.utils.time_to_live import epoch_to_datetime +from kv_store_adapter.utils.managed_entry import ManagedEntry +from kv_store_adapter.utils.time_to_live import epoch_to_datetime try: from cachetools import TLRUCache diff --git a/src/kv_store_adapter/stores/mongodb/__init__.py b/src/kv_store_adapter/stores/mongodb/__init__.py new file mode 100644 index 00000000..3941e70d --- /dev/null +++ b/src/kv_store_adapter/stores/mongodb/__init__.py @@ -0,0 +1,3 @@ +from .store import MongoDBStore + +__all__ = ["MongoDBStore"] diff --git a/src/kv_store_adapter/stores/mongodb/store.py b/src/kv_store_adapter/stores/mongodb/store.py new file mode 100644 index 00000000..02cd743f --- /dev/null +++ b/src/kv_store_adapter/stores/mongodb/store.py @@ -0,0 +1,193 @@ +from datetime import datetime +from typing import TYPE_CHECKING, Any, TypedDict, overload + +from pymongo.asynchronous.collection import AsyncCollection +from pymongo.asynchronous.database import AsyncDatabase +from typing_extensions import Self, override + +from kv_store_adapter.stores.base import BaseContextManagerStore, BaseDestroyCollectionStore, BaseEnumerateCollectionsStore, BaseStore +from kv_store_adapter.utils.managed_entry import ManagedEntry +from kv_store_adapter.utils.sanitize import ALPHANUMERIC_CHARACTERS, sanitize_string +from kv_store_adapter.utils.time_to_live import now + +if TYPE_CHECKING: + from pymongo.results import DeleteResult + +try: + from pymongo import AsyncMongoClient +except ImportError as e: + msg = "MongoDBStore requires py-kv-store-adapter[mongodb]" + raise ImportError(msg) from e + + +DEFAULT_DB = "kv-store-adapter" +DEFAULT_COLLECTION = "kv" + +DEFAULT_PAGE_SIZE = 10000 +PAGE_LIMIT = 10000 + +# MongoDB collection name length limit +# https://www.mongodb.com/docs/manual/reference/limits/ +# For unsharded collections and views, the namespace length limit is 255 bytes. +# For sharded collections, the namespace length limit is 235 bytes. +# So limit the collection name to 200 bytes +MAX_COLLECTION_LENGTH = 200 +COLLECTION_ALLOWED_CHARACTERS = ALPHANUMERIC_CHARACTERS + "_" + + +class MongoDBStoreDocument(TypedDict): + value: dict[str, Any] + + created_at: datetime | None + expires_at: datetime | None + + +class MongoDBStore(BaseEnumerateCollectionsStore, BaseDestroyCollectionStore, BaseContextManagerStore, BaseStore): + """MongoDB-based key-value store using Motor (async MongoDB driver).""" + + _client: AsyncMongoClient[dict[str, Any]] + _db: AsyncDatabase[dict[str, Any]] + _collections_by_name: dict[str, AsyncCollection[dict[str, Any]]] + + @overload + def __init__( + self, + *, + client: AsyncMongoClient[dict[str, Any]], + db_name: str | None = None, + coll_name: str | None = None, + default_collection: str | None = None, + ) -> None: ... + + @overload + def __init__( + self, *, url: str, db_name: str | None = None, coll_name: str | None = None, default_collection: str | None = None + ) -> None: ... + + def __init__( + self, + *, + client: AsyncMongoClient[dict[str, Any]] | None = None, + url: str | None = None, + db_name: str | None = None, + coll_name: str | None = None, + default_collection: str | None = None, + ) -> None: + """Initialize the MongoDB store. + + The store uses a single MongoDB collection to persist entries for all adapter collections. + We store compound keys "{collection}::{key}" and a JSON string payload. Optional TTL is persisted + as ISO timestamps in the JSON payload itself to maintain consistent semantics across backends. + """ + + if client: + self._client = client + elif url: + self._client = AsyncMongoClient(url) + else: + # Defaults to localhost + self._client = AsyncMongoClient() + + db_name = db_name or DEFAULT_DB + coll_name = coll_name or DEFAULT_COLLECTION + + self._db = self._client[db_name] + self._collections_by_name = {} + + super().__init__(default_collection=default_collection) + + @override + async def __aenter__(self) -> Self: + _ = await self._client.__aenter__() + return self + + @override + async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: # pyright: ignore[reportAny] + await self._client.__aexit__(exc_type, exc_val, exc_tb) + + def _sanitize_collection_name(self, collection: str) -> str: + return sanitize_string(value=collection, max_length=MAX_COLLECTION_LENGTH, allowed_characters=ALPHANUMERIC_CHARACTERS) + + @override + async def _setup_collection(self, *, collection: str) -> None: + # Ensure index on the unique combo key and supporting queries + collection = self._sanitize_collection_name(collection=collection) + + collection_filter: dict[str, str] = {"name": collection} + matching_collections: list[str] = await self._db.list_collection_names(filter=collection_filter) + + if matching_collections: + return + + new_collection: AsyncCollection[dict[str, Any]] = await self._db.create_collection(name=collection) + + _ = await new_collection.create_index(keys="key") + + self._collections_by_name[collection] = new_collection + + @override + async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + collection = self._sanitize_collection_name(collection=collection) + + doc: dict[str, Any] | None = await self._collections_by_name[collection].find_one(filter={"key": key}) + + if not doc: + return None + + json_value: str | None = doc.get("value") + + if not isinstance(json_value, str): + return None + + return ManagedEntry.from_json(json_str=json_value) + + @override + async def _put_managed_entry( + self, + *, + key: str, + collection: str, + managed_entry: ManagedEntry, + ) -> None: + json_value: str = managed_entry.to_json() + + collection = self._sanitize_collection_name(collection=collection) + + _ = await self._collections_by_name[collection].update_one( + filter={"key": key}, + update={ + "$set": { + "collection": collection, + "key": key, + "value": json_value, + "created_at": managed_entry.created_at.isoformat() if managed_entry.created_at else None, + "expires_at": managed_entry.expires_at.isoformat() if managed_entry.expires_at else None, + "updated_at": now().isoformat(), + } + }, + upsert=True, + ) + + @override + async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + collection = self._sanitize_collection_name(collection=collection) + + result: DeleteResult = await self._collections_by_name[collection].delete_one(filter={"key": key}) + return bool(result.deleted_count) + + @override + async def _get_collection_names(self, *, limit: int | None = None) -> list[str]: + limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) + + return list(self._collections_by_name.keys())[:limit] + + @override + async def _delete_collection(self, *, collection: str) -> bool: + collection = self._sanitize_collection_name(collection=collection) + + _ = await self._db.drop_collection(name_or_collection=collection) + return True + + @override + async def _close(self) -> None: + pass diff --git a/src/kv_store_adapter/stores/null/store.py b/src/kv_store_adapter/stores/null/store.py index 1772d020..3652e518 100644 --- a/src/kv_store_adapter/stores/null/store.py +++ b/src/kv_store_adapter/stores/null/store.py @@ -1,7 +1,7 @@ from typing_extensions import override from kv_store_adapter.stores.base import BaseStore -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry +from kv_store_adapter.utils.managed_entry import ManagedEntry class NullStore(BaseStore): diff --git a/src/kv_store_adapter/stores/redis/store.py b/src/kv_store_adapter/stores/redis/store.py index ca96282d..db3d7265 100644 --- a/src/kv_store_adapter/stores/redis/store.py +++ b/src/kv_store_adapter/stores/redis/store.py @@ -3,9 +3,9 @@ from typing_extensions import override -from kv_store_adapter.stores.base import BaseDestroyStore, BaseEnumerateKeysStore, BaseStore -from kv_store_adapter.stores.utils.compound import compound_key, get_keys_from_compound_keys -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry +from kv_store_adapter.stores.base import BaseContextManagerStore, BaseDestroyStore, BaseEnumerateKeysStore, BaseStore +from kv_store_adapter.utils.compound import compound_key, get_keys_from_compound_keys +from kv_store_adapter.utils.managed_entry import ManagedEntry try: from redis.asyncio import Redis @@ -17,7 +17,7 @@ PAGE_LIMIT = 10000 -class RedisStore(BaseDestroyStore, BaseEnumerateKeysStore, BaseStore): +class RedisStore(BaseDestroyStore, BaseEnumerateKeysStore, BaseContextManagerStore, BaseStore): """Redis-based key-value store.""" _client: Redis @@ -132,3 +132,7 @@ async def _get_collection_keys(self, *, collection: str, limit: int | None = Non @override async def _delete_store(self) -> bool: return await self._client.flushdb() # pyright: ignore[reportUnknownMemberType, reportAny] + + @override + async def _close(self) -> None: + await self._client.close() diff --git a/src/kv_store_adapter/stores/simple/store.py b/src/kv_store_adapter/stores/simple/store.py index 101b560f..7cd559a7 100644 --- a/src/kv_store_adapter/stores/simple/store.py +++ b/src/kv_store_adapter/stores/simple/store.py @@ -10,9 +10,9 @@ BaseEnumerateKeysStore, BaseStore, ) -from kv_store_adapter.stores.utils.compound import compound_key, get_collections_from_compound_keys, get_keys_from_compound_keys -from kv_store_adapter.stores.utils.managed_entry import ManagedEntry, load_from_json -from kv_store_adapter.stores.utils.time_to_live import seconds_to +from kv_store_adapter.utils.compound import compound_key, get_collections_from_compound_keys, get_keys_from_compound_keys +from kv_store_adapter.utils.managed_entry import ManagedEntry, load_from_json +from kv_store_adapter.utils.time_to_live import seconds_to DEFAULT_SIMPLE_MANAGED_STORE_MAX_ENTRIES = 1000 DEFAULT_SIMPLE_STORE_MAX_ENTRIES = 1000 diff --git a/src/kv_store_adapter/stores/valkey/__init__.py b/src/kv_store_adapter/stores/valkey/__init__.py new file mode 100644 index 00000000..281d9a58 --- /dev/null +++ b/src/kv_store_adapter/stores/valkey/__init__.py @@ -0,0 +1,3 @@ +from .store import ValkeyStore + +__all__ = ["ValkeyStore"] diff --git a/src/kv_store_adapter/stores/valkey/store.py b/src/kv_store_adapter/stores/valkey/store.py new file mode 100644 index 00000000..572a8bd6 --- /dev/null +++ b/src/kv_store_adapter/stores/valkey/store.py @@ -0,0 +1,127 @@ +from typing import overload + +from glide.glide_client import BaseClient, ServerCredentials +from glide_shared.commands.core_options import ExpirySet, ExpiryType +from glide_shared.config import GlideClientConfiguration, NodeAddress +from typing_extensions import override + +from kv_store_adapter.stores.base import BaseContextManagerStore, BaseStore +from kv_store_adapter.utils.compound import compound_key +from kv_store_adapter.utils.managed_entry import ManagedEntry + +try: + # Use redis-py asyncio client to communicate with a Valkey server (protocol compatible) + from glide.glide_client import GlideClient +except ImportError as e: + msg = "ValkeyStore requires py-kv-store-adapter[valkey]" + raise ImportError(msg) from e + + +DEFAULT_PAGE_SIZE = 10000 +PAGE_LIMIT = 10000 + + +class ValkeyStore(BaseContextManagerStore, BaseStore): + """Valkey-based key-value store (Redis protocol compatible).""" + + _connected_client: BaseClient | None + _client_config: GlideClientConfiguration | None + + @overload + def __init__(self, *, client: BaseClient, default_collection: str | None = None) -> None: ... + + @overload + def __init__( + self, + *, + host: str = "localhost", + port: int = 6379, + db: int = 0, + username: str | None = None, + password: str | None = None, + default_collection: str | None = None, + ) -> None: ... + + def __init__( + self, + *, + client: BaseClient | None = None, + default_collection: str | None = None, + host: str = "localhost", + port: int = 6379, + db: int = 0, + username: str | None = None, + password: str | None = None, + ) -> None: + if client is not None: + self._connected_client = client + else: + # redis client accepts URL + addresses: list[NodeAddress] = [NodeAddress(host=host, port=port)] + credentials: ServerCredentials | None = ServerCredentials(password=password, username=username) if password else None + self._client_config = GlideClientConfiguration(addresses=addresses, database_id=db, credentials=credentials) + self._connected_client = None + + super().__init__(default_collection=default_collection) + + @override + async def _setup(self) -> None: + if self._connected_client is None: + if self._client_config is None: + # This should never happen, makes the type checker happy though + msg = "Client configuration is not set" + raise ValueError(msg) + + self._connected_client = await GlideClient.create(config=self._client_config) + + @property + def _client(self) -> BaseClient: + if self._connected_client is None: + # This should never happen, makes the type checker happy though + msg = "Client is not connected" + raise ValueError(msg) + return self._connected_client + + @override + async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + combo_key: str = compound_key(collection=collection, key=key) + + response: bytes | None = await self._client.get(key=combo_key) + if not isinstance(response, bytes): + return None + decoded_response: str = response.decode("utf-8") + return ManagedEntry.from_json(json_str=decoded_response) + + @override + async def _put_managed_entry( + self, + *, + key: str, + collection: str, + managed_entry: ManagedEntry, + ) -> None: + combo_key: str = compound_key(collection=collection, key=key) + + json_value: str = managed_entry.to_json() + + expiry: ExpirySet | None = ExpirySet(expiry_type=ExpiryType.SEC, value=int(managed_entry.ttl)) if managed_entry.ttl else None + + _ = await self._client.set(key=combo_key, value=json_value, expiry=expiry) + + @override + async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + combo_key: str = compound_key(collection=collection, key=key) + return await self._client.delete(keys=[combo_key]) != 0 + + @override + async def _close(self) -> None: + await self._client.close() + + # @override + # async def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> list[str]: + # limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) + # pattern = compound_key(collection=collection, key="*") + # _cursor: int + # keys: list[str] + # _cursor, keys = await self._client.scan(cursor=0, match=pattern, count=limit) + # return get_keys_from_compound_keys(compound_keys=keys, collection=collection) diff --git a/src/kv_store_adapter/stores/utils/compound.py b/src/kv_store_adapter/utils/compound.py similarity index 97% rename from src/kv_store_adapter/stores/utils/compound.py rename to src/kv_store_adapter/utils/compound.py index 4b1d3ef5..a28fdac0 100644 --- a/src/kv_store_adapter/stores/utils/compound.py +++ b/src/kv_store_adapter/utils/compound.py @@ -45,7 +45,7 @@ def prefix_key(key: str, prefix: str, separator: str | None = None) -> str: def unprefix_key(key: str, prefix: str, separator: str | None = None) -> str: separator = separator or DEFAULT_PREFIX_SEPARATOR if not key.startswith(prefix + separator): - msg = f"Key {key} is not prefixed with {prefix}" + msg = f"Key {key} is not prefixed with {prefix}{separator}" raise ValueError(msg) return key[len(prefix + separator) :] @@ -58,7 +58,7 @@ def prefix_collection(collection: str, prefix: str, separator: str | None = None def unprefix_collection(collection: str, prefix: str, separator: str | None = None) -> str: separator = separator or DEFAULT_PREFIX_SEPARATOR if not collection.startswith(prefix + separator): - msg = f"Collection {collection} is not prefixed with {prefix}" + msg = f"Collection {collection} is not prefixed with {prefix}{separator}" raise ValueError(msg) return collection[len(prefix + separator) :] diff --git a/src/kv_store_adapter/stores/utils/managed_entry.py b/src/kv_store_adapter/utils/managed_entry.py similarity index 92% rename from src/kv_store_adapter/stores/utils/managed_entry.py rename to src/kv_store_adapter/utils/managed_entry.py index 5b53b01b..6f696fd2 100644 --- a/src/kv_store_adapter/stores/utils/managed_entry.py +++ b/src/kv_store_adapter/utils/managed_entry.py @@ -6,7 +6,7 @@ from typing_extensions import Self from kv_store_adapter.errors import DeserializationError, SerializationError -from kv_store_adapter.stores.utils.time_to_live import now, now_plus, try_parse_datetime +from kv_store_adapter.utils.time_to_live import now, now_plus, try_parse_datetime_str @dataclass(kw_only=True) @@ -64,8 +64,8 @@ def from_json(cls, json_str: str, includes_metadata: bool = True, ttl: float | N value=data, ) - created_at: datetime | None = try_parse_datetime(value=data.get("created_at")) - expires_at: datetime | None = try_parse_datetime(value=data.get("expires_at")) + created_at: datetime | None = try_parse_datetime_str(value=data.get("created_at")) + expires_at: datetime | None = try_parse_datetime_str(value=data.get("expires_at")) value: dict[str, Any] | None = data.get("value") diff --git a/src/kv_store_adapter/utils/sanitize.py b/src/kv_store_adapter/utils/sanitize.py new file mode 100644 index 00000000..102e5819 --- /dev/null +++ b/src/kv_store_adapter/utils/sanitize.py @@ -0,0 +1,160 @@ +import hashlib +from enum import Enum + +MINIMUM_MAX_LENGTH = 16 + +DEFAULT_HASH_FRAGMENT_SIZE = 8 + +DEFAULT_HASH_FRAGMENT_SEPARATOR = "-" +DEFAULT_REPLACEMENT_CHARACTER = "_" + +LOWERCASE_ALPHABET = "abcdefghijklmnopqrstuvwxyz" +UPPERCASE_ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" +NUMBERS = "0123456789" +ALPHANUMERIC_CHARACTERS = LOWERCASE_ALPHABET + UPPERCASE_ALPHABET + NUMBERS + + +def generate_hash_fragment( + value: str, + size: int = DEFAULT_HASH_FRAGMENT_SIZE, +) -> str: + """Generate a hash fragment of the value.""" + + return hashlib.sha256(value.encode()).hexdigest()[:size] + + +class HashFragmentMode(str, Enum): + ALWAYS = "always" + NEVER = "never" + ONLY_IF_CHANGED = "only_if_changed" + + +def sanitize_characters_in_string(value: str, allowed_characters: str, replace_with: str) -> str: + """Replace characters in a string. If multiple characters are in a row that are not allowed, only + the first one will be replaced. The rest will be removed. If all characters are not allowed, an + empty string will be returned. + + Args: + value: The value to replace characters in. + to_replace: The characters to replace. + replace_with: The characters to replace with. + """ + new_value = "" + last_char_was_replaced = False + + for char in value: + if char in allowed_characters: + new_value += char + last_char_was_replaced = False + else: + if last_char_was_replaced: + continue + + new_value += replace_with + last_char_was_replaced = True + + if len(new_value) == 1 and last_char_was_replaced: + return "" + + return new_value + + +def sanitize_string( + value: str, + max_length: int, + allowed_characters: str | None = None, + replacement_character: str = DEFAULT_REPLACEMENT_CHARACTER, + hash_fragment_separator: str = DEFAULT_HASH_FRAGMENT_SEPARATOR, + hash_fragment_mode: HashFragmentMode = HashFragmentMode.ONLY_IF_CHANGED, + hash_fragment_length: int = DEFAULT_HASH_FRAGMENT_SIZE, +) -> str: + """Sanitize the value, replacing characters and optionally adding a fragment a hash of the value if requested. + + If the entire value is sanitized and hash_fragment_mode is HashFragmentMode.ALWAYS or HashFragmentMode.ONLY_IF_CHANGED, + the value returned will be the hash fragment only. + + If the entire value is sanitized and hash_fragment_mode is HashFragmentMode.NEVER, an error will be raised. + + Args: + value: The value to sanitize. + allowed_characters: The allowed characters in the value. + max_length: The maximum length of the value (with the hash fragment added). + hash_fragment_separator: The separator to add between the value and the hash fragment. + hash_fragment_mode: The mode to add the hash fragment. + """ + if max_length < MINIMUM_MAX_LENGTH: + msg = f"max_length must be greater than or equal to {MINIMUM_MAX_LENGTH}" + raise ValueError(msg) + + if hash_fragment_length > max_length // 2: + msg = "hash_fragment_length must be less than or equal to half of max_length" + raise ValueError(msg) + + hash_fragment: str = generate_hash_fragment(value=value, size=hash_fragment_length) + hash_fragment_size_required: int = len(hash_fragment_separator) + len(hash_fragment) + + sanitized_value: str = ( + sanitize_characters_in_string(value=value, allowed_characters=allowed_characters, replace_with=replacement_character) + if allowed_characters + else value + ) + + actual_max_length: int + + if hash_fragment_mode == HashFragmentMode.ALWAYS: + actual_max_length = max_length - hash_fragment_size_required + + sanitized_value = sanitized_value[:actual_max_length] + + if not sanitized_value: + return hash_fragment + + return sanitized_value + hash_fragment_separator + hash_fragment + + if hash_fragment_mode == HashFragmentMode.ONLY_IF_CHANGED: + sanitized_value = sanitized_value[:max_length] + + if value == sanitized_value: + return value + + actual_max_length = max_length - hash_fragment_size_required + + sanitized_value = sanitized_value[:actual_max_length] + + if not sanitized_value: + return hash_fragment + + return sanitized_value + hash_fragment_separator + hash_fragment + + if not sanitized_value: + msg = "Entire value was sanitized and hash_fragment_mode is HashFragmentMode.NEVER" + raise ValueError(msg) + + return sanitized_value + + +def hash_excess_length(value: str, max_length: int) -> str: + """Hash part of the value if it exceeds the maximum length. This operation + will truncate the value to the maximum length minus 8 characters and will swap + the last 8 characters with the first 8 characters of the generated hash. + + Args: + value: The value to hash. + max_length: The maximum length of the value. Must be greater than 32. + + Returns: + The hashed value if the value exceeds the maximum length, otherwise the original value. + """ + if max_length <= MINIMUM_MAX_LENGTH: + msg = f"max_length must be greater than {MINIMUM_MAX_LENGTH}" + raise ValueError(msg) + + if len(value) <= max_length: + return value + + truncated_value = value[: max_length - 8] + + hash_of_value = hashlib.sha256(value.encode()).hexdigest() + first_eight_of_hash = hash_of_value[:8] + + return truncated_value + first_eight_of_hash diff --git a/src/kv_store_adapter/stores/utils/time_to_live.py b/src/kv_store_adapter/utils/time_to_live.py similarity index 79% rename from src/kv_store_adapter/stores/utils/time_to_live.py rename to src/kv_store_adapter/utils/time_to_live.py index d6373a46..b99d6e84 100644 --- a/src/kv_store_adapter/stores/utils/time_to_live.py +++ b/src/kv_store_adapter/utils/time_to_live.py @@ -28,7 +28,11 @@ def now_plus(seconds: float) -> datetime: return datetime.now(tz=timezone.utc) + timedelta(seconds=seconds) -def try_parse_datetime(value: Any) -> datetime | None: # pyright: ignore[reportAny] - if isinstance(value, str): - return datetime.fromisoformat(value) +def try_parse_datetime_str(value: Any) -> datetime | None: # pyright: ignore[reportAny] + try: + if isinstance(value, str): + return datetime.fromisoformat(value) + except ValueError: + return None + return None diff --git a/src/kv_store_adapter/wrappers/prefix_collections.py b/src/kv_store_adapter/wrappers/prefix_collections.py index a1dc436c..f5f79b48 100644 --- a/src/kv_store_adapter/wrappers/prefix_collections.py +++ b/src/kv_store_adapter/wrappers/prefix_collections.py @@ -4,8 +4,8 @@ from typing_extensions import override from kv_store_adapter.stores.base import DEFAULT_COLLECTION_NAME -from kv_store_adapter.stores.utils.compound import prefix_collection, unprefix_collection from kv_store_adapter.types import KVStore +from kv_store_adapter.utils.compound import prefix_collection, unprefix_collection from kv_store_adapter.wrappers.base import BaseWrapper diff --git a/src/kv_store_adapter/wrappers/prefix_keys.py b/src/kv_store_adapter/wrappers/prefix_keys.py index 78d13c30..8db1f391 100644 --- a/src/kv_store_adapter/wrappers/prefix_keys.py +++ b/src/kv_store_adapter/wrappers/prefix_keys.py @@ -3,8 +3,8 @@ from typing_extensions import override -from kv_store_adapter.stores.utils.compound import prefix_key, unprefix_key from kv_store_adapter.types import KVStore +from kv_store_adapter.utils.compound import prefix_key, unprefix_key from kv_store_adapter.wrappers.base import BaseWrapper diff --git a/src/kv_store_adapter/wrappers/single_collection.py b/src/kv_store_adapter/wrappers/single_collection.py index 28bbe8a7..627c30c0 100644 --- a/src/kv_store_adapter/wrappers/single_collection.py +++ b/src/kv_store_adapter/wrappers/single_collection.py @@ -4,8 +4,8 @@ from typing_extensions import override from kv_store_adapter.stores.base import DEFAULT_COLLECTION_NAME -from kv_store_adapter.stores.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key from kv_store_adapter.types import KVStore +from kv_store_adapter.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key from kv_store_adapter.wrappers.base import BaseWrapper diff --git a/tests/stores/conftest.py b/tests/stores/conftest.py index 51fca1bc..5814f9d8 100644 --- a/tests/stores/conftest.py +++ b/tests/stores/conftest.py @@ -1,5 +1,6 @@ import asyncio import hashlib +import os import subprocess from abc import ABC, abstractmethod from collections.abc import AsyncGenerator @@ -8,8 +9,8 @@ import pytest from pydantic import AnyHttpUrl -from kv_store_adapter.errors import SerializationError -from kv_store_adapter.stores.base import BaseStore +from kv_store_adapter.errors import InvalidTTLError, SerializationError +from kv_store_adapter.stores.base import BaseContextManagerStore, BaseStore def now() -> datetime: @@ -33,6 +34,28 @@ def detect_docker() -> bool: return result.returncode == 0 +def detect_on_ci() -> bool: + return os.getenv("CI", "false") == "true" + + +def detect_on_windows() -> bool: + return os.name == "nt" + + +def detect_on_macos() -> bool: + return os.name == "darwin" + + +def should_run_docker_tests() -> bool: + if detect_on_ci(): + return all([detect_docker(), not detect_on_windows(), not detect_on_macos()]) + return detect_docker() + + +def should_skip_docker_tests() -> bool: + return not should_run_docker_tests() + + class BaseStoreTests(ABC): async def eventually_consistent(self) -> None: # noqa: B027 """Subclasses can override this to wait for eventually consistent operations.""" @@ -137,11 +160,13 @@ async def test_put_ttl_get_ttl(self, store: BaseStore): async def test_negative_ttl(self, store: BaseStore): """Tests that a negative ttl will return None when getting the key.""" - await store.put(collection="test", key="test", value={"test": "test"}, ttl=-100) + with pytest.raises(InvalidTTLError): + await store.put(collection="test", key="test", value={"test": "test"}, ttl=-100) async def test_put_expired_get_none(self, store: BaseStore): """Tests that a put call with a negative ttl will return None when getting the key.""" - await store.put(collection="test_collection", key="test_key", value={"test": "test"}, ttl=-100) + await store.put(collection="test_collection", key="test_key", value={"test": "test"}, ttl=1) + await asyncio.sleep(3) assert await store.get(collection="test_collection", key="test_key") is None async def test_long_collection_name(self, store: BaseStore): @@ -191,3 +216,18 @@ async def worker(store: BaseStore, worker_id: int): assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None _ = await asyncio.gather(*[worker(store, worker_id) for worker_id in range(1)]) + + +class ContextManagerStoreTestMixin: + @pytest.fixture(params=[True, False], ids=["with_ctx_manager", "no_ctx_manager"], autouse=True) + async def enter_exit_store( + self, request: pytest.FixtureRequest, store: BaseContextManagerStore + ) -> AsyncGenerator[BaseContextManagerStore, None]: + context_manager = request.param # pyright: ignore[reportAny] + + if context_manager: + async with store: + yield store + else: + yield store + await store.close() diff --git a/tests/stores/disk/test_disk.py b/tests/stores/disk/test_disk.py index 9c2b1017..b1e30a12 100644 --- a/tests/stores/disk/test_disk.py +++ b/tests/stores/disk/test_disk.py @@ -5,14 +5,16 @@ from typing_extensions import override from kv_store_adapter.stores.disk import DiskStore -from tests.stores.conftest import BaseStoreTests +from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin TEST_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB -class TestDiskStore(BaseStoreTests): +class TestDiskStore(ContextManagerStoreTestMixin, BaseStoreTests): @override @pytest.fixture async def store(self) -> AsyncGenerator[DiskStore, None]: with tempfile.TemporaryDirectory() as temp_dir: - yield DiskStore(directory=temp_dir, max_size=TEST_SIZE_LIMIT) + store = DiskStore(directory=temp_dir, max_size=TEST_SIZE_LIMIT) + + yield store diff --git a/tests/stores/disk/test_multi_disk.py b/tests/stores/disk/test_multi_disk.py index dce16b02..eeb7ed5c 100644 --- a/tests/stores/disk/test_multi_disk.py +++ b/tests/stores/disk/test_multi_disk.py @@ -16,4 +16,6 @@ class TestMultiDiskStore(BaseStoreTests): @pytest.fixture async def store(self) -> AsyncGenerator[MultiDiskStore, None]: with tempfile.TemporaryDirectory() as temp_dir: - yield MultiDiskStore(base_directory=Path(temp_dir), max_size=TEST_SIZE_LIMIT) + disk_store = MultiDiskStore(base_directory=Path(temp_dir), max_size=TEST_SIZE_LIMIT) + + yield disk_store diff --git a/tests/stores/elasticsearch/test_elasticsearch.py b/tests/stores/elasticsearch/test_elasticsearch.py index e5e08dc6..8f7908d3 100644 --- a/tests/stores/elasticsearch/test_elasticsearch.py +++ b/tests/stores/elasticsearch/test_elasticsearch.py @@ -7,7 +7,7 @@ from kv_store_adapter.stores.base import BaseStore from kv_store_adapter.stores.elasticsearch import ElasticsearchStore -from tests.stores.conftest import BaseStoreTests, detect_docker +from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin TEST_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB @@ -28,8 +28,7 @@ async def elasticsearch_client() -> AsyncGenerator[AsyncElasticsearch, None]: @pytest.mark.skipif(os.getenv("ES_URL") is None, reason="Elasticsearch is not configured") -@pytest.mark.skipif(not detect_docker(), reason="Docker is not available") -class TestElasticsearchStore(BaseStoreTests): +class TestElasticsearchStore(ContextManagerStoreTestMixin, BaseStoreTests): @override @pytest.fixture async def store(self, elasticsearch_client: AsyncElasticsearch) -> ElasticsearchStore: diff --git a/tests/stores/memcached/test_memcached.py b/tests/stores/memcached/test_memcached.py index b959503f..b2fbe558 100644 --- a/tests/stores/memcached/test_memcached.py +++ b/tests/stores/memcached/test_memcached.py @@ -8,7 +8,7 @@ from kv_store_adapter.stores.base import BaseStore from kv_store_adapter.stores.memcached import MemcachedStore -from tests.stores.conftest import BaseStoreTests, detect_docker +from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests # Memcached test configuration MEMCACHED_HOST = "localhost" @@ -43,8 +43,8 @@ class MemcachedFailedToStartError(Exception): pass -@pytest.mark.skipif(not detect_docker(), reason="Docker is not available") -class TestMemcachedStore(BaseStoreTests): +@pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not available") +class TestMemcachedStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_memcached(self) -> AsyncGenerator[None, None]: _ = await asyncio.create_subprocess_exec("docker", "stop", "memcached-test") diff --git a/tests/stores/mongodb/test_mongodb.py b/tests/stores/mongodb/test_mongodb.py new file mode 100644 index 00000000..1e7cf493 --- /dev/null +++ b/tests/stores/mongodb/test_mongodb.py @@ -0,0 +1,88 @@ +import asyncio +import contextlib +from collections.abc import AsyncGenerator +from typing import Any + +import pytest +from inline_snapshot import snapshot +from pymongo import AsyncMongoClient +from typing_extensions import override + +from kv_store_adapter.stores.base import BaseStore +from kv_store_adapter.stores.mongodb import MongoDBStore +from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests + +# MongoDB test configuration +MONGODB_HOST = "localhost" +MONGODB_HOST_PORT = 27017 +MONGODB_TEST_DB = "kv-store-adapter-tests" + +WAIT_FOR_MONGODB_TIMEOUT = 30 + + +async def ping_mongodb() -> bool: + try: + client: AsyncMongoClient[Any] = AsyncMongoClient[Any](host=MONGODB_HOST, port=MONGODB_HOST_PORT) + _ = await client.list_database_names() + except Exception: + return False + + return True + + +async def wait_mongodb() -> bool: + for _ in range(WAIT_FOR_MONGODB_TIMEOUT): + if await ping_mongodb(): + return True + await asyncio.sleep(delay=1) + return False + + +class MongoDBFailedToStartError(Exception): + pass + + +@pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not available") +class TestMongoDBStore(ContextManagerStoreTestMixin, BaseStoreTests): + @pytest.fixture(autouse=True, scope="session") + async def setup_mongodb(self) -> AsyncGenerator[None, None]: + _ = await asyncio.create_subprocess_exec("docker", "stop", "mongodb-test") + _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "mongodb-test") + + process = await asyncio.create_subprocess_exec( + "docker", "run", "-d", "--name", "mongodb-test", "-p", f"{MONGODB_HOST_PORT}:27017", "mongo:7" + ) + _ = await process.wait() + if not await wait_mongodb(): + msg = "MongoDB failed to start" + raise MongoDBFailedToStartError(msg) + try: + yield + finally: + _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "mongodb-test") + + @override + @pytest.fixture + async def store(self, setup_mongodb: None) -> MongoDBStore: + store = MongoDBStore(url=f"mongodb://{MONGODB_HOST}:{MONGODB_HOST_PORT}", db_name=MONGODB_TEST_DB) + # Ensure a clean db by dropping our default test collection if it exists + with contextlib.suppress(Exception): + _ = await store._client.drop_database(name_or_database=MONGODB_TEST_DB) # pyright: ignore[reportPrivateUsage] + + return store + + @pytest.fixture + async def mongodb_store(self, store: MongoDBStore) -> MongoDBStore: + return store + + @pytest.mark.skip(reason="Distributed Caches are unbounded") + @override + async def test_not_unbounded(self, store: BaseStore): ... + + async def test_mongodb_collection_name_sanitization(self, mongodb_store: MongoDBStore): + """Tests that a special characters in the collection name will not raise an error.""" + await mongodb_store.put(collection="test_collection!@#$%^&*()", key="test_key", value={"test": "test"}) + assert await mongodb_store.get(collection="test_collection!@#$%^&*()", key="test_key") == {"test": "test"} + + collections = await mongodb_store.collections() + assert collections == snapshot(["test_collection_-daf4a2ec"]) diff --git a/tests/stores/redis/test_redis.py b/tests/stores/redis/test_redis.py index 3ebfab63..59ed3959 100644 --- a/tests/stores/redis/test_redis.py +++ b/tests/stores/redis/test_redis.py @@ -7,7 +7,7 @@ from kv_store_adapter.stores.base import BaseStore from kv_store_adapter.stores.redis import RedisStore -from tests.stores.conftest import BaseStoreTests, detect_docker +from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests # Redis test configuration REDIS_HOST = "localhost" @@ -39,8 +39,8 @@ class RedisFailedToStartError(Exception): pass -@pytest.mark.skipif(not detect_docker(), reason="Docker is not running") -class TestRedisStore(BaseStoreTests): +@pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not running") +class TestRedisStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_redis(self) -> AsyncGenerator[None, None]: _ = await asyncio.create_subprocess_exec("docker", "stop", "redis-test") diff --git a/tests/stores/valkey/test_valkey.py b/tests/stores/valkey/test_valkey.py new file mode 100644 index 00000000..852143c9 --- /dev/null +++ b/tests/stores/valkey/test_valkey.py @@ -0,0 +1,81 @@ +import asyncio +from collections.abc import AsyncGenerator + +import pytest +from glide.glide_client import GlideClient +from glide_shared.config import GlideClientConfiguration, NodeAddress +from typing_extensions import override + +from kv_store_adapter.stores.base import BaseStore +from kv_store_adapter.stores.valkey import ValkeyStore +from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests + +# Valkey test configuration +VALKEY_HOST = "localhost" +VALKEY_PORT = 6379 # avoid clashing with Redis tests +VALKEY_DB = 15 + +WAIT_FOR_VALKEY_TIMEOUT = 30 + + +async def get_valkey_client() -> GlideClient: + client_config: GlideClientConfiguration = GlideClientConfiguration( + addresses=[NodeAddress(host=VALKEY_HOST, port=VALKEY_PORT)], database_id=VALKEY_DB + ) + return await GlideClient.create(config=client_config) + + +async def ping_valkey() -> bool: + try: + client = await get_valkey_client() + _ = await client.ping() + except Exception: + return False + + return True + + +async def wait_valkey() -> bool: + for _ in range(WAIT_FOR_VALKEY_TIMEOUT): + if await ping_valkey(): + return True + await asyncio.sleep(delay=1) + return False + + +class ValkeyFailedToStartError(Exception): + pass + + +@pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not running") +class TestValkeyStore(ContextManagerStoreTestMixin, BaseStoreTests): + @pytest.fixture(autouse=True, scope="session") + async def setup_valkey(self) -> AsyncGenerator[None, None]: + _ = await asyncio.create_subprocess_exec("docker", "stop", "valkey-test") + _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "valkey-test") + + process = await asyncio.create_subprocess_exec( + "docker", "run", "-d", "--name", "valkey-test", "-p", f"{VALKEY_PORT}:6379", "valkey/valkey:latest" + ) + _ = await process.wait() + if not await wait_valkey(): + msg = "Valkey failed to start" + raise ValkeyFailedToStartError(msg) + try: + yield + finally: + _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "valkey-test") + + @override + @pytest.fixture + async def store(self, setup_valkey: None) -> ValkeyStore: + store: ValkeyStore = ValkeyStore(host=VALKEY_HOST, port=VALKEY_PORT, db=VALKEY_DB) + + client: GlideClient = await get_valkey_client() + _ = await client.flushdb() + + return store + + @pytest.mark.skip(reason="Distributed Caches are unbounded") + @override + async def test_not_unbounded(self, store: BaseStore): ... diff --git a/tests/stores/base/test_kv_json_store.py b/tests/utils/test_managed_entry.py similarity index 93% rename from tests/stores/base/test_kv_json_store.py rename to tests/utils/test_managed_entry.py index 8d0fb26d..b6eb8a63 100644 --- a/tests/stores/base/test_kv_json_store.py +++ b/tests/utils/test_managed_entry.py @@ -3,7 +3,7 @@ import pytest -from kv_store_adapter.stores.utils.managed_entry import dump_to_json, load_from_json +from kv_store_adapter.utils.managed_entry import dump_to_json, load_from_json from tests.cases import DICTIONARY_TO_JSON_TEST_CASES, DICTIONARY_TO_JSON_TEST_CASES_NAMES FIXED_DATETIME = datetime(2025, 1, 1, 0, 0, 0, tzinfo=timezone.utc) diff --git a/tests/utils/test_sanitize.py b/tests/utils/test_sanitize.py new file mode 100644 index 00000000..11784aa1 --- /dev/null +++ b/tests/utils/test_sanitize.py @@ -0,0 +1,88 @@ +import pytest +from inline_snapshot import snapshot + +from kv_store_adapter.utils.sanitize import ( + ALPHANUMERIC_CHARACTERS, + LOWERCASE_ALPHABET, + NUMBERS, + UPPERCASE_ALPHABET, + HashFragmentMode, + sanitize_string, +) + +ALWAYS_HASH = HashFragmentMode.ALWAYS +ONLY_IF_CHANGED_HASH = HashFragmentMode.ONLY_IF_CHANGED +NEVER_HASH = HashFragmentMode.NEVER + + +def test_sanitize_string(): + sanitized_string = sanitize_string(value="test string", max_length=16) + assert sanitized_string == snapshot("test string") + + sanitized_string = sanitize_string(value="test string", max_length=16, hash_fragment_mode=ALWAYS_HASH) + assert sanitized_string == snapshot("test st-d5579c46") + + sanitized_string = sanitize_string(value="test string", max_length=16, hash_fragment_mode=ONLY_IF_CHANGED_HASH) + assert sanitized_string == snapshot("test string") + + sanitized_string = sanitize_string(value="test string", max_length=16, hash_fragment_mode=NEVER_HASH) + assert sanitized_string == snapshot("test string") + + +@pytest.mark.parametrize( + argnames=("hash_fragment_mode"), + argvalues=[(ONLY_IF_CHANGED_HASH), (NEVER_HASH)], +) +@pytest.mark.parametrize( + argnames=("max_length"), + argvalues=[16, 32], +) +@pytest.mark.parametrize( + argnames=("value", "allowed_chars"), + argvalues=[ + ("test", None), + ("test", "test"), + ("test_test", "test_"), + ("!@#$%^&*()", "!@#$%^&*()"), + ("test", LOWERCASE_ALPHABET), + ("test", ALPHANUMERIC_CHARACTERS), + ], +) +def test_unchanged_strings(value: str, allowed_chars: str | None, max_length: int, hash_fragment_mode: HashFragmentMode): + sanitized_string = sanitize_string( + value=value, allowed_characters=allowed_chars, max_length=max_length, hash_fragment_mode=hash_fragment_mode + ) + assert sanitized_string == value + + +@pytest.mark.parametrize( + argnames=("hash_fragment_mode"), + argvalues=[(ONLY_IF_CHANGED_HASH), (ALWAYS_HASH)], +) +def test_changed_strings(hash_fragment_mode: HashFragmentMode): + def process_string(value: str, allowed_characters: str | None) -> str: + return sanitize_string(value=value, allowed_characters=allowed_characters, max_length=16, hash_fragment_mode=hash_fragment_mode) + + sanitized_string = process_string(value="test", allowed_characters=NUMBERS) + assert sanitized_string == snapshot("9f86d081") + + sanitized_string = process_string(value="test", allowed_characters=UPPERCASE_ALPHABET) + assert sanitized_string == snapshot("9f86d081") + + sanitized_string = process_string(value="test with spaces", allowed_characters=LOWERCASE_ALPHABET) + assert sanitized_string == snapshot("test_wi-ed2daf39") + + sanitized_string = process_string(value="test too long with spaces", allowed_characters=ALPHANUMERIC_CHARACTERS) + assert sanitized_string == snapshot("test_to-479b94c3") + + sanitized_string = process_string(value="test too long with spaces", allowed_characters=None) + assert sanitized_string == snapshot("test to-479b94c3") + + sanitized_string = process_string(value="test too long with spaces", allowed_characters=ALPHANUMERIC_CHARACTERS) + assert sanitized_string == snapshot("test_to-479b94c3") + + sanitized_string = process_string(value="test way too long with spaces", allowed_characters=None) + assert sanitized_string == snapshot("test wa-3d014b9b") + + sanitized_string = process_string(value="test way too long with spaces", allowed_characters=ALPHANUMERIC_CHARACTERS) + assert sanitized_string == snapshot("test_wa-3d014b9b") diff --git a/uv.lock b/uv.lock index b0bf54d5..75d3e56a 100644 --- a/uv.lock +++ b/uv.lock @@ -131,6 +131,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + [[package]] name = "asttokens" version = "3.0.0" @@ -236,6 +251,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6d/76/288d91c284ac1787f01c8260af5ea89dcfa6c0abc9acd601d01cf6f72f86/diskcache_stubs-5.6.3.6.20240818-py3-none-any.whl", hash = "sha256:e1db90940b344140730976abe79f57f5b43ca296cbb43fa95da0c69b12d5de4f", size = 18391, upload-time = "2024-08-18T07:50:10.723Z" }, ] +[[package]] +name = "dnspython" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, +] + [[package]] name = "elastic-transport" version = "9.1.0" @@ -432,12 +456,18 @@ memcached = [ memory = [ { name = "cachetools" }, ] +mongodb = [ + { name = "pymongo" }, +] pydantic = [ { name = "pydantic" }, ] redis = [ { name = "redis" }, ] +valkey = [ + { name = "valkey-glide" }, +] [package.dev-dependencies] dev = [ @@ -445,12 +475,13 @@ dev = [ { name = "dirty-equals" }, { name = "diskcache-stubs" }, { name = "inline-snapshot" }, - { name = "kv-store-adapter", extra = ["disk", "elasticsearch", "memcached", "memory", "pydantic", "redis"] }, + { name = "kv-store-adapter", extra = ["disk", "elasticsearch", "memcached", "memory", "mongodb", "pydantic", "redis", "valkey"] }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-dotenv" }, { name = "pytest-mock" }, { name = "pytest-redis" }, + { name = "pytest-timeout" }, { name = "ruff" }, ] lint = [ @@ -466,9 +497,11 @@ requires-dist = [ { name = "elasticsearch", marker = "extra == 'elasticsearch'", specifier = ">=9.0.0" }, { name = "pathvalidate", marker = "extra == 'disk'", specifier = ">=3.3.1" }, { name = "pydantic", marker = "extra == 'pydantic'", specifier = ">=2.11.9" }, + { name = "pymongo", marker = "extra == 'mongodb'", specifier = ">=4.15.0" }, { name = "redis", marker = "extra == 'redis'", specifier = ">=6.0.0" }, + { name = "valkey-glide", marker = "extra == 'valkey'", specifier = ">=2.1.0" }, ] -provides-extras = ["memory", "disk", "redis", "memcached", "elasticsearch", "pydantic"] +provides-extras = ["memory", "disk", "redis", "mongodb", "valkey", "memcached", "elasticsearch", "pydantic"] [package.metadata.requires-dev] dev = [ @@ -476,13 +509,14 @@ dev = [ { name = "dirty-equals", specifier = ">=0.10.0" }, { name = "diskcache-stubs", specifier = ">=5.6.3.6.20240818" }, { name = "inline-snapshot", specifier = ">=0.29.0" }, - { name = "kv-store-adapter", extras = ["memory", "disk", "redis", "elasticsearch", "memcached"] }, + { name = "kv-store-adapter", extras = ["memory", "disk", "redis", "elasticsearch", "memcached", "mongodb", "valkey"] }, { name = "kv-store-adapter", extras = ["pydantic"] }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-dotenv", specifier = ">=0.5.2" }, { name = "pytest-mock" }, { name = "pytest-redis", specifier = ">=3.1.3" }, + { name = "pytest-timeout", specifier = ">=2.4.0" }, { name = "ruff" }, ] lint = [{ name = "ruff" }] @@ -763,6 +797,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, ] +[[package]] +name = "protobuf" +version = "6.32.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/a4/cc17347aa2897568beece2e674674359f911d6fe21b0b8d6268cd42727ac/protobuf-6.32.1.tar.gz", hash = "sha256:ee2469e4a021474ab9baafea6cd070e5bf27c7d29433504ddea1a4ee5850f68d", size = 440635, upload-time = "2025-09-11T21:38:42.935Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/98/645183ea03ab3995d29086b8bf4f7562ebd3d10c9a4b14ee3f20d47cfe50/protobuf-6.32.1-cp310-abi3-win32.whl", hash = "sha256:a8a32a84bc9f2aad712041b8b366190f71dde248926da517bde9e832e4412085", size = 424411, upload-time = "2025-09-11T21:38:27.427Z" }, + { url = "https://files.pythonhosted.org/packages/8c/f3/6f58f841f6ebafe076cebeae33fc336e900619d34b1c93e4b5c97a81fdfa/protobuf-6.32.1-cp310-abi3-win_amd64.whl", hash = "sha256:b00a7d8c25fa471f16bc8153d0e53d6c9e827f0953f3c09aaa4331c718cae5e1", size = 435738, upload-time = "2025-09-11T21:38:30.959Z" }, + { url = "https://files.pythonhosted.org/packages/10/56/a8a3f4e7190837139e68c7002ec749190a163af3e330f65d90309145a210/protobuf-6.32.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d8c7e6eb619ffdf105ee4ab76af5a68b60a9d0f66da3ea12d1640e6d8dab7281", size = 426454, upload-time = "2025-09-11T21:38:34.076Z" }, + { url = "https://files.pythonhosted.org/packages/3f/be/8dd0a927c559b37d7a6c8ab79034fd167dcc1f851595f2e641ad62be8643/protobuf-6.32.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:2f5b80a49e1eb7b86d85fcd23fe92df154b9730a725c3b38c4e43b9d77018bf4", size = 322874, upload-time = "2025-09-11T21:38:35.509Z" }, + { url = "https://files.pythonhosted.org/packages/5c/f6/88d77011b605ef979aace37b7703e4eefad066f7e84d935e5a696515c2dd/protobuf-6.32.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:b1864818300c297265c83a4982fd3169f97122c299f56a56e2445c3698d34710", size = 322013, upload-time = "2025-09-11T21:38:37.017Z" }, + { url = "https://files.pythonhosted.org/packages/97/b7/15cc7d93443d6c6a84626ae3258a91f4c6ac8c0edd5df35ea7658f71b79c/protobuf-6.32.1-py3-none-any.whl", hash = "sha256:2601b779fc7d32a866c6b4404f9d42a3f67c5b9f3f15b4db3cccabe06b95c346", size = 169289, upload-time = "2025-09-11T21:38:41.234Z" }, +] + [[package]] name = "psutil" version = "7.1.0" @@ -890,6 +938,67 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] +[[package]] +name = "pymongo" +version = "4.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/c0c6732fbd358b75a07e17d7e588fd23d481b9812ca96ceeff90bbf879fc/pymongo-4.15.1.tar.gz", hash = "sha256:b9f379a4333dc3779a6bf7adfd077d4387404ed1561472743486a9c58286f705", size = 2470613, upload-time = "2025-09-16T16:39:47.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/19/2de6086e3974f3a95a1fc41fd082bc4a58dc9b70268cbfd7c84067d184f2/pymongo-4.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:97ccf8222abd5b79daa29811f64ef8b6bb678b9c9a1c1a2cfa0a277f89facd1d", size = 811020, upload-time = "2025-09-16T16:37:57.329Z" }, + { url = "https://files.pythonhosted.org/packages/a2/a4/a340dde32818dd5c95b1c373bc4a27cef5863009faa328388ddc899527fe/pymongo-4.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f130b3d7540749a8788a254ceb199a03ede4ee080061bfa5e20e28237c87f2d7", size = 811313, upload-time = "2025-09-16T16:37:59.312Z" }, + { url = "https://files.pythonhosted.org/packages/e2/d9/7d64fdc9e87ec38bd36395bc730848ef56e1cd4bd29ab065d53c27559ace/pymongo-4.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fbe6a044a306ed974bd1788f3ceffc2f5e13f81fdb786a28c948c047f4cea38", size = 1188666, upload-time = "2025-09-16T16:38:00.896Z" }, + { url = "https://files.pythonhosted.org/packages/ef/d9/47cc69d3b22c9d971b1486e3a80d6a5d0bbf2dec6c9c4d5e39a129ee8125/pymongo-4.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b96768741e0e03451ef7b07c4857490cc43999e01c7f8da704fe00b3fe5d4d3", size = 1222891, upload-time = "2025-09-16T16:38:02.574Z" }, + { url = "https://files.pythonhosted.org/packages/a9/73/a57594c956bf276069a438056330a346871b2f5e3cae4e3bcc257cffc788/pymongo-4.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50b18ad6e4a55a75c30f0e669bd15ed1ceb18f9994d6835b4f5d5218592b4a0", size = 1205824, upload-time = "2025-09-16T16:38:04.277Z" }, + { url = "https://files.pythonhosted.org/packages/37/d5/1ae77ddcc376ebce0139614d51ec1fd0ba666d7cc1f198ec88272cfdac36/pymongo-4.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e8e2a33613b2880d516d9c8616b64d27957c488de2f8e591945cf12094336a5", size = 1191838, upload-time = "2025-09-16T16:38:05.728Z" }, + { url = "https://files.pythonhosted.org/packages/35/07/ae3fc20a809066b35bbf470bda79d34a72948603d9f29a425bf1d0ef2cb7/pymongo-4.15.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a2a439395f3d4c9d3dc33ba4575d52b6dd285d57db54e32062ae8ef557cab10", size = 1170996, upload-time = "2025-09-16T16:38:09.084Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0f/eb654cea7586588704151ac4894cd3fb2582c0db458cd615cad1c7fe4c59/pymongo-4.15.1-cp310-cp310-win32.whl", hash = "sha256:142abf2fbd4667a3c8f4ce2e30fdbd287c015f52a838f4845d7476a45340208d", size = 798249, upload-time = "2025-09-16T16:38:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/9f/6b/38184382c32695f914a5474d8de0c9f3714b7d8f4c66f090b3836d70273d/pymongo-4.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:8baf46384c97f774bc84178662e1fc6e32a2755fbc8e259f424780c2a11a3566", size = 807990, upload-time = "2025-09-16T16:38:12.525Z" }, + { url = "https://files.pythonhosted.org/packages/38/eb/77a4d37b2a0673c010dd97b9911438f17bb05f407235cc9f02074175855d/pymongo-4.15.1-cp310-cp310-win_arm64.whl", hash = "sha256:b5b837df8e414e2a173722395107da981d178ba7e648f612fa49b7ab4e240852", size = 800875, upload-time = "2025-09-16T16:38:14.532Z" }, + { url = "https://files.pythonhosted.org/packages/c9/da/89066930a70b4299844f1155fc23baaa7e30e77c8a0cbf62a2ae06ee34a5/pymongo-4.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:363445cc0e899b9e55ac9904a868c8a16a6c81f71c48dbadfd78c98e0b54de27", size = 865410, upload-time = "2025-09-16T16:38:16.279Z" }, + { url = "https://files.pythonhosted.org/packages/99/8f/a1d0402d52e5ebd14283718abefdc0c16f308cf10bee56cdff04b1f5119b/pymongo-4.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:da0a13f345f4b101776dbab92cec66f0b75015df0b007b47bd73bfd0305cc56a", size = 865695, upload-time = "2025-09-16T16:38:18.015Z" }, + { url = "https://files.pythonhosted.org/packages/53/38/d1ef69028923f86fd00638d9eb16400d4e60a89eabd2011fe631fd3186cf/pymongo-4.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9481a492851e432122a83755d4e69c06aeb087bbf8370bac9f96d112ac1303fd", size = 1434758, upload-time = "2025-09-16T16:38:20.141Z" }, + { url = "https://files.pythonhosted.org/packages/b0/eb/a8d5dff748a2dd333610b2e4c8120b623e38ea2b5e30ad190d0ce2803840/pymongo-4.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:625dec3e9cd7c3d336285a20728c01bfc56d37230a99ec537a6a8625af783a43", size = 1485716, upload-time = "2025-09-16T16:38:21.607Z" }, + { url = "https://files.pythonhosted.org/packages/c4/d4/17ba457a828b733182ddc01a202872fef3006eed6b54450b20dc95a2f77d/pymongo-4.15.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26a31af455bffcc64537a7f67e2f84833a57855a82d05a085a1030c471138990", size = 1460160, upload-time = "2025-09-16T16:38:23.509Z" }, + { url = "https://files.pythonhosted.org/packages/c3/25/42b8662c09f5ca9c81d18d160f48e58842e0fa4c314ea02613c5e5d54542/pymongo-4.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea4415970d2a074d5890696af10e174d84cb735f1fa7673020c7538431e1cb6e", size = 1439284, upload-time = "2025-09-16T16:38:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/b3/bb/46b9d978161828eb91973bd441a3f05f73c789203e976332a8de2832d5db/pymongo-4.15.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51ee050a2e026e2b224d2ed382830194be20a81c78e1ef98f467e469071df3ac", size = 1407933, upload-time = "2025-09-16T16:38:27.045Z" }, + { url = "https://files.pythonhosted.org/packages/4b/55/bd5af98f675001f4b06f7314b3918e45809424a7ad3510f823f6703cd8f2/pymongo-4.15.1-cp311-cp311-win32.whl", hash = "sha256:9aef07d33839f6429dc24f2ef36e4ec906979cb4f628c57a1c2676cc66625711", size = 844328, upload-time = "2025-09-16T16:38:28.513Z" }, + { url = "https://files.pythonhosted.org/packages/c3/78/90989a290dd458ed43a8a04fa561ac9c7b3391f395cdacd42e21f0f22ce4/pymongo-4.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ea6e5ff4d6747e7b64966629a964db3089e9c1e0206d8f9cc8720c90f5a7af1", size = 858951, upload-time = "2025-09-16T16:38:30.074Z" }, + { url = "https://files.pythonhosted.org/packages/de/bb/d4d23f06e166cd773f2324cff73841a62d78a1ad16fb799cf7c5490ce32c/pymongo-4.15.1-cp311-cp311-win_arm64.whl", hash = "sha256:bb783d9001b464a6ef3ee76c30ebbb6f977caee7bbc3a9bb1bd2ff596e818c46", size = 848290, upload-time = "2025-09-16T16:38:31.741Z" }, + { url = "https://files.pythonhosted.org/packages/7e/31/bc4525312083706a59fffe6e8de868054472308230fdee8db0c452c2b831/pymongo-4.15.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bab357c5ff36ba2340dfc94f3338ef399032089d35c3d257ce0c48630b7848b2", size = 920261, upload-time = "2025-09-16T16:38:33.614Z" }, + { url = "https://files.pythonhosted.org/packages/ae/55/4d99aec625494f21151b8b31e12e06b8ccd3b9dcff609b0dd1acf9bbbc0e/pymongo-4.15.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46d1af3eb2c274f07815372b5a68f99ecd48750e8ab54d5c3ff36a280fb41c8e", size = 919956, upload-time = "2025-09-16T16:38:35.121Z" }, + { url = "https://files.pythonhosted.org/packages/be/60/8f1afa41521df950e13f6490ecdef48155fc63b78f926e7649045e07afd1/pymongo-4.15.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7dc31357379318881186213dc5fc49b62601c955504f65c8e72032b5048950a1", size = 1698596, upload-time = "2025-09-16T16:38:36.586Z" }, + { url = "https://files.pythonhosted.org/packages/bc/3f/e48d50ee8d6aa0a4cda7889dd73076ec2ab79a232716a5eb0b9df070ffcf/pymongo-4.15.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12140d29da1ecbaefee2a9e65433ef15d6c2c38f97bc6dab0ff246a96f9d20cd", size = 1762833, upload-time = "2025-09-16T16:38:38.09Z" }, + { url = "https://files.pythonhosted.org/packages/63/87/db976859efc617f608754e051e1468459d9a818fe1ad5d0862e8af57720b/pymongo-4.15.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf193d2dcd91fa1d1dfa1fd036a3b54f792915a4842d323c0548d23d30461b59", size = 1731875, upload-time = "2025-09-16T16:38:39.742Z" }, + { url = "https://files.pythonhosted.org/packages/18/59/3643ad52a5064ad3ef8c32910de6da28eb658234c25f2db5366f16bffbfb/pymongo-4.15.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2c0bdcf4d57e4861ed323ba430b585ad98c010a83e46cb8aa3b29c248a82be1", size = 1701853, upload-time = "2025-09-16T16:38:41.333Z" }, + { url = "https://files.pythonhosted.org/packages/d8/96/441c190823f855fc6445ea574b39dca41156acf723c5e6a69ee718421700/pymongo-4.15.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43fcfc19446e0706bbfe86f683a477d1e699b02369dd9c114ec17c7182d1fe2b", size = 1660978, upload-time = "2025-09-16T16:38:42.877Z" }, + { url = "https://files.pythonhosted.org/packages/47/49/bd7e783fb78aaf9bdaa3f88cc238449be5bc5546e930ec98845ef235f809/pymongo-4.15.1-cp312-cp312-win32.whl", hash = "sha256:e5fedea0e7b3747da836cd5f88b0fa3e2ec5a394371f9b6a6b15927cfeb5455d", size = 891175, upload-time = "2025-09-16T16:38:44.658Z" }, + { url = "https://files.pythonhosted.org/packages/2e/28/7de5858bdeaa07ea4b277f9eb06123ea358003659fe55e72e4e7c898b321/pymongo-4.15.1-cp312-cp312-win_amd64.whl", hash = "sha256:330a17c1c89e2c3bf03ed391108f928d5881298c17692199d3e0cdf097a20082", size = 910619, upload-time = "2025-09-16T16:38:46.124Z" }, + { url = "https://files.pythonhosted.org/packages/17/87/c39f4f8415e7c65f8b66413f53a9272211ff7dfe78a5128b27027bf88864/pymongo-4.15.1-cp312-cp312-win_arm64.whl", hash = "sha256:756b7a2a80ec3dd5b89cd62e9d13c573afd456452a53d05663e8ad0c5ff6632b", size = 896229, upload-time = "2025-09-16T16:38:48.563Z" }, + { url = "https://files.pythonhosted.org/packages/a6/22/02ac885d8accb4c86ae92e99681a09f3fd310c431843fc850e141b42ab17/pymongo-4.15.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:622957eed757e44d9605c43b576ef90affb61176d9e8be7356c1a2948812cb84", size = 974492, upload-time = "2025-09-16T16:38:50.437Z" }, + { url = "https://files.pythonhosted.org/packages/56/bf/71685b6b2d085dbaadf029b1ea4a1bc7a1bc483452513dea283b47a5f7c0/pymongo-4.15.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c5283dffcf601b793a57bb86819a467473bbb1bf21cd170c0b9648f933f22131", size = 974191, upload-time = "2025-09-16T16:38:52.725Z" }, + { url = "https://files.pythonhosted.org/packages/df/98/141edc92fa97af96b4c691e10a7225ac3e552914e88b7a8d439bd6bc9fcc/pymongo-4.15.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:def51dea1f8e336aed807eb5d2f2a416c5613e97ec64f07479681d05044c217c", size = 1962311, upload-time = "2025-09-16T16:38:54.319Z" }, + { url = "https://files.pythonhosted.org/packages/f8/a9/601b91607af1dec8035b46ba67a5a023c819ccedd40d6f6232e15bf76030/pymongo-4.15.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:24171b2015052b2f0a3f8cbfa38b973fa87f6474e88236a4dfeb735983f9f49e", size = 2039667, upload-time = "2025-09-16T16:38:55.958Z" }, + { url = "https://files.pythonhosted.org/packages/4f/71/02e9a5248e0a9dfc371fd7350f8b11eac03d9eb3662328978f37613d319a/pymongo-4.15.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64b60ed7220c52f8c78c7af8d2c58f7e415732e21b3ff7e642169efa6e0b11e7", size = 2003579, upload-time = "2025-09-16T16:38:57.576Z" }, + { url = "https://files.pythonhosted.org/packages/f9/d1/b1a9520b33e022ed1c0d2d43e8805ba18d3d686fc9c9d89a507593f6dd86/pymongo-4.15.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58236ce5ba3a79748c1813221b07b411847fd8849ff34c2891ba56f807cce3e5", size = 1964307, upload-time = "2025-09-16T16:38:59.219Z" }, + { url = "https://files.pythonhosted.org/packages/3d/d1/1d205a762020f056c05899a912364c48bac0f3438502b36d057aa1da3ca5/pymongo-4.15.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7461e777b3da96568c1f077b1fbf9e0c15667ac4d8b9a1cf90d80a69fe3be609", size = 1913879, upload-time = "2025-09-16T16:39:01.266Z" }, + { url = "https://files.pythonhosted.org/packages/44/d1/0a3ab2440ea00b6423f33c84e6433022fd51f3561dede9346f54f39cf4dd/pymongo-4.15.1-cp313-cp313-win32.whl", hash = "sha256:45f0a2fb09704ca5e0df08a794076d21cbe5521d3a8ceb8ad6d51cef12f5f4e7", size = 938007, upload-time = "2025-09-16T16:39:03.427Z" }, + { url = "https://files.pythonhosted.org/packages/7b/61/e9ea839af2caadfde91774549a6f72450b72efdc92117995e7117d4b1270/pymongo-4.15.1-cp313-cp313-win_amd64.whl", hash = "sha256:b70201a6dbe19d0d10a886989d3ba4b857ea6ef402a22a61c8ca387b937cc065", size = 962236, upload-time = "2025-09-16T16:39:05.018Z" }, + { url = "https://files.pythonhosted.org/packages/ad/f8/0a92a72993b2e1c110ee532650624ca7ae15c5e45906dbae4f063a2fd32a/pymongo-4.15.1-cp313-cp313-win_arm64.whl", hash = "sha256:6892ebf8b2bc345cacfe1301724195d87162f02d01c417175e9f27d276a2f198", size = 944138, upload-time = "2025-09-16T16:39:07.035Z" }, + { url = "https://files.pythonhosted.org/packages/e5/eb/2ba257482844bb2e3c82c6b266d6e811bc610fa80408133e352cc1afb3c9/pymongo-4.15.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:db439288516514713c8ee09c9baaf66bc4b0188fbe4cd578ef3433ee27699aab", size = 1030987, upload-time = "2025-09-16T16:39:08.914Z" }, + { url = "https://files.pythonhosted.org/packages/0d/86/8c6eab3767251ba77a3604d3b6b0826d0af246bd04b2d16aced3a54f08b0/pymongo-4.15.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:234c80a5f21c8854cc5d6c2f5541ff17dd645b99643587c5e7ed1e21d42003b6", size = 1030996, upload-time = "2025-09-16T16:39:10.429Z" }, + { url = "https://files.pythonhosted.org/packages/5b/26/c1bc0bcb64f39b9891b8b537f21cc37d668edd8b93f47ed930af7f95649c/pymongo-4.15.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b570dc8179dcab980259b885116b14462bcf39170e30d8cbcce6f17f28a2ac5b", size = 2290670, upload-time = "2025-09-16T16:39:12.348Z" }, + { url = "https://files.pythonhosted.org/packages/82/af/f5e8b6c404a3678a99bf0b704f7b19fa14a71edb42d724eb09147aa1d3be/pymongo-4.15.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb6321bde02308d4d313b487d19bfae62ea4d37749fc2325b1c12388e05e4c31", size = 2377711, upload-time = "2025-09-16T16:39:13.992Z" }, + { url = "https://files.pythonhosted.org/packages/af/f4/63bcc1760bf3e0925cb6cb91b2b3ba756c113b1674a14b41efe7e3738b8d/pymongo-4.15.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc808588289f693aba80fae8272af4582a7d6edc4e95fb8fbf65fe6f634116ce", size = 2337097, upload-time = "2025-09-16T16:39:15.717Z" }, + { url = "https://files.pythonhosted.org/packages/d0/dc/0cfada0426556b4b04144fb00ce6a1e7535ab49623d4d9dd052d27ea46c0/pymongo-4.15.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99236fd0e0cf6b048a4370d0df6820963dc94f935ad55a2e29af752272abd6c9", size = 2288295, upload-time = "2025-09-16T16:39:17.385Z" }, + { url = "https://files.pythonhosted.org/packages/5b/a8/081a80f60042d2b8cd6a1c091ecaa186f1ef216b587d06acd0743e1016c6/pymongo-4.15.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2277548bb093424742325b2a88861d913d8990f358fc71fd26004d1b87029bb8", size = 2227616, upload-time = "2025-09-16T16:39:19.025Z" }, + { url = "https://files.pythonhosted.org/packages/56/d0/a6007e0c3c5727391ac5ea40e93a1e7d14146c65ac4ca731c0680962eb48/pymongo-4.15.1-cp313-cp313t-win32.whl", hash = "sha256:754a5d75c33d49691e2b09a4e0dc75959e271a38cbfd92c6b36f7e4eafc4608e", size = 987225, upload-time = "2025-09-16T16:39:20.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/97/c9bf6dcd647a8cf7abbad5814dfb7d8a16e6ab92a3e56343b3bcb454a6d3/pymongo-4.15.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8d62e68ad21661e536555d0683087a14bf5c74b242a4446c602d16080eb9e293", size = 1017521, upload-time = "2025-09-16T16:39:22.319Z" }, + { url = "https://files.pythonhosted.org/packages/31/ea/102f7c9477302fa05e5303dd504781ac82400e01aab91bfba9c290253bd6/pymongo-4.15.1-cp313-cp313t-win_arm64.whl", hash = "sha256:56bbfb79b51e95f4b1324a5a7665f3629f4d27c18e2002cfaa60c907cc5369d9", size = 992963, upload-time = "2025-09-16T16:39:23.957Z" }, +] + [[package]] name = "pytest" version = "8.4.2" @@ -962,6 +1071,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/90/5f/d9e617368aeee75609e43c66ff22e9d216c761f5b4290d56927d493ec618/pytest_redis-3.1.3-py3-none-any.whl", hash = "sha256:7fd6eb54ed0878590b857e1011b031c38aa3e230a53771739e845d3fc6b05d79", size = 32856, upload-time = "2024-11-27T08:42:19.837Z" }, ] +[[package]] +name = "pytest-timeout" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ac/82/4c9ecabab13363e72d880f2fb504c5f750433b2b6f16e99f4ec21ada284c/pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a", size = 17973, upload-time = "2025-05-05T19:44:34.99Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/b6/3127540ecdf1464a00e5a01ee60a1b09175f6913f0644ac748494d9c4b21/pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2", size = 14382, upload-time = "2025-05-05T19:44:33.502Z" }, +] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -1043,6 +1164,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, ] +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + [[package]] name = "tomli" version = "2.2.1" @@ -1112,6 +1242,43 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, ] +[[package]] +name = "valkey-glide" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "protobuf" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/b7/1006f51e45b5a392371307db0ed3a3c85cb8664df0065838dfd51c51acb5/valkey_glide-2.1.0.tar.gz", hash = "sha256:858c7931e4b6807fd17a18da62e48ca84c5d66892bcc2ecfa183548a23fa3e49", size = 675795, upload-time = "2025-09-17T14:59:05.231Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/4f/42d7aa81501cf4ad1aa79e688118409acc7059c802ad4ef6c8691574f22d/valkey_glide-2.1.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:44d40949e63bdc13c49bd9743d73ce26696214f5d88ae7fdbf631058b23f698e", size = 5103998, upload-time = "2025-09-17T14:57:56.629Z" }, + { url = "https://files.pythonhosted.org/packages/ad/c7/fcf41deb04d01a880fab6067819c5f87ae6c1d9d255c5bae68f876f44c2f/valkey_glide-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b11425088244a6e4bec2c5661378a9c6ce43a980d5b1b5dfbba5ff55b34b600a", size = 4762822, upload-time = "2025-09-17T14:57:59.519Z" }, + { url = "https://files.pythonhosted.org/packages/94/77/9e027874070919a2322a8303b2142766fe90d1650d6f2e78c54f6c99034a/valkey_glide-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99cc77ea1e50251ec1945239e915e884dfe5383b6c77df3cb7b70b5c22e9d697", size = 4802218, upload-time = "2025-09-17T14:58:01.674Z" }, + { url = "https://files.pythonhosted.org/packages/3e/9b/115e97717afac7915664d5c31738e5e1d69786f2d12a3dab67176dc7188d/valkey_glide-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6795207935616684f6972373202faac8cb03e46e44a9a73a8902492eb7804cc7", size = 5134610, upload-time = "2025-09-17T14:58:03.634Z" }, + { url = "https://files.pythonhosted.org/packages/88/a6/f699b2abe6625742e0a6ed3a97b271bcc5e3b18b2331a5144bfdfe25c5db/valkey_glide-2.1.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:54b60fd282df287f49a44a6a2a0d0f279da78ea90a1b19b59b017377a6afc6d8", size = 5104083, upload-time = "2025-09-17T14:58:05.838Z" }, + { url = "https://files.pythonhosted.org/packages/13/34/804b5ed850b739ed1580e31f39f06b86a0827b0d6f2e145fd63ae161f72f/valkey_glide-2.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7b50a4b4c3aca6978e19aa266b26885ed1d4759cc5eff5cbece2ad07f6568c94", size = 4762815, upload-time = "2025-09-17T14:58:08.144Z" }, + { url = "https://files.pythonhosted.org/packages/aa/9c/62c74b312eb0dd7ce5ac8b6e08c589cacce79fb77e6d4b2361d41ee710f4/valkey_glide-2.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cf0f88284b5aec401796416d97c5920a6389eb265c43cfc838f35a420fcf551", size = 4800863, upload-time = "2025-09-17T14:58:10.305Z" }, + { url = "https://files.pythonhosted.org/packages/b8/59/6fb06bd58696a8ddcb5b599526ac8d4daf45167396e8f7f61ee8e125cca6/valkey_glide-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bbd75c0382c64c5f356b12476ac2689d9efb1cb5bc48c23d8ddd4571ded72a", size = 5133784, upload-time = "2025-09-17T14:58:12.237Z" }, + { url = "https://files.pythonhosted.org/packages/24/bb/cc6fccf75412fe01a3865d70e06965680eae1dcd73c52e82fa31bd46f478/valkey_glide-2.1.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:c54a1c3197784b16a57e5ed4b9850058e720de63272c39c864f61571fbea70a7", size = 5112022, upload-time = "2025-09-17T14:58:14.164Z" }, + { url = "https://files.pythonhosted.org/packages/56/38/bd3c7c220a24a085eb975d5239bc42134e58235a7a60b26bc20cc1ad7268/valkey_glide-2.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bf89b5cf15d484b0ef0b0bad97ca84bc7ce1e5899a6e7a5bb6f551d06df84c27", size = 4769683, upload-time = "2025-09-17T14:58:16.023Z" }, + { url = "https://files.pythonhosted.org/packages/ae/33/6bda0d49ca91cc75423e2c9f306e6543101f844678ef6ad1e0c4458993c5/valkey_glide-2.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3712f03213c31501eb07cb360e68ec55a3c8674375b4f1356d52ab990b3fdde5", size = 4803569, upload-time = "2025-09-17T14:58:18.137Z" }, + { url = "https://files.pythonhosted.org/packages/06/68/19c2f2e24fa5c3b849aa4c261a5d108d2725d19701fd9037312c1cf3242f/valkey_glide-2.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a55833558e1fe3d058f61625618d6e4552ef1ddf9995c626a59f69a8751b32f4", size = 5136281, upload-time = "2025-09-17T14:58:20.05Z" }, + { url = "https://files.pythonhosted.org/packages/7e/2b/323d156220b435af33ca9ff476368ffe42c69ed8172365f83f4f6d5290d5/valkey_glide-2.1.0-cp313-cp313-macosx_10_7_x86_64.whl", hash = "sha256:88174f205e73853c183abfde262568da7b274fb760188d8058dd041edb18e490", size = 5111989, upload-time = "2025-09-17T14:58:22.225Z" }, + { url = "https://files.pythonhosted.org/packages/5c/d9/c732c13d0651f88bcf70e0c1ae278c55dd3f09cc6ebf747041fb56002ca0/valkey_glide-2.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:64ab3c9f23bf4af961fd8844b71d26a1889a5ee449e4a45dc71bac20334a4705", size = 4769732, upload-time = "2025-09-17T14:58:24.126Z" }, + { url = "https://files.pythonhosted.org/packages/a6/4d/dba077d1561faf00660ee6f887ca8c6b749bb0fac49790a027308bc33d5a/valkey_glide-2.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60f2771471314e0e2c3f090018fc830bc62d36af2ded7c844a1c64c302f5568c", size = 4803617, upload-time = "2025-09-17T14:58:26.048Z" }, + { url = "https://files.pythonhosted.org/packages/bc/c5/a173c2abc241ed6242bc9c29ea74b28a08a32561f362e3b8e664aae0e1de/valkey_glide-2.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e4764c0df30973f1b85f3e666b36ed38bdfa5cfc78114a71b3a79601ca343f5", size = 5136350, upload-time = "2025-09-17T14:58:27.995Z" }, + { url = "https://files.pythonhosted.org/packages/8c/86/f8a5f5da6fdf35f85e1e379a2271e7f56cdac7389a71a6d4963dcc82efd3/valkey_glide-2.1.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:bf802d7def4ab04ff6c6a3b36d203f8ef5c77db7cefaa072cba7a5a7c0d529f3", size = 5102581, upload-time = "2025-09-17T14:58:38.82Z" }, + { url = "https://files.pythonhosted.org/packages/08/dd/723febad644d97a85befe1234cb2656c5f1892f712a7b1f911ef5f9538cf/valkey_glide-2.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7a91bb7ed6707e6c1cfa062489a2d818ddbfc353af727ab640bc4c17734a573b", size = 4762103, upload-time = "2025-09-17T14:58:40.841Z" }, + { url = "https://files.pythonhosted.org/packages/9d/82/e99a59bbc63cc997fc06a6d1d4857f1bd0c2353aacdbd95bb9fc756c9ef0/valkey_glide-2.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8770f641f722ce832e0b6f5c0cc6eda7f9c23cab158dbee5250e1fa8d61aef7", size = 4800986, upload-time = "2025-09-17T14:58:43.029Z" }, + { url = "https://files.pythonhosted.org/packages/86/3d/45506ff34984561af9159314a929cc51456badcae610a6db7885626fc7db/valkey_glide-2.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ca3653fde871563e90d6c07a4ac77d268aea952eeb468ceda9789f3f0564d6a", size = 5135384, upload-time = "2025-09-17T14:58:44.995Z" }, + { url = "https://files.pythonhosted.org/packages/db/fe/09617783c2f8ad1c0623b217d616ea70fdb985a4e5bc2fb91c4fae60377e/valkey_glide-2.1.0-pp311-pypy311_pp73-macosx_10_7_x86_64.whl", hash = "sha256:f52119c4888599d435e81aee52f855f9d4da7e0198b32d74c96c9032ef015a52", size = 5102505, upload-time = "2025-09-17T14:58:47.231Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ba/2396769c522eec13c8246c19eeeef66d5e60fa43e7c966b473a577447f7e/valkey_glide-2.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0b57a7b6b0a7d70ffad7b121debc4910ea58d6891c9ba0915bc6ebe3359e4cb5", size = 4762596, upload-time = "2025-09-17T14:58:49.403Z" }, + { url = "https://files.pythonhosted.org/packages/6e/09/b314a3a172cc5532f7e95f096e54a4a3bbba0a280fd496b4614411648d7b/valkey_glide-2.1.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde7789666bf0da33c8fbbc1bc33d7717b4e17411cd81f286256340af12edf6d", size = 4800982, upload-time = "2025-09-17T14:58:51.344Z" }, + { url = "https://files.pythonhosted.org/packages/45/ea/9833c8e95b02dcd2ff31e0f688e34d5dc8416b8ffd08d8b7ca068a1983d8/valkey_glide-2.1.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:717f6ae2bbc6aefe7c68a6961a24ac024ca790ba1e048e252bbe661e4d7efe98", size = 5135392, upload-time = "2025-09-17T14:58:53.606Z" }, +] + [[package]] name = "yarl" version = "1.20.1" From 729bdd4f577e72d7ccc2858a748ba5a1a855599b Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 13:03:41 -0500 Subject: [PATCH 15/31] Readme update and lint check --- .github/workflows/test_pull_request.yml | 33 ++++++++++++++----- README.md | 15 ++++++--- src/kv_store_adapter/errors.py | 2 -- .../stores/elasticsearch/store.py | 4 ++- 4 files changed, 37 insertions(+), 17 deletions(-) diff --git a/.github/workflows/test_pull_request.yml b/.github/workflows/test_pull_request.yml index e896a8e7..c13bface 100644 --- a/.github/workflows/test_pull_request.yml +++ b/.github/workflows/test_pull_request.yml @@ -10,6 +10,27 @@ on: workflow_dispatch: jobs: + lint_and_type_check: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: "Install uv" + uses: astral-sh/setup-uv@v6 + + - name: "Install" + run: uv sync --locked --group dev + + - name: "Lint" + run: uv run ruff check --exit-non-zero-on-fix --fix + + - name: "Format" + run: uv run ruff format --check + + - name: "Type Check" + run: uv run basedpyright + test_small_subset_platforms: timeout-minutes: 10 strategy: @@ -28,15 +49,6 @@ jobs: - name: "Install" run: uv sync --locked --group dev --python ${{ matrix.python-version }} - - name: "Lint" - run: uv run ruff check --exit-non-zero-on-fix --fix - - - name: "Type Check" - run: uv run basedpyright - - - name: "Format" - run: uv run ruff format - - name: "Test" run: uv run pytest tests @@ -44,6 +56,9 @@ jobs: run: uv build test_all_platforms: + needs: + - test_small_subset_platforms + timeout-minutes: 10 strategy: matrix: diff --git a/README.md b/README.md index 54bfffa9..f88cc77e 100644 --- a/README.md +++ b/README.md @@ -2,9 +2,8 @@ A pluggable, async-only key-value store interface for modern Python applications. -## Features +## Why use this library? -- **Async-only**: Built from the ground up with `async`/`await` support - **Multiple backends**: Elasticsearch, Memcached, MongoDB, Redis, Valkey, and In-memory, Disk, etc - **TTL support**: Automatic expiration handling across all store types - **Type-safe**: Full type hints with Protocol-based interfaces @@ -13,19 +12,25 @@ A pluggable, async-only key-value store interface for modern Python applications - **Collection-based**: Organize keys into logical collections/namespaces - **Pluggable architecture**: Easy to add custom store implementations +## Why not use this library? + +- **Async-only**: Built from the ground up with `async`/`await` support +- **Managed Entries**: Raw values are not stored in backends, a wrapper object is stored instead. This wrapper object contains the value, sometimes metadata like the TTL, and the creation timestamp. Most often it is serialized to and from JSON. +- **No Live Objects**: Even when using the in-memory store, "live" objects are never returned from the store. You get a dictionary or a Pydantic model, hopefully a copy of what you stored, but never the same instance in memory. + ## Quick Start ```bash pip install kv-store-adapter # With specific backend support -pip install kv-store-adapter[redis] pip install kv-store-adapter[elasticsearch] -pip install kv-store-adapter[memory] -pip install kv-store-adapter[disk] +pip install kv-store-adapter[redis] pip install kv-store-adapter[memcached] pip install kv-store-adapter[mongodb] pip install kv-store-adapter[valkey] +pip install kv-store-adapter[memory] +pip install kv-store-adapter[disk] # With all backends pip install kv-store-adapter[memory,disk,redis,elasticsearch,memcached,mongodb,valkey] diff --git a/src/kv_store_adapter/errors.py b/src/kv_store_adapter/errors.py index 3eea56bf..b2a90ea0 100644 --- a/src/kv_store_adapter/errors.py +++ b/src/kv_store_adapter/errors.py @@ -1,5 +1,3 @@ -from typing import Any - ExtraInfoType = dict[str, str | int | float | bool | None] diff --git a/src/kv_store_adapter/stores/elasticsearch/store.py b/src/kv_store_adapter/stores/elasticsearch/store.py index 9d791834..a47c28df 100644 --- a/src/kv_store_adapter/stores/elasticsearch/store.py +++ b/src/kv_store_adapter/stores/elasticsearch/store.py @@ -105,7 +105,9 @@ def __init__( if elasticsearch_client: self._client = elasticsearch_client elif url: - self._client = AsyncElasticsearch(hosts=[url], api_key=api_key, http_compress=True, request_timeout=10, retry_on_timeout=True, max_retries=3) + self._client = AsyncElasticsearch( + hosts=[url], api_key=api_key, http_compress=True, request_timeout=10, retry_on_timeout=True, max_retries=3 + ) else: msg = "Either elasticsearch_client or url must be provided" raise ValueError(msg) From 9db52fb36ecd75871e44a33480f7a95831ea4a12 Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 13:09:02 -0500 Subject: [PATCH 16/31] bump timeouts for distributed caches --- .github/workflows/test_pull_request.yml | 10 ++++++---- tests/stores/memcached/test_memcached.py | 1 + tests/stores/mongodb/test_mongodb.py | 1 + tests/stores/redis/test_redis.py | 1 + tests/stores/valkey/test_valkey.py | 1 + 5 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test_pull_request.yml b/.github/workflows/test_pull_request.yml index c13bface..f2bc53a4 100644 --- a/.github/workflows/test_pull_request.yml +++ b/.github/workflows/test_pull_request.yml @@ -10,7 +10,7 @@ on: workflow_dispatch: jobs: - lint_and_type_check: + static_analysis: runs-on: ubuntu-latest steps: - name: Checkout repository @@ -31,7 +31,9 @@ jobs: - name: "Type Check" run: uv run basedpyright - test_small_subset_platforms: + test_quick: + needs: + - static_analysis timeout-minutes: 10 strategy: matrix: @@ -55,9 +57,9 @@ jobs: - name: "Build" run: uv build - test_all_platforms: + test_all: needs: - - test_small_subset_platforms + - test_quick timeout-minutes: 10 strategy: diff --git a/tests/stores/memcached/test_memcached.py b/tests/stores/memcached/test_memcached.py index b2fbe558..a0c3f0dc 100644 --- a/tests/stores/memcached/test_memcached.py +++ b/tests/stores/memcached/test_memcached.py @@ -44,6 +44,7 @@ class MemcachedFailedToStartError(Exception): @pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not available") +@pytest.mark.timeout(15) class TestMemcachedStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_memcached(self) -> AsyncGenerator[None, None]: diff --git a/tests/stores/mongodb/test_mongodb.py b/tests/stores/mongodb/test_mongodb.py index 1e7cf493..0066b714 100644 --- a/tests/stores/mongodb/test_mongodb.py +++ b/tests/stores/mongodb/test_mongodb.py @@ -43,6 +43,7 @@ class MongoDBFailedToStartError(Exception): @pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not available") +@pytest.mark.timeout(15) class TestMongoDBStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_mongodb(self) -> AsyncGenerator[None, None]: diff --git a/tests/stores/redis/test_redis.py b/tests/stores/redis/test_redis.py index 59ed3959..78a1d253 100644 --- a/tests/stores/redis/test_redis.py +++ b/tests/stores/redis/test_redis.py @@ -40,6 +40,7 @@ class RedisFailedToStartError(Exception): @pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not running") +@pytest.mark.timeout(15) class TestRedisStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_redis(self) -> AsyncGenerator[None, None]: diff --git a/tests/stores/valkey/test_valkey.py b/tests/stores/valkey/test_valkey.py index 852143c9..14d2c66e 100644 --- a/tests/stores/valkey/test_valkey.py +++ b/tests/stores/valkey/test_valkey.py @@ -48,6 +48,7 @@ class ValkeyFailedToStartError(Exception): @pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not running") +@pytest.mark.timeout(15) class TestValkeyStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_valkey(self) -> AsyncGenerator[None, None]: From 5e921aa40e78a2d6636a4c873b7a6584281c2cae Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 13:31:47 -0500 Subject: [PATCH 17/31] Dont test valkey on Windows --- pyproject.toml | 3 ++- tests/stores/valkey/test_valkey.py | 8 +++++++- uv.lock | 30 ++++++++++++++++++------------ 3 files changed, 27 insertions(+), 14 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 302a7162..5f6ea4f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,7 +44,8 @@ env_files = [".env"] [dependency-groups] dev = [ - "kv-store-adapter[memory,disk,redis,elasticsearch,memcached,mongodb,valkey]", + "kv-store-adapter[memory,disk,redis,elasticsearch,memcached,mongodb]", + "kv-store-adapter[valkey]; platform_system != 'Windows'", "kv-store-adapter[pydantic]", "pytest", "pytest-mock", diff --git a/tests/stores/valkey/test_valkey.py b/tests/stores/valkey/test_valkey.py index 14d2c66e..8484a699 100644 --- a/tests/stores/valkey/test_valkey.py +++ b/tests/stores/valkey/test_valkey.py @@ -2,13 +2,18 @@ from collections.abc import AsyncGenerator import pytest + +from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, detect_on_windows, should_skip_docker_tests + +pytestmark = pytest.mark.skipif(detect_on_windows(), reason="This test file requires Windows") + +# ruff: noqa: E402 # ignore non-top-level imports from glide.glide_client import GlideClient from glide_shared.config import GlideClientConfiguration, NodeAddress from typing_extensions import override from kv_store_adapter.stores.base import BaseStore from kv_store_adapter.stores.valkey import ValkeyStore -from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests # Valkey test configuration VALKEY_HOST = "localhost" @@ -48,6 +53,7 @@ class ValkeyFailedToStartError(Exception): @pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not running") +@pytest.mark.skipif(detect_on_windows(), reason="Valkey is not supported on Windows") @pytest.mark.timeout(15) class TestValkeyStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") diff --git a/uv.lock b/uv.lock index 75d3e56a..e88bdb1e 100644 --- a/uv.lock +++ b/uv.lock @@ -1,6 +1,10 @@ version = 1 revision = 2 requires-python = ">=3.10" +resolution-markers = [ + "sys_platform != 'win32'", + "sys_platform == 'win32'", +] [[package]] name = "aiohappyeyeballs" @@ -475,7 +479,8 @@ dev = [ { name = "dirty-equals" }, { name = "diskcache-stubs" }, { name = "inline-snapshot" }, - { name = "kv-store-adapter", extra = ["disk", "elasticsearch", "memcached", "memory", "mongodb", "pydantic", "redis", "valkey"] }, + { name = "kv-store-adapter", extra = ["disk", "elasticsearch", "memcached", "memory", "mongodb", "pydantic", "redis"] }, + { name = "kv-store-adapter", extra = ["valkey"], marker = "sys_platform != 'win32'" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-dotenv" }, @@ -509,8 +514,9 @@ dev = [ { name = "dirty-equals", specifier = ">=0.10.0" }, { name = "diskcache-stubs", specifier = ">=5.6.3.6.20240818" }, { name = "inline-snapshot", specifier = ">=0.29.0" }, - { name = "kv-store-adapter", extras = ["memory", "disk", "redis", "elasticsearch", "memcached", "mongodb", "valkey"] }, + { name = "kv-store-adapter", extras = ["memory", "disk", "redis", "elasticsearch", "memcached", "mongodb"] }, { name = "kv-store-adapter", extras = ["pydantic"] }, + { name = "kv-store-adapter", extras = ["valkey"], marker = "sys_platform != 'win32'" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-dotenv", specifier = ">=0.5.2" }, @@ -658,18 +664,18 @@ wheels = [ [[package]] name = "nodejs-wheel-binaries" -version = "22.19.0" +version = "22.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/ca/6033f80b7aebc23cb31ed8b09608b6308c5273c3522aedd043e8a0644d83/nodejs_wheel_binaries-22.19.0.tar.gz", hash = "sha256:e69b97ef443d36a72602f7ed356c6a36323873230f894799f4270a853932fdb3", size = 8060, upload-time = "2025-09-12T10:33:46.935Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/54/02f58c8119e2f1984e2572cc77a7b469dbaf4f8d171ad376e305749ef48e/nodejs_wheel_binaries-22.20.0.tar.gz", hash = "sha256:a62d47c9fd9c32191dff65bbe60261504f26992a0a19fe8b4d523256a84bd351", size = 8058, upload-time = "2025-09-26T09:48:00.906Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/a2/0d055fd1d8c9a7a971c4db10cf42f3bba57c964beb6cf383ca053f2cdd20/nodejs_wheel_binaries-22.19.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:43eca1526455a1fb4cb777095198f7ebe5111a4444749c87f5c2b84645aaa72a", size = 50902454, upload-time = "2025-09-12T10:33:18.3Z" }, - { url = "https://files.pythonhosted.org/packages/b5/f5/446f7b3c5be1d2f5145ffa3c9aac3496e06cdf0f436adeb21a1f95dd79a7/nodejs_wheel_binaries-22.19.0-py2.py3-none-macosx_11_0_x86_64.whl", hash = "sha256:feb06709e1320790d34babdf71d841ec7f28e4c73217d733e7f5023060a86bfc", size = 51837860, upload-time = "2025-09-12T10:33:21.599Z" }, - { url = "https://files.pythonhosted.org/packages/1e/4e/d0a036f04fd0f5dc3ae505430657044b8d9853c33be6b2d122bb171aaca3/nodejs_wheel_binaries-22.19.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9f5777292491430457c99228d3a267decf12a09d31246f0692391e3513285e", size = 57841528, upload-time = "2025-09-12T10:33:25.433Z" }, - { url = "https://files.pythonhosted.org/packages/e2/11/4811d27819f229cc129925c170db20c12d4f01ad366a0066f06d6eb833cf/nodejs_wheel_binaries-22.19.0-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1392896f1a05a88a8a89b26e182d90fdf3020b4598a047807b91b65731e24c00", size = 58368815, upload-time = "2025-09-12T10:33:29.083Z" }, - { url = "https://files.pythonhosted.org/packages/6e/94/df41416856b980e38a7ff280cfb59f142a77955ccdbec7cc4260d8ab2e78/nodejs_wheel_binaries-22.19.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9164c876644f949cad665e3ada00f75023e18f381e78a1d7b60ccbbfb4086e73", size = 59690937, upload-time = "2025-09-12T10:33:32.771Z" }, - { url = "https://files.pythonhosted.org/packages/d1/39/8d0d5f84b7616bdc4eca725f5d64a1cfcac3d90cf3f30cae17d12f8e987f/nodejs_wheel_binaries-22.19.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6b4b75166134010bc9cfebd30dc57047796a27049fef3fc22316216d76bc0af7", size = 60751996, upload-time = "2025-09-12T10:33:36.962Z" }, - { url = "https://files.pythonhosted.org/packages/41/93/2d66b5b60055dd1de6e37e35bef563c15e4cafa5cfe3a6990e0ab358e515/nodejs_wheel_binaries-22.19.0-py2.py3-none-win_amd64.whl", hash = "sha256:3f271f5abfc71b052a6b074225eca8c1223a0f7216863439b86feaca814f6e5a", size = 40026140, upload-time = "2025-09-12T10:33:40.33Z" }, - { url = "https://files.pythonhosted.org/packages/a3/46/c9cf7ff7e3c71f07ca8331c939afd09b6e59fc85a2944ea9411e8b29ce50/nodejs_wheel_binaries-22.19.0-py2.py3-none-win_arm64.whl", hash = "sha256:666a355fe0c9bde44a9221cd543599b029045643c8196b8eedb44f28dc192e06", size = 38804500, upload-time = "2025-09-12T10:33:43.302Z" }, + { url = "https://files.pythonhosted.org/packages/24/6d/333e5458422f12318e3c3e6e7f194353aa68b0d633217c7e89833427ca01/nodejs_wheel_binaries-22.20.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:455add5ac4f01c9c830ab6771dbfad0fdf373f9b040d3aabe8cca9b6c56654fb", size = 53246314, upload-time = "2025-09-26T09:47:32.536Z" }, + { url = "https://files.pythonhosted.org/packages/56/30/dcd6879d286a35b3c4c8f9e5e0e1bcf4f9e25fe35310fc77ecf97f915a23/nodejs_wheel_binaries-22.20.0-py2.py3-none-macosx_11_0_x86_64.whl", hash = "sha256:5d8c12f97eea7028b34a84446eb5ca81829d0c428dfb4e647e09ac617f4e21fa", size = 53644391, upload-time = "2025-09-26T09:47:36.093Z" }, + { url = "https://files.pythonhosted.org/packages/58/be/c7b2e7aa3bb281d380a1c531f84d0ccfe225832dfc3bed1ca171753b9630/nodejs_wheel_binaries-22.20.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a2b0989194148f66e9295d8f11bc463bde02cbe276517f4d20a310fb84780ae", size = 60282516, upload-time = "2025-09-26T09:47:39.88Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c5/8befacf4190e03babbae54cb0809fb1a76e1600ec3967ab8ee9f8fc85b65/nodejs_wheel_binaries-22.20.0-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5c500aa4dc046333ecb0a80f183e069e5c30ce637f1c1a37166b2c0b642dc21", size = 60347290, upload-time = "2025-09-26T09:47:43.712Z" }, + { url = "https://files.pythonhosted.org/packages/c0/bd/cfffd1e334277afa0714962c6ec432b5fe339340a6bca2e5fa8e678e7590/nodejs_wheel_binaries-22.20.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3279eb1b99521f0d20a850bbfc0159a658e0e85b843b3cf31b090d7da9f10dfc", size = 62178798, upload-time = "2025-09-26T09:47:47.752Z" }, + { url = "https://files.pythonhosted.org/packages/08/14/10b83a9c02faac985b3e9f5e65d63a34fc0f46b48d8a2c3e4caa3e1e7318/nodejs_wheel_binaries-22.20.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d29705797b33bade62d79d8f106c2453c8a26442a9b2a5576610c0f7e7c351ed", size = 62772957, upload-time = "2025-09-26T09:47:51.266Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a9/c6a480259aa0d6b270aac2c6ba73a97444b9267adde983a5b7e34f17e45a/nodejs_wheel_binaries-22.20.0-py2.py3-none-win_amd64.whl", hash = "sha256:4bd658962f24958503541963e5a6f2cc512a8cb301e48a69dc03c879f40a28ae", size = 40120431, upload-time = "2025-09-26T09:47:54.363Z" }, + { url = "https://files.pythonhosted.org/packages/42/b1/6a4eb2c6e9efa028074b0001b61008c9d202b6b46caee9e5d1b18c088216/nodejs_wheel_binaries-22.20.0-py2.py3-none-win_arm64.whl", hash = "sha256:1fccac931faa210d22b6962bcdbc99269d16221d831b9a118bbb80fe434a60b8", size = 38844133, upload-time = "2025-09-26T09:47:57.357Z" }, ] [[package]] From 6698d9e0a13c97626336e822c51cc49ec5e5d30e Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 14:38:30 -0500 Subject: [PATCH 18/31] small fixes to valkey tests --- src/kv_store_adapter/adapters/pydantic.py | 19 ++++++- tests/conftest.py | 17 ++++++ tests/stores/valkey/test_valkey.py | 69 +++++++++++------------ 3 files changed, 69 insertions(+), 36 deletions(-) diff --git a/src/kv_store_adapter/adapters/pydantic.py b/src/kv_store_adapter/adapters/pydantic.py index 6651f352..94b16d7a 100644 --- a/src/kv_store_adapter/adapters/pydantic.py +++ b/src/kv_store_adapter/adapters/pydantic.py @@ -13,9 +13,10 @@ class PydanticAdapter(Generic[T]): """Adapter around a KVStore-compliant Store that allows type-safe persistence of Pydantic models.""" - def __init__(self, kv_store: KVStore, pydantic_model: type[T]) -> None: + def __init__(self, kv_store: KVStore, pydantic_model: type[T], default_collection: str | None = None) -> None: self.kv_store: KVStore = kv_store self.pydantic_model: type[T] = pydantic_model + self.default_collection: str | None = default_collection def _validate_model(self, value: dict[str, Any]) -> T: try: @@ -37,6 +38,8 @@ async def get(self, key: str, *, collection: str | None = None) -> T | None: Returns the parsed model instance, or None if not present. Raises DeserializationError if the stored data cannot be validated as the model. """ + collection = collection or self.default_collection + if value := await self.kv_store.get(key=key, collection=collection): return self._validate_model(value=value) @@ -47,6 +50,8 @@ async def get_many(self, keys: Sequence[str], *, collection: str | None = None) Each element is either a parsed model instance or None if missing. """ + collection = collection or self.default_collection + values: list[dict[str, Any] | None] = await self.kv_store.get_many(keys=keys, collection=collection) return [self._validate_model(value=value) if value else None for value in values] @@ -56,22 +61,30 @@ async def put(self, key: str, value: T, *, collection: str | None = None, ttl: f Propagates SerializationError if the model cannot be serialized. """ + collection = collection or self.default_collection + value_dict: dict[str, Any] = self._serialize_model(value=value) await self.kv_store.put(key=key, value=value_dict, collection=collection, ttl=ttl) async def put_many(self, keys: Sequence[str], values: Sequence[T], *, collection: str | None = None, ttl: float | None = None) -> None: """Serialize and store multiple models, preserving order alignment with keys.""" + collection = collection or self.default_collection + value_dicts: list[dict[str, Any]] = [self._serialize_model(value=value) for value in values] await self.kv_store.put_many(keys=keys, values=value_dicts, collection=collection, ttl=ttl) async def delete(self, key: str, *, collection: str | None = None) -> bool: """Delete a model by key. Returns True if a value was deleted, else False.""" + collection = collection or self.default_collection + return await self.kv_store.delete(key=key, collection=collection) async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: """Delete multiple models by key. Returns the count of deleted entries.""" + collection = collection or self.default_collection + return await self.kv_store.delete_many(keys=keys, collection=collection) async def ttl(self, key: str, *, collection: str | None = None) -> tuple[T | None, float | None]: @@ -79,6 +92,8 @@ async def ttl(self, key: str, *, collection: str | None = None) -> tuple[T | Non Returns (model, ttl_seconds) or (None, None) if missing. """ + collection = collection or self.default_collection + entry: dict[str, Any] | None ttl_info: float | None @@ -92,6 +107,8 @@ async def ttl(self, key: str, *, collection: str | None = None) -> tuple[T | Non async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[T | None, float | None]]: """Batch get models with TTLs. Each element is (model|None, ttl_seconds|None).""" + collection = collection or self.default_collection + entries: list[tuple[dict[str, Any] | None, float | None]] = await self.kv_store.ttl_many(keys=keys, collection=collection) return [(self._validate_model(value=entry) if entry else None, ttl_info) for entry, ttl_info in entries] diff --git a/tests/conftest.py b/tests/conftest.py index e69de29b..07cbb88a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -0,0 +1,17 @@ +from collections.abc import Callable, Iterator +from contextlib import contextmanager + + +@contextmanager +def try_import() -> Iterator[Callable[[], bool]]: + import_success = False + + def check_import() -> bool: + return import_success + + try: + yield check_import + except ImportError: + pass + else: + import_success = True diff --git a/tests/stores/valkey/test_valkey.py b/tests/stores/valkey/test_valkey.py index 8484a699..9b5b660f 100644 --- a/tests/stores/valkey/test_valkey.py +++ b/tests/stores/valkey/test_valkey.py @@ -2,18 +2,20 @@ from collections.abc import AsyncGenerator import pytest +from typing_extensions import override +from kv_store_adapter.stores.base import BaseStore +from tests.conftest import try_import from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, detect_on_windows, should_skip_docker_tests -pytestmark = pytest.mark.skipif(detect_on_windows(), reason="This test file requires Windows") +with try_import() as has_valkey: + from glide.glide_client import GlideClient + from glide_shared.config import GlideClientConfiguration, NodeAddress -# ruff: noqa: E402 # ignore non-top-level imports -from glide.glide_client import GlideClient -from glide_shared.config import GlideClientConfiguration, NodeAddress -from typing_extensions import override + from kv_store_adapter.stores.valkey import ValkeyStore -from kv_store_adapter.stores.base import BaseStore -from kv_store_adapter.stores.valkey import ValkeyStore +if not has_valkey(): + pytestmark = pytest.mark.skip(reason="GlideClient is not installed") # Valkey test configuration VALKEY_HOST = "localhost" @@ -23,31 +25,6 @@ WAIT_FOR_VALKEY_TIMEOUT = 30 -async def get_valkey_client() -> GlideClient: - client_config: GlideClientConfiguration = GlideClientConfiguration( - addresses=[NodeAddress(host=VALKEY_HOST, port=VALKEY_PORT)], database_id=VALKEY_DB - ) - return await GlideClient.create(config=client_config) - - -async def ping_valkey() -> bool: - try: - client = await get_valkey_client() - _ = await client.ping() - except Exception: - return False - - return True - - -async def wait_valkey() -> bool: - for _ in range(WAIT_FOR_VALKEY_TIMEOUT): - if await ping_valkey(): - return True - await asyncio.sleep(delay=1) - return False - - class ValkeyFailedToStartError(Exception): pass @@ -56,6 +33,28 @@ class ValkeyFailedToStartError(Exception): @pytest.mark.skipif(detect_on_windows(), reason="Valkey is not supported on Windows") @pytest.mark.timeout(15) class TestValkeyStore(ContextManagerStoreTestMixin, BaseStoreTests): + async def get_valkey_client(self): + client_config: GlideClientConfiguration = GlideClientConfiguration( + addresses=[NodeAddress(host=VALKEY_HOST, port=VALKEY_PORT)], database_id=VALKEY_DB + ) + return await GlideClient.create(config=client_config) + + async def ping_valkey(self) -> bool: + try: + client = await self.get_valkey_client() + _ = await client.ping() + except Exception: + return False + + return True + + async def wait_valkey(self) -> bool: + for _ in range(WAIT_FOR_VALKEY_TIMEOUT): + if await self.ping_valkey(): + return True + await asyncio.sleep(delay=1) + return False + @pytest.fixture(autouse=True, scope="session") async def setup_valkey(self) -> AsyncGenerator[None, None]: _ = await asyncio.create_subprocess_exec("docker", "stop", "valkey-test") @@ -65,7 +64,7 @@ async def setup_valkey(self) -> AsyncGenerator[None, None]: "docker", "run", "-d", "--name", "valkey-test", "-p", f"{VALKEY_PORT}:6379", "valkey/valkey:latest" ) _ = await process.wait() - if not await wait_valkey(): + if not await self.wait_valkey(): msg = "Valkey failed to start" raise ValkeyFailedToStartError(msg) try: @@ -75,10 +74,10 @@ async def setup_valkey(self) -> AsyncGenerator[None, None]: @override @pytest.fixture - async def store(self, setup_valkey: None) -> ValkeyStore: + async def store(self, setup_valkey: None): store: ValkeyStore = ValkeyStore(host=VALKEY_HOST, port=VALKEY_PORT, db=VALKEY_DB) - client: GlideClient = await get_valkey_client() + client: GlideClient = await self.get_valkey_client() _ = await client.flushdb() return store From 325f7d51a5471999594a5a8b67288bf39d88b87e Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 15:08:43 -0500 Subject: [PATCH 19/31] maybe tests work now --- tests/stores/conftest.py | 6 +++--- tests/stores/disk/test_disk.py | 2 +- tests/stores/disk/test_multi_disk.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/stores/conftest.py b/tests/stores/conftest.py index 5814f9d8..c74d95be 100644 --- a/tests/stores/conftest.py +++ b/tests/stores/conftest.py @@ -9,6 +9,7 @@ import pytest from pydantic import AnyHttpUrl +from kv_store_adapter.adapters.sync import SyncAdapter from kv_store_adapter.errors import InvalidTTLError, SerializationError from kv_store_adapter.stores.base import BaseContextManagerStore, BaseStore @@ -192,12 +193,12 @@ async def test_special_characters_in_key_name(self, store: BaseStore): async def test_not_unbounded(self, store: BaseStore): """Tests that the store is not unbounded.""" - for i in range(5000): + for i in range(1000): value = hashlib.sha256(f"test_{i}".encode()).hexdigest() await store.put(collection="test_collection", key=f"test_key_{i}", value={"test": value}) assert await store.get(collection="test_collection", key="test_key_0") is None - assert await store.get(collection="test_collection", key="test_key_4999") is not None + assert await store.get(collection="test_collection", key="test_key_999") is not None async def test_concurrent_operations(self, store: BaseStore): """Tests that the store can handle concurrent operations.""" @@ -217,7 +218,6 @@ async def worker(store: BaseStore, worker_id: int): _ = await asyncio.gather(*[worker(store, worker_id) for worker_id in range(1)]) - class ContextManagerStoreTestMixin: @pytest.fixture(params=[True, False], ids=["with_ctx_manager", "no_ctx_manager"], autouse=True) async def enter_exit_store( diff --git a/tests/stores/disk/test_disk.py b/tests/stores/disk/test_disk.py index b1e30a12..db812421 100644 --- a/tests/stores/disk/test_disk.py +++ b/tests/stores/disk/test_disk.py @@ -7,7 +7,7 @@ from kv_store_adapter.stores.disk import DiskStore from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin -TEST_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB +TEST_SIZE_LIMIT = 100 * 1024 # 100KB class TestDiskStore(ContextManagerStoreTestMixin, BaseStoreTests): diff --git a/tests/stores/disk/test_multi_disk.py b/tests/stores/disk/test_multi_disk.py index eeb7ed5c..d9986712 100644 --- a/tests/stores/disk/test_multi_disk.py +++ b/tests/stores/disk/test_multi_disk.py @@ -8,7 +8,7 @@ from kv_store_adapter.stores.disk.multi_store import MultiDiskStore from tests.stores.conftest import BaseStoreTests -TEST_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB +TEST_SIZE_LIMIT = 100 * 1024 # 100KB class TestMultiDiskStore(BaseStoreTests): From c108ca96c015931a07b378aaca8b4e94f1f76912 Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 15:11:41 -0500 Subject: [PATCH 20/31] forgot to lint --- tests/stores/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/stores/conftest.py b/tests/stores/conftest.py index c74d95be..bcd6399f 100644 --- a/tests/stores/conftest.py +++ b/tests/stores/conftest.py @@ -9,7 +9,6 @@ import pytest from pydantic import AnyHttpUrl -from kv_store_adapter.adapters.sync import SyncAdapter from kv_store_adapter.errors import InvalidTTLError, SerializationError from kv_store_adapter.stores.base import BaseContextManagerStore, BaseStore @@ -218,6 +217,7 @@ async def worker(store: BaseStore, worker_id: int): _ = await asyncio.gather(*[worker(store, worker_id) for worker_id in range(1)]) + class ContextManagerStoreTestMixin: @pytest.fixture(params=[True, False], ids=["with_ctx_manager", "no_ctx_manager"], autouse=True) async def enter_exit_store( From 7b47ce57cd8be7a795125776baaffaf788b2dd03 Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 15:17:15 -0500 Subject: [PATCH 21/31] Test clean-up --- src/kv_store_adapter/stores/memory/store.py | 9 +++++---- tests/stores/conftest.py | 6 ++++++ tests/stores/memory/test_memory.py | 2 +- tests/stores/wrappers/test_clamp_ttl.py | 4 ---- tests/stores/wrappers/test_passthrough_cache.py | 6 +++--- tests/stores/wrappers/test_prefix_collection.py | 3 +-- tests/stores/wrappers/test_prefix_key.py | 3 +-- tests/stores/wrappers/test_single_collection.py | 3 +-- tests/stores/wrappers/test_statistics.py | 3 +-- 9 files changed, 19 insertions(+), 20 deletions(-) diff --git a/src/kv_store_adapter/stores/memory/store.py b/src/kv_store_adapter/stores/memory/store.py index eb7d0b55..22726dd8 100644 --- a/src/kv_store_adapter/stores/memory/store.py +++ b/src/kv_store_adapter/stores/memory/store.py @@ -58,7 +58,7 @@ def _memory_cache_getsizeof(value: MemoryCacheEntry) -> int: # pyright: ignore[ return 1 -DEFAULT_MEMORY_CACHE_LIMIT = 1000 +DEFAULT_MAX_ENTRIES_PER_COLLECTION = 10000 DEFAULT_PAGE_SIZE = 10000 PAGE_LIMIT = 10000 @@ -67,7 +67,7 @@ def _memory_cache_getsizeof(value: MemoryCacheEntry) -> int: # pyright: ignore[ class MemoryCollection: _cache: TLRUCache[str, MemoryCacheEntry] - def __init__(self, max_entries: int = DEFAULT_MEMORY_CACHE_LIMIT): + def __init__(self, max_entries: int = DEFAULT_MAX_ENTRIES_PER_COLLECTION): self._cache = TLRUCache[str, MemoryCacheEntry]( maxsize=max_entries, ttu=_memory_cache_ttu, @@ -102,11 +102,12 @@ class MemoryStore(BaseDestroyStore, BaseDestroyCollectionStore, BaseEnumerateCol _cache: dict[str, MemoryCollection] - def __init__(self, *, max_entries_per_collection: int = DEFAULT_MEMORY_CACHE_LIMIT, default_collection: str | None = None): + def __init__(self, *, max_entries_per_collection: int = DEFAULT_MAX_ENTRIES_PER_COLLECTION, default_collection: str | None = None): """Initialize the in-memory cache. Args: - max_entries_per_collection: The maximum number of entries per collection. Defaults to 1000. + max_entries_per_collection: The maximum number of entries per collection. Defaults to 10000. + default_collection: The default collection to use if no collection is provided. """ self.max_entries_per_collection = max_entries_per_collection diff --git a/tests/stores/conftest.py b/tests/stores/conftest.py index bcd6399f..0cfcdfb7 100644 --- a/tests/stores/conftest.py +++ b/tests/stores/conftest.py @@ -11,6 +11,12 @@ from kv_store_adapter.errors import InvalidTTLError, SerializationError from kv_store_adapter.stores.base import BaseContextManagerStore, BaseStore +from kv_store_adapter.stores.memory.store import MemoryStore + + +@pytest.fixture +def memory_store() -> MemoryStore: + return MemoryStore(max_entries_per_collection=500) def now() -> datetime: diff --git a/tests/stores/memory/test_memory.py b/tests/stores/memory/test_memory.py index 63a7bcc5..15b524d8 100644 --- a/tests/stores/memory/test_memory.py +++ b/tests/stores/memory/test_memory.py @@ -9,4 +9,4 @@ class TestMemoryStore(BaseStoreTests): @override @pytest.fixture async def store(self) -> MemoryStore: - return MemoryStore() + return MemoryStore(max_entries_per_collection=500) diff --git a/tests/stores/wrappers/test_clamp_ttl.py b/tests/stores/wrappers/test_clamp_ttl.py index 1fc48695..7ef572e8 100644 --- a/tests/stores/wrappers/test_clamp_ttl.py +++ b/tests/stores/wrappers/test_clamp_ttl.py @@ -8,10 +8,6 @@ class TestTTLClampWrapper(BaseStoreTests): - @pytest.fixture - async def memory_store(self) -> MemoryStore: - return MemoryStore() - @override @pytest.fixture async def store(self, memory_store: MemoryStore) -> TTLClampWrapper: diff --git a/tests/stores/wrappers/test_passthrough_cache.py b/tests/stores/wrappers/test_passthrough_cache.py index 46dc4892..baf5a2e1 100644 --- a/tests/stores/wrappers/test_passthrough_cache.py +++ b/tests/stores/wrappers/test_passthrough_cache.py @@ -9,7 +9,7 @@ from kv_store_adapter.wrappers.passthrough_cache import PassthroughCacheWrapper from tests.stores.conftest import BaseStoreTests -DISK_STORE_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB +DISK_STORE_SIZE_LIMIT = 100 * 1024 # 100KB class TestPassthroughCacheWrapper(BaseStoreTests): @@ -19,8 +19,8 @@ async def primary_store(self) -> AsyncGenerator[DiskStore, None]: yield DiskStore(directory=temp_dir, max_size=DISK_STORE_SIZE_LIMIT) @pytest.fixture - async def cache_store(self) -> MemoryStore: - return MemoryStore() + async def cache_store(self, memory_store: MemoryStore) -> MemoryStore: + return memory_store @override @pytest.fixture diff --git a/tests/stores/wrappers/test_prefix_collection.py b/tests/stores/wrappers/test_prefix_collection.py index df2dd07b..8b9f55cb 100644 --- a/tests/stores/wrappers/test_prefix_collection.py +++ b/tests/stores/wrappers/test_prefix_collection.py @@ -9,6 +9,5 @@ class TestPrefixCollectionWrapper(BaseStoreTests): @override @pytest.fixture - async def store(self) -> PrefixCollectionsWrapper: - memory_store: MemoryStore = MemoryStore() + async def store(self, memory_store: MemoryStore) -> PrefixCollectionsWrapper: return PrefixCollectionsWrapper(store=memory_store, prefix="collection_prefix") diff --git a/tests/stores/wrappers/test_prefix_key.py b/tests/stores/wrappers/test_prefix_key.py index 1e6ffa18..d949462b 100644 --- a/tests/stores/wrappers/test_prefix_key.py +++ b/tests/stores/wrappers/test_prefix_key.py @@ -9,6 +9,5 @@ class TestPrefixKeyWrapper(BaseStoreTests): @override @pytest.fixture - async def store(self) -> PrefixKeysWrapper: - memory_store: MemoryStore = MemoryStore() + async def store(self, memory_store: MemoryStore) -> PrefixKeysWrapper: return PrefixKeysWrapper(store=memory_store, prefix="key_prefix") diff --git a/tests/stores/wrappers/test_single_collection.py b/tests/stores/wrappers/test_single_collection.py index 1022cb54..0c33880e 100644 --- a/tests/stores/wrappers/test_single_collection.py +++ b/tests/stores/wrappers/test_single_collection.py @@ -9,6 +9,5 @@ class TestSingleCollectionWrapper(BaseStoreTests): @override @pytest.fixture - async def store(self) -> SingleCollectionWrapper: - memory_store: MemoryStore = MemoryStore() + async def store(self, memory_store: MemoryStore) -> SingleCollectionWrapper: return SingleCollectionWrapper(store=memory_store, single_collection="test") diff --git a/tests/stores/wrappers/test_statistics.py b/tests/stores/wrappers/test_statistics.py index 6b6983ef..9362b73d 100644 --- a/tests/stores/wrappers/test_statistics.py +++ b/tests/stores/wrappers/test_statistics.py @@ -9,6 +9,5 @@ class TestStatisticsWrapper(BaseStoreTests): @override @pytest.fixture - async def store(self) -> StatisticsWrapper: - memory_store: MemoryStore = MemoryStore() + async def store(self, memory_store: MemoryStore) -> StatisticsWrapper: return StatisticsWrapper(store=memory_store) From 31039f746deb9f0e51ec10988d1aacc9308614b0 Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 15:23:22 -0500 Subject: [PATCH 22/31] More test updates --- src/kv_store_adapter/stores/simple/store.py | 12 +++++++++--- tests/stores/conftest.py | 20 ++++++++++++++++++-- tests/stores/simple/test_store.py | 2 +- 3 files changed, 28 insertions(+), 6 deletions(-) diff --git a/src/kv_store_adapter/stores/simple/store.py b/src/kv_store_adapter/stores/simple/store.py index 7cd559a7..2fdbe70a 100644 --- a/src/kv_store_adapter/stores/simple/store.py +++ b/src/kv_store_adapter/stores/simple/store.py @@ -14,8 +14,7 @@ from kv_store_adapter.utils.managed_entry import ManagedEntry, load_from_json from kv_store_adapter.utils.time_to_live import seconds_to -DEFAULT_SIMPLE_MANAGED_STORE_MAX_ENTRIES = 1000 -DEFAULT_SIMPLE_STORE_MAX_ENTRIES = 1000 +DEFAULT_SIMPLE_STORE_MAX_ENTRIES = 10000 @dataclass @@ -49,7 +48,14 @@ class SimpleStore(BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, BaseDes _data: dict[str, SimpleStoreEntry] - def __init__(self, max_entries: int = DEFAULT_SIMPLE_MANAGED_STORE_MAX_ENTRIES, default_collection: str | None = None): + def __init__(self, max_entries: int = DEFAULT_SIMPLE_STORE_MAX_ENTRIES, default_collection: str | None = None): + """Initialize the simple store. + + Args: + max_entries: The maximum number of entries to store. Defaults to 10000. + default_collection: The default collection to use if no collection is provided. + """ + self.max_entries = max_entries self._data = defaultdict[str, SimpleStoreEntry]() diff --git a/tests/stores/conftest.py b/tests/stores/conftest.py index 0cfcdfb7..022709a9 100644 --- a/tests/stores/conftest.py +++ b/tests/stores/conftest.py @@ -209,7 +209,7 @@ async def test_concurrent_operations(self, store: BaseStore): """Tests that the store can handle concurrent operations.""" async def worker(store: BaseStore, worker_id: int): - for i in range(100): + for i in range(10): assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None await store.put(collection="test_collection", key=f"test_{worker_id}_{i}", value={"test": f"test_{i}"}) @@ -221,7 +221,23 @@ async def worker(store: BaseStore, worker_id: int): assert await store.delete(collection="test_collection", key=f"test_{worker_id}_{i}") assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None - _ = await asyncio.gather(*[worker(store, worker_id) for worker_id in range(1)]) + _ = await asyncio.gather(*[worker(store, worker_id) for worker_id in range(5)]) + + @pytest.mark.timeout(15) + async def test_minimum_put_many_get_many_performance(self, store: BaseStore): + """Tests that the store meets minimum performance requirements.""" + keys = [f"test_{i}" for i in range(10)] + values = [{"test": f"test_{i}"} for i in range(10)] + await store.put_many(collection="test_collection", keys=keys, values=values) + assert await store.get_many(collection="test_collection", keys=keys) == values + + @pytest.mark.timeout(15) + async def test_minimum_put_many_delete_many_performance(self, store: BaseStore): + """Tests that the store meets minimum performance requirements.""" + keys = [f"test_{i}" for i in range(10)] + values = [{"test": f"test_{i}"} for i in range(10)] + await store.put_many(collection="test_collection", keys=keys, values=values) + assert await store.delete_many(collection="test_collection", keys=keys) == 10 class ContextManagerStoreTestMixin: diff --git a/tests/stores/simple/test_store.py b/tests/stores/simple/test_store.py index f45165fc..85282f82 100644 --- a/tests/stores/simple/test_store.py +++ b/tests/stores/simple/test_store.py @@ -9,4 +9,4 @@ class TestSimpleStore(BaseStoreTests): @override @pytest.fixture async def store(self) -> SimpleStore: - return SimpleStore() + return SimpleStore(max_entries=500) From dd2e84845f50593df1084a221365948be3020f99 Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 17:20:11 -0500 Subject: [PATCH 23/31] disk store fixes for windows --- src/kv_store_adapter/stores/base.py | 7 +++---- .../stores/disk/multi_store.py | 6 +++++- src/kv_store_adapter/stores/disk/store.py | 18 +++++++++++++++--- .../stores/wrappers/test_passthrough_cache.py | 4 +++- 4 files changed, 26 insertions(+), 9 deletions(-) diff --git a/src/kv_store_adapter/stores/base.py b/src/kv_store_adapter/stores/base.py index 1aa20fe5..6d28a8a7 100644 --- a/src/kv_store_adapter/stores/base.py +++ b/src/kv_store_adapter/stores/base.py @@ -2,7 +2,6 @@ Base abstract class for managed key-value store implementations. """ -import asyncio from abc import ABC, abstractmethod from asyncio.locks import Lock from collections import defaultdict @@ -56,7 +55,7 @@ class BaseStore(KeyValueProtocol, ABC): """ _setup_complete: bool - _setup_lock: asyncio.Lock + _setup_lock: Lock _setup_collection_locks: defaultdict[str, Lock] _setup_collection_complete: defaultdict[str, bool] @@ -72,8 +71,8 @@ def __init__(self, *, default_collection: str | None = None) -> None: """ self._setup_complete = False - self._setup_lock = asyncio.Lock() - self._setup_collection_locks = defaultdict[str, asyncio.Lock](asyncio.Lock) + self._setup_lock = Lock() + self._setup_collection_locks = defaultdict[str, Lock](Lock) self._setup_collection_complete = defaultdict[str, bool](bool) self.default_collection = default_collection or DEFAULT_COLLECTION_NAME diff --git a/src/kv_store_adapter/stores/disk/multi_store.py b/src/kv_store_adapter/stores/disk/multi_store.py index e4a06f1f..3f4f9c23 100644 --- a/src/kv_store_adapter/stores/disk/multi_store.py +++ b/src/kv_store_adapter/stores/disk/multi_store.py @@ -88,7 +88,11 @@ def __init__( def default_disk_cache_factory(collection: str) -> Cache: sanitized_collection: str = _sanitize_collection_for_filesystem(collection=collection) - return Cache(directory=self._base_directory / sanitized_collection, size_limit=self._max_size or DEFAULT_DISK_STORE_SIZE_LIMIT) + cache_directory: Path = self._base_directory / sanitized_collection + + cache_directory.mkdir(parents=True, exist_ok=True) + + return Cache(directory=cache_directory, size_limit=self._max_size or DEFAULT_DISK_STORE_SIZE_LIMIT) self._disk_cache_factory = disk_cache_factory or default_disk_cache_factory diff --git a/src/kv_store_adapter/stores/disk/store.py b/src/kv_store_adapter/stores/disk/store.py index 30fd4961..25dde250 100644 --- a/src/kv_store_adapter/stores/disk/store.py +++ b/src/kv_store_adapter/stores/disk/store.py @@ -57,10 +57,22 @@ def __init__( max_size: The maximum size of the disk cache. Defaults to 1GB. default_collection: The default collection to use if no collection is provided. """ - if isinstance(directory, Path): - directory = str(object=directory) + if disk_cache is not None and directory is not None: + msg = "Either disk_cache or directory must be provided" + raise ValueError(msg) - self._cache = disk_cache or Cache(directory=directory, size_limit=max_size or DEFAULT_DISK_STORE_MAX_SIZE) + if disk_cache is None and directory is None: + msg = "Either disk_cache or directory must be provided" + raise ValueError(msg) + + if disk_cache: + self._cache = disk_cache + elif directory: + directory = Path(directory) + + directory.mkdir(parents=True, exist_ok=True) + + self._cache = Cache(directory=directory, size_limit=max_size or DEFAULT_DISK_STORE_MAX_SIZE) super().__init__(default_collection=default_collection) diff --git a/tests/stores/wrappers/test_passthrough_cache.py b/tests/stores/wrappers/test_passthrough_cache.py index baf5a2e1..9cbd1111 100644 --- a/tests/stores/wrappers/test_passthrough_cache.py +++ b/tests/stores/wrappers/test_passthrough_cache.py @@ -16,7 +16,9 @@ class TestPassthroughCacheWrapper(BaseStoreTests): @pytest.fixture async def primary_store(self) -> AsyncGenerator[DiskStore, None]: with tempfile.TemporaryDirectory() as temp_dir: - yield DiskStore(directory=temp_dir, max_size=DISK_STORE_SIZE_LIMIT) + async with DiskStore(directory=temp_dir, max_size=DISK_STORE_SIZE_LIMIT) as disk_store: + yield disk_store + @pytest.fixture async def cache_store(self, memory_store: MemoryStore) -> MemoryStore: From 1bdb40042de632bfd1e5d53bad09e88af21bc9f3 Mon Sep 17 00:00:00 2001 From: William Easton Date: Fri, 26 Sep 2025 22:16:15 -0500 Subject: [PATCH 24/31] Reorganize project --- ...sh-py-kv-store-adapter.yml => publish.yml} | 16 +- .../{test_pull_request.yml => test.yml} | 47 +- .vscode/launch.json | 10 + .vscode/settings.json | 3 - DEVELOPING.md | 432 ------------------ README.md | 20 +- key-value/key-value-aio/README.md | 219 +++++++++ key-value/key-value-aio/pyproject.toml | 74 +++ .../src/key_value/aio/__init__.py | 3 + .../src/key_value/aio}/adapters/__init__.py | 0 .../src/key_value/aio}/adapters/pydantic.py | 8 +- .../aio}/adapters/raise_on_missing.py | 8 +- .../src/key_value/aio}/errors.py | 0 .../src/key_value/aio}/stores/__init__.py | 0 .../src/key_value/aio}/stores/base.py | 38 +- .../key_value/aio}/stores/disk/__init__.py | 0 .../key_value/aio}/stores/disk/multi_store.py | 6 +- .../src/key_value/aio}/stores/disk/store.py | 6 +- .../aio}/stores/elasticsearch/__init__.py | 0 .../aio}/stores/elasticsearch/store.py | 10 +- .../aio}/stores/elasticsearch/utils.py | 0 .../aio}/stores/memcached/__init__.py | 0 .../key_value/aio}/stores/memcached/store.py | 6 +- .../key_value/aio}/stores/memory/__init__.py | 0 .../src/key_value/aio}/stores/memory/store.py | 6 +- .../key_value/aio}/stores/mongodb/__init__.py | 0 .../key_value/aio}/stores/mongodb/store.py | 8 +- .../key_value/aio}/stores/null/__init__.py | 0 .../src/key_value/aio}/stores/null/store.py | 4 +- .../key_value/aio}/stores/redis/__init__.py | 0 .../src/key_value/aio}/stores/redis/store.py | 6 +- .../key_value/aio}/stores/simple/__init__.py | 0 .../src/key_value/aio}/stores/simple/store.py | 8 +- .../key_value/aio}/stores/valkey/__init__.py | 0 .../src/key_value/aio}/stores/valkey/store.py | 6 +- .../key-value-aio/src/key_value/aio}/types.py | 14 +- .../src/key_value/aio}/utils/compound.py | 0 .../src/key_value/aio}/utils/managed_entry.py | 4 +- .../src/key_value/aio}/utils/sanitize.py | 0 .../src/key_value/aio}/utils/time_to_live.py | 0 .../src/key_value/aio/wrappers/__init__.py | 17 + .../src/key_value/aio}/wrappers/base.py | 6 +- .../aio}/wrappers/passthrough_cache.py | 14 +- .../aio}/wrappers/prefix_collections.py | 12 +- .../key_value/aio}/wrappers/prefix_keys.py | 10 +- .../aio}/wrappers/single_collection.py | 14 +- .../src/key_value/aio}/wrappers/statistics.py | 8 +- .../src/key_value/aio/wrappers/ttl_clamp.py | 20 +- .../key-value-aio/tests}/__init__.py | 0 .../key-value-aio/tests}/adapters/__init__.py | 0 .../tests}/adapters/test_pydantic.py | 4 +- .../tests}/adapters/test_raise.py | 6 +- .../key-value-aio/tests}/cases.py | 0 .../key-value-aio/tests}/conftest.py | 0 .../key-value-aio/tests}/stores/__init__.py | 0 .../tests}/stores/base/__init__.py | 0 .../key-value-aio/tests}/stores/conftest.py | 14 +- .../tests}/stores/disk/__init__.py | 0 .../tests}/stores/disk/test_disk.py | 14 +- .../tests/stores/disk/test_multi_disk.py | 26 ++ .../tests}/stores/elasticsearch/__init__.py | 0 .../elasticsearch/test_elasticsearch.py | 4 +- .../tests}/stores/memcached/test_memcached.py | 5 +- .../tests}/stores/memory/__init__.py | 0 .../tests}/stores/memory/test_memory.py | 2 +- .../tests}/stores/mongodb/test_mongodb.py | 5 +- .../tests}/stores/redis/__init__.py | 0 .../tests}/stores/redis/test_redis.py | 5 +- .../tests}/stores/simple/__init__.py | 0 .../tests}/stores/simple/test_store.py | 2 +- .../tests}/stores/valkey/test_valkey.py | 5 +- .../tests}/stores/wrappers/__init__.py | 0 .../tests}/stores/wrappers/test_clamp_ttl.py | 4 +- .../stores/wrappers/test_passthrough_cache.py | 10 +- .../stores/wrappers/test_prefix_collection.py | 4 +- .../tests}/stores/wrappers/test_prefix_key.py | 4 +- .../stores/wrappers/test_single_collection.py | 4 +- .../tests}/stores/wrappers/test_statistics.py | 4 +- .../key-value-aio/tests}/test_types.py | 6 +- .../tests}/utils/test_managed_entry.py | 2 +- .../tests}/utils/test_sanitize.py | 2 +- pyproject.toml | 96 +--- src/kv_store_adapter/__init__.py | 3 - tests/stores/disk/test_multi_disk.py | 21 - uv.lock | 196 ++++---- 85 files changed, 702 insertions(+), 809 deletions(-) rename .github/workflows/{publish-py-kv-store-adapter.yml => publish.yml} (65%) rename .github/workflows/{test_pull_request.yml => test.yml} (61%) delete mode 100644 DEVELOPING.md create mode 100644 key-value/key-value-aio/README.md create mode 100644 key-value/key-value-aio/pyproject.toml create mode 100644 key-value/key-value-aio/src/key_value/aio/__init__.py rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/adapters/__init__.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/adapters/pydantic.py (94%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/adapters/raise_on_missing.py (97%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/errors.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/__init__.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/base.py (93%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/disk/__init__.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/disk/multi_store.py (96%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/disk/store.py (95%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/elasticsearch/__init__.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/elasticsearch/store.py (96%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/elasticsearch/utils.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/memcached/__init__.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/memcached/store.py (93%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/memory/__init__.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/memory/store.py (96%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/mongodb/__init__.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/mongodb/store.py (95%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/null/__init__.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/null/store.py (83%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/redis/__init__.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/redis/store.py (94%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/simple/__init__.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/simple/store.py (91%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/valkey/__init__.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/stores/valkey/store.py (95%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/types.py (95%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/utils/compound.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/utils/managed_entry.py (95%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/utils/sanitize.py (100%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/utils/time_to_live.py (100%) create mode 100644 key-value/key-value-aio/src/key_value/aio/wrappers/__init__.py rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/wrappers/base.py (94%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/wrappers/passthrough_cache.py (95%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/wrappers/prefix_collections.py (89%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/wrappers/prefix_keys.py (91%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/wrappers/single_collection.py (89%) rename {src/kv_store_adapter => key-value/key-value-aio/src/key_value/aio}/wrappers/statistics.py (97%) rename src/kv_store_adapter/wrappers/clamp_ttl.py => key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp.py (72%) rename {tests => key-value/key-value-aio/tests}/__init__.py (100%) rename {tests => key-value/key-value-aio/tests}/adapters/__init__.py (100%) rename {tests => key-value/key-value-aio/tests}/adapters/test_pydantic.py (95%) rename {tests => key-value/key-value-aio/tests}/adapters/test_raise.py (86%) rename {tests => key-value/key-value-aio/tests}/cases.py (100%) rename {tests => key-value/key-value-aio/tests}/conftest.py (100%) rename {tests => key-value/key-value-aio/tests}/stores/__init__.py (100%) rename {tests => key-value/key-value-aio/tests}/stores/base/__init__.py (100%) rename {tests => key-value/key-value-aio/tests}/stores/conftest.py (96%) rename {tests => key-value/key-value-aio/tests}/stores/disk/__init__.py (100%) rename {tests => key-value/key-value-aio/tests}/stores/disk/test_disk.py (50%) create mode 100644 key-value/key-value-aio/tests/stores/disk/test_multi_disk.py rename {tests => key-value/key-value-aio/tests}/stores/elasticsearch/__init__.py (100%) rename {tests => key-value/key-value-aio/tests}/stores/elasticsearch/test_elasticsearch.py (92%) rename {tests => key-value/key-value-aio/tests}/stores/memcached/test_memcached.py (94%) rename {tests => key-value/key-value-aio/tests}/stores/memory/__init__.py (100%) rename {tests => key-value/key-value-aio/tests}/stores/memory/test_memory.py (82%) rename {tests => key-value/key-value-aio/tests}/stores/mongodb/test_mongodb.py (95%) rename {tests => key-value/key-value-aio/tests}/stores/redis/__init__.py (100%) rename {tests => key-value/key-value-aio/tests}/stores/redis/test_redis.py (96%) rename {tests => key-value/key-value-aio/tests}/stores/simple/__init__.py (100%) rename {tests => key-value/key-value-aio/tests}/stores/simple/test_store.py (81%) rename {tests => key-value/key-value-aio/tests}/stores/valkey/test_valkey.py (95%) rename {tests => key-value/key-value-aio/tests}/stores/wrappers/__init__.py (100%) rename {tests => key-value/key-value-aio/tests}/stores/wrappers/test_clamp_ttl.py (94%) rename {tests => key-value/key-value-aio/tests}/stores/wrappers/test_passthrough_cache.py (74%) rename {tests => key-value/key-value-aio/tests}/stores/wrappers/test_prefix_collection.py (71%) rename {tests => key-value/key-value-aio/tests}/stores/wrappers/test_prefix_key.py (72%) rename {tests => key-value/key-value-aio/tests}/stores/wrappers/test_single_collection.py (71%) rename {tests => key-value/key-value-aio/tests}/stores/wrappers/test_statistics.py (70%) rename {tests => key-value/key-value-aio/tests}/test_types.py (79%) rename {tests => key-value/key-value-aio/tests}/utils/test_managed_entry.py (93%) rename {tests => key-value/key-value-aio/tests}/utils/test_sanitize.py (98%) delete mode 100644 src/kv_store_adapter/__init__.py delete mode 100644 tests/stores/disk/test_multi_disk.py diff --git a/.github/workflows/publish-py-kv-store-adapter.yml b/.github/workflows/publish.yml similarity index 65% rename from .github/workflows/publish-py-kv-store-adapter.yml rename to .github/workflows/publish.yml index 5d95c69a..2ac94e65 100644 --- a/.github/workflows/publish-py-kv-store-adapter.yml +++ b/.github/workflows/publish.yml @@ -1,4 +1,4 @@ -name: Publish py-kv-store-adapter to PyPI +name: Publish to PyPI on: release: @@ -13,22 +13,30 @@ jobs: id-token: write environment: pypi + strategy: + matrix: + project: + - "key-value/key-value-aio" + # - "kv-store-sync" steps: - name: Checkout repository uses: actions/checkout@v4 - name: "Install uv" uses: astral-sh/setup-uv@v6 - + - name: "Install" run: uv sync --locked --group dev + working-directory: ${{ matrix.project }} - name: "Test" run: uv run pytest tests - + working-directory: ${{ matrix.project }} + - name: "Build" run: uv build + working-directory: ${{ matrix.project }} - name: "Publish to PyPi" if: github.event_name == 'release' && github.event.action == 'created' - run: uv publish -v dist/* \ No newline at end of file + run: uv publish -v dist/* diff --git a/.github/workflows/test_pull_request.yml b/.github/workflows/test.yml similarity index 61% rename from .github/workflows/test_pull_request.yml rename to .github/workflows/test.yml index f2bc53a4..c3076989 100644 --- a/.github/workflows/test_pull_request.yml +++ b/.github/workflows/test.yml @@ -1,4 +1,4 @@ -name: Run tests for pull requests and merges +name: Run Tests on: pull_request: @@ -12,24 +12,34 @@ on: jobs: static_analysis: runs-on: ubuntu-latest + + strategy: + matrix: + project: + - "key-value/key-value-aio" + # - "kv-store-sync" steps: - name: Checkout repository uses: actions/checkout@v4 - name: "Install uv" uses: astral-sh/setup-uv@v6 - + - name: "Install" run: uv sync --locked --group dev + working-directory: ${{ matrix.project }} - name: "Lint" run: uv run ruff check --exit-non-zero-on-fix --fix + working-directory: ${{ matrix.project }} - name: "Format" run: uv run ruff format --check + working-directory: ${{ matrix.project }} - name: "Type Check" run: uv run basedpyright + working-directory: ${{ matrix.project }} test_quick: needs: @@ -39,7 +49,10 @@ jobs: matrix: python-version: ["3.10"] platform: [ubuntu-latest, macos-latest, windows-latest] - + project: + - "key-value/key-value-aio" + # - "kv-store-sync" + runs-on: ${{ matrix.platform }} steps: - name: Checkout repository @@ -47,26 +60,39 @@ jobs: - name: "Install uv" uses: astral-sh/setup-uv@v6 - + - name: "Install" run: uv sync --locked --group dev --python ${{ matrix.python-version }} + working-directory: ${{ matrix.project }} - name: "Test" run: uv run pytest tests + working-directory: ${{ matrix.project }} - name: "Build" run: uv build + working-directory: ${{ matrix.project }} test_all: needs: - test_quick - - timeout-minutes: 10 + + timeout-minutes: 20 strategy: matrix: python-version: ["3.10", "3.11", "3.12", "3.13"] - platform: [ubuntu-22.04, ubuntu-latest, macos-14, macos-latest, windows-2022, windows-latest] - + platform: + [ + ubuntu-22.04, + ubuntu-latest, + macos-14, + macos-latest, + windows-2022, + windows-latest, + ] + project: + - "key-value/key-value-aio" + # - "kv-store-sync" runs-on: ${{ matrix.platform }} @@ -76,12 +102,15 @@ jobs: - name: "Install uv" uses: astral-sh/setup-uv@v6 - + - name: "Install" run: uv sync --locked --group dev --python ${{ matrix.python-version }} + working-directory: ${{ matrix.project }} - name: "Test" run: uv run pytest tests + working-directory: ${{ matrix.project }} - name: "Build" run: uv build + working-directory: ${{ matrix.project }} diff --git a/.vscode/launch.json b/.vscode/launch.json index 1b0dc51a..a9591e2c 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -26,6 +26,16 @@ "console": "integratedTerminal", "justMyCode": false, "envFile": "${workspaceFolder}/.env" + }, + { + "name": "Python: Build Sync Library", + "type": "debugpy", + "request": "launch", + "program": "${workspaceFolder}/scripts/build_sync_library.py", + "console": "integratedTerminal", + "justMyCode": false, + "envFile": "${workspaceFolder}/.env", + "args": [] } ] } \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index 9b388533..e137fadb 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,7 +1,4 @@ { - "python.testing.pytestArgs": [ - "tests" - ], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true } \ No newline at end of file diff --git a/DEVELOPING.md b/DEVELOPING.md deleted file mode 100644 index 5c5982e1..00000000 --- a/DEVELOPING.md +++ /dev/null @@ -1,432 +0,0 @@ -# Development Guide - -This guide covers development setup, testing, and contribution guidelines for the KV Store Adapter project. - -## Development Setup - -### Prerequisites - -- Python 3.10 or higher -- [uv](https://docs.astral.sh/uv/) for dependency management -- Docker and Docker Compose (for integration tests) - -### Initial Setup - -1. **Clone the repository:** - ```bash - git clone - cd py-kv-store-adapter - ``` - -2. **Install dependencies:** - ```bash - uv sync --group dev - ``` - -3. **Activate the virtual environment:** - ```bash - source .venv/bin/activate # Linux/macOS - # or - .venv\Scripts\activate # Windows - ``` - -4. **Install pre-commit hooks (optional but recommended):** - ```bash - pre-commit install - ``` - -## Project Structure - -``` -src/kv_store_adapter/ -├── __init__.py # Main package exports -├── types.py # Core types and protocols -├── errors.py # Exception hierarchy -├── adapters/ # Protocol adapters -│ ├── __init__.py # Adapter exports -│ ├── pydantic.py # Pydantic model adapter -│ └── raise_on_missing.py # Raise-on-missing adapter -├── stores/ # Store implementations -│ ├── __init__.py # Store exports -│ ├── base.py # Abstract base classes -│ ├── redis/ # Redis implementation -│ │ ├── __init__.py # Redis exports -│ │ └── store.py # RedisStore implementation -│ ├── memory/ # In-memory TLRU cache -│ │ ├── __init__.py # Memory exports -│ │ └── store.py # MemoryStore implementation -│ ├── disk/ # Disk-based storage -│ │ ├── __init__.py # Disk exports -│ │ ├── store.py # DiskStore implementation -│ │ └── multi_store.py # Multi-disk store -│ ├── elasticsearch/ # Elasticsearch implementation -│ │ ├── __init__.py # Elasticsearch exports -│ │ ├── store.py # ElasticsearchStore implementation -│ │ └── utils.py # Elasticsearch utilities -│ ├── simple/ # Simple dict-based stores -│ │ ├── __init__.py # Simple store exports -│ │ └── store.py # SimpleStore implementation -│ ├── null/ # Null object pattern store -│ │ ├── __init__.py # Null store exports -│ │ └── store.py # NullStore implementation -│ └── utils/ # Utility functions -│ ├── compound.py # Key composition utilities -│ ├── managed_entry.py # ManagedEntry dataclass -│ └── time_to_live.py # TTL calculation -├── wrappers/ # Wrapper implementations -│ ├── __init__.py # Wrapper exports -│ ├── base.py # Base wrapper class -│ ├── statistics.py # Statistics tracking wrapper -│ ├── clamp_ttl.py # TTL clamping wrapper -│ ├── passthrough_cache.py # Passthrough cache wrapper -│ ├── prefix_collections.py # Collection prefix wrapper -│ ├── prefix_keys.py # Key prefix wrapper -│ └── single_collection.py # Single collection wrapper - -tests/ -├── conftest.py # Test configuration -├── cases.py # Common test cases -├── test_types.py # Type tests -└── stores/ # Store-specific tests -``` - -## Store Configuration - -All stores implement the `KVStore` interface. Here are detailed configuration options: - -### Redis Store -High-performance store with native TTL support: - -```python -from kv_store_adapter.stores.redis.store import RedisStore - -# Connection options -store = RedisStore(host="localhost", port=6379, db=0, password="secret") -store = RedisStore(url="redis://localhost:6379/0") -store = RedisStore(client=existing_redis_client) -``` - -### Memory Store -In-memory TLRU (Time-aware Least Recently Used) cache: - -```python -from kv_store_adapter.stores.memory.store import MemoryStore - -store = MemoryStore(max_entries_per_collection=1000) # Default: 1000 entries per collection -``` - -### Disk Store -Persistent disk-based storage using diskcache: - -```python -from kv_store_adapter.stores.disk.store import DiskStore - -store = DiskStore(directory="/path/to/cache", size_limit=1024*1024*1024) # 1GB -store = DiskStore(disk_cache=existing_cache_instance) -``` - -### Elasticsearch Store -Full-text searchable storage with Elasticsearch: - -```python -from kv_store_adapter.stores.elasticsearch.store import ElasticsearchStore - -store = ElasticsearchStore( - url="https://localhost:9200", - api_key="your-api-key", - index="kv-store" -) -store = ElasticsearchStore(client=existing_client, index="custom-index") -``` - -### Simple Store -Dictionary-based store for testing and development: - -```python -from kv_store_adapter.stores.simple.store import SimpleStore - -# Basic managed dictionary store -store = SimpleStore(max_entries=1000) -``` - -### Null Store -Null object pattern store for testing: - -```python -from kv_store_adapter.stores.null.store import NullStore - -store = NullStore() # Accepts all operations but stores nothing -``` - -## Architecture - -### Store Types - -All stores now inherit from the unified `BaseStore` class which uses `ManagedEntry` objects: - -1. **Managed Stores (`BaseStore`)** - - Use `ManagedEntry` wrapper objects for consistent TTL and metadata handling - - Automatic TTL handling and expiration checking - - Consistent behavior across all store implementations - - Examples: `RedisStore`, `MemoryStore`, `DiskStore`, `ElasticsearchStore`, `SimpleStore`, `NullStore` - -### Key Concepts - -- **Collections**: Logical namespaces for organizing keys -- **Compound Keys**: Internal key format `collection::key` for flat stores -- **TTL Management**: Automatic expiration handling with timezone-aware timestamps -- **Wrappers**: Wrapper pattern for adding functionality (statistics, TTL clamping, prefixing, etc.) -- **Adapters**: Transform data to/from stores (Pydantic models, raise-on-missing behavior, etc.) - -## Testing - -### Running Tests - -```bash -# Run all tests -uv run pytest - -# Run tests with coverage -uv run pytest --cov=src/kv_store_adapter --cov-report=html - -# Run specific test file -uv run pytest tests/stores/redis/test_redis.py - -# Run tests with specific markers -uv run pytest -m "not skip_on_ci" -``` - -### Test Environment Setup - -Some tests require external services. Use Docker Compose to start them: - -```bash -# Start all services -docker-compose up -d - -# Start specific services -docker-compose up -d redis elasticsearch - -# Stop services -docker-compose down -``` - -### Environment Variables - -Create a `.env` file for test configuration: - -```bash -# Redis -REDIS_URL=redis://localhost:6379/0 - -# Elasticsearch -ELASTICSEARCH_URL=https://localhost:9200 -ELASTICSEARCH_API_KEY=your-api-key-here -ELASTICSEARCH_INDEX=test-kv-store - -# Test settings -SKIP_INTEGRATION_TESTS=false -``` - -### Writing Tests - -#### Test Structure - -Tests are organized by store type and use common test cases: - -```python -# tests/stores/mystore/test_mystore.py -import pytest -from kv_store_adapter.stores.mystore.store import MyStore -from tests.stores.conftest import BaseStoreTests - -class TestMyStore(BaseStoreTests): - @pytest.fixture - async def store(self): - """Provide store instance for testing.""" - store = MyStore() - yield store - # Cleanup if needed - await store.destroy() -``` - -#### Common Test Cases - -Use the provided base test cases for consistency: - -```python -from tests.stores.conftest import BaseStoreTests - -class TestMyStore(BaseStoreTests): - # Inherits all standard KV store tests - pass -``` - -#### Custom Test Methods - -Add store-specific tests as needed: - -```python -class TestRedisStore(BaseStoreTests): - async def test_redis_specific_feature(self, store): - """Test Redis-specific functionality.""" - # Your test implementation - pass -``` - -### Test Markers - -- `skip_on_ci`: Skip tests that require external services on CI -- `slow`: Mark slow-running tests -- `integration`: Mark integration tests - -## Code Quality - -### Linting and Formatting - -The project uses Ruff for linting and formatting: - -```bash -# Check code style -uv run ruff check - -# Fix auto-fixable issues -uv run ruff check --fix - -# Format code -uv run ruff format -``` - -### Type Checking - -Use Pyright for type checking: - -```bash -# Check types -pyright - -# Check specific file -pyright src/kv_store_adapter/stores/redis/store.py -``` - -## Adding New Store Implementations - -### 1. Choose Base Class - -All stores inherit from the unified `BaseStore` class, which provides consistent TTL and metadata handling: - -```python -from kv_store_adapter.stores.base import BaseStore -``` - -You can also inherit from specialized base classes for additional functionality: -- `BaseEnumerateKeysStore` - Adds key enumeration support -- `BaseEnumerateCollectionsStore` - Adds collection enumeration support -- `BaseDestroyStore` - Adds store destruction support -- `BaseDestroyCollectionStore` - Adds collection destruction support -- `BaseCullStore` - Adds expired entry culling support - -### 2. Create Store Class - -```python -# src/kv_store_adapter/stores/mystore/store.py -from typing_extensions import override -from kv_store_adapter.stores.base import BaseStore -from kv_store_adapter.utils.managed_entry import ManagedEntry - -class MyStore(BaseStore): - """My custom key-value store implementation.""" - - def __init__(self, *, default_collection: str | None = None, **kwargs): - """Initialize store with custom parameters.""" - super().__init__(default_collection=default_collection) - # Your initialization code - - async def _setup(self) -> None: - """Initialize store (called once before first use).""" - # Setup code (connect to database, etc.) - pass - - @override - async def _get_managed_entry(self, *, collection: str, key: str) -> ManagedEntry | None: - """Retrieve a managed entry by key from the specified collection. - - Returns: - ManagedEntry if found, None if not found or expired. - """ - # Your implementation - pass - - @override - async def _put_managed_entry( - self, - *, - collection: str, - key: str, - managed_entry: ManagedEntry, - ) -> None: - """Store a managed entry by key in the specified collection. - - Args: - collection: The collection to store in. - key: The key to store under. - managed_entry: The ManagedEntry containing value and metadata. - """ - # Your implementation - pass - - @override - async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: - """Delete a managed entry by key from the specified collection. - - Args: - key: The key to delete. - collection: The collection to delete from. - - Returns: - True if the key was deleted, False if it didn't exist. - """ - # Your implementation - pass - - # Implement other optional methods as needed... -``` - -### 3. Create Package Structure - -``` -src/kv_store_adapter/stores/mystore/ -├── __init__.py # Export store class -└── store.py # Store implementation -``` - -```python -# src/kv_store_adapter/stores/mystore/__init__.py -from .store import MyStore - -__all__ = ["MyStore"] -``` - -### 4. Add Tests - -```python -# tests/stores/mystore/test_mystore.py -import pytest -from kv_store_adapter.stores.mystore.store import MyStore -from tests.stores.conftest import BaseStoreTests - -class TestMyStore(BaseStoreTests): - @pytest.fixture - async def store(self): - store = MyStore() - yield store - # Cleanup -``` - -### 5. Add Optional Dependencies - -```toml -# pyproject.toml -[project.optional-dependencies] -mystore = ["my-store-dependency>=1.0.0"] -``` diff --git a/README.md b/README.md index f88cc77e..2bd4172e 100644 --- a/README.md +++ b/README.md @@ -46,9 +46,9 @@ The simplest way to get started is to use the `KVStore` interface, which allows ```python import asyncio -from kv_store_adapter.types import KVStore -from kv_store_adapter.stores.redis.store import RedisStore -from kv_store_adapter.stores.memory.store import MemoryStore +from key_value.aio.types import AsyncKeyValue +from key_value.aio.stores.redis.store import RedisStore +from key_value.aio.stores.memory.store import MemoryStore async def example(): # In-memory store @@ -104,8 +104,8 @@ For example, the PydanticAdapter can be used to provide type-safe interactions w ```python from pydantic import BaseModel -from kv_store_adapter.adapters.pydantic import PydanticAdapter -from kv_store_adapter.stores.memory.store import MemoryStore +from key_value.aio.adapters.pydantic import PydanticAdapter +from key_value.aio.stores.memory.store import MemoryStore class User(BaseModel): name: str @@ -133,8 +133,8 @@ Track operation statistics for any store: ```python import asyncio -from kv_store_adapter.wrappers.statistics import StatisticsWrapper -from kv_store_adapter.stores.memory.store import MemoryStore +from key_value.aio.wrappers.statistics import StatisticsWrapper +from key_value.aio.stores.memory.store import MemoryStore memory_store = MemoryStore() store = StatisticsWrapper(store=memory_store) @@ -174,9 +174,9 @@ Imagine you have a service where you want to cache 3 pydantic models in a single ```python import asyncio -from kv_store_adapter.adapters.pydantic import PydanticAdapter -from kv_store_adapter.wrappers.single_collection import SingleCollectionWrapper -from kv_store_adapter.stores.memory.store import MemoryStore +from key_value.aio.adapters.pydantic import PydanticAdapter +from key_value.aio.wrappers.single_collection import SingleCollectionWrapper +from key_value.aio.stores.memory.store import MemoryStore from pydantic import BaseModel class User(BaseModel): diff --git a/key-value/key-value-aio/README.md b/key-value/key-value-aio/README.md new file mode 100644 index 00000000..2bd4172e --- /dev/null +++ b/key-value/key-value-aio/README.md @@ -0,0 +1,219 @@ +# KV Store Adapter + +A pluggable, async-only key-value store interface for modern Python applications. + +## Why use this library? + +- **Multiple backends**: Elasticsearch, Memcached, MongoDB, Redis, Valkey, and In-memory, Disk, etc +- **TTL support**: Automatic expiration handling across all store types +- **Type-safe**: Full type hints with Protocol-based interfaces +- **Adapters**: Pydantic model support, raise-on-missing behavior, etc +- **Wrappers**: Statistics tracking and extensible wrapper system +- **Collection-based**: Organize keys into logical collections/namespaces +- **Pluggable architecture**: Easy to add custom store implementations + +## Why not use this library? + +- **Async-only**: Built from the ground up with `async`/`await` support +- **Managed Entries**: Raw values are not stored in backends, a wrapper object is stored instead. This wrapper object contains the value, sometimes metadata like the TTL, and the creation timestamp. Most often it is serialized to and from JSON. +- **No Live Objects**: Even when using the in-memory store, "live" objects are never returned from the store. You get a dictionary or a Pydantic model, hopefully a copy of what you stored, but never the same instance in memory. + +## Quick Start + +```bash +pip install kv-store-adapter + +# With specific backend support +pip install kv-store-adapter[elasticsearch] +pip install kv-store-adapter[redis] +pip install kv-store-adapter[memcached] +pip install kv-store-adapter[mongodb] +pip install kv-store-adapter[valkey] +pip install kv-store-adapter[memory] +pip install kv-store-adapter[disk] + +# With all backends +pip install kv-store-adapter[memory,disk,redis,elasticsearch,memcached,mongodb,valkey] + +# With Pydantic adapter support +pip install kv-store-adapter[pydantic] +``` + +# The KV Store Protocol + +The simplest way to get started is to use the `KVStore` interface, which allows you to write code that works with any supported KV Store: + +```python +import asyncio + +from key_value.aio.types import AsyncKeyValue +from key_value.aio.stores.redis.store import RedisStore +from key_value.aio.stores.memory.store import MemoryStore + +async def example(): + # In-memory store + memory_store = MemoryStore() + await memory_store.put(key="456", value={"name": "Bob"}, collection="users", ttl=3600) # TTL is supported, but optional! + bob = await memory_store.get(key="456", collection="users") + await memory_store.delete(key="456", collection="users") + + redis_store = RedisStore(url="redis://localhost:6379") + await redis_store.put(key="123", value={"name": "Alice"}, collection="products") + alice = await redis_store.get(key="123", collection="products") + await redis_store.delete(key="123", collection="products") + +asyncio.run(example()) +``` + +## Store Implementations + +Choose the store that best fits your needs. All stores implement the same `KVStore` interface: + +### Production Stores + +- **ElasticsearchStore**: `ElasticsearchStore(url="https://localhost:9200", api_key="your-api-key")` +- **RedisStore**: `RedisStore(url="redis://localhost:6379/0")` +- **MongoDBStore**: `MongoDBStore(url="mongodb://localhost:27017/test")` +- **ValkeyStore**: `ValkeyStore(host="localhost", port=6379)` +- **MemcachedStore**: `MemcachedStore(host="localhost", port=11211)` +- **DiskStore**: A disk-based store using diskcache `DiskStore(directory="./cache")`. Also see `MultiDiskStore` for a store that creates one disk store per collection. +- **MemoryStore**: A fast in-memory TLRU cache `MemoryStore()` + +### Development/Testing Stores + +- **SimpleStore**: In-memory and inspectable for testing `SimpleStore()` +- **NullStore**: No-op store for testing `NullStore()` + +For detailed configuration options and all available stores, see [DEVELOPING.md](DEVELOPING.md). + +## Atomicity / Consistency + +We strive to support atomicity and consistency across basic key-value operations across all stores and operations in the KVStore. That being said, each store may have different guarantees for consistency and atomicity. Especially with distributed stores like MongoDB, Redis, etc and especially with bulk/management operations. + +## Protocol Adapters + +The library provides an adapter pattern simplifying the use of the protocol/store. Adapters themselves do not implement the `KVStore` interface and cannot be nested. As a result, Adapters are the "outer" layer of the store. Adapters are primarily for improved type-safe operations. + +The following adapters are available: + +- **PydanticAdapter**: Type-safe storage and retrieval using Pydantic models with automatic serialization/deserialization. +- **RaiseOnMissingAdapter**: Provides optional raise-on-missing behavior for get, get_many, ttl, and ttl_many operations. + +For example, the PydanticAdapter can be used to provide type-safe interactions with a store: + +```python +from pydantic import BaseModel + +from key_value.aio.adapters.pydantic import PydanticAdapter +from key_value.aio.stores.memory.store import MemoryStore + +class User(BaseModel): + name: str + email: str + +memory_store = MemoryStore() + +user_adapter = PydanticAdapter(kv_store=memory_store, pydantic_model=User) + +async def example(): + await user_adapter.put(key="123", value=User(name="John Doe", email="john.doe@example.com"), collection="users") + user: User | None = await user_adapter.get(key="123", collection="users") + +asyncio.run(example()) +``` + +## Wrappers + +The library provides a wrapper pattern for adding functionality to a store. Wrappers themselves implement the `KVStore` interface meaning that you can wrap any store with any wrapper, and chain wrappers together as needed. + +### Statistics Tracking + +Track operation statistics for any store: + +```python +import asyncio + +from key_value.aio.wrappers.statistics import StatisticsWrapper +from key_value.aio.stores.memory.store import MemoryStore + +memory_store = MemoryStore() +store = StatisticsWrapper(store=memory_store) + +async def example(): + # Use store normally - statistics are tracked automatically + await store.put(key="123", value={"name": "Alice"}, collection="users") + await store.get(key="123", collection="users") + await store.get(key="456", collection="users") # Cache miss + + # Access statistics + stats = store.statistics + user_stats = stats.get_collection("users") + print(f"Total gets: {user_stats.get.count}") + print(f"Cache hits: {user_stats.get.hit}") + print(f"Cache misses: {user_stats.get.miss}") + +asyncio.run(example()) +``` + +Other wrappers that are available include: + +- **ClampTTLWrapper**: Wraps a store and clamps the TTL to a given range. +- **TTLClampWrapper**: Wraps a store and clamps the TTL to a given range. +- **PassthroughCacheWrapper**: Wraps two stores to provide a read-through cache. Reads go to the cache store first and fall back to the primary store, populating the cache with the entry from the primary; writes evict from the cache and then write to the primary. For example, use a RedisStore as the primary and a MemoryStore as the cache store. Or a DiskStore as the primary and a MemoryStore as the cache store. +- **PrefixCollectionsWrapper**: Wraps a store and prefixes all collections with a given prefix. +- **PrefixKeysWrapper**: Wraps a store and prefixes all keys with a given prefix. +- **SingleCollectionWrapper**: Wraps a store and forces all requests into a single collection. +- **StatisticsWrapper**: Wraps a store and tracks hit/miss statistics for the store. + +See [DEVELOPING.md](DEVELOPING.md) for more information on how to create your own wrappers. + +## Chaining Wrappers, Adapters, and Stores + +Imagine you have a service where you want to cache 3 pydantic models in a single collection. You can do this by wrapping the store in a PydanticAdapter and a SingleCollectionWrapper: + +```python +import asyncio + +from key_value.aio.adapters.pydantic import PydanticAdapter +from key_value.aio.wrappers.single_collection import SingleCollectionWrapper +from key_value.aio.stores.memory.store import MemoryStore +from pydantic import BaseModel + +class User(BaseModel): + name: str + email: str + +store = MemoryStore() + +users_store = PydanticAdapter(kv_store=SingleCollectionWrapper(store=store, single_collection="users", default_collection="default"), pydantic_model=User) +products_store = PydanticAdapter(kv_store=SingleCollectionWrapper(store=store, single_collection="products", default_collection="default"), pydantic_model=Product) +orders_store = PydanticAdapter(kv_store=SingleCollectionWrapper(store=store, single_collection="orders", default_collection="default"), pydantic_model=Order) + +async def example(): + new_user: User = User(name="John Doe", email="john.doe@example.com") + await users_store.put(key="123", value=new_user, collection="allowed_users") + + john_doe: User | None = await users_store.get(key="123", collection="allowed_users") + +asyncio.run(example()) +``` + +The SingleCollectionWrapper will result in writes to the `allowed_users` collection being redirected to the `users` collection but the keys will be prefixed with the original collection `allowed_users__` name. So the key `123` will be stored as `allowed_users__123` in the `users` collection. + +Note: The above example shows the conceptual usage, but you would need to define `Product` and `Order` models as well for the complete example to work. + +## Development + +See [DEVELOPING.md](DEVELOPING.md) for development setup, testing, and contribution guidelines. + +## License + +This project is licensed under the MIT License - see the LICENSE file for details. + +## Contributing + +Contributions are welcome! Please read [DEVELOPING.md](DEVELOPING.md) for development setup and contribution guidelines. + +## Changelog + +See [CHANGELOG.md](CHANGELOG.md) for version history and changes. diff --git a/key-value/key-value-aio/pyproject.toml b/key-value/key-value-aio/pyproject.toml new file mode 100644 index 00000000..b2d4243a --- /dev/null +++ b/key-value/key-value-aio/pyproject.toml @@ -0,0 +1,74 @@ +[project] +name = "py-key-value-aio" +version = "0.2.0" +description = "Async Key-Value" +readme = "README.md" +requires-python = ">=3.10" +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +dependencies = [ +] + + +[build-system] +requires = ["uv_build>=0.8.2,<0.9.0"] +build-backend = "uv_build" + +[tool.uv.build-backend] +module-name = "key_value.aio" + +[project.optional-dependencies] +memory = ["cachetools>=6.0.0"] +disk = ["diskcache>=5.6.0", "pathvalidate>=3.3.1",] +redis = ["redis>=6.0.0"] +mongodb = ["pymongo>=4.15.0"] +valkey = ["valkey-glide>=2.1.0"] +memcached = ["aiomcache>=0.8.0"] +elasticsearch = ["elasticsearch>=9.0.0", "aiohttp>=3.12"] +pydantic = ["pydantic>=2.11.9"] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +addopts = ["--inline-snapshot=disable","-vv","-s"] +markers = [ + "skip_on_ci: Skip running the test when running on CI", +] +timeout = 10 + +env_files = [".env"] + +[dependency-groups] +dev = [ + "py-key-value-aio[memory,disk,redis,elasticsearch,memcached,mongodb]", + "py-key-value-aio[valkey]; platform_system != 'Windows'", + "py-key-value-aio[pydantic]", + "pytest", + "pytest-mock", + "pytest-asyncio", + "ruff", + "diskcache-stubs>=5.6.3.6.20240818", + "pytest-dotenv>=0.5.2", + "dirty-equals>=0.10.0", + "inline-snapshot>=0.29.0", + "pytest-redis>=3.1.3", + "basedpyright>=1.31.5", + "pytest-timeout>=2.4.0", + "ast-comments>=1.2.3", +] +lint = [ + "ruff" +] + +[tool.ruff] +extend="../../pyproject.toml" + +[tool.pyright] +extends = "../../pyproject.toml" diff --git a/key-value/key-value-aio/src/key_value/aio/__init__.py b/key-value/key-value-aio/src/key_value/aio/__init__.py new file mode 100644 index 00000000..49064a64 --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/__init__.py @@ -0,0 +1,3 @@ +from .types import AsyncKeyValue + +__all__ = ["AsyncKeyValue"] diff --git a/src/kv_store_adapter/adapters/__init__.py b/key-value/key-value-aio/src/key_value/aio/adapters/__init__.py similarity index 100% rename from src/kv_store_adapter/adapters/__init__.py rename to key-value/key-value-aio/src/key_value/aio/adapters/__init__.py diff --git a/src/kv_store_adapter/adapters/pydantic.py b/key-value/key-value-aio/src/key_value/aio/adapters/pydantic.py similarity index 94% rename from src/kv_store_adapter/adapters/pydantic.py rename to key-value/key-value-aio/src/key_value/aio/adapters/pydantic.py index 94b16d7a..0c2fe8db 100644 --- a/src/kv_store_adapter/adapters/pydantic.py +++ b/key-value/key-value-aio/src/key_value/aio/adapters/pydantic.py @@ -4,8 +4,8 @@ from pydantic import BaseModel, ValidationError from pydantic_core import PydanticSerializationError -from kv_store_adapter.errors import DeserializationError, SerializationError -from kv_store_adapter.types import KVStore +from key_value.aio.errors import DeserializationError, SerializationError +from key_value.aio.types import AsyncKeyValue T = TypeVar("T", bound=BaseModel) @@ -13,8 +13,8 @@ class PydanticAdapter(Generic[T]): """Adapter around a KVStore-compliant Store that allows type-safe persistence of Pydantic models.""" - def __init__(self, kv_store: KVStore, pydantic_model: type[T], default_collection: str | None = None) -> None: - self.kv_store: KVStore = kv_store + def __init__(self, kv_store: AsyncKeyValue, pydantic_model: type[T], default_collection: str | None = None) -> None: + self.kv_store: AsyncKeyValue = kv_store self.pydantic_model: type[T] = pydantic_model self.default_collection: str | None = default_collection diff --git a/src/kv_store_adapter/adapters/raise_on_missing.py b/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing.py similarity index 97% rename from src/kv_store_adapter/adapters/raise_on_missing.py rename to key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing.py index 82ac922f..30a2a18a 100644 --- a/src/kv_store_adapter/adapters/raise_on_missing.py +++ b/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing.py @@ -1,8 +1,8 @@ from collections.abc import Sequence from typing import Any, Literal, overload -from kv_store_adapter.errors import MissingKeyError -from kv_store_adapter.types import KVStore +from key_value.aio.errors import MissingKeyError +from key_value.aio.types import AsyncKeyValue class RaiseOnMissingAdapter: @@ -11,8 +11,8 @@ class RaiseOnMissingAdapter: When `raise_on_missing=True`, methods raise `MissingKeyError` instead of returning None. """ - def __init__(self, kv_store: KVStore) -> None: - self.kv_store: KVStore = kv_store + def __init__(self, kv_store: AsyncKeyValue) -> None: + self.kv_store: AsyncKeyValue = kv_store @overload async def get(self, key: str, *, collection: str | None = None, raise_on_missing: Literal[False] = False) -> dict[str, Any] | None: ... diff --git a/src/kv_store_adapter/errors.py b/key-value/key-value-aio/src/key_value/aio/errors.py similarity index 100% rename from src/kv_store_adapter/errors.py rename to key-value/key-value-aio/src/key_value/aio/errors.py diff --git a/src/kv_store_adapter/stores/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/__init__.py similarity index 100% rename from src/kv_store_adapter/stores/__init__.py rename to key-value/key-value-aio/src/key_value/aio/stores/__init__.py diff --git a/src/kv_store_adapter/stores/base.py b/key-value/key-value-aio/src/key_value/aio/stores/base.py similarity index 93% rename from src/kv_store_adapter/stores/base.py rename to key-value/key-value-aio/src/key_value/aio/stores/base.py index 6d28a8a7..c6896933 100644 --- a/src/kv_store_adapter/stores/base.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/base.py @@ -11,17 +11,17 @@ from typing_extensions import Self, override -from kv_store_adapter.errors import InvalidTTLError, SetupError -from kv_store_adapter.types import ( - CullProtocol, - DestroyCollectionProtocol, - DestroyStoreProtocol, - EnumerateCollectionsProtocol, - EnumerateKeysProtocol, - KeyValueProtocol, +from key_value.aio.errors import InvalidTTLError, SetupError +from key_value.aio.types import ( + AsyncCullProtocol, + AsyncDestroyCollectionProtocol, + AsyncDestroyStoreProtocol, + AsyncEnumerateCollectionsProtocol, + AsyncEnumerateKeysProtocol, + AsyncKeyValueProtocol, ) -from kv_store_adapter.utils.managed_entry import ManagedEntry -from kv_store_adapter.utils.time_to_live import now +from key_value.aio.utils.managed_entry import ManagedEntry +from key_value.aio.utils.time_to_live import now DEFAULT_COLLECTION_NAME = "default_collection" @@ -37,12 +37,12 @@ def validate_one_ttl(t: float | None, raise_error: bool = False) -> bool: def validate_ttls(t: list[float | None] | float | None, raise_error: bool = False) -> bool: - if not isinstance(t, (Sequence)): + if not isinstance(t, Sequence): t = [t] return all(validate_one_ttl(t=ttl, raise_error=raise_error) for ttl in t) -class BaseStore(KeyValueProtocol, ABC): +class BaseStore(AsyncKeyValueProtocol, ABC): """An opinionated Abstract base class for managed key-value stores using ManagedEntry objects. This class implements all of the methods required for compliance with the KVStore protocol but @@ -72,8 +72,8 @@ def __init__(self, *, default_collection: str | None = None) -> None: self._setup_complete = False self._setup_lock = Lock() - self._setup_collection_locks = defaultdict[str, Lock](Lock) - self._setup_collection_complete = defaultdict[str, bool](bool) + self._setup_collection_locks = defaultdict(Lock) + self._setup_collection_complete = defaultdict(bool) self.default_collection = default_collection or DEFAULT_COLLECTION_NAME @@ -285,7 +285,7 @@ async def delete_many(self, keys: Sequence[str], *, collection: str | None = Non return await self._delete_managed_entries(keys=keys, collection=collection) -class BaseEnumerateKeysStore(BaseStore, EnumerateKeysProtocol, ABC): +class BaseEnumerateKeysStore(BaseStore, AsyncEnumerateKeysProtocol, ABC): """An abstract base class for enumerate key-value stores. Subclasses must implement the get_collection_keys and get_collection_names methods. @@ -326,7 +326,7 @@ async def _close(self) -> None: ... -class BaseEnumerateCollectionsStore(BaseStore, EnumerateCollectionsProtocol, ABC): +class BaseEnumerateCollectionsStore(BaseStore, AsyncEnumerateCollectionsProtocol, ABC): @override async def collections(self, *, limit: int | None = None) -> list[str]: """List all available collection names (may include empty collections).""" @@ -339,7 +339,7 @@ async def _get_collection_names(self, *, limit: int | None = None) -> list[str]: """List all available collection names (may include empty collections).""" -class BaseDestroyStore(BaseStore, DestroyStoreProtocol, ABC): +class BaseDestroyStore(BaseStore, AsyncDestroyStoreProtocol, ABC): """An abstract base class for destroyable stores. Subclasses must implement the delete_store method. @@ -358,7 +358,7 @@ async def _delete_store(self) -> bool: ... -class BaseDestroyCollectionStore(BaseStore, DestroyCollectionProtocol, ABC): +class BaseDestroyCollectionStore(BaseStore, AsyncDestroyCollectionProtocol, ABC): """An abstract base class for destroyable collections. Subclasses must implement the delete_collection method. @@ -377,7 +377,7 @@ async def _delete_collection(self, *, collection: str) -> bool: ... -class BaseCullStore(BaseStore, CullProtocol, ABC): +class BaseCullStore(BaseStore, AsyncCullProtocol, ABC): """An abstract base class for cullable stores. Subclasses must implement the cull method. diff --git a/src/kv_store_adapter/stores/disk/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/disk/__init__.py similarity index 100% rename from src/kv_store_adapter/stores/disk/__init__.py rename to key-value/key-value-aio/src/key_value/aio/stores/disk/__init__.py diff --git a/src/kv_store_adapter/stores/disk/multi_store.py b/key-value/key-value-aio/src/key_value/aio/stores/disk/multi_store.py similarity index 96% rename from src/kv_store_adapter/stores/disk/multi_store.py rename to key-value/key-value-aio/src/key_value/aio/stores/disk/multi_store.py index 3f4f9c23..e841a493 100644 --- a/src/kv_store_adapter/stores/disk/multi_store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/disk/multi_store.py @@ -5,9 +5,9 @@ from typing_extensions import override -from kv_store_adapter.stores.base import BaseContextManagerStore, BaseStore -from kv_store_adapter.utils.compound import compound_key -from kv_store_adapter.utils.managed_entry import ManagedEntry +from key_value.aio.stores.base import BaseContextManagerStore, BaseStore +from key_value.aio.utils.compound import compound_key +from key_value.aio.utils.managed_entry import ManagedEntry try: from diskcache import Cache diff --git a/src/kv_store_adapter/stores/disk/store.py b/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py similarity index 95% rename from src/kv_store_adapter/stores/disk/store.py rename to key-value/key-value-aio/src/key_value/aio/stores/disk/store.py index 25dde250..2b5fb5d0 100644 --- a/src/kv_store_adapter/stores/disk/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py @@ -4,9 +4,9 @@ from typing_extensions import override -from kv_store_adapter.stores.base import BaseContextManagerStore, BaseStore -from kv_store_adapter.utils.compound import compound_key -from kv_store_adapter.utils.managed_entry import ManagedEntry +from key_value.aio.stores.base import BaseContextManagerStore, BaseStore +from key_value.aio.utils.compound import compound_key +from key_value.aio.utils.managed_entry import ManagedEntry try: from diskcache import Cache diff --git a/src/kv_store_adapter/stores/elasticsearch/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/__init__.py similarity index 100% rename from src/kv_store_adapter/stores/elasticsearch/__init__.py rename to key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/__init__.py diff --git a/src/kv_store_adapter/stores/elasticsearch/store.py b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py similarity index 96% rename from src/kv_store_adapter/stores/elasticsearch/store.py rename to key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py index a47c28df..bdf2f4f5 100644 --- a/src/kv_store_adapter/stores/elasticsearch/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py @@ -3,7 +3,7 @@ from typing_extensions import override -from kv_store_adapter.stores.base import ( +from key_value.aio.stores.base import ( BaseContextManagerStore, BaseCullStore, BaseDestroyCollectionStore, @@ -11,14 +11,14 @@ BaseEnumerateKeysStore, BaseStore, ) -from kv_store_adapter.utils.compound import compound_key -from kv_store_adapter.utils.managed_entry import ManagedEntry, load_from_json -from kv_store_adapter.utils.time_to_live import now_as_epoch, try_parse_datetime_str +from key_value.aio.utils.compound import compound_key +from key_value.aio.utils.managed_entry import ManagedEntry, load_from_json +from key_value.aio.utils.time_to_live import now_as_epoch, try_parse_datetime_str try: from elasticsearch import AsyncElasticsearch - from kv_store_adapter.stores.elasticsearch.utils import ( + from key_value.aio.stores.elasticsearch.utils import ( get_aggregations_from_body, get_body_from_response, get_first_value_from_field_in_hit, diff --git a/src/kv_store_adapter/stores/elasticsearch/utils.py b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/utils.py similarity index 100% rename from src/kv_store_adapter/stores/elasticsearch/utils.py rename to key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/utils.py diff --git a/src/kv_store_adapter/stores/memcached/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/memcached/__init__.py similarity index 100% rename from src/kv_store_adapter/stores/memcached/__init__.py rename to key-value/key-value-aio/src/key_value/aio/stores/memcached/__init__.py diff --git a/src/kv_store_adapter/stores/memcached/store.py b/key-value/key-value-aio/src/key_value/aio/stores/memcached/store.py similarity index 93% rename from src/kv_store_adapter/stores/memcached/store.py rename to key-value/key-value-aio/src/key_value/aio/stores/memcached/store.py index 307f4a08..d9791665 100644 --- a/src/kv_store_adapter/stores/memcached/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/memcached/store.py @@ -3,9 +3,9 @@ from typing_extensions import override -from kv_store_adapter.stores.base import BaseContextManagerStore, BaseDestroyStore, BaseStore -from kv_store_adapter.utils.compound import compound_key -from kv_store_adapter.utils.managed_entry import ManagedEntry +from key_value.aio.stores.base import BaseContextManagerStore, BaseDestroyStore, BaseStore +from key_value.aio.utils.compound import compound_key +from key_value.aio.utils.managed_entry import ManagedEntry try: from aiomcache import Client diff --git a/src/kv_store_adapter/stores/memory/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/memory/__init__.py similarity index 100% rename from src/kv_store_adapter/stores/memory/__init__.py rename to key-value/key-value-aio/src/key_value/aio/stores/memory/__init__.py diff --git a/src/kv_store_adapter/stores/memory/store.py b/key-value/key-value-aio/src/key_value/aio/stores/memory/store.py similarity index 96% rename from src/kv_store_adapter/stores/memory/store.py rename to key-value/key-value-aio/src/key_value/aio/stores/memory/store.py index 22726dd8..fabed4b9 100644 --- a/src/kv_store_adapter/stores/memory/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/memory/store.py @@ -5,14 +5,14 @@ from typing_extensions import Self, override -from kv_store_adapter.stores.base import ( +from key_value.aio.stores.base import ( BaseDestroyCollectionStore, BaseDestroyStore, BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, ) -from kv_store_adapter.utils.managed_entry import ManagedEntry -from kv_store_adapter.utils.time_to_live import epoch_to_datetime +from key_value.aio.utils.managed_entry import ManagedEntry +from key_value.aio.utils.time_to_live import epoch_to_datetime try: from cachetools import TLRUCache diff --git a/src/kv_store_adapter/stores/mongodb/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/mongodb/__init__.py similarity index 100% rename from src/kv_store_adapter/stores/mongodb/__init__.py rename to key-value/key-value-aio/src/key_value/aio/stores/mongodb/__init__.py diff --git a/src/kv_store_adapter/stores/mongodb/store.py b/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py similarity index 95% rename from src/kv_store_adapter/stores/mongodb/store.py rename to key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py index 02cd743f..26a32171 100644 --- a/src/kv_store_adapter/stores/mongodb/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py @@ -5,10 +5,10 @@ from pymongo.asynchronous.database import AsyncDatabase from typing_extensions import Self, override -from kv_store_adapter.stores.base import BaseContextManagerStore, BaseDestroyCollectionStore, BaseEnumerateCollectionsStore, BaseStore -from kv_store_adapter.utils.managed_entry import ManagedEntry -from kv_store_adapter.utils.sanitize import ALPHANUMERIC_CHARACTERS, sanitize_string -from kv_store_adapter.utils.time_to_live import now +from key_value.aio.stores.base import BaseContextManagerStore, BaseDestroyCollectionStore, BaseEnumerateCollectionsStore, BaseStore +from key_value.aio.utils.managed_entry import ManagedEntry +from key_value.aio.utils.sanitize import ALPHANUMERIC_CHARACTERS, sanitize_string +from key_value.aio.utils.time_to_live import now if TYPE_CHECKING: from pymongo.results import DeleteResult diff --git a/src/kv_store_adapter/stores/null/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/null/__init__.py similarity index 100% rename from src/kv_store_adapter/stores/null/__init__.py rename to key-value/key-value-aio/src/key_value/aio/stores/null/__init__.py diff --git a/src/kv_store_adapter/stores/null/store.py b/key-value/key-value-aio/src/key_value/aio/stores/null/store.py similarity index 83% rename from src/kv_store_adapter/stores/null/store.py rename to key-value/key-value-aio/src/key_value/aio/stores/null/store.py index 3652e518..2f7e1fc5 100644 --- a/src/kv_store_adapter/stores/null/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/null/store.py @@ -1,7 +1,7 @@ from typing_extensions import override -from kv_store_adapter.stores.base import BaseStore -from kv_store_adapter.utils.managed_entry import ManagedEntry +from key_value.aio.stores.base import BaseStore +from key_value.aio.utils.managed_entry import ManagedEntry class NullStore(BaseStore): diff --git a/src/kv_store_adapter/stores/redis/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/redis/__init__.py similarity index 100% rename from src/kv_store_adapter/stores/redis/__init__.py rename to key-value/key-value-aio/src/key_value/aio/stores/redis/__init__.py diff --git a/src/kv_store_adapter/stores/redis/store.py b/key-value/key-value-aio/src/key_value/aio/stores/redis/store.py similarity index 94% rename from src/kv_store_adapter/stores/redis/store.py rename to key-value/key-value-aio/src/key_value/aio/stores/redis/store.py index db3d7265..47c641f2 100644 --- a/src/kv_store_adapter/stores/redis/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/redis/store.py @@ -3,9 +3,9 @@ from typing_extensions import override -from kv_store_adapter.stores.base import BaseContextManagerStore, BaseDestroyStore, BaseEnumerateKeysStore, BaseStore -from kv_store_adapter.utils.compound import compound_key, get_keys_from_compound_keys -from kv_store_adapter.utils.managed_entry import ManagedEntry +from key_value.aio.stores.base import BaseContextManagerStore, BaseDestroyStore, BaseEnumerateKeysStore, BaseStore +from key_value.aio.utils.compound import compound_key, get_keys_from_compound_keys +from key_value.aio.utils.managed_entry import ManagedEntry try: from redis.asyncio import Redis diff --git a/src/kv_store_adapter/stores/simple/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/simple/__init__.py similarity index 100% rename from src/kv_store_adapter/stores/simple/__init__.py rename to key-value/key-value-aio/src/key_value/aio/stores/simple/__init__.py diff --git a/src/kv_store_adapter/stores/simple/store.py b/key-value/key-value-aio/src/key_value/aio/stores/simple/store.py similarity index 91% rename from src/kv_store_adapter/stores/simple/store.py rename to key-value/key-value-aio/src/key_value/aio/stores/simple/store.py index 2fdbe70a..93a5bf7c 100644 --- a/src/kv_store_adapter/stores/simple/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/simple/store.py @@ -4,15 +4,15 @@ from typing_extensions import override -from kv_store_adapter.stores.base import ( +from key_value.aio.stores.base import ( BaseDestroyStore, BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, BaseStore, ) -from kv_store_adapter.utils.compound import compound_key, get_collections_from_compound_keys, get_keys_from_compound_keys -from kv_store_adapter.utils.managed_entry import ManagedEntry, load_from_json -from kv_store_adapter.utils.time_to_live import seconds_to +from key_value.aio.utils.compound import compound_key, get_collections_from_compound_keys, get_keys_from_compound_keys +from key_value.aio.utils.managed_entry import ManagedEntry, load_from_json +from key_value.aio.utils.time_to_live import seconds_to DEFAULT_SIMPLE_STORE_MAX_ENTRIES = 10000 diff --git a/src/kv_store_adapter/stores/valkey/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/valkey/__init__.py similarity index 100% rename from src/kv_store_adapter/stores/valkey/__init__.py rename to key-value/key-value-aio/src/key_value/aio/stores/valkey/__init__.py diff --git a/src/kv_store_adapter/stores/valkey/store.py b/key-value/key-value-aio/src/key_value/aio/stores/valkey/store.py similarity index 95% rename from src/kv_store_adapter/stores/valkey/store.py rename to key-value/key-value-aio/src/key_value/aio/stores/valkey/store.py index 572a8bd6..14671b24 100644 --- a/src/kv_store_adapter/stores/valkey/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/valkey/store.py @@ -5,9 +5,9 @@ from glide_shared.config import GlideClientConfiguration, NodeAddress from typing_extensions import override -from kv_store_adapter.stores.base import BaseContextManagerStore, BaseStore -from kv_store_adapter.utils.compound import compound_key -from kv_store_adapter.utils.managed_entry import ManagedEntry +from key_value.aio.stores.base import BaseContextManagerStore, BaseStore +from key_value.aio.utils.compound import compound_key +from key_value.aio.utils.managed_entry import ManagedEntry try: # Use redis-py asyncio client to communicate with a Valkey server (protocol compatible) diff --git a/src/kv_store_adapter/types.py b/key-value/key-value-aio/src/key_value/aio/types.py similarity index 95% rename from src/kv_store_adapter/types.py rename to key-value/key-value-aio/src/key_value/aio/types.py index ad3b9cf6..983b2ab1 100644 --- a/src/kv_store_adapter/types.py +++ b/key-value/key-value-aio/src/key_value/aio/types.py @@ -3,7 +3,7 @@ @runtime_checkable -class KeyValueProtocol(Protocol): +class AsyncKeyValueProtocol(Protocol): """A subset of KV operations: get/put/delete and TTL variants, including bulk calls.""" async def get( @@ -115,7 +115,7 @@ async def delete_many(self, keys: Sequence[str], *, collection: str | None = Non @runtime_checkable -class CullProtocol(Protocol): +class AsyncCullProtocol(Protocol): async def cull(self) -> None: """Cull the store. @@ -125,7 +125,7 @@ async def cull(self) -> None: @runtime_checkable -class EnumerateKeysProtocol(Protocol): +class AsyncEnumerateKeysProtocol(Protocol): """Protocol segment to enumerate keys in a collection.""" async def keys(self, collection: str | None = None, *, limit: int | None = None) -> list[str]: @@ -139,7 +139,7 @@ async def keys(self, collection: str | None = None, *, limit: int | None = None) @runtime_checkable -class EnumerateCollectionsProtocol(Protocol): +class AsyncEnumerateCollectionsProtocol(Protocol): async def collections(self, *, limit: int | None = None) -> list[str]: """List all available collection names (may include empty collections). @@ -150,7 +150,7 @@ async def collections(self, *, limit: int | None = None) -> list[str]: @runtime_checkable -class DestroyStoreProtocol(Protocol): +class AsyncDestroyStoreProtocol(Protocol): """Protocol segment for store-destruction semantics.""" async def destroy(self) -> bool: @@ -162,7 +162,7 @@ async def destroy(self) -> bool: @runtime_checkable -class DestroyCollectionProtocol(Protocol): +class AsyncDestroyCollectionProtocol(Protocol): async def destroy_collection(self, collection: str) -> bool: """Destroy the specified collection. @@ -172,7 +172,7 @@ async def destroy_collection(self, collection: str) -> bool: ... -class KVStore(KeyValueProtocol, Protocol): +class AsyncKeyValue(AsyncKeyValueProtocol, Protocol): """A protocol for key-value store operations. Includes basic operations: get, put, delete, ttl diff --git a/src/kv_store_adapter/utils/compound.py b/key-value/key-value-aio/src/key_value/aio/utils/compound.py similarity index 100% rename from src/kv_store_adapter/utils/compound.py rename to key-value/key-value-aio/src/key_value/aio/utils/compound.py diff --git a/src/kv_store_adapter/utils/managed_entry.py b/key-value/key-value-aio/src/key_value/aio/utils/managed_entry.py similarity index 95% rename from src/kv_store_adapter/utils/managed_entry.py rename to key-value/key-value-aio/src/key_value/aio/utils/managed_entry.py index 6f696fd2..714d76d9 100644 --- a/src/kv_store_adapter/utils/managed_entry.py +++ b/key-value/key-value-aio/src/key_value/aio/utils/managed_entry.py @@ -5,8 +5,8 @@ from typing_extensions import Self -from kv_store_adapter.errors import DeserializationError, SerializationError -from kv_store_adapter.utils.time_to_live import now, now_plus, try_parse_datetime_str +from key_value.aio.errors import DeserializationError, SerializationError +from key_value.aio.utils.time_to_live import now, now_plus, try_parse_datetime_str @dataclass(kw_only=True) diff --git a/src/kv_store_adapter/utils/sanitize.py b/key-value/key-value-aio/src/key_value/aio/utils/sanitize.py similarity index 100% rename from src/kv_store_adapter/utils/sanitize.py rename to key-value/key-value-aio/src/key_value/aio/utils/sanitize.py diff --git a/src/kv_store_adapter/utils/time_to_live.py b/key-value/key-value-aio/src/key_value/aio/utils/time_to_live.py similarity index 100% rename from src/kv_store_adapter/utils/time_to_live.py rename to key-value/key-value-aio/src/key_value/aio/utils/time_to_live.py diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/__init__.py b/key-value/key-value-aio/src/key_value/aio/wrappers/__init__.py new file mode 100644 index 00000000..cc7aadda --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/__init__.py @@ -0,0 +1,17 @@ +from .base import BaseWrapper +from .passthrough_cache import PassthroughCacheWrapper +from .prefix_collections import PrefixCollectionsWrapper +from .prefix_keys import PrefixKeysWrapper +from .single_collection import SingleCollectionWrapper +from .statistics import StatisticsWrapper +from .ttl_clamp import TTLClampWrapper + +__all__ = [ + "BaseWrapper", + "PassthroughCacheWrapper", + "PrefixCollectionsWrapper", + "PrefixKeysWrapper", + "SingleCollectionWrapper", + "StatisticsWrapper", + "TTLClampWrapper", +] diff --git a/src/kv_store_adapter/wrappers/base.py b/key-value/key-value-aio/src/key_value/aio/wrappers/base.py similarity index 94% rename from src/kv_store_adapter/wrappers/base.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/base.py index fcfbb6dc..f79f7bc1 100644 --- a/src/kv_store_adapter/wrappers/base.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/base.py @@ -3,13 +3,13 @@ from typing_extensions import override -from kv_store_adapter.types import KVStore +from key_value.aio.types import AsyncKeyValue -class BaseWrapper(KVStore): +class BaseWrapper(AsyncKeyValue): """A base wrapper for KVStore implementations that passes through to the underlying store.""" - store: KVStore + store: AsyncKeyValue @override async def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: diff --git a/src/kv_store_adapter/wrappers/passthrough_cache.py b/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache.py similarity index 95% rename from src/kv_store_adapter/wrappers/passthrough_cache.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache.py index 3ec25b75..a5ad3fd6 100644 --- a/src/kv_store_adapter/wrappers/passthrough_cache.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache.py @@ -3,9 +3,9 @@ from typing_extensions import override -from kv_store_adapter.types import KVStore -from kv_store_adapter.wrappers.base import BaseWrapper -from kv_store_adapter.wrappers.clamp_ttl import TTLClampWrapper +from key_value.aio.types import AsyncKeyValue +from key_value.aio.wrappers.base import BaseWrapper +from key_value.aio.wrappers.ttl_clamp import TTLClampWrapper DEFAULT_MAX_TTL: float = 30 * 60 DEFAULT_MISSING_TTL: float = 30 * 60 @@ -19,8 +19,8 @@ class PassthroughCacheWrapper(BaseWrapper): def __init__( self, - primary_store: KVStore, - cache_store: KVStore, + primary_store: AsyncKeyValue, + cache_store: AsyncKeyValue, maximum_ttl: float | None = None, missing_ttl: float | None = None, ) -> None: @@ -32,8 +32,8 @@ def __init__( maximum_ttl: The maximum TTL for puts into the cache store. Defaults to 30 minutes. missing_ttl: The TTL to use for entries that do not have a TTL. Defaults to 30 minutes. """ - self.store: KVStore = primary_store - self.cache_store: KVStore = cache_store + self.store: AsyncKeyValue = primary_store + self.cache_store: AsyncKeyValue = cache_store self.cache_store = TTLClampWrapper( store=cache_store, diff --git a/src/kv_store_adapter/wrappers/prefix_collections.py b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections.py similarity index 89% rename from src/kv_store_adapter/wrappers/prefix_collections.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections.py index f5f79b48..9ff67b44 100644 --- a/src/kv_store_adapter/wrappers/prefix_collections.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections.py @@ -3,16 +3,16 @@ from typing_extensions import override -from kv_store_adapter.stores.base import DEFAULT_COLLECTION_NAME -from kv_store_adapter.types import KVStore -from kv_store_adapter.utils.compound import prefix_collection, unprefix_collection -from kv_store_adapter.wrappers.base import BaseWrapper +from key_value.aio.stores.base import DEFAULT_COLLECTION_NAME +from key_value.aio.types import AsyncKeyValue +from key_value.aio.utils.compound import prefix_collection, unprefix_collection +from key_value.aio.wrappers.base import BaseWrapper class PrefixCollectionsWrapper(BaseWrapper): """A wrapper that prefixes collection names before delegating to the underlying store.""" - def __init__(self, store: KVStore, prefix: str, default_collection: str | None = None) -> None: + def __init__(self, store: AsyncKeyValue, prefix: str, default_collection: str | None = None) -> None: """Initialize the prefix collections wrapper. Args: @@ -20,7 +20,7 @@ def __init__(self, store: KVStore, prefix: str, default_collection: str | None = prefix: The prefix to add to the collections. default_collection: The default collection to use if no collection is provided. Will be automatically prefixed with the `prefix` """ - self.store: KVStore = store + self.store: AsyncKeyValue = store self.prefix: str = prefix self.default_collection: str = default_collection or DEFAULT_COLLECTION_NAME super().__init__() diff --git a/src/kv_store_adapter/wrappers/prefix_keys.py b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys.py similarity index 91% rename from src/kv_store_adapter/wrappers/prefix_keys.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys.py index 8db1f391..348b7588 100644 --- a/src/kv_store_adapter/wrappers/prefix_keys.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys.py @@ -3,22 +3,22 @@ from typing_extensions import override -from kv_store_adapter.types import KVStore -from kv_store_adapter.utils.compound import prefix_key, unprefix_key -from kv_store_adapter.wrappers.base import BaseWrapper +from key_value.aio.types import AsyncKeyValue +from key_value.aio.utils.compound import prefix_key, unprefix_key +from key_value.aio.wrappers.base import BaseWrapper class PrefixKeysWrapper(BaseWrapper): """A wrapper that prefixes key names before delegating to the underlying store.""" - def __init__(self, store: KVStore, prefix: str) -> None: + def __init__(self, store: AsyncKeyValue, prefix: str) -> None: """Initialize the prefix keys wrapper. Args: store: The store to wrap. prefix: The prefix to add to the keys. """ - self.store: KVStore = store + self.store: AsyncKeyValue = store self.prefix: str = prefix super().__init__() diff --git a/src/kv_store_adapter/wrappers/single_collection.py b/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection.py similarity index 89% rename from src/kv_store_adapter/wrappers/single_collection.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/single_collection.py index 627c30c0..d5282c69 100644 --- a/src/kv_store_adapter/wrappers/single_collection.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection.py @@ -3,16 +3,18 @@ from typing_extensions import override -from kv_store_adapter.stores.base import DEFAULT_COLLECTION_NAME -from kv_store_adapter.types import KVStore -from kv_store_adapter.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key -from kv_store_adapter.wrappers.base import BaseWrapper +from key_value.aio.stores.base import DEFAULT_COLLECTION_NAME +from key_value.aio.types import AsyncKeyValue +from key_value.aio.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key +from key_value.aio.wrappers.base import BaseWrapper class SingleCollectionWrapper(BaseWrapper): """A wrapper that stores all collections within a single backing collection via key prefixing.""" - def __init__(self, store: KVStore, single_collection: str, default_collection: str | None = None, separator: str | None = None) -> None: + def __init__( + self, store: AsyncKeyValue, single_collection: str, default_collection: str | None = None, separator: str | None = None + ) -> None: """Initialize the prefix collections wrapper. Args: @@ -20,7 +22,7 @@ def __init__(self, store: KVStore, single_collection: str, default_collection: s single_collection: The single collection to use to store all collections. default_collection: The default collection to use if no collection is provided. """ - self.store: KVStore = store + self.store: AsyncKeyValue = store self.single_collection: str = single_collection self.default_collection: str = default_collection or DEFAULT_COLLECTION_NAME self.separator: str = separator or DEFAULT_PREFIX_SEPARATOR diff --git a/src/kv_store_adapter/wrappers/statistics.py b/key-value/key-value-aio/src/key_value/aio/wrappers/statistics.py similarity index 97% rename from src/kv_store_adapter/wrappers/statistics.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/statistics.py index 1d6f1558..e2411fb9 100644 --- a/src/kv_store_adapter/wrappers/statistics.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/statistics.py @@ -4,8 +4,8 @@ from typing_extensions import override -from kv_store_adapter.types import KVStore -from kv_store_adapter.wrappers.base import BaseWrapper +from key_value.aio.types import AsyncKeyValue +from key_value.aio.wrappers.base import BaseWrapper @dataclass @@ -103,8 +103,8 @@ class StatisticsWrapper(BaseWrapper): Note: enumeration and destroy operations are not tracked by this wrapper. """ - def __init__(self, store: KVStore) -> None: - self.store: KVStore = store + def __init__(self, store: AsyncKeyValue) -> None: + self.store: AsyncKeyValue = store self._statistics: KVStoreStatistics = KVStoreStatistics() @property diff --git a/src/kv_store_adapter/wrappers/clamp_ttl.py b/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp.py similarity index 72% rename from src/kv_store_adapter/wrappers/clamp_ttl.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp.py index c19cffc7..054b1de2 100644 --- a/src/kv_store_adapter/wrappers/clamp_ttl.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp.py @@ -3,14 +3,14 @@ from typing_extensions import override -from kv_store_adapter.types import KVStore -from kv_store_adapter.wrappers.base import BaseWrapper +from key_value.aio.types import AsyncKeyValue +from key_value.aio.wrappers.base import BaseWrapper class TTLClampWrapper(BaseWrapper): """Wrapper that enforces a maximum TTL for puts into the store.""" - def __init__(self, store: KVStore, min_ttl: float, max_ttl: float, missing_ttl: float | None = None) -> None: + def __init__(self, store: AsyncKeyValue, min_ttl: float, max_ttl: float, missing_ttl: float | None = None) -> None: """Initialize the TTL clamp wrapper. Args: @@ -19,7 +19,7 @@ def __init__(self, store: KVStore, min_ttl: float, max_ttl: float, missing_ttl: max_ttl: The maximum TTL for puts into the store. missing_ttl: The TTL to use for entries that do not have a TTL. Defaults to None. """ - self.store: KVStore = store + self.store: AsyncKeyValue = store self.min_ttl: float = min_ttl self.max_ttl: float = max_ttl self.missing_ttl: float | None = missing_ttl @@ -27,12 +27,12 @@ def __init__(self, store: KVStore, min_ttl: float, max_ttl: float, missing_ttl: super().__init__() @overload - def _clamp_ttl(self, ttl: float) -> float: ... + def _ttl_clamp(self, ttl: float) -> float: ... @overload - def _clamp_ttl(self, ttl: float | None) -> float | None: ... + def _ttl_clamp(self, ttl: float | None) -> float | None: ... - def _clamp_ttl(self, ttl: float | None) -> float | None: + def _ttl_clamp(self, ttl: float | None) -> float | None: if ttl is None: return self.missing_ttl @@ -40,7 +40,7 @@ def _clamp_ttl(self, ttl: float | None) -> float | None: @override async def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: - await self.store.put(collection=collection, key=key, value=value, ttl=self._clamp_ttl(ttl=ttl)) + await self.store.put(collection=collection, key=key, value=value, ttl=self._ttl_clamp(ttl=ttl)) @override async def put_many( @@ -54,8 +54,8 @@ async def put_many( clamped_ttl: Sequence[float | None] | float | None = None if isinstance(ttl, Sequence): - clamped_ttl = [self._clamp_ttl(ttl=t) for t in ttl] + clamped_ttl = [self._ttl_clamp(ttl=t) for t in ttl] elif isinstance(ttl, float): - clamped_ttl = self._clamp_ttl(ttl=ttl) + clamped_ttl = self._ttl_clamp(ttl=ttl) await self.store.put_many(keys=keys, values=values, collection=collection, ttl=clamped_ttl) diff --git a/tests/__init__.py b/key-value/key-value-aio/tests/__init__.py similarity index 100% rename from tests/__init__.py rename to key-value/key-value-aio/tests/__init__.py diff --git a/tests/adapters/__init__.py b/key-value/key-value-aio/tests/adapters/__init__.py similarity index 100% rename from tests/adapters/__init__.py rename to key-value/key-value-aio/tests/adapters/__init__.py diff --git a/tests/adapters/test_pydantic.py b/key-value/key-value-aio/tests/adapters/test_pydantic.py similarity index 95% rename from tests/adapters/test_pydantic.py rename to key-value/key-value-aio/tests/adapters/test_pydantic.py index 19c20bc9..a2cc6056 100644 --- a/tests/adapters/test_pydantic.py +++ b/key-value/key-value-aio/tests/adapters/test_pydantic.py @@ -3,8 +3,8 @@ import pytest from pydantic import AnyHttpUrl, BaseModel -from kv_store_adapter.adapters.pydantic import PydanticAdapter -from kv_store_adapter.stores.memory.store import MemoryStore +from key_value.aio.adapters.pydantic import PydanticAdapter +from key_value.aio.stores.memory.store import MemoryStore class User(BaseModel): diff --git a/tests/adapters/test_raise.py b/key-value/key-value-aio/tests/adapters/test_raise.py similarity index 86% rename from tests/adapters/test_raise.py rename to key-value/key-value-aio/tests/adapters/test_raise.py index ed2a1f1f..60509757 100644 --- a/tests/adapters/test_raise.py +++ b/key-value/key-value-aio/tests/adapters/test_raise.py @@ -1,8 +1,8 @@ import pytest -from kv_store_adapter.adapters.raise_on_missing import RaiseOnMissingAdapter -from kv_store_adapter.errors import MissingKeyError -from kv_store_adapter.stores.memory.store import MemoryStore +from key_value.aio.adapters.raise_on_missing import RaiseOnMissingAdapter +from key_value.aio.errors import MissingKeyError +from key_value.aio.stores.memory.store import MemoryStore @pytest.fixture diff --git a/tests/cases.py b/key-value/key-value-aio/tests/cases.py similarity index 100% rename from tests/cases.py rename to key-value/key-value-aio/tests/cases.py diff --git a/tests/conftest.py b/key-value/key-value-aio/tests/conftest.py similarity index 100% rename from tests/conftest.py rename to key-value/key-value-aio/tests/conftest.py diff --git a/tests/stores/__init__.py b/key-value/key-value-aio/tests/stores/__init__.py similarity index 100% rename from tests/stores/__init__.py rename to key-value/key-value-aio/tests/stores/__init__.py diff --git a/tests/stores/base/__init__.py b/key-value/key-value-aio/tests/stores/base/__init__.py similarity index 100% rename from tests/stores/base/__init__.py rename to key-value/key-value-aio/tests/stores/base/__init__.py diff --git a/tests/stores/conftest.py b/key-value/key-value-aio/tests/stores/conftest.py similarity index 96% rename from tests/stores/conftest.py rename to key-value/key-value-aio/tests/stores/conftest.py index 022709a9..686ecf3f 100644 --- a/tests/stores/conftest.py +++ b/key-value/key-value-aio/tests/stores/conftest.py @@ -7,11 +7,12 @@ from datetime import datetime, timedelta, timezone import pytest +from dirty_equals import IsFloat from pydantic import AnyHttpUrl -from kv_store_adapter.errors import InvalidTTLError, SerializationError -from kv_store_adapter.stores.base import BaseContextManagerStore, BaseStore -from kv_store_adapter.stores.memory.store import MemoryStore +from key_value.aio.errors import InvalidTTLError, SerializationError +from key_value.aio.stores.base import BaseContextManagerStore, BaseStore +from key_value.aio.stores.memory.store import MemoryStore @pytest.fixture @@ -70,6 +71,8 @@ async def eventually_consistent(self) -> None: # noqa: B027 @abstractmethod async def store(self) -> BaseStore | AsyncGenerator[BaseStore, None]: ... + # The first test requires a docker pull, so we only time the actual test + @pytest.mark.timeout(5, func_only=True) async def test_empty_get(self, store: BaseStore): """Tests that the get method returns None from an empty store.""" assert await store.get(collection="test", key="test") is None @@ -161,14 +164,14 @@ async def test_put_ttl_get_ttl(self, store: BaseStore): assert value == {"test": "test"} assert ttl is not None - assert ttl < 100 - assert ttl > 90 + assert ttl == IsFloat(approx=100) async def test_negative_ttl(self, store: BaseStore): """Tests that a negative ttl will return None when getting the key.""" with pytest.raises(InvalidTTLError): await store.put(collection="test", key="test", value={"test": "test"}, ttl=-100) + @pytest.mark.timeout(10) async def test_put_expired_get_none(self, store: BaseStore): """Tests that a put call with a negative ttl will return None when getting the key.""" await store.put(collection="test_collection", key="test_key", value={"test": "test"}, ttl=1) @@ -195,6 +198,7 @@ async def test_special_characters_in_key_name(self, store: BaseStore): await store.put(collection="test_collection", key="test_key!@#$%^&*()", value={"test": "test"}) assert await store.get(collection="test_collection", key="test_key!@#$%^&*()") == {"test": "test"} + @pytest.mark.timeout(20) async def test_not_unbounded(self, store: BaseStore): """Tests that the store is not unbounded.""" diff --git a/tests/stores/disk/__init__.py b/key-value/key-value-aio/tests/stores/disk/__init__.py similarity index 100% rename from tests/stores/disk/__init__.py rename to key-value/key-value-aio/tests/stores/disk/__init__.py diff --git a/tests/stores/disk/test_disk.py b/key-value/key-value-aio/tests/stores/disk/test_disk.py similarity index 50% rename from tests/stores/disk/test_disk.py rename to key-value/key-value-aio/tests/stores/disk/test_disk.py index db812421..b8471bf1 100644 --- a/tests/stores/disk/test_disk.py +++ b/key-value/key-value-aio/tests/stores/disk/test_disk.py @@ -4,17 +4,21 @@ import pytest from typing_extensions import override -from kv_store_adapter.stores.disk import DiskStore +from key_value.aio.stores.disk import DiskStore from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin TEST_SIZE_LIMIT = 100 * 1024 # 100KB class TestDiskStore(ContextManagerStoreTestMixin, BaseStoreTests): + @pytest.fixture(scope="session") + async def disk_store(self) -> AsyncGenerator[DiskStore, None]: + with tempfile.TemporaryDirectory() as temp_dir: + yield DiskStore(directory=temp_dir, max_size=TEST_SIZE_LIMIT) + @override @pytest.fixture - async def store(self) -> AsyncGenerator[DiskStore, None]: - with tempfile.TemporaryDirectory() as temp_dir: - store = DiskStore(directory=temp_dir, max_size=TEST_SIZE_LIMIT) + async def store(self, disk_store: DiskStore) -> DiskStore: + disk_store._cache.clear() # pyright: ignore[reportPrivateUsage] - yield store + return disk_store diff --git a/key-value/key-value-aio/tests/stores/disk/test_multi_disk.py b/key-value/key-value-aio/tests/stores/disk/test_multi_disk.py new file mode 100644 index 00000000..d838100f --- /dev/null +++ b/key-value/key-value-aio/tests/stores/disk/test_multi_disk.py @@ -0,0 +1,26 @@ +import tempfile +from collections.abc import AsyncGenerator +from pathlib import Path + +import pytest +from typing_extensions import override + +from key_value.aio.stores.disk.multi_store import MultiDiskStore +from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin + +TEST_SIZE_LIMIT = 100 * 1024 # 100KB + + +class TestMultiDiskStore(ContextManagerStoreTestMixin, BaseStoreTests): + @pytest.fixture(scope="session") + async def multi_disk_store(self) -> AsyncGenerator[MultiDiskStore, None]: + with tempfile.TemporaryDirectory() as temp_dir: + yield MultiDiskStore(base_directory=Path(temp_dir), max_size=TEST_SIZE_LIMIT) + + @override + @pytest.fixture + async def store(self, multi_disk_store: MultiDiskStore) -> MultiDiskStore: + for collection in multi_disk_store._cache: # pyright: ignore[reportPrivateUsage] + multi_disk_store._cache[collection].clear() # pyright: ignore[reportPrivateUsage] + + return multi_disk_store diff --git a/tests/stores/elasticsearch/__init__.py b/key-value/key-value-aio/tests/stores/elasticsearch/__init__.py similarity index 100% rename from tests/stores/elasticsearch/__init__.py rename to key-value/key-value-aio/tests/stores/elasticsearch/__init__.py diff --git a/tests/stores/elasticsearch/test_elasticsearch.py b/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py similarity index 92% rename from tests/stores/elasticsearch/test_elasticsearch.py rename to key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py index 8f7908d3..6813d901 100644 --- a/tests/stores/elasticsearch/test_elasticsearch.py +++ b/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py @@ -5,8 +5,8 @@ from elasticsearch import AsyncElasticsearch from typing_extensions import override -from kv_store_adapter.stores.base import BaseStore -from kv_store_adapter.stores.elasticsearch import ElasticsearchStore +from key_value.aio.stores.base import BaseStore +from key_value.aio.stores.elasticsearch import ElasticsearchStore from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin TEST_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB diff --git a/tests/stores/memcached/test_memcached.py b/key-value/key-value-aio/tests/stores/memcached/test_memcached.py similarity index 94% rename from tests/stores/memcached/test_memcached.py rename to key-value/key-value-aio/tests/stores/memcached/test_memcached.py index a0c3f0dc..014e9eab 100644 --- a/tests/stores/memcached/test_memcached.py +++ b/key-value/key-value-aio/tests/stores/memcached/test_memcached.py @@ -6,8 +6,8 @@ from aiomcache import Client from typing_extensions import override -from kv_store_adapter.stores.base import BaseStore -from kv_store_adapter.stores.memcached import MemcachedStore +from key_value.aio.stores.base import BaseStore +from key_value.aio.stores.memcached import MemcachedStore from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests # Memcached test configuration @@ -44,7 +44,6 @@ class MemcachedFailedToStartError(Exception): @pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not available") -@pytest.mark.timeout(15) class TestMemcachedStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_memcached(self) -> AsyncGenerator[None, None]: diff --git a/tests/stores/memory/__init__.py b/key-value/key-value-aio/tests/stores/memory/__init__.py similarity index 100% rename from tests/stores/memory/__init__.py rename to key-value/key-value-aio/tests/stores/memory/__init__.py diff --git a/tests/stores/memory/test_memory.py b/key-value/key-value-aio/tests/stores/memory/test_memory.py similarity index 82% rename from tests/stores/memory/test_memory.py rename to key-value/key-value-aio/tests/stores/memory/test_memory.py index 15b524d8..810c3a25 100644 --- a/tests/stores/memory/test_memory.py +++ b/key-value/key-value-aio/tests/stores/memory/test_memory.py @@ -1,7 +1,7 @@ import pytest from typing_extensions import override -from kv_store_adapter.stores.memory.store import MemoryStore +from key_value.aio.stores.memory.store import MemoryStore from tests.stores.conftest import BaseStoreTests diff --git a/tests/stores/mongodb/test_mongodb.py b/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py similarity index 95% rename from tests/stores/mongodb/test_mongodb.py rename to key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py index 0066b714..a4697a7f 100644 --- a/tests/stores/mongodb/test_mongodb.py +++ b/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py @@ -8,8 +8,8 @@ from pymongo import AsyncMongoClient from typing_extensions import override -from kv_store_adapter.stores.base import BaseStore -from kv_store_adapter.stores.mongodb import MongoDBStore +from key_value.aio.stores.base import BaseStore +from key_value.aio.stores.mongodb import MongoDBStore from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests # MongoDB test configuration @@ -43,7 +43,6 @@ class MongoDBFailedToStartError(Exception): @pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not available") -@pytest.mark.timeout(15) class TestMongoDBStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_mongodb(self) -> AsyncGenerator[None, None]: diff --git a/tests/stores/redis/__init__.py b/key-value/key-value-aio/tests/stores/redis/__init__.py similarity index 100% rename from tests/stores/redis/__init__.py rename to key-value/key-value-aio/tests/stores/redis/__init__.py diff --git a/tests/stores/redis/test_redis.py b/key-value/key-value-aio/tests/stores/redis/test_redis.py similarity index 96% rename from tests/stores/redis/test_redis.py rename to key-value/key-value-aio/tests/stores/redis/test_redis.py index 78a1d253..f7224385 100644 --- a/tests/stores/redis/test_redis.py +++ b/key-value/key-value-aio/tests/stores/redis/test_redis.py @@ -5,8 +5,8 @@ from redis.asyncio import Redis from typing_extensions import override -from kv_store_adapter.stores.base import BaseStore -from kv_store_adapter.stores.redis import RedisStore +from key_value.aio.stores.base import BaseStore +from key_value.aio.stores.redis import RedisStore from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests # Redis test configuration @@ -40,7 +40,6 @@ class RedisFailedToStartError(Exception): @pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not running") -@pytest.mark.timeout(15) class TestRedisStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_redis(self) -> AsyncGenerator[None, None]: diff --git a/tests/stores/simple/__init__.py b/key-value/key-value-aio/tests/stores/simple/__init__.py similarity index 100% rename from tests/stores/simple/__init__.py rename to key-value/key-value-aio/tests/stores/simple/__init__.py diff --git a/tests/stores/simple/test_store.py b/key-value/key-value-aio/tests/stores/simple/test_store.py similarity index 81% rename from tests/stores/simple/test_store.py rename to key-value/key-value-aio/tests/stores/simple/test_store.py index 85282f82..e4104986 100644 --- a/tests/stores/simple/test_store.py +++ b/key-value/key-value-aio/tests/stores/simple/test_store.py @@ -1,7 +1,7 @@ import pytest from typing_extensions import override -from kv_store_adapter.stores.simple.store import SimpleStore +from key_value.aio.stores.simple.store import SimpleStore from tests.stores.conftest import BaseStoreTests diff --git a/tests/stores/valkey/test_valkey.py b/key-value/key-value-aio/tests/stores/valkey/test_valkey.py similarity index 95% rename from tests/stores/valkey/test_valkey.py rename to key-value/key-value-aio/tests/stores/valkey/test_valkey.py index 9b5b660f..c1d5ebb3 100644 --- a/tests/stores/valkey/test_valkey.py +++ b/key-value/key-value-aio/tests/stores/valkey/test_valkey.py @@ -4,7 +4,7 @@ import pytest from typing_extensions import override -from kv_store_adapter.stores.base import BaseStore +from key_value.aio.stores.base import BaseStore from tests.conftest import try_import from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, detect_on_windows, should_skip_docker_tests @@ -12,7 +12,7 @@ from glide.glide_client import GlideClient from glide_shared.config import GlideClientConfiguration, NodeAddress - from kv_store_adapter.stores.valkey import ValkeyStore + from key_value.aio.stores.valkey import ValkeyStore if not has_valkey(): pytestmark = pytest.mark.skip(reason="GlideClient is not installed") @@ -31,7 +31,6 @@ class ValkeyFailedToStartError(Exception): @pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not running") @pytest.mark.skipif(detect_on_windows(), reason="Valkey is not supported on Windows") -@pytest.mark.timeout(15) class TestValkeyStore(ContextManagerStoreTestMixin, BaseStoreTests): async def get_valkey_client(self): client_config: GlideClientConfiguration = GlideClientConfiguration( diff --git a/tests/stores/wrappers/__init__.py b/key-value/key-value-aio/tests/stores/wrappers/__init__.py similarity index 100% rename from tests/stores/wrappers/__init__.py rename to key-value/key-value-aio/tests/stores/wrappers/__init__.py diff --git a/tests/stores/wrappers/test_clamp_ttl.py b/key-value/key-value-aio/tests/stores/wrappers/test_clamp_ttl.py similarity index 94% rename from tests/stores/wrappers/test_clamp_ttl.py rename to key-value/key-value-aio/tests/stores/wrappers/test_clamp_ttl.py index 7ef572e8..374f8dd2 100644 --- a/tests/stores/wrappers/test_clamp_ttl.py +++ b/key-value/key-value-aio/tests/stores/wrappers/test_clamp_ttl.py @@ -2,8 +2,8 @@ from dirty_equals import IsFloat from typing_extensions import override -from kv_store_adapter.stores.memory.store import MemoryStore -from kv_store_adapter.wrappers.clamp_ttl import TTLClampWrapper +from key_value.aio.stores.memory.store import MemoryStore +from key_value.aio.wrappers.ttl_clamp import TTLClampWrapper from tests.stores.conftest import BaseStoreTests diff --git a/tests/stores/wrappers/test_passthrough_cache.py b/key-value/key-value-aio/tests/stores/wrappers/test_passthrough_cache.py similarity index 74% rename from tests/stores/wrappers/test_passthrough_cache.py rename to key-value/key-value-aio/tests/stores/wrappers/test_passthrough_cache.py index 9cbd1111..6ebb259c 100644 --- a/tests/stores/wrappers/test_passthrough_cache.py +++ b/key-value/key-value-aio/tests/stores/wrappers/test_passthrough_cache.py @@ -4,22 +4,21 @@ import pytest from typing_extensions import override -from kv_store_adapter.stores.disk.store import DiskStore -from kv_store_adapter.stores.memory.store import MemoryStore -from kv_store_adapter.wrappers.passthrough_cache import PassthroughCacheWrapper +from key_value.aio.stores.disk.store import DiskStore +from key_value.aio.stores.memory.store import MemoryStore +from key_value.aio.wrappers.passthrough_cache import PassthroughCacheWrapper from tests.stores.conftest import BaseStoreTests DISK_STORE_SIZE_LIMIT = 100 * 1024 # 100KB class TestPassthroughCacheWrapper(BaseStoreTests): - @pytest.fixture + @pytest.fixture(scope="session") async def primary_store(self) -> AsyncGenerator[DiskStore, None]: with tempfile.TemporaryDirectory() as temp_dir: async with DiskStore(directory=temp_dir, max_size=DISK_STORE_SIZE_LIMIT) as disk_store: yield disk_store - @pytest.fixture async def cache_store(self, memory_store: MemoryStore) -> MemoryStore: return memory_store @@ -27,4 +26,5 @@ async def cache_store(self, memory_store: MemoryStore) -> MemoryStore: @override @pytest.fixture async def store(self, primary_store: DiskStore, cache_store: MemoryStore) -> PassthroughCacheWrapper: + primary_store._cache.clear() # pyright: ignore[reportPrivateUsage] return PassthroughCacheWrapper(primary_store=primary_store, cache_store=cache_store) diff --git a/tests/stores/wrappers/test_prefix_collection.py b/key-value/key-value-aio/tests/stores/wrappers/test_prefix_collection.py similarity index 71% rename from tests/stores/wrappers/test_prefix_collection.py rename to key-value/key-value-aio/tests/stores/wrappers/test_prefix_collection.py index 8b9f55cb..379f2ba3 100644 --- a/tests/stores/wrappers/test_prefix_collection.py +++ b/key-value/key-value-aio/tests/stores/wrappers/test_prefix_collection.py @@ -1,8 +1,8 @@ import pytest from typing_extensions import override -from kv_store_adapter.stores.memory.store import MemoryStore -from kv_store_adapter.wrappers.prefix_collections import PrefixCollectionsWrapper +from key_value.aio.stores.memory.store import MemoryStore +from key_value.aio.wrappers.prefix_collections import PrefixCollectionsWrapper from tests.stores.conftest import BaseStoreTests diff --git a/tests/stores/wrappers/test_prefix_key.py b/key-value/key-value-aio/tests/stores/wrappers/test_prefix_key.py similarity index 72% rename from tests/stores/wrappers/test_prefix_key.py rename to key-value/key-value-aio/tests/stores/wrappers/test_prefix_key.py index d949462b..d96a572e 100644 --- a/tests/stores/wrappers/test_prefix_key.py +++ b/key-value/key-value-aio/tests/stores/wrappers/test_prefix_key.py @@ -1,8 +1,8 @@ import pytest from typing_extensions import override -from kv_store_adapter.stores.memory.store import MemoryStore -from kv_store_adapter.wrappers.prefix_keys import PrefixKeysWrapper +from key_value.aio.stores.memory.store import MemoryStore +from key_value.aio.wrappers.prefix_keys import PrefixKeysWrapper from tests.stores.conftest import BaseStoreTests diff --git a/tests/stores/wrappers/test_single_collection.py b/key-value/key-value-aio/tests/stores/wrappers/test_single_collection.py similarity index 71% rename from tests/stores/wrappers/test_single_collection.py rename to key-value/key-value-aio/tests/stores/wrappers/test_single_collection.py index 0c33880e..f579a1a1 100644 --- a/tests/stores/wrappers/test_single_collection.py +++ b/key-value/key-value-aio/tests/stores/wrappers/test_single_collection.py @@ -1,8 +1,8 @@ import pytest from typing_extensions import override -from kv_store_adapter.stores.memory.store import MemoryStore -from kv_store_adapter.wrappers.single_collection import SingleCollectionWrapper +from key_value.aio.stores.memory.store import MemoryStore +from key_value.aio.wrappers.single_collection import SingleCollectionWrapper from tests.stores.conftest import BaseStoreTests diff --git a/tests/stores/wrappers/test_statistics.py b/key-value/key-value-aio/tests/stores/wrappers/test_statistics.py similarity index 70% rename from tests/stores/wrappers/test_statistics.py rename to key-value/key-value-aio/tests/stores/wrappers/test_statistics.py index 9362b73d..818ec182 100644 --- a/tests/stores/wrappers/test_statistics.py +++ b/key-value/key-value-aio/tests/stores/wrappers/test_statistics.py @@ -1,8 +1,8 @@ import pytest from typing_extensions import override -from kv_store_adapter.stores.memory.store import MemoryStore -from kv_store_adapter.wrappers.statistics import StatisticsWrapper +from key_value.aio.stores.memory.store import MemoryStore +from key_value.aio.wrappers.statistics import StatisticsWrapper from tests.stores.conftest import BaseStoreTests diff --git a/tests/test_types.py b/key-value/key-value-aio/tests/test_types.py similarity index 79% rename from tests/test_types.py rename to key-value/key-value-aio/tests/test_types.py index c213c049..a4add53a 100644 --- a/tests/test_types.py +++ b/key-value/key-value-aio/tests/test_types.py @@ -1,9 +1,9 @@ -from kv_store_adapter.stores.memory import MemoryStore -from kv_store_adapter.types import KVStore +from key_value.aio.stores.memory import MemoryStore +from key_value.aio.types import AsyncKeyValue async def test_kv_store_protocol(): - async def test_protocol(kv_store: KVStore): + async def test_protocol(kv_store: AsyncKeyValue): assert await kv_store.get(collection="test", key="test") is None await kv_store.put(collection="test", key="test", value={"test": "test"}) assert await kv_store.delete(collection="test", key="test") diff --git a/tests/utils/test_managed_entry.py b/key-value/key-value-aio/tests/utils/test_managed_entry.py similarity index 93% rename from tests/utils/test_managed_entry.py rename to key-value/key-value-aio/tests/utils/test_managed_entry.py index b6eb8a63..5de129c2 100644 --- a/tests/utils/test_managed_entry.py +++ b/key-value/key-value-aio/tests/utils/test_managed_entry.py @@ -3,7 +3,7 @@ import pytest -from kv_store_adapter.utils.managed_entry import dump_to_json, load_from_json +from key_value.aio.utils.managed_entry import dump_to_json, load_from_json from tests.cases import DICTIONARY_TO_JSON_TEST_CASES, DICTIONARY_TO_JSON_TEST_CASES_NAMES FIXED_DATETIME = datetime(2025, 1, 1, 0, 0, 0, tzinfo=timezone.utc) diff --git a/tests/utils/test_sanitize.py b/key-value/key-value-aio/tests/utils/test_sanitize.py similarity index 98% rename from tests/utils/test_sanitize.py rename to key-value/key-value-aio/tests/utils/test_sanitize.py index 11784aa1..86f425e3 100644 --- a/tests/utils/test_sanitize.py +++ b/key-value/key-value-aio/tests/utils/test_sanitize.py @@ -1,7 +1,7 @@ import pytest from inline_snapshot import snapshot -from kv_store_adapter.utils.sanitize import ( +from key_value.aio.utils.sanitize import ( ALPHANUMERIC_CHARACTERS, LOWERCASE_ALPHABET, NUMBERS, diff --git a/pyproject.toml b/pyproject.toml index 5f6ea4f7..456d56c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,66 +1,28 @@ [project] -name = "kv-store-adapter" +name = "py-key-value" version = "0.2.0" -description = "A pluggable interface for KV Stores" +description = "Key-Value Store Project" readme = "README.md" requires-python = ">=3.10" -classifiers = [ - "Development Status :: 3 - Alpha", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.14", -] -dependencies = [ -] - -[build-system] -requires = ["hatchling", "uv-dynamic-versioning>=0.7.0"] -build-backend = "hatchling.build" -[project.optional-dependencies] -memory = ["cachetools>=6.0.0"] -disk = ["diskcache>=5.6.0", "pathvalidate>=3.3.1",] -redis = ["redis>=6.0.0"] -mongodb = ["pymongo>=4.15.0"] -valkey = ["valkey-glide>=2.1.0"] -memcached = ["aiomcache>=0.8.0"] -elasticsearch = ["elasticsearch>=9.0.0", "aiohttp>=3.12"] -pydantic = ["pydantic>=2.11.9"] -[tool.pytest.ini_options] -asyncio_mode = "auto" -addopts = ["--inline-snapshot=create,fix","-vv","-s"] -markers = [ - "skip_on_ci: Skip running the test when running on CI", +[tool.uv.workspace] +members = [ + "key-value/key-value-aio", ] -timeout = 5 -env_files = [".env"] - -[dependency-groups] -dev = [ - "kv-store-adapter[memory,disk,redis,elasticsearch,memcached,mongodb]", - "kv-store-adapter[valkey]; platform_system != 'Windows'", - "kv-store-adapter[pydantic]", - "pytest", - "pytest-mock", - "pytest-asyncio", - "ruff", - "diskcache-stubs>=5.6.3.6.20240818", - "pytest-dotenv>=0.5.2", - "dirty-equals>=0.10.0", - "inline-snapshot>=0.29.0", - "pytest-redis>=3.1.3", - "basedpyright>=1.31.5", - "pytest-timeout>=2.4.0", -] -lint = [ - "ruff" +[tool.pyright] +pythonVersion = "3.10" +typeCheckingMode = "strict" +reportExplicitAny = false +reportMissingTypeStubs = false +include = ["**/tests/**", "**/src/**"] +exclude = [ + "**/playground/**", + "**/examples/**", + "**/references/**", + "**/docs/**", + "**/.venv/**", ] [tool.ruff] @@ -111,28 +73,4 @@ line-length = 140 "DTZ005", # Ignore datetime.UTC "PLR2004", # Ignore magic values "E501", # Ignore line length - ] -"**/references/*" = ["ALL"] -"template/*" = ["ALL"] -"**/vendored/**" = ["ALL"] - -[tool.pyright] -pythonVersion = "3.10" -typeCheckingMode = "recommended" -extraPaths = ["src/"] -include = ["src/"] -exclude = [ - "**/archive/**", - "**/node_modules/**", - "**/__pycache__/**", - "**/.venv/**", - ".venv", - "**/.pytest_cache/**", - "**/.ruff_cache/**", - "**/uv/python/**", - "**/clients/graphql/**", -] -reportMissingTypeStubs = false -reportExplicitAny = false -reportMissingModuleSource = false diff --git a/src/kv_store_adapter/__init__.py b/src/kv_store_adapter/__init__.py deleted file mode 100644 index 084ec8ae..00000000 --- a/src/kv_store_adapter/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .types import KVStore - -__all__ = ["KVStore"] diff --git a/tests/stores/disk/test_multi_disk.py b/tests/stores/disk/test_multi_disk.py deleted file mode 100644 index d9986712..00000000 --- a/tests/stores/disk/test_multi_disk.py +++ /dev/null @@ -1,21 +0,0 @@ -import tempfile -from collections.abc import AsyncGenerator -from pathlib import Path - -import pytest -from typing_extensions import override - -from kv_store_adapter.stores.disk.multi_store import MultiDiskStore -from tests.stores.conftest import BaseStoreTests - -TEST_SIZE_LIMIT = 100 * 1024 # 100KB - - -class TestMultiDiskStore(BaseStoreTests): - @override - @pytest.fixture - async def store(self) -> AsyncGenerator[MultiDiskStore, None]: - with tempfile.TemporaryDirectory() as temp_dir: - disk_store = MultiDiskStore(base_directory=Path(temp_dir), max_size=TEST_SIZE_LIMIT) - - yield disk_store diff --git a/uv.lock b/uv.lock index e88bdb1e..75e3cec1 100644 --- a/uv.lock +++ b/uv.lock @@ -6,6 +6,12 @@ resolution-markers = [ "sys_platform == 'win32'", ] +[manifest] +members = [ + "py-key-value", + "py-key-value-aio", +] + [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -150,6 +156,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, ] +[[package]] +name = "ast-comments" +version = "1.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/9f/be024de83c52e7aecb7d3871c3e4c24bc0df377fb752fa65745c36b8f35f/ast_comments-1.2.3.tar.gz", hash = "sha256:f9d003e9608b54195bd822b1d359a18e24573d2d8beff0307d7dc6f9d0cd0ba6", size = 5360, upload-time = "2025-06-29T09:53:20.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/c3/85f81c3fe2f7a2bd634239056bffac4bfb4324761c787845f78dc264082c/ast_comments-1.2.3-py3-none-any.whl", hash = "sha256:2b2319312027a523dd4b30381a7662da8fdd16210786db3402d8e1b1df12a886", size = 5840, upload-time = "2025-06-29T09:53:19.211Z" }, +] + [[package]] name = "asttokens" version = "3.0.0" @@ -440,93 +455,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f2/46/1938d92fca179c0c81268c68073bef6339054be5779cf3f7de00bad6bf91/inline_snapshot-0.29.1-py3-none-any.whl", hash = "sha256:3fd02adb25be551a6245c9787c90fea33a578e051524804ef92fab5017cf4f16", size = 70763, upload-time = "2025-09-24T19:47:14.589Z" }, ] -[[package]] -name = "kv-store-adapter" -version = "0.2.0" -source = { editable = "." } - -[package.optional-dependencies] -disk = [ - { name = "diskcache" }, - { name = "pathvalidate" }, -] -elasticsearch = [ - { name = "aiohttp" }, - { name = "elasticsearch" }, -] -memcached = [ - { name = "aiomcache" }, -] -memory = [ - { name = "cachetools" }, -] -mongodb = [ - { name = "pymongo" }, -] -pydantic = [ - { name = "pydantic" }, -] -redis = [ - { name = "redis" }, -] -valkey = [ - { name = "valkey-glide" }, -] - -[package.dev-dependencies] -dev = [ - { name = "basedpyright" }, - { name = "dirty-equals" }, - { name = "diskcache-stubs" }, - { name = "inline-snapshot" }, - { name = "kv-store-adapter", extra = ["disk", "elasticsearch", "memcached", "memory", "mongodb", "pydantic", "redis"] }, - { name = "kv-store-adapter", extra = ["valkey"], marker = "sys_platform != 'win32'" }, - { name = "pytest" }, - { name = "pytest-asyncio" }, - { name = "pytest-dotenv" }, - { name = "pytest-mock" }, - { name = "pytest-redis" }, - { name = "pytest-timeout" }, - { name = "ruff" }, -] -lint = [ - { name = "ruff" }, -] - -[package.metadata] -requires-dist = [ - { name = "aiohttp", marker = "extra == 'elasticsearch'", specifier = ">=3.12" }, - { name = "aiomcache", marker = "extra == 'memcached'", specifier = ">=0.8.0" }, - { name = "cachetools", marker = "extra == 'memory'", specifier = ">=6.0.0" }, - { name = "diskcache", marker = "extra == 'disk'", specifier = ">=5.6.0" }, - { name = "elasticsearch", marker = "extra == 'elasticsearch'", specifier = ">=9.0.0" }, - { name = "pathvalidate", marker = "extra == 'disk'", specifier = ">=3.3.1" }, - { name = "pydantic", marker = "extra == 'pydantic'", specifier = ">=2.11.9" }, - { name = "pymongo", marker = "extra == 'mongodb'", specifier = ">=4.15.0" }, - { name = "redis", marker = "extra == 'redis'", specifier = ">=6.0.0" }, - { name = "valkey-glide", marker = "extra == 'valkey'", specifier = ">=2.1.0" }, -] -provides-extras = ["memory", "disk", "redis", "mongodb", "valkey", "memcached", "elasticsearch", "pydantic"] - -[package.metadata.requires-dev] -dev = [ - { name = "basedpyright", specifier = ">=1.31.5" }, - { name = "dirty-equals", specifier = ">=0.10.0" }, - { name = "diskcache-stubs", specifier = ">=5.6.3.6.20240818" }, - { name = "inline-snapshot", specifier = ">=0.29.0" }, - { name = "kv-store-adapter", extras = ["memory", "disk", "redis", "elasticsearch", "memcached", "mongodb"] }, - { name = "kv-store-adapter", extras = ["pydantic"] }, - { name = "kv-store-adapter", extras = ["valkey"], marker = "sys_platform != 'win32'" }, - { name = "pytest" }, - { name = "pytest-asyncio" }, - { name = "pytest-dotenv", specifier = ">=0.5.2" }, - { name = "pytest-mock" }, - { name = "pytest-redis", specifier = ">=3.1.3" }, - { name = "pytest-timeout", specifier = ">=2.4.0" }, - { name = "ruff" }, -] -lint = [{ name = "ruff" }] - [[package]] name = "markdown-it-py" version = "4.0.0" @@ -833,6 +761,100 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/26/65/1070a6e3c036f39142c2820c4b52e9243246fcfc3f96239ac84472ba361e/psutil-7.1.0-cp37-abi3-win_arm64.whl", hash = "sha256:6937cb68133e7c97b6cc9649a570c9a18ba0efebed46d8c5dae4c07fa1b67a07", size = 244971, upload-time = "2025-09-17T20:15:12.262Z" }, ] +[[package]] +name = "py-key-value" +version = "0.2.0" +source = { virtual = "." } + +[[package]] +name = "py-key-value-aio" +version = "0.2.0" +source = { editable = "key-value/key-value-aio" } + +[package.optional-dependencies] +disk = [ + { name = "diskcache" }, + { name = "pathvalidate" }, +] +elasticsearch = [ + { name = "aiohttp" }, + { name = "elasticsearch" }, +] +memcached = [ + { name = "aiomcache" }, +] +memory = [ + { name = "cachetools" }, +] +mongodb = [ + { name = "pymongo" }, +] +pydantic = [ + { name = "pydantic" }, +] +redis = [ + { name = "redis" }, +] +valkey = [ + { name = "valkey-glide" }, +] + +[package.dev-dependencies] +dev = [ + { name = "ast-comments" }, + { name = "basedpyright" }, + { name = "dirty-equals" }, + { name = "diskcache-stubs" }, + { name = "inline-snapshot" }, + { name = "py-key-value-aio", extra = ["disk", "elasticsearch", "memcached", "memory", "mongodb", "pydantic", "redis"] }, + { name = "py-key-value-aio", extra = ["valkey"], marker = "sys_platform != 'win32'" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-dotenv" }, + { name = "pytest-mock" }, + { name = "pytest-redis" }, + { name = "pytest-timeout" }, + { name = "ruff" }, +] +lint = [ + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiohttp", marker = "extra == 'elasticsearch'", specifier = ">=3.12" }, + { name = "aiomcache", marker = "extra == 'memcached'", specifier = ">=0.8.0" }, + { name = "cachetools", marker = "extra == 'memory'", specifier = ">=6.0.0" }, + { name = "diskcache", marker = "extra == 'disk'", specifier = ">=5.6.0" }, + { name = "elasticsearch", marker = "extra == 'elasticsearch'", specifier = ">=9.0.0" }, + { name = "pathvalidate", marker = "extra == 'disk'", specifier = ">=3.3.1" }, + { name = "pydantic", marker = "extra == 'pydantic'", specifier = ">=2.11.9" }, + { name = "pymongo", marker = "extra == 'mongodb'", specifier = ">=4.15.0" }, + { name = "redis", marker = "extra == 'redis'", specifier = ">=6.0.0" }, + { name = "valkey-glide", marker = "extra == 'valkey'", specifier = ">=2.1.0" }, +] +provides-extras = ["memory", "disk", "redis", "mongodb", "valkey", "memcached", "elasticsearch", "pydantic"] + +[package.metadata.requires-dev] +dev = [ + { name = "ast-comments", specifier = ">=1.2.3" }, + { name = "basedpyright", specifier = ">=1.31.5" }, + { name = "dirty-equals", specifier = ">=0.10.0" }, + { name = "diskcache-stubs", specifier = ">=5.6.3.6.20240818" }, + { name = "inline-snapshot", specifier = ">=0.29.0" }, + { name = "py-key-value-aio", extras = ["memory", "disk", "redis", "elasticsearch", "memcached", "mongodb"] }, + { name = "py-key-value-aio", extras = ["pydantic"] }, + { name = "py-key-value-aio", extras = ["valkey"], marker = "sys_platform != 'win32'" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-dotenv", specifier = ">=0.5.2" }, + { name = "pytest-mock" }, + { name = "pytest-redis", specifier = ">=3.1.3" }, + { name = "pytest-timeout", specifier = ">=2.4.0" }, + { name = "ruff" }, +] +lint = [{ name = "ruff" }] + [[package]] name = "pydantic" version = "2.11.9" From 71c4731e473b518f32309a5ab1959c11e1d3b63e Mon Sep 17 00:00:00 2001 From: William Easton Date: Sat, 27 Sep 2025 08:31:57 -0500 Subject: [PATCH 25/31] switch to docker lib for test fixtures --- .github/workflows/test.yml | 2 +- key-value/key-value-aio/pyproject.toml | 1 + key-value/key-value-aio/tests/conftest.py | 94 +++++++++++++ .../tests/stores/memcached/test_memcached.py | 21 ++- .../tests/stores/mongodb/test_mongodb.py | 19 +-- .../tests/stores/redis/test_redis.py | 23 ++-- .../tests/stores/valkey/test_valkey.py | 26 ++-- pyproject.toml | 5 + uv.lock | 127 ++++++++++++++++++ 9 files changed, 264 insertions(+), 54 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c3076989..d42097c1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -83,7 +83,7 @@ jobs: python-version: ["3.10", "3.11", "3.12", "3.13"] platform: [ - ubuntu-22.04, + #ubuntu-22.04, ubuntu-latest, macos-14, macos-latest, diff --git a/key-value/key-value-aio/pyproject.toml b/key-value/key-value-aio/pyproject.toml index b2d4243a..6659456c 100644 --- a/key-value/key-value-aio/pyproject.toml +++ b/key-value/key-value-aio/pyproject.toml @@ -62,6 +62,7 @@ dev = [ "basedpyright>=1.31.5", "pytest-timeout>=2.4.0", "ast-comments>=1.2.3", + "docker>=7.1.0", ] lint = [ "ruff" diff --git a/key-value/key-value-aio/tests/conftest.py b/key-value/key-value-aio/tests/conftest.py index 07cbb88a..8979894a 100644 --- a/key-value/key-value-aio/tests/conftest.py +++ b/key-value/key-value-aio/tests/conftest.py @@ -1,6 +1,14 @@ +import logging from collections.abc import Callable, Iterator from contextlib import contextmanager +import pytest +from docker import DockerClient + +logger = logging.getLogger(__name__) + +logging.basicConfig(level=logging.INFO) + @contextmanager def try_import() -> Iterator[Callable[[], bool]]: @@ -15,3 +23,89 @@ def check_import() -> bool: pass else: import_success = True + + +def get_docker_client() -> DockerClient: + return DockerClient.from_env() + + +@pytest.fixture(scope="session") +def docker_client() -> DockerClient: + return get_docker_client() + + +def docker_pull(image: str, raise_on_error: bool = False) -> bool: + logger.info(f"Pulling image {image}") + client = get_docker_client() + try: + client.images.pull(image) + except Exception: + logger.info(f"Image {image} failed to pull") + if raise_on_error: + raise + return False + return True + + +def docker_stop(name: str, raise_on_error: bool = False) -> bool: + logger.info(f"Stopping container {name}") + client = get_docker_client() + try: + client.containers.get(name).stop() + except Exception: + logger.info(f"Container {name} failed to stop") + if raise_on_error: + raise + return False + logger.info(f"Container {name} stopped") + return True + + +def docker_rm(name: str, raise_on_error: bool = False) -> bool: + logger.info(f"Removing container {name}") + client = get_docker_client() + try: + client.containers.get(container_id=name).remove() + except Exception: + logger.info(f"Container {name} failed to remove") + if raise_on_error: + raise + return False + logger.info(f"Container {name} removed") + return True + + +def docker_run(name: str, image: str, ports: dict[str, int], raise_on_error: bool = False) -> bool: + logger.info(f"Running container {name} with image {image} and ports {ports}") + client = get_docker_client() + try: + client.containers.run(name=name, image=image, ports=ports, detach=True) + except Exception: + logger.info(f"Container {name} failed to run") + if raise_on_error: + raise + return False + logger.info(f"Container {name} running") + return True + + +@contextmanager +def docker_container(name: str, image: str, ports: dict[str, int], raise_on_error: bool = True) -> Iterator[None]: + logger.info(f"Creating container {name} with image {image} and ports {ports}") + try: + docker_pull(image, raise_on_error=True) + docker_stop(name, raise_on_error=False) + docker_rm(name, raise_on_error=False) + docker_run(name, image, ports, raise_on_error=True) + logger.info(f"Container {name} created") + yield + except Exception: + logger.info(f"Container {name} failed to create") + if raise_on_error: + raise + return + finally: + docker_stop(name, raise_on_error=False) + docker_rm(name, raise_on_error=False) + logger.info(f"Container {name} stopped and removed") + return diff --git a/key-value/key-value-aio/tests/stores/memcached/test_memcached.py b/key-value/key-value-aio/tests/stores/memcached/test_memcached.py index 014e9eab..27199faf 100644 --- a/key-value/key-value-aio/tests/stores/memcached/test_memcached.py +++ b/key-value/key-value-aio/tests/stores/memcached/test_memcached.py @@ -8,6 +8,7 @@ from key_value.aio.stores.base import BaseStore from key_value.aio.stores.memcached import MemcachedStore +from tests.conftest import docker_container from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests # Memcached test configuration @@ -33,7 +34,8 @@ async def ping_memcached() -> bool: async def wait_memcached() -> bool: for _ in range(WAIT_FOR_MEMCACHED_TIMEOUT): - if await ping_memcached(): + result = await asyncio.wait_for(ping_memcached(), timeout=1) + if result: return True await asyncio.sleep(delay=1) return False @@ -47,20 +49,13 @@ class MemcachedFailedToStartError(Exception): class TestMemcachedStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_memcached(self) -> AsyncGenerator[None, None]: - _ = await asyncio.create_subprocess_exec("docker", "stop", "memcached-test") - _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "memcached-test") + await wait_memcached() + with docker_container("memcached-test", "memcached:1.6-alpine", {"11211": 11211}): + if not await wait_memcached(): + msg = "Memcached failed to start" + raise MemcachedFailedToStartError(msg) - process = await asyncio.create_subprocess_exec( - "docker", "run", "-d", "--name", "memcached-test", "-p", "11211:11211", "memcached:1.6-alpine" - ) - _ = await process.wait() - if not await wait_memcached(): - msg = "Memcached failed to start" - raise MemcachedFailedToStartError(msg) - try: yield - finally: - _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "memcached-test") @override @pytest.fixture diff --git a/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py b/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py index a4697a7f..ee6eccd5 100644 --- a/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py +++ b/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py @@ -10,6 +10,7 @@ from key_value.aio.stores.base import BaseStore from key_value.aio.stores.mongodb import MongoDBStore +from tests.conftest import docker_container from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests # MongoDB test configuration @@ -46,20 +47,12 @@ class MongoDBFailedToStartError(Exception): class TestMongoDBStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_mongodb(self) -> AsyncGenerator[None, None]: - _ = await asyncio.create_subprocess_exec("docker", "stop", "mongodb-test") - _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "mongodb-test") - - process = await asyncio.create_subprocess_exec( - "docker", "run", "-d", "--name", "mongodb-test", "-p", f"{MONGODB_HOST_PORT}:27017", "mongo:7" - ) - _ = await process.wait() - if not await wait_mongodb(): - msg = "MongoDB failed to start" - raise MongoDBFailedToStartError(msg) - try: + with docker_container("mongodb-test", "mongo:7", {"27017": 27017}): + if not await wait_mongodb(): + msg = "MongoDB failed to start" + raise MongoDBFailedToStartError(msg) + yield - finally: - _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "mongodb-test") @override @pytest.fixture diff --git a/key-value/key-value-aio/tests/stores/redis/test_redis.py b/key-value/key-value-aio/tests/stores/redis/test_redis.py index f7224385..39352a27 100644 --- a/key-value/key-value-aio/tests/stores/redis/test_redis.py +++ b/key-value/key-value-aio/tests/stores/redis/test_redis.py @@ -7,6 +7,7 @@ from key_value.aio.stores.base import BaseStore from key_value.aio.stores.redis import RedisStore +from tests.conftest import docker_container, docker_stop from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests # Redis test configuration @@ -28,7 +29,8 @@ async def ping_redis() -> bool: async def wait_redis() -> bool: # with a timeout of 10 seconds for _ in range(WAIT_FOR_REDIS_TIMEOUT): - if await ping_redis(): + result = await asyncio.wait_for(ping_redis(), timeout=1) + if result: return True await asyncio.sleep(delay=1) @@ -43,18 +45,15 @@ class RedisFailedToStartError(Exception): class TestRedisStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_redis(self) -> AsyncGenerator[None, None]: - _ = await asyncio.create_subprocess_exec("docker", "stop", "redis-test") - _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "redis-test") - - process = await asyncio.create_subprocess_exec("docker", "run", "-d", "--name", "redis-test", "-p", "6379:6379", "redis") - _ = await process.wait() - if not await wait_redis(): - msg = "Redis failed to start" - raise RedisFailedToStartError(msg) - try: + # Double-check that the Valkey test container is stopped + docker_stop("valkey-test", raise_on_error=False) + + with docker_container("redis-test", "redis", {"6379": 6379}): + if not await wait_redis(): + msg = "Redis failed to start" + raise RedisFailedToStartError(msg) + yield - finally: - _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "redis-test") @override @pytest.fixture diff --git a/key-value/key-value-aio/tests/stores/valkey/test_valkey.py b/key-value/key-value-aio/tests/stores/valkey/test_valkey.py index c1d5ebb3..8bdeda06 100644 --- a/key-value/key-value-aio/tests/stores/valkey/test_valkey.py +++ b/key-value/key-value-aio/tests/stores/valkey/test_valkey.py @@ -5,7 +5,7 @@ from typing_extensions import override from key_value.aio.stores.base import BaseStore -from tests.conftest import try_import +from tests.conftest import docker_container, docker_stop, try_import from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, detect_on_windows, should_skip_docker_tests with try_import() as has_valkey: @@ -49,27 +49,23 @@ async def ping_valkey(self) -> bool: async def wait_valkey(self) -> bool: for _ in range(WAIT_FOR_VALKEY_TIMEOUT): - if await self.ping_valkey(): + result = await asyncio.wait_for(self.ping_valkey(), timeout=1) + if result: return True await asyncio.sleep(delay=1) return False - @pytest.fixture(autouse=True, scope="session") + @pytest.fixture(scope="session") async def setup_valkey(self) -> AsyncGenerator[None, None]: - _ = await asyncio.create_subprocess_exec("docker", "stop", "valkey-test") - _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "valkey-test") + # Double-check that the Redis test container is stopped + docker_stop("redis-test", raise_on_error=False) + + with docker_container("valkey-test", "valkey/valkey:latest", {"6379": 6379}): + if not await self.wait_valkey(): + msg = "Valkey failed to start" + raise ValkeyFailedToStartError(msg) - process = await asyncio.create_subprocess_exec( - "docker", "run", "-d", "--name", "valkey-test", "-p", f"{VALKEY_PORT}:6379", "valkey/valkey:latest" - ) - _ = await process.wait() - if not await self.wait_valkey(): - msg = "Valkey failed to start" - raise ValkeyFailedToStartError(msg) - try: yield - finally: - _ = await asyncio.create_subprocess_exec("docker", "rm", "-f", "valkey-test") @override @pytest.fixture diff --git a/pyproject.toml b/pyproject.toml index 456d56c6..40fcdd9e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -74,3 +74,8 @@ line-length = 140 "PLR2004", # Ignore magic values "E501", # Ignore line length ] + +[dependency-groups] +dev = [ + "docker>=7.1.0", +] diff --git a/uv.lock b/uv.lock index 75e3cec1..07569bda 100644 --- a/uv.lock +++ b/uv.lock @@ -231,6 +231,70 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, ] +[[package]] +name = "charset-normalizer" +version = "3.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" }, + { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" }, + { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" }, + { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" }, + { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" }, + { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" }, + { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" }, + { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" }, + { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" }, + { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, + { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, + { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, + { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, + { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, + { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, + { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, + { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, + { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, + { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, + { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, + { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, + { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, + { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, + { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, + { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, + { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, + { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, +] + [[package]] name = "colorama" version = "0.4.6" @@ -279,6 +343,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, ] +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, +] + [[package]] name = "elastic-transport" version = "9.1.0" @@ -766,6 +844,16 @@ name = "py-key-value" version = "0.2.0" source = { virtual = "." } +[package.dev-dependencies] +dev = [ + { name = "docker" }, +] + +[package.metadata] + +[package.metadata.requires-dev] +dev = [{ name = "docker", specifier = ">=7.1.0" }] + [[package]] name = "py-key-value-aio" version = "0.2.0" @@ -805,6 +893,7 @@ dev = [ { name = "basedpyright" }, { name = "dirty-equals" }, { name = "diskcache-stubs" }, + { name = "docker" }, { name = "inline-snapshot" }, { name = "py-key-value-aio", extra = ["disk", "elasticsearch", "memcached", "memory", "mongodb", "pydantic", "redis"] }, { name = "py-key-value-aio", extra = ["valkey"], marker = "sys_platform != 'win32'" }, @@ -841,6 +930,7 @@ dev = [ { name = "basedpyright", specifier = ">=1.31.5" }, { name = "dirty-equals", specifier = ">=0.10.0" }, { name = "diskcache-stubs", specifier = ">=5.6.3.6.20240818" }, + { name = "docker", specifier = ">=7.1.0" }, { name = "inline-snapshot", specifier = ">=0.29.0" }, { name = "py-key-value-aio", extras = ["memory", "disk", "redis", "elasticsearch", "memcached", "mongodb"] }, { name = "py-key-value-aio", extras = ["pydantic"] }, @@ -1132,6 +1222,28 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, ] +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432, upload-time = "2025-07-14T20:13:05.9Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103, upload-time = "2025-07-14T20:13:07.698Z" }, + { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557, upload-time = "2025-07-14T20:13:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + [[package]] name = "redis" version = "6.4.0" @@ -1144,6 +1256,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e8/02/89e2ed7e85db6c93dfa9e8f691c5087df4e3551ab39081a4d7c6d1f90e05/redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f", size = 279847, upload-time = "2025-08-07T08:10:09.84Z" }, ] +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + [[package]] name = "rich" version = "14.1.0" From 6c4b9ef7c69157561ccda234bcb0a00ffee9b214 Mon Sep 17 00:00:00 2001 From: William Easton Date: Sun, 28 Sep 2025 09:31:44 -0500 Subject: [PATCH 26/31] Checkpoint progress --- .github/copilot-instructions.md | 73 -- .github/workflows/test.yml | 14 +- .vscode/launch.json | 12 +- .vscode/settings.json | 4 - DEVELOPING.md | 60 ++ README.md | 260 ++++---- key-value/key-value-aio/.vscode/settings.json | 7 + key-value/key-value-aio/README.md | 220 +----- .../src/key_value/aio/__init__.py | 3 - .../aio/adapters/pydantic/__init__.py | 3 + .../{pydantic.py => pydantic/adapter.py} | 22 +- .../aio/adapters/raise_on_missing/__init__.py | 3 + .../adapter.py} | 22 +- .../key-value-aio/src/key_value/aio/errors.py | 68 -- .../src/key_value/aio/errors/__init__.py | 21 + .../src/key_value/aio/errors/base.py | 20 + .../src/key_value/aio/errors/key_value.py | 33 + .../src/key_value/aio/errors/store.py | 13 + .../src/key_value/aio/protocols/__init__.py | 1 + .../aio/{types.py => protocols/key_value.py} | 0 .../src/key_value/aio/stores/__init__.py | 1 - .../src/key_value/aio/stores/base.py | 8 +- .../src/key_value/aio/stores/disk/__init__.py | 4 +- .../key_value/aio/stores/disk/multi_store.py | 2 +- .../src/key_value/aio/stores/disk/store.py | 2 +- .../aio/stores/elasticsearch/__init__.py | 2 +- .../aio/stores/elasticsearch/store.py | 9 +- .../aio/stores/memcached/__init__.py | 2 +- .../key_value/aio/stores/memcached/store.py | 2 +- .../key_value/aio/stores/memory/__init__.py | 2 +- .../src/key_value/aio/stores/memory/store.py | 2 +- .../key_value/aio/stores/mongodb/__init__.py | 2 +- .../src/key_value/aio/stores/mongodb/store.py | 2 +- .../src/key_value/aio/stores/null/__init__.py | 2 +- .../key_value/aio/stores/redis/__init__.py | 2 +- .../src/key_value/aio/stores/redis/store.py | 2 +- .../key_value/aio/stores/simple/__init__.py | 2 +- .../key_value/aio/stores/valkey/__init__.py | 2 +- .../src/key_value/aio/stores/valkey/store.py | 6 +- .../src/key_value/aio/utils/acompat.py | 18 + .../src/key_value/aio/wrappers/__init__.py | 17 - .../src/key_value/aio/wrappers/base.py | 2 +- .../wrappers/passthrough_cache/__init__.py | 3 + .../wrapper.py} | 2 +- .../wrappers/prefix_collections/__init__.py | 3 + .../wrapper.py} | 2 +- .../aio/wrappers/prefix_keys/__init__.py | 3 + .../wrapper.py} | 2 +- .../wrappers/single_collection/__init__.py | 3 + .../wrapper.py} | 2 +- .../aio/wrappers/statistics/__init__.py | 3 + .../{statistics.py => statistics/wrapper.py} | 2 +- .../aio/wrappers/ttl_clamp/__init__.py | 3 + .../{ttl_clamp.py => ttl_clamp/wrapper.py} | 2 +- .../tests/adapters/test_pydantic.py | 6 +- .../tests/adapters/test_raise.py | 2 +- key-value/key-value-aio/tests/cases.py | 23 +- key-value/key-value-aio/tests/conftest.py | 29 +- .../key-value-aio/tests/protocols/__init__.py | 0 .../tests/protocols/test_types.py | 17 + .../key-value-aio/tests/stores/conftest.py | 28 +- .../elasticsearch/test_elasticsearch.py | 45 +- .../tests/stores/memcached/test_memcached.py | 3 +- .../tests/stores/mongodb/test_mongodb.py | 21 +- .../tests/stores/redis/test_redis.py | 25 +- .../tests/stores/valkey/test_valkey.py | 44 +- key-value/key-value-aio/tests/test_types.py | 17 - .../key-value-aio/tests/utils/__init__.py | 0 .../key-value-sync/.vscode/settings.json | 7 + key-value/key-value-sync/README.md | 1 + key-value/key-value-sync/pyproject.toml | 80 +++ .../src/key_value/sync/__init__.py | 0 .../src/key_value/sync/adapters/__init__.py | 4 + .../sync/adapters/pydantic/__init__.py | 6 + .../adapters/raise_on_missing/__init__.py | 6 + .../sync/code_gen/adapters/__init__.py | 4 + .../code_gen/adapters/pydantic/__init__.py | 6 + .../code_gen/adapters/pydantic/adapter.py | 117 ++++ .../adapters/raise_on_missing/__init__.py | 6 + .../adapters/raise_on_missing/adapter.py | 181 +++++ .../sync/code_gen/errors/__init__.py | 24 + .../key_value/sync/code_gen/errors/base.py | 23 + .../sync/code_gen/errors/key_value.py | 33 + .../key_value/sync/code_gen/errors/store.py | 16 + .../sync/code_gen/protocols/__init__.py | 4 + .../sync/code_gen/protocols/key_value.py | 178 +++++ .../key_value/sync/code_gen/stores/base.py | 379 +++++++++++ .../sync/code_gen/stores/disk/__init__.py | 7 + .../sync/code_gen/stores/disk/multi_store.py | 148 +++++ .../sync/code_gen/stores/disk/store.py | 116 ++++ .../code_gen/stores/elasticsearch/__init__.py | 6 + .../code_gen/stores/elasticsearch/store.py | 232 +++++++ .../code_gen/stores/elasticsearch/utils.py | 110 +++ .../sync/code_gen/stores/memory/__init__.py | 6 + .../sync/code_gen/stores/memory/store.py | 160 +++++ .../sync/code_gen/stores/mongodb/__init__.py | 6 + .../sync/code_gen/stores/mongodb/store.py | 194 ++++++ .../sync/code_gen/stores/null/__init__.py | 6 + .../sync/code_gen/stores/null/store.py | 23 + .../sync/code_gen/stores/redis/__init__.py | 6 + .../sync/code_gen/stores/redis/store.py | 129 ++++ .../sync/code_gen/stores/simple/__init__.py | 6 + .../sync/code_gen/stores/simple/store.py | 100 +++ .../sync/code_gen/stores/valkey/__init__.py | 6 + .../sync/code_gen/stores/valkey/store.py | 124 ++++ .../key_value/sync/code_gen/utils/acompat.py | 21 + .../key_value/sync/code_gen/utils/compound.py | 78 +++ .../sync/code_gen/utils/managed_entry.py | 102 +++ .../key_value/sync/code_gen/utils/sanitize.py | 159 +++++ .../sync/code_gen/utils/time_to_live.py | 41 ++ .../sync/code_gen/wrappers/__init__.py | 4 + .../key_value/sync/code_gen/wrappers/base.py | 54 ++ .../wrappers/passthrough_cache/__init__.py | 6 + .../wrappers/passthrough_cache/wrapper.py | 166 +++++ .../wrappers/prefix_collections/__init__.py | 6 + .../wrappers/prefix_collections/wrapper.py | 82 +++ .../code_gen/wrappers/prefix_keys/__init__.py | 6 + .../code_gen/wrappers/prefix_keys/wrapper.py | 79 +++ .../wrappers/single_collection/__init__.py | 6 + .../wrappers/single_collection/wrapper.py | 86 +++ .../code_gen/wrappers/statistics/__init__.py | 6 + .../code_gen/wrappers/statistics/wrapper.py | 217 ++++++ .../code_gen/wrappers/ttl_clamp/__init__.py | 6 + .../code_gen/wrappers/ttl_clamp/wrapper.py | 64 ++ .../src/key_value/sync/errors/__init__.py | 24 + .../src/key_value/sync/protocols/__init__.py | 4 + .../key_value/sync/stores/disk/__init__.py | 7 + .../sync/stores/elasticsearch/__init__.py | 6 + .../key_value/sync/stores/memory/__init__.py | 6 + .../key_value/sync/stores/mongodb/__init__.py | 6 + .../key_value/sync/stores/null/__init__.py | 6 + .../key_value/sync/stores/redis/__init__.py | 6 + .../key_value/sync/stores/simple/__init__.py | 6 + .../key_value/sync/stores/valkey/__init__.py | 6 + .../src/key_value/sync/wrappers/__init__.py | 4 + .../wrappers/passthrough_cache/__init__.py | 6 + .../wrappers/prefix_collections/__init__.py | 6 + .../sync/wrappers/prefix_keys/__init__.py | 6 + .../wrappers/single_collection/__init__.py | 6 + .../sync/wrappers/statistics/__init__.py | 6 + .../sync/wrappers/ttl_clamp/__init__.py | 6 + .../key-value-sync/tests/code_gen/__init__.py | 4 + .../tests/code_gen/adapters/__init__.py | 4 + .../tests/code_gen/adapters/test_pydantic.py | 76 +++ .../tests/code_gen/adapters/test_raise.py | 40 ++ .../key-value-sync/tests/code_gen/cases.py | 64 ++ .../key-value-sync/tests/code_gen/conftest.py | 129 ++++ .../tests/code_gen/protocols/__init__.py | 4 + .../tests/code_gen/protocols/test_types.py | 20 + .../tests/code_gen/stores/__init__.py | 4 + .../tests/code_gen/stores/base/__init__.py | 4 + .../tests/code_gen/stores/conftest.py | 287 ++++++++ .../tests/code_gen/stores/disk/__init__.py | 4 + .../tests/code_gen/stores/disk/test_disk.py | 27 + .../code_gen/stores/disk/test_multi_disk.py | 29 + .../code_gen/stores/elasticsearch/__init__.py | 4 + .../elasticsearch/test_elasticsearch.py | 64 ++ .../tests/code_gen/stores/memory/__init__.py | 4 + .../code_gen/stores/memory/test_memory.py | 15 + .../code_gen/stores/mongodb/test_mongodb.py | 75 +++ .../tests/code_gen/stores/redis/__init__.py | 4 + .../tests/code_gen/stores/redis/test_redis.py | 82 +++ .../tests/code_gen/stores/simple/__init__.py | 4 + .../code_gen/stores/simple/test_store.py | 15 + .../code_gen/stores/valkey/test_valkey.py | 79 +++ .../code_gen/stores/wrappers/__init__.py | 4 + .../stores/wrappers/test_clamp_ttl.py | 51 ++ .../stores/wrappers/test_passthrough_cache.py | 32 + .../stores/wrappers/test_prefix_collection.py | 16 + .../stores/wrappers/test_prefix_key.py | 16 + .../stores/wrappers/test_single_collection.py | 16 + .../stores/wrappers/test_statistics.py | 16 + .../tests/code_gen/utils/__init__.py | 4 + .../code_gen/utils/test_managed_entry.py | 30 + .../tests/code_gen/utils/test_sanitize.py | 82 +++ py-key-value.code-workspace | 17 + pyproject.toml | 4 + scripts/build_sync_library.py | 626 ++++++++++++++++++ uv.lock | 222 +++++++ 179 files changed, 6501 insertions(+), 685 deletions(-) delete mode 100644 .github/copilot-instructions.md delete mode 100644 .vscode/settings.json create mode 100644 DEVELOPING.md create mode 100644 key-value/key-value-aio/.vscode/settings.json create mode 100644 key-value/key-value-aio/src/key_value/aio/adapters/pydantic/__init__.py rename key-value/key-value-aio/src/key_value/aio/adapters/{pydantic.py => pydantic/adapter.py} (82%) create mode 100644 key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/__init__.py rename key-value/key-value-aio/src/key_value/aio/adapters/{raise_on_missing.py => raise_on_missing/adapter.py} (89%) delete mode 100644 key-value/key-value-aio/src/key_value/aio/errors.py create mode 100644 key-value/key-value-aio/src/key_value/aio/errors/__init__.py create mode 100644 key-value/key-value-aio/src/key_value/aio/errors/base.py create mode 100644 key-value/key-value-aio/src/key_value/aio/errors/key_value.py create mode 100644 key-value/key-value-aio/src/key_value/aio/errors/store.py create mode 100644 key-value/key-value-aio/src/key_value/aio/protocols/__init__.py rename key-value/key-value-aio/src/key_value/aio/{types.py => protocols/key_value.py} (100%) delete mode 100644 key-value/key-value-aio/src/key_value/aio/stores/__init__.py create mode 100644 key-value/key-value-aio/src/key_value/aio/utils/acompat.py create mode 100644 key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache/__init__.py rename key-value/key-value-aio/src/key_value/aio/wrappers/{passthrough_cache.py => passthrough_cache/wrapper.py} (99%) create mode 100644 key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/__init__.py rename key-value/key-value-aio/src/key_value/aio/wrappers/{prefix_collections.py => prefix_collections/wrapper.py} (98%) create mode 100644 key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/__init__.py rename key-value/key-value-aio/src/key_value/aio/wrappers/{prefix_keys.py => prefix_keys/wrapper.py} (98%) create mode 100644 key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/__init__.py rename key-value/key-value-aio/src/key_value/aio/wrappers/{single_collection.py => single_collection/wrapper.py} (98%) create mode 100644 key-value/key-value-aio/src/key_value/aio/wrappers/statistics/__init__.py rename key-value/key-value-aio/src/key_value/aio/wrappers/{statistics.py => statistics/wrapper.py} (99%) create mode 100644 key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp/__init__.py rename key-value/key-value-aio/src/key_value/aio/wrappers/{ttl_clamp.py => ttl_clamp/wrapper.py} (97%) create mode 100644 key-value/key-value-aio/tests/protocols/__init__.py create mode 100644 key-value/key-value-aio/tests/protocols/test_types.py delete mode 100644 key-value/key-value-aio/tests/test_types.py create mode 100644 key-value/key-value-aio/tests/utils/__init__.py create mode 100644 key-value/key-value-sync/.vscode/settings.json create mode 100644 key-value/key-value-sync/README.md create mode 100644 key-value/key-value-sync/pyproject.toml create mode 100644 key-value/key-value-sync/src/key_value/sync/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/adapters/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/adapters/pydantic/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/adapters/raise_on_missing/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/adapters/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/adapter.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/adapter.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/errors/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/errors/base.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/errors/key_value.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/errors/store.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/protocols/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/protocols/key_value.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/base.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/multi_store.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/store.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/utils.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/memory/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/memory/store.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/store.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/null/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/null/store.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/redis/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/redis/store.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/simple/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/simple/store.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/valkey/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/valkey/store.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/utils/acompat.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/utils/compound.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/utils/managed_entry.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/utils/sanitize.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/utils/time_to_live.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/base.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/passthrough_cache/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/passthrough_cache/wrapper.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/wrapper.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/wrapper.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/wrapper.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/statistics/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/statistics/wrapper.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/ttl_clamp/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/ttl_clamp/wrapper.py create mode 100644 key-value/key-value-sync/src/key_value/sync/errors/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/protocols/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/stores/disk/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/stores/elasticsearch/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/stores/memory/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/stores/mongodb/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/stores/null/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/stores/redis/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/stores/simple/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/stores/valkey/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/wrappers/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/wrappers/passthrough_cache/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/wrappers/prefix_collections/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/wrappers/prefix_keys/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/wrappers/single_collection/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/wrappers/statistics/__init__.py create mode 100644 key-value/key-value-sync/src/key_value/sync/wrappers/ttl_clamp/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/adapters/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/adapters/test_pydantic.py create mode 100644 key-value/key-value-sync/tests/code_gen/adapters/test_raise.py create mode 100644 key-value/key-value-sync/tests/code_gen/cases.py create mode 100644 key-value/key-value-sync/tests/code_gen/conftest.py create mode 100644 key-value/key-value-sync/tests/code_gen/protocols/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/protocols/test_types.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/base/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/conftest.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/disk/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/disk/test_disk.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/disk/test_multi_disk.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/elasticsearch/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/memory/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/memory/test_memory.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/mongodb/test_mongodb.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/redis/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/redis/test_redis.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/simple/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/simple/test_store.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/valkey/test_valkey.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/wrappers/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/wrappers/test_clamp_ttl.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/wrappers/test_passthrough_cache.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_collection.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_key.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/wrappers/test_single_collection.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/wrappers/test_statistics.py create mode 100644 key-value/key-value-sync/tests/code_gen/utils/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/utils/test_managed_entry.py create mode 100644 key-value/key-value-sync/tests/code_gen/utils/test_sanitize.py create mode 100644 py-key-value.code-workspace create mode 100644 scripts/build_sync_library.py diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md deleted file mode 100644 index 7def6705..00000000 --- a/.github/copilot-instructions.md +++ /dev/null @@ -1,73 +0,0 @@ -## Architecture Overview -The `py-kv-store-adapter` project provides a pluggable, async-first interface for various key-value (KV) store backends in Python. Its core purpose is to abstract away the underlying KV store implementation, offering a consistent `KVStore` protocol for interacting with different storage solutions like Redis, Elasticsearch, in-memory caches, and disk-based stores. The architecture uses a unified `BaseStore` class that automatically manages `ManagedEntry` objects for consistent TTL and expiration handling across all store implementations. The system supports `Adapters` for transforming data (e.g., Pydantic models, raise-on-missing behavior) and `Wrappers` for adding cross-cutting concerns like statistics tracking, TTL clamping, key/collection prefixing, or single-collection mapping, which can be chained together. Key concepts include collections for namespacing, compound keys for internal storage in flat stores, automatic TTL management with timezone-aware timestamps, and a separation between adapters (which don't implement KVStore) and wrappers (which do implement KVStore and can be chained). - -## Code Style & Conventions -- **Python Version**: Requires Python 3.10 or higher (`pyproject.toml:project.requires-python`). -- **Dependency Management**: Uses `uv` for dependency management (`DEVELOPING.md:L10`). Development dependencies are managed via `uv sync --group dev` (`DEVELOPING.md:L24`). -- **Linting & Formatting**: Enforced by Ruff (`pyproject.toml:[tool.ruff]`). - - Line length: 140 characters (`pyproject.toml:line-length`). - - Fixable issues: All auto-fixable issues are configured to be fixed (`pyproject.toml:lint.fixable`). - - Ignored rules: `COM812`, `PLR0913` (too many arguments) (`pyproject.toml:lint.ignore`). - - Extended select rules: A broad range of linting rules are enabled, including `A`, `ARG`, `B`, `C4`, `COM`, `DTZ`, `E`, `EM`, `F`, `FURB`, `I`, `LOG`, `N`, `PERF`, `PIE`, `PLR`, `PLW`, `PT`, `PTH`, `Q`, `RET`, `RSE`, `RUF`, `S`, `SIM`, `TC`, `TID`, `TRY`, `UP`, `W` (`pyproject.toml:lint.extend-select`). - - Per-file ignores: Test files (`**/tests/*.py`) ignore `S101` (asserts), `DTZ005` (datetime.UTC), `PLR2004` (magic values), `E501` (line length) (`pyproject.toml:[tool.ruff.lint.extend-per-file-ignores]`). -- **Type Checking**: Uses Pyright (`pyproject.toml:[tool.pyright]`). - - Python version: 3.10 (`pyproject.toml:pythonVersion`). - - Type checking mode: `recommended` (`pyproject.toml:typeCheckingMode`). - - `src/` directory is included for type checking (`pyproject.toml:include`). - - Missing type stubs, explicit `Any`, and missing module sources are not reported (`pyproject.toml:reportMissingTypeStubs`, `reportExplicitAny`, `reportMissingModuleSource`). - -## Quick Recipes -| Command | Description | -|---|---| -| Install dependencies | `uv sync --group dev` (`DEVELOPING.md:L24`) | -| Run all tests | `uv run pytest` (`DEVELOPING.md:L169`) | -| Run tests with coverage | `uv run pytest --cov=src/kv_store_adapter --cov-report=html` (`DEVELOPING.md:L172`) | -| Run specific test file | `uv run pytest tests/stores/redis/test_redis.py` (`DEVELOPING.md:L175`) | -| Check code style (lint) | `uv run ruff check` (`DEVELOPING.md:L277`) | -| Fix auto-fixable lint issues | `uv run ruff check --fix` (`DEVELOPING.md:L280`) | -| Format code | `uv run ruff format` (`DEVELOPING.md:L283`) | -| Type check | `pyright` (`DEVELOPING.md:L292`) | -| Start external services for integration tests | `docker-compose up -d` (`DEVELOPING.md:L187`) | -| Stop external services | `docker-compose down` (`DEVELOPING.md:L193`) | - -## Dependencies & Compatibility -- **Critical Runtime Dependencies**: - - `cachetools>=6.0.0` for `MemoryStore` (`pyproject.toml:L26`). - - `diskcache>=5.6.0`, `pathvalidate>=3.3.1` for `DiskStore` (`pyproject.toml:L27`). - - `redis>=6.0.0` for `RedisStore` (`pyproject.toml:L28`). - - `elasticsearch>=9.0.0`, `aiohttp>=3.12` for `ElasticsearchStore` (`pyproject.toml:L29`). - - `pydantic>=2.11.9` for `PydanticAdapter` (`pyproject.toml:L30`). -- **Toolchain & Versions**: - - Python: `>=3.10` (`pyproject.toml:L6`). - - `uv`: Used for dependency management and running commands (`DEVELOPING.md:L10`). - - `pytest`: Test runner (`pyproject.toml:L45`). `asyncio_mode = \"auto\"` is configured for async tests (`pyproject.toml:L33`). - - `ruff`: Linter and formatter (`pyproject.toml:L48`). - - `basedpyright`: Type checker (`pyproject.toml:L54`). -- **Observability**: - - The `StatisticsWrapper` (`src/kv_store_adapter/wrappers/statistics.py`) provides in-memory tracking of operation counts, hits, and misses for `get`, `put`, `delete`, and `ttl` operations per collection. It can be enabled during initialization. - -## Unique Workflows -- **Adding New Store Implementations**: Developers can extend the system by creating new store classes that inherit from the unified `BaseStore` class, implementing abstract methods `_get_managed_entry`, `_put_managed_entry`, and `_delete_managed_entry` (`DEVELOPING.md:L312-L399`). -- **Wrapper/Adapter Chaining**: The design allows for chaining multiple wrappers and adapters to compose complex behaviors, such as `PydanticAdapter(SingleCollectionWrapper(store, \"users\"), User)` (`README.md:L174`). -- **CI/CD**: GitHub Actions workflows (`.github/workflows/`) are configured to run tests, linting, type checking, and formatting on pull requests and pushes to `main`. - -## API Surface Map -The primary API surface is defined by the `KVStore` protocol (`src/kv_store_adapter/types.py:L175-L180`) and implemented by the unified `BaseStore` class (`src/kv_store_adapter/stores/base.py:L29-L353`). -- **Core KV Operations**: `get(key, *, collection=None)`, `put(key, value, *, collection=None, ttl=None)`, `delete(key, *, collection=None)`, `ttl(key, *, collection=None)`. -- **Bulk Operations**: `get_many(keys, *, collection=None)`, `put_many(keys, values, *, collection=None, ttl=None)`, `delete_many(keys, *, collection=None)`, `ttl_many(keys, *, collection=None)`. -- **Management Operations (Extended Stores)**: `keys(collection=None, *, limit=None)`, `collections(*, limit=None)`, `destroy()`, `destroy_collection(collection)`, `cull()`. -- **Adapters**: `PydanticAdapter` for type-safe Pydantic model handling, `RaiseOnMissingAdapter` for optional exception-based missing key handling. -- **Wrappers**: `StatisticsWrapper`, `ClampTTLWrapper`, `PassthroughCacheWrapper`, `PrefixKeysWrapper`, `PrefixCollectionsWrapper`, `SingleCollectionWrapper`. - - -## Onboarding Steps -- **Understand Core Concepts**: Familiarize yourself with `KVStore`, `BaseStore`, `ManagedEntry`, `Collections`, `Compound Keys`, `TTL Management`, `Wrappers`, and `Adapters` by reading `README.md` and `DEVELOPING.md`. -- **Development Setup**: Follow the \"Development Setup\" in `DEVELOPING.md` to clone the repository, install `uv`, sync dependencies (`uv sync --group dev`), activate the virtual environment, and install pre-commit hooks. -- **Testing**: Review `DEVELOPING.md`'s \"Testing\" section for how to run tests, set up test environments using Docker Compose, and write new tests using `BaseStoreTests` from `tests/stores/conftest.py`. -- **Code Quality**: Understand the `ruff` and `pyright` configurations in `pyproject.toml` and how to run them (`uv run ruff check`, `uv run ruff format`, `pyright`). -- **Adding New Stores**: If extending the project, follow the \"Adding New Store Implementations\" guide in `DEVELOPING.md` for detailed steps on creating stores that inherit from the unified `BaseStore` class. - -## Getting Unstuck -- **General Development Issues**: Refer to the \"Development Guide\" in [`DEVELOPING.md`](DEVELOPING.md) for setup, testing, and contribution guidelines. -- **Integration Tests with External Services**: If integration tests fail, ensure Docker and Docker Compose are running and the necessary services (Redis, Elasticsearch) are started via `docker-compose up -d` as described in [`DEVELOPING.md:L181-L194`](DEVELOPING.md:L181-L194). Check `.env` file configuration for external services (`DEVELOPING.md:L197-L211`). -- **Redis Test Failures**: The `tests/stores/redis/test_redis.py` fixture `setup_redis` attempts to manage a Dockerized Redis instance. If Redis fails to start, check Docker logs or manually ensure the `redis-test` container is running and accessible. \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index d42097c1..bc94a51e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -30,15 +30,15 @@ jobs: working-directory: ${{ matrix.project }} - name: "Lint" - run: uv run ruff check --exit-non-zero-on-fix --fix + run: uv run ruff check --exit-non-zero-on-fix --fix . working-directory: ${{ matrix.project }} - name: "Format" - run: uv run ruff format --check + run: uv run ruff format --check . working-directory: ${{ matrix.project }} - name: "Type Check" - run: uv run basedpyright + run: uv run basedpyright . working-directory: ${{ matrix.project }} test_quick: @@ -66,11 +66,11 @@ jobs: working-directory: ${{ matrix.project }} - name: "Test" - run: uv run pytest tests + run: uv run pytest tests . working-directory: ${{ matrix.project }} - name: "Build" - run: uv build + run: uv build . working-directory: ${{ matrix.project }} test_all: @@ -108,9 +108,9 @@ jobs: working-directory: ${{ matrix.project }} - name: "Test" - run: uv run pytest tests + run: uv run pytest tests . working-directory: ${{ matrix.project }} - name: "Build" - run: uv build + run: uv build . working-directory: ${{ matrix.project }} diff --git a/.vscode/launch.json b/.vscode/launch.json index a9591e2c..7d8a2adb 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -28,7 +28,7 @@ "envFile": "${workspaceFolder}/.env" }, { - "name": "Python: Build Sync Library", + "name": "Compile Sync Library", "type": "debugpy", "request": "launch", "program": "${workspaceFolder}/scripts/build_sync_library.py", @@ -36,6 +36,16 @@ "justMyCode": false, "envFile": "${workspaceFolder}/.env", "args": [] + }, + { + "name": "Compile Sync Library - Single File", + "type": "debugpy", + "request": "launch", + "program": "${workspaceFolder}/scripts/build_sync_library.py", + "console": "integratedTerminal", + "justMyCode": false, + "envFile": "${workspaceFolder}/.env", + "args": ["${workspaceFolder}/key-value/key-value-aio/src/key_value/aio/stores/memcached/__init__.py"] } ] } \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index e137fadb..00000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "python.testing.unittestEnabled": false, - "python.testing.pytestEnabled": true -} \ No newline at end of file diff --git a/DEVELOPING.md b/DEVELOPING.md new file mode 100644 index 00000000..379afee0 --- /dev/null +++ b/DEVELOPING.md @@ -0,0 +1,60 @@ +# Developing + +This monorepo contains two Python packages: + +- `py-key-value-aio` (async; supported) +- `py-key-value-sync` (sync; generated from async) + +## Prerequisites + +- Python 3.10 (the sync codegen targets 3.10) +- `uv` for dependency management and running tools + +## Setup + +```bash +# From repo root +uv sync --all-extras +``` + +## Lint and format + +```bash +# From repo root +uv run ruff format . +uv run ruff check --fix . +``` + +## Test + +```bash +# Async package tests +uv run pytest key-value/key-value-aio/tests -q + +# Sync package tests (generated tests live under tests/code_gen) +uv run pytest key-value/key-value-sync/tests -q +``` + +## Generate/update sync package + +The sync package is generated from the async package. After changes to the async code, regenerate the sync package: + +```bash +uv run python scripts/build_sync_library.py +``` + +Notes: +- The codegen script lints the generated code automatically. +- Some extras differ between async and sync (e.g., valkey). Refer to each package’s README for current extras. + +## Project layout + +- Async package: `key-value/key-value-aio/` +- Sync package: `key-value/key-value-sync/` +- Codegen script: `scripts/build_sync_library.py` + +## Releasing + +TBD + + diff --git a/README.md b/README.md index 2bd4172e..b343b52a 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,9 @@ -# KV Store Adapter +# Python Key-Value Libraries -A pluggable, async-only key-value store interface for modern Python applications. +This monorepo contains two libraries: + +- `py-key-value-aio`: Async key-value store library (supported). +- `py-key-value-sync`: Sync key-value store library (under development; generated from the async API). ## Why use this library? @@ -14,206 +17,205 @@ A pluggable, async-only key-value store interface for modern Python applications ## Why not use this library? -- **Async-only**: Built from the ground up with `async`/`await` support +- **Async-only**: While a code-gen'd synchronous library is under development, the async library is the primary focus at the moment. - **Managed Entries**: Raw values are not stored in backends, a wrapper object is stored instead. This wrapper object contains the value, sometimes metadata like the TTL, and the creation timestamp. Most often it is serialized to and from JSON. - **No Live Objects**: Even when using the in-memory store, "live" objects are never returned from the store. You get a dictionary or a Pydantic model, hopefully a copy of what you stored, but never the same instance in memory. -## Quick Start +## Installation -```bash -pip install kv-store-adapter - -# With specific backend support -pip install kv-store-adapter[elasticsearch] -pip install kv-store-adapter[redis] -pip install kv-store-adapter[memcached] -pip install kv-store-adapter[mongodb] -pip install kv-store-adapter[valkey] -pip install kv-store-adapter[memory] -pip install kv-store-adapter[disk] - -# With all backends -pip install kv-store-adapter[memory,disk,redis,elasticsearch,memcached,mongodb,valkey] - -# With Pydantic adapter support -pip install kv-store-adapter[pydantic] -``` +## Quick start for Async library -# The KV Store Protocol +Install the library with the backends you need. -The simplest way to get started is to use the `KVStore` interface, which allows you to write code that works with any supported KV Store: +```bash +# Async library +pip install py-key-value-aio + +# With specific backend extras +pip install py-key-value-aio[memory] +pip install py-key-value-aio[disk] +pip install py-key-value-aio[elasticsearch] +# or: redis, mongodb, memcached, valkey, see below for all options +``` ```python import asyncio -from key_value.aio.types import AsyncKeyValue -from key_value.aio.stores.redis.store import RedisStore -from key_value.aio.stores.memory.store import MemoryStore +from key_value.aio.protocols.key_value import AsyncKeyValue +from key_value.aio.stores.memory import MemoryStore -async def example(): - # In-memory store - memory_store = MemoryStore() - await memory_store.put(key="456", value={"name": "Bob"}, collection="users", ttl=3600) # TTL is supported, but optional! - bob = await memory_store.get(key="456", collection="users") - await memory_store.delete(key="456", collection="users") - redis_store = RedisStore(url="redis://localhost:6379") - await redis_store.put(key="123", value={"name": "Alice"}, collection="products") - alice = await redis_store.get(key="123", collection="products") - await redis_store.delete(key="123", collection="products") +async def example(store: AsyncKeyValue) -> None: + await store.put(key="123", value={"name": "Alice"}, collection="users", ttl=3600) + value = await store.get(key="123", collection="users") + await store.delete(key="123", collection="users") -asyncio.run(example()) + +async def main(): + memory_store = MemoryStore() + await example(memory_store) + +asyncio.run(main()) ``` -## Store Implementations +## Introduction to py-key-value -Choose the store that best fits your needs. All stores implement the same `KVStore` interface: +### Protocols -### Production Stores +- **Async**: `key_value.aio.protocols.AsyncKeyValue` — async `get/put/delete/ttl` and bulk variants; optional protocol segments for culling, destroying stores/collections, and enumerating keys/collections implemented by capable stores. +- **Sync**: `key_value.sync.protocols.KeyValue` — sync mirror of the async protocol, generated from the async library. -- **ElasticsearchStore**: `ElasticsearchStore(url="https://localhost:9200", api_key="your-api-key")` -- **RedisStore**: `RedisStore(url="redis://localhost:6379/0")` -- **MongoDBStore**: `MongoDBStore(url="mongodb://localhost:27017/test")` -- **ValkeyStore**: `ValkeyStore(host="localhost", port=6379)` -- **MemcachedStore**: `MemcachedStore(host="localhost", port=11211)` -- **DiskStore**: A disk-based store using diskcache `DiskStore(directory="./cache")`. Also see `MultiDiskStore` for a store that creates one disk store per collection. -- **MemoryStore**: A fast in-memory TLRU cache `MemoryStore()` +The protocols offer a simple interface for your application to interact with the store: -### Development/Testing Stores +```python +get(key: str, collection: str | None = None) -> dict[str, Any] | None: +get_many(keys: Sequence[str], collection: str | None = None) -> list[dict[str, Any] | None]: + +put(key: str, value: dict[str, Any], collection: str | None = None, ttl: float | None = None) -> None: +put_many(keys: Sequence[str], values: Sequence[dict[str, Any]], collection: str | None = None, ttl: Sequence[float | None] | float | None = None) -> None: + +delete(key: str, collection: str | None = None) -> bool: +delete_many(keys: Sequence[str], collection: str | None = None) -> int: + +ttl(key: str, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: +ttl_many(keys: Sequence[str], collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: +``` -- **SimpleStore**: In-memory and inspectable for testing `SimpleStore()` -- **NullStore**: No-op store for testing `NullStore()` +### Stores -For detailed configuration options and all available stores, see [DEVELOPING.md](DEVELOPING.md). +The library provides a variety of stores that implement the protocol: -## Atomicity / Consistency +| Local Stores | Async | Sync | Example | +|------------------|:-----:|:----:|:-------| +| Memory | ✅ | ✅ | `MemoryStore()` | +| Disk | ✅ | ✅ | `DiskStore(directory="./cache")` | +| Disk (Per-Collection) | ✅ | ✅ | `MultiDiskStore(directory="./cache")` | +| Simple (test) | ✅ | ✅ | `SimpleStore()` | +| Null (test) | ✅ | ✅ | `NullStore()` | -We strive to support atomicity and consistency across basic key-value operations across all stores and operations in the KVStore. That being said, each store may have different guarantees for consistency and atomicity. Especially with distributed stores like MongoDB, Redis, etc and especially with bulk/management operations. +| Distributed Stores | Async | Sync | Example | +|------------------|:-----:|:----:|:-------| +| Elasticsearch | ✅ | ✅ | `ElasticsearchStore(url="https://localhost:9200", api_key="your-api-key", index="kv-store")` | +| Memcached | ✅ | | `MemcachedStore(host="127.0.0.1", port=11211")` | +| MongoDB | ✅ | ✅ | `MongoDBStore(url="mongodb://localhost:27017/test")` | +| Redis | ✅ | ✅ | `RedisStore(url="redis://localhost:6379/0")` | +| Valkey | ✅ | ✅ | `ValkeyStore(host="localhost", port=6379)` | -## Protocol Adapters -The library provides an adapter pattern simplifying the use of the protocol/store. Adapters themselves do not implement the `KVStore` interface and cannot be nested. As a result, Adapters are the "outer" layer of the store. Adapters are primarily for improved type-safe operations. +### Adapters -The following adapters are available: +Adapters "wrap" any protocol-compliant store but do not themselves implement the protocol. -- **PydanticAdapter**: Type-safe storage and retrieval using Pydantic models with automatic serialization/deserialization. -- **RaiseOnMissingAdapter**: Provides optional raise-on-missing behavior for get, get_many, ttl, and ttl_many operations. +They simplify your applications interactions with stores and provide additional functionality. While your application will accept an instance that implements the protocol, your application code might be simplified by using an adapter. -For example, the PydanticAdapter can be used to provide type-safe interactions with a store: +| Adapter | Description | Example | +|---------|-------------|---------| +| PydanticAdapter | Type-safe storage/retrieval of Pydantic models with transparent serialization/deserialization. | `PydanticAdapter(store=memory_store, pydantic_model=User)` | +| RaiseOnMissingAdapter | Optional raise-on-missing behavior for `get`, `get_many`, `ttl`, and `ttl_many`. | `RaiseOnMissingAdapter(store=memory_store)` | + +For example, the PydanticAdapter allows you to store and retrieve Pydantic models with transparent serialization/deserialization: ```python +import asyncio from pydantic import BaseModel from key_value.aio.adapters.pydantic import PydanticAdapter -from key_value.aio.stores.memory.store import MemoryStore +from key_value.aio.stores.memory import MemoryStore class User(BaseModel): name: str email: str -memory_store = MemoryStore() +async def example(): + memory_store: MemoryStore = MemoryStore() -user_adapter = PydanticAdapter(kv_store=memory_store, pydantic_model=User) + user_adapter: PydanticAdapter[User] = PydanticAdapter( + key_value=memory_store, + pydantic_model=User, + default_collection="users", + ) -async def example(): - await user_adapter.put(key="123", value=User(name="John Doe", email="john.doe@example.com"), collection="users") - user: User | None = await user_adapter.get(key="123", collection="users") + new_user: User = User(name="John Doe", email="john.doe@example.com") + + # Directly store the User model + await user_adapter.put( + key="john-doe", + value=new_user, + ) + + # Retrieve the User model + existing_user: User | None = await user_adapter.get( + key="john-doe", + ) asyncio.run(example()) ``` -## Wrappers +### Wrappers -The library provides a wrapper pattern for adding functionality to a store. Wrappers themselves implement the `KVStore` interface meaning that you can wrap any store with any wrapper, and chain wrappers together as needed. +The library provides a wrapper pattern for adding functionality to a store. Wrappers themselves implement the protocol meaning that you can wrap any store with any wrapper, and chain wrappers together as needed. -### Statistics Tracking +The following wrappers are available: -Track operation statistics for any store: +| Wrapper | Description | Example | +|---------|---------------|-----| +| StatisticsWrapper | Track operation statistics for the store. | `StatisticsWrapper(store=memory_store)` | +| TTLClampWrapper | Clamp the TTL to a given range. | `TTLClampWrapper(store=memory_store, min_ttl=60, max_ttl=3600)` | +| PassthroughCacheWrapper | Wrap two stores to provide a read-through cache. | `PassthroughCacheWrapper(store=memory_store, cache_store=memory_store)` | +| PrefixCollectionsWrapper | Prefix all collections with a given prefix. | `PrefixCollectionsWrapper(store=memory_store, prefix="users")` | +| PrefixKeysWrapper | Prefix all keys with a given prefix. | `PrefixKeysWrapper(store=memory_store, prefix="users")` | -```python -import asyncio +### Atomicity / Consistency -from key_value.aio.wrappers.statistics import StatisticsWrapper -from key_value.aio.stores.memory.store import MemoryStore +We aim for consistent semantics across basic key-value operations. Guarantees may vary by backend (especially distributed systems) and for bulk or management operations. -memory_store = MemoryStore() -store = StatisticsWrapper(store=memory_store) -async def example(): - # Use store normally - statistics are tracked automatically - await store.put(key="123", value={"name": "Alice"}, collection="users") - await store.get(key="123", collection="users") - await store.get(key="456", collection="users") # Cache miss - - # Access statistics - stats = store.statistics - user_stats = stats.get_collection("users") - print(f"Total gets: {user_stats.get.count}") - print(f"Cache hits: {user_stats.get.hit}") - print(f"Cache misses: {user_stats.get.miss}") +## Advanced Patterns -asyncio.run(example()) -``` +Adapters, stores, and wrappers can be combined in a variety of ways as needed. -Other wrappers that are available include: - -- **ClampTTLWrapper**: Wraps a store and clamps the TTL to a given range. -- **TTLClampWrapper**: Wraps a store and clamps the TTL to a given range. -- **PassthroughCacheWrapper**: Wraps two stores to provide a read-through cache. Reads go to the cache store first and fall back to the primary store, populating the cache with the entry from the primary; writes evict from the cache and then write to the primary. For example, use a RedisStore as the primary and a MemoryStore as the cache store. Or a DiskStore as the primary and a MemoryStore as the cache store. -- **PrefixCollectionsWrapper**: Wraps a store and prefixes all collections with a given prefix. -- **PrefixKeysWrapper**: Wraps a store and prefixes all keys with a given prefix. -- **SingleCollectionWrapper**: Wraps a store and forces all requests into a single collection. -- **StatisticsWrapper**: Wraps a store and tracks hit/miss statistics for the store. - -See [DEVELOPING.md](DEVELOPING.md) for more information on how to create your own wrappers. - -## Chaining Wrappers, Adapters, and Stores - -Imagine you have a service where you want to cache 3 pydantic models in a single collection. You can do this by wrapping the store in a PydanticAdapter and a SingleCollectionWrapper: +The following example simulates a consumer of your service providing an Elasticsearch store and forcing all data into a single collection. They pass this wrapped store to your service and you further wrap it in a statistics wrapper (for metrics/monitoring) and a pydantic adapter, to simplify the application's usage. ```python import asyncio +from pydantic import BaseModel from key_value.aio.adapters.pydantic import PydanticAdapter from key_value.aio.wrappers.single_collection import SingleCollectionWrapper -from key_value.aio.stores.memory.store import MemoryStore -from pydantic import BaseModel +from key_value.aio.wrappers.statistics import StatisticsWrapper +from key_value.aio.stores.elasticsearch import ElasticsearchStore + class User(BaseModel): name: str email: str -store = MemoryStore() - -users_store = PydanticAdapter(kv_store=SingleCollectionWrapper(store=store, single_collection="users", default_collection="default"), pydantic_model=User) -products_store = PydanticAdapter(kv_store=SingleCollectionWrapper(store=store, single_collection="products", default_collection="default"), pydantic_model=Product) -orders_store = PydanticAdapter(kv_store=SingleCollectionWrapper(store=store, single_collection="orders", default_collection="default"), pydantic_model=Order) - -async def example(): - new_user: User = User(name="John Doe", email="john.doe@example.com") - await users_store.put(key="123", value=new_user, collection="allowed_users") +elasticsearch_store: ElasticsearchStore = ElasticsearchStore(url="https://localhost:9200", api_key="your-api-key", index="kv-store") - john_doe: User | None = await users_store.get(key="123", collection="allowed_users") +single_collection: SingleCollectionWrapper = SingleCollectionWrapper(store=elasticsearch_store, single_collection="users", default_collection="one-collection") -asyncio.run(example()) -``` -The SingleCollectionWrapper will result in writes to the `allowed_users` collection being redirected to the `users` collection but the keys will be prefixed with the original collection `allowed_users__` name. So the key `123` will be stored as `allowed_users__123` in the `users` collection. +async def main(store: AsyncKeyValue): + statistics_wrapper = StatisticsWrapper(store=store) + users = PydanticAdapter(key_value=wrapped, pydantic_model=User) -Note: The above example shows the conceptual usage, but you would need to define `Product` and `Order` models as well for the complete example to work. + await users.put(key="u1", value=User(name="Jane", email="j@example.com"), collection="ignored") + user = await users.get(key="u1", collection="ignored") + _ = statistics_wrapper.statistics # access metrics -## Development -See [DEVELOPING.md](DEVELOPING.md) for development setup, testing, and contribution guidelines. +asyncio.run(main(store=single_collection)) +``` -## License +## Sync library status -This project is licensed under the MIT License - see the LICENSE file for details. +The sync library is under development and mirrors the async library. The goal is to code gen the vast majority of the syncronous library from the async library. -## Contributing +## Project links -Contributions are welcome! Please read [DEVELOPING.md](DEVELOPING.md) for development setup and contribution guidelines. +- Async README: `key-value/key-value-aio/README.md` +- Sync README: `key-value/key-value-sync/README.md` -## Changelog +Contributions welcome but may not be accepted. File an issue before submitting a pull request. If you do not get agreement on your proposal before making a pull request you may have a bad time. -See [CHANGELOG.md](CHANGELOG.md) for version history and changes. +MIT licensed. \ No newline at end of file diff --git a/key-value/key-value-aio/.vscode/settings.json b/key-value/key-value-aio/.vscode/settings.json new file mode 100644 index 00000000..d7338ad7 --- /dev/null +++ b/key-value/key-value-aio/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "python.testing.pytestArgs": [ + "tests" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, +} \ No newline at end of file diff --git a/key-value/key-value-aio/README.md b/key-value/key-value-aio/README.md index 2bd4172e..fa89b3bc 100644 --- a/key-value/key-value-aio/README.md +++ b/key-value/key-value-aio/README.md @@ -1,219 +1 @@ -# KV Store Adapter - -A pluggable, async-only key-value store interface for modern Python applications. - -## Why use this library? - -- **Multiple backends**: Elasticsearch, Memcached, MongoDB, Redis, Valkey, and In-memory, Disk, etc -- **TTL support**: Automatic expiration handling across all store types -- **Type-safe**: Full type hints with Protocol-based interfaces -- **Adapters**: Pydantic model support, raise-on-missing behavior, etc -- **Wrappers**: Statistics tracking and extensible wrapper system -- **Collection-based**: Organize keys into logical collections/namespaces -- **Pluggable architecture**: Easy to add custom store implementations - -## Why not use this library? - -- **Async-only**: Built from the ground up with `async`/`await` support -- **Managed Entries**: Raw values are not stored in backends, a wrapper object is stored instead. This wrapper object contains the value, sometimes metadata like the TTL, and the creation timestamp. Most often it is serialized to and from JSON. -- **No Live Objects**: Even when using the in-memory store, "live" objects are never returned from the store. You get a dictionary or a Pydantic model, hopefully a copy of what you stored, but never the same instance in memory. - -## Quick Start - -```bash -pip install kv-store-adapter - -# With specific backend support -pip install kv-store-adapter[elasticsearch] -pip install kv-store-adapter[redis] -pip install kv-store-adapter[memcached] -pip install kv-store-adapter[mongodb] -pip install kv-store-adapter[valkey] -pip install kv-store-adapter[memory] -pip install kv-store-adapter[disk] - -# With all backends -pip install kv-store-adapter[memory,disk,redis,elasticsearch,memcached,mongodb,valkey] - -# With Pydantic adapter support -pip install kv-store-adapter[pydantic] -``` - -# The KV Store Protocol - -The simplest way to get started is to use the `KVStore` interface, which allows you to write code that works with any supported KV Store: - -```python -import asyncio - -from key_value.aio.types import AsyncKeyValue -from key_value.aio.stores.redis.store import RedisStore -from key_value.aio.stores.memory.store import MemoryStore - -async def example(): - # In-memory store - memory_store = MemoryStore() - await memory_store.put(key="456", value={"name": "Bob"}, collection="users", ttl=3600) # TTL is supported, but optional! - bob = await memory_store.get(key="456", collection="users") - await memory_store.delete(key="456", collection="users") - - redis_store = RedisStore(url="redis://localhost:6379") - await redis_store.put(key="123", value={"name": "Alice"}, collection="products") - alice = await redis_store.get(key="123", collection="products") - await redis_store.delete(key="123", collection="products") - -asyncio.run(example()) -``` - -## Store Implementations - -Choose the store that best fits your needs. All stores implement the same `KVStore` interface: - -### Production Stores - -- **ElasticsearchStore**: `ElasticsearchStore(url="https://localhost:9200", api_key="your-api-key")` -- **RedisStore**: `RedisStore(url="redis://localhost:6379/0")` -- **MongoDBStore**: `MongoDBStore(url="mongodb://localhost:27017/test")` -- **ValkeyStore**: `ValkeyStore(host="localhost", port=6379)` -- **MemcachedStore**: `MemcachedStore(host="localhost", port=11211)` -- **DiskStore**: A disk-based store using diskcache `DiskStore(directory="./cache")`. Also see `MultiDiskStore` for a store that creates one disk store per collection. -- **MemoryStore**: A fast in-memory TLRU cache `MemoryStore()` - -### Development/Testing Stores - -- **SimpleStore**: In-memory and inspectable for testing `SimpleStore()` -- **NullStore**: No-op store for testing `NullStore()` - -For detailed configuration options and all available stores, see [DEVELOPING.md](DEVELOPING.md). - -## Atomicity / Consistency - -We strive to support atomicity and consistency across basic key-value operations across all stores and operations in the KVStore. That being said, each store may have different guarantees for consistency and atomicity. Especially with distributed stores like MongoDB, Redis, etc and especially with bulk/management operations. - -## Protocol Adapters - -The library provides an adapter pattern simplifying the use of the protocol/store. Adapters themselves do not implement the `KVStore` interface and cannot be nested. As a result, Adapters are the "outer" layer of the store. Adapters are primarily for improved type-safe operations. - -The following adapters are available: - -- **PydanticAdapter**: Type-safe storage and retrieval using Pydantic models with automatic serialization/deserialization. -- **RaiseOnMissingAdapter**: Provides optional raise-on-missing behavior for get, get_many, ttl, and ttl_many operations. - -For example, the PydanticAdapter can be used to provide type-safe interactions with a store: - -```python -from pydantic import BaseModel - -from key_value.aio.adapters.pydantic import PydanticAdapter -from key_value.aio.stores.memory.store import MemoryStore - -class User(BaseModel): - name: str - email: str - -memory_store = MemoryStore() - -user_adapter = PydanticAdapter(kv_store=memory_store, pydantic_model=User) - -async def example(): - await user_adapter.put(key="123", value=User(name="John Doe", email="john.doe@example.com"), collection="users") - user: User | None = await user_adapter.get(key="123", collection="users") - -asyncio.run(example()) -``` - -## Wrappers - -The library provides a wrapper pattern for adding functionality to a store. Wrappers themselves implement the `KVStore` interface meaning that you can wrap any store with any wrapper, and chain wrappers together as needed. - -### Statistics Tracking - -Track operation statistics for any store: - -```python -import asyncio - -from key_value.aio.wrappers.statistics import StatisticsWrapper -from key_value.aio.stores.memory.store import MemoryStore - -memory_store = MemoryStore() -store = StatisticsWrapper(store=memory_store) - -async def example(): - # Use store normally - statistics are tracked automatically - await store.put(key="123", value={"name": "Alice"}, collection="users") - await store.get(key="123", collection="users") - await store.get(key="456", collection="users") # Cache miss - - # Access statistics - stats = store.statistics - user_stats = stats.get_collection("users") - print(f"Total gets: {user_stats.get.count}") - print(f"Cache hits: {user_stats.get.hit}") - print(f"Cache misses: {user_stats.get.miss}") - -asyncio.run(example()) -``` - -Other wrappers that are available include: - -- **ClampTTLWrapper**: Wraps a store and clamps the TTL to a given range. -- **TTLClampWrapper**: Wraps a store and clamps the TTL to a given range. -- **PassthroughCacheWrapper**: Wraps two stores to provide a read-through cache. Reads go to the cache store first and fall back to the primary store, populating the cache with the entry from the primary; writes evict from the cache and then write to the primary. For example, use a RedisStore as the primary and a MemoryStore as the cache store. Or a DiskStore as the primary and a MemoryStore as the cache store. -- **PrefixCollectionsWrapper**: Wraps a store and prefixes all collections with a given prefix. -- **PrefixKeysWrapper**: Wraps a store and prefixes all keys with a given prefix. -- **SingleCollectionWrapper**: Wraps a store and forces all requests into a single collection. -- **StatisticsWrapper**: Wraps a store and tracks hit/miss statistics for the store. - -See [DEVELOPING.md](DEVELOPING.md) for more information on how to create your own wrappers. - -## Chaining Wrappers, Adapters, and Stores - -Imagine you have a service where you want to cache 3 pydantic models in a single collection. You can do this by wrapping the store in a PydanticAdapter and a SingleCollectionWrapper: - -```python -import asyncio - -from key_value.aio.adapters.pydantic import PydanticAdapter -from key_value.aio.wrappers.single_collection import SingleCollectionWrapper -from key_value.aio.stores.memory.store import MemoryStore -from pydantic import BaseModel - -class User(BaseModel): - name: str - email: str - -store = MemoryStore() - -users_store = PydanticAdapter(kv_store=SingleCollectionWrapper(store=store, single_collection="users", default_collection="default"), pydantic_model=User) -products_store = PydanticAdapter(kv_store=SingleCollectionWrapper(store=store, single_collection="products", default_collection="default"), pydantic_model=Product) -orders_store = PydanticAdapter(kv_store=SingleCollectionWrapper(store=store, single_collection="orders", default_collection="default"), pydantic_model=Order) - -async def example(): - new_user: User = User(name="John Doe", email="john.doe@example.com") - await users_store.put(key="123", value=new_user, collection="allowed_users") - - john_doe: User | None = await users_store.get(key="123", collection="allowed_users") - -asyncio.run(example()) -``` - -The SingleCollectionWrapper will result in writes to the `allowed_users` collection being redirected to the `users` collection but the keys will be prefixed with the original collection `allowed_users__` name. So the key `123` will be stored as `allowed_users__123` in the `users` collection. - -Note: The above example shows the conceptual usage, but you would need to define `Product` and `Order` models as well for the complete example to work. - -## Development - -See [DEVELOPING.md](DEVELOPING.md) for development setup, testing, and contribution guidelines. - -## License - -This project is licensed under the MIT License - see the LICENSE file for details. - -## Contributing - -Contributions are welcome! Please read [DEVELOPING.md](DEVELOPING.md) for development setup and contribution guidelines. - -## Changelog - -See [CHANGELOG.md](CHANGELOG.md) for version history and changes. +See the root [README.md](../README.md) for more information. \ No newline at end of file diff --git a/key-value/key-value-aio/src/key_value/aio/__init__.py b/key-value/key-value-aio/src/key_value/aio/__init__.py index 49064a64..e69de29b 100644 --- a/key-value/key-value-aio/src/key_value/aio/__init__.py +++ b/key-value/key-value-aio/src/key_value/aio/__init__.py @@ -1,3 +0,0 @@ -from .types import AsyncKeyValue - -__all__ = ["AsyncKeyValue"] diff --git a/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/__init__.py b/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/__init__.py new file mode 100644 index 00000000..f9a9daf7 --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/__init__.py @@ -0,0 +1,3 @@ +from key_value.aio.adapters.pydantic.adapter import PydanticAdapter + +__all__ = ["PydanticAdapter"] diff --git a/key-value/key-value-aio/src/key_value/aio/adapters/pydantic.py b/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/adapter.py similarity index 82% rename from key-value/key-value-aio/src/key_value/aio/adapters/pydantic.py rename to key-value/key-value-aio/src/key_value/aio/adapters/pydantic/adapter.py index 0c2fe8db..1661d657 100644 --- a/key-value/key-value-aio/src/key_value/aio/adapters/pydantic.py +++ b/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/adapter.py @@ -5,7 +5,7 @@ from pydantic_core import PydanticSerializationError from key_value.aio.errors import DeserializationError, SerializationError -from key_value.aio.types import AsyncKeyValue +from key_value.aio.protocols.key_value import AsyncKeyValue T = TypeVar("T", bound=BaseModel) @@ -13,8 +13,8 @@ class PydanticAdapter(Generic[T]): """Adapter around a KVStore-compliant Store that allows type-safe persistence of Pydantic models.""" - def __init__(self, kv_store: AsyncKeyValue, pydantic_model: type[T], default_collection: str | None = None) -> None: - self.kv_store: AsyncKeyValue = kv_store + def __init__(self, key_value: AsyncKeyValue, pydantic_model: type[T], default_collection: str | None = None) -> None: + self.key_value: AsyncKeyValue = key_value self.pydantic_model: type[T] = pydantic_model self.default_collection: str | None = default_collection @@ -40,7 +40,7 @@ async def get(self, key: str, *, collection: str | None = None) -> T | None: """ collection = collection or self.default_collection - if value := await self.kv_store.get(key=key, collection=collection): + if value := await self.key_value.get(key=key, collection=collection): return self._validate_model(value=value) return None @@ -52,7 +52,7 @@ async def get_many(self, keys: Sequence[str], *, collection: str | None = None) """ collection = collection or self.default_collection - values: list[dict[str, Any] | None] = await self.kv_store.get_many(keys=keys, collection=collection) + values: list[dict[str, Any] | None] = await self.key_value.get_many(keys=keys, collection=collection) return [self._validate_model(value=value) if value else None for value in values] @@ -65,7 +65,7 @@ async def put(self, key: str, value: T, *, collection: str | None = None, ttl: f value_dict: dict[str, Any] = self._serialize_model(value=value) - await self.kv_store.put(key=key, value=value_dict, collection=collection, ttl=ttl) + await self.key_value.put(key=key, value=value_dict, collection=collection, ttl=ttl) async def put_many(self, keys: Sequence[str], values: Sequence[T], *, collection: str | None = None, ttl: float | None = None) -> None: """Serialize and store multiple models, preserving order alignment with keys.""" @@ -73,19 +73,19 @@ async def put_many(self, keys: Sequence[str], values: Sequence[T], *, collection value_dicts: list[dict[str, Any]] = [self._serialize_model(value=value) for value in values] - await self.kv_store.put_many(keys=keys, values=value_dicts, collection=collection, ttl=ttl) + await self.key_value.put_many(keys=keys, values=value_dicts, collection=collection, ttl=ttl) async def delete(self, key: str, *, collection: str | None = None) -> bool: """Delete a model by key. Returns True if a value was deleted, else False.""" collection = collection or self.default_collection - return await self.kv_store.delete(key=key, collection=collection) + return await self.key_value.delete(key=key, collection=collection) async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: """Delete multiple models by key. Returns the count of deleted entries.""" collection = collection or self.default_collection - return await self.kv_store.delete_many(keys=keys, collection=collection) + return await self.key_value.delete_many(keys=keys, collection=collection) async def ttl(self, key: str, *, collection: str | None = None) -> tuple[T | None, float | None]: """Get a model and its TTL seconds if present. @@ -97,7 +97,7 @@ async def ttl(self, key: str, *, collection: str | None = None) -> tuple[T | Non entry: dict[str, Any] | None ttl_info: float | None - entry, ttl_info = await self.kv_store.ttl(key=key, collection=collection) + entry, ttl_info = await self.key_value.ttl(key=key, collection=collection) if entry is not None: model_validate: T = self._validate_model(value=entry) @@ -109,6 +109,6 @@ async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) """Batch get models with TTLs. Each element is (model|None, ttl_seconds|None).""" collection = collection or self.default_collection - entries: list[tuple[dict[str, Any] | None, float | None]] = await self.kv_store.ttl_many(keys=keys, collection=collection) + entries: list[tuple[dict[str, Any] | None, float | None]] = await self.key_value.ttl_many(keys=keys, collection=collection) return [(self._validate_model(value=entry) if entry else None, ttl_info) for entry, ttl_info in entries] diff --git a/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/__init__.py b/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/__init__.py new file mode 100644 index 00000000..fcda813b --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/__init__.py @@ -0,0 +1,3 @@ +from key_value.aio.adapters.raise_on_missing.adapter import RaiseOnMissingAdapter + +__all__ = ["RaiseOnMissingAdapter"] diff --git a/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing.py b/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/adapter.py similarity index 89% rename from key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing.py rename to key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/adapter.py index 30a2a18a..448e7c7a 100644 --- a/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing.py +++ b/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/adapter.py @@ -2,7 +2,7 @@ from typing import Any, Literal, overload from key_value.aio.errors import MissingKeyError -from key_value.aio.types import AsyncKeyValue +from key_value.aio.protocols.key_value import AsyncKeyValue class RaiseOnMissingAdapter: @@ -11,8 +11,8 @@ class RaiseOnMissingAdapter: When `raise_on_missing=True`, methods raise `MissingKeyError` instead of returning None. """ - def __init__(self, kv_store: AsyncKeyValue) -> None: - self.kv_store: AsyncKeyValue = kv_store + def __init__(self, key_value: AsyncKeyValue) -> None: + self.key_value: AsyncKeyValue = key_value @overload async def get(self, key: str, *, collection: str | None = None, raise_on_missing: Literal[False] = False) -> dict[str, Any] | None: ... @@ -37,7 +37,7 @@ async def get( Returns: The value associated with the key. If the key is not found, None will be returned. """ - result = await self.kv_store.get(key=key, collection=collection) + result = await self.key_value.get(key=key, collection=collection) if result is not None: return result @@ -69,7 +69,7 @@ async def get_many( Returns: The values for the keys, or [] if the key is not found. """ - results: list[dict[str, Any] | None] = await self.kv_store.get_many(collection=collection, keys=keys) + results: list[dict[str, Any] | None] = await self.key_value.get_many(collection=collection, keys=keys) for i, key in enumerate(keys): if results[i] is None and raise_on_missing: @@ -100,7 +100,7 @@ async def ttl( Returns: The value and TTL information for the key. If the key is not found, (None, None) will be returned. """ - value, ttl = await self.kv_store.ttl(key=key, collection=collection) + value, ttl = await self.key_value.ttl(key=key, collection=collection) if value is not None: return value, ttl @@ -129,7 +129,7 @@ async def ttl_many( keys: The keys to retrieve the values and TTL information from. collection: The collection to retrieve keys from. If no collection is provided, it will use the default collection. """ - results: list[tuple[dict[str, Any] | None, float | None]] = await self.kv_store.ttl_many(collection=collection, keys=keys) + results: list[tuple[dict[str, Any] | None, float | None]] = await self.key_value.ttl_many(collection=collection, keys=keys) for i, key in enumerate(keys): if results[i][0] is None and raise_on_missing: @@ -147,7 +147,7 @@ async def put(self, key: str, value: dict[str, Any], *, collection: str | None = ttl: The optional time-to-live (expiry duration) for the key-value pair. Defaults to no TTL. Note: The backend store will convert the provided format to its own internal format. """ - return await self.kv_store.put(key=key, value=value, collection=collection, ttl=ttl) + return await self.key_value.put(key=key, value=value, collection=collection, ttl=ttl) async def put_many( self, @@ -166,7 +166,7 @@ async def put_many( ttl: The optional time-to-live (expiry duration) for the key-value pairs. Defaults to no TTL. Note: The backend store will convert the provided format to its own internal format. """ - return await self.kv_store.put_many(keys=keys, values=values, collection=collection, ttl=ttl) + return await self.key_value.put_many(keys=keys, values=values, collection=collection, ttl=ttl) async def delete(self, key: str, *, collection: str | None = None) -> bool: """Delete a key-value pair from the specified collection. @@ -175,7 +175,7 @@ async def delete(self, key: str, *, collection: str | None = None) -> bool: key: The key to delete the value from. collection: The collection to delete the value from. If no collection is provided, it will use the default collection. """ - return await self.kv_store.delete(key=key, collection=collection) + return await self.key_value.delete(key=key, collection=collection) async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: """Delete multiple key-value pairs from the specified collection. @@ -187,4 +187,4 @@ async def delete_many(self, keys: Sequence[str], *, collection: str | None = Non Returns: The number of keys deleted. """ - return await self.kv_store.delete_many(keys=keys, collection=collection) + return await self.key_value.delete_many(keys=keys, collection=collection) diff --git a/key-value/key-value-aio/src/key_value/aio/errors.py b/key-value/key-value-aio/src/key_value/aio/errors.py deleted file mode 100644 index b2a90ea0..00000000 --- a/key-value/key-value-aio/src/key_value/aio/errors.py +++ /dev/null @@ -1,68 +0,0 @@ -ExtraInfoType = dict[str, str | int | float | bool | None] - - -class KVStoreAdapterError(Exception): - """Base exception for all KV Store Adapter errors.""" - - def __init__(self, message: str | None = None, extra_info: ExtraInfoType | None = None): - message_parts: list[str] = [] - - if message: - message_parts.append(message) - - if extra_info: - extra_info_str = ";".join(f"{k}: {v}" for k, v in extra_info.items()) - if message: - extra_info_str = "(" + extra_info_str + ")" - - message_parts.append(extra_info_str) - - super().__init__(": ".join(message_parts)) - - -class MissingKeyError(KVStoreAdapterError): - """Raised when a key is missing from the store.""" - - def __init__(self, operation: str, collection: str | None = None, key: str | None = None): - super().__init__( - message="A key was requested that was required but not found in the store.", - extra_info={"operation": operation, "collection": collection or "default", "key": key}, - ) - - -class InvalidTTLError(KVStoreAdapterError): - """Raised when a TTL is invalid.""" - - def __init__(self, ttl: float): - super().__init__( - message="A TTL is invalid.", - extra_info={"ttl": ttl}, - ) - - -class SetupError(KVStoreAdapterError): - """Raised when a store setup fails.""" - - -class UnknownError(KVStoreAdapterError): - """Raised when an unexpected or unidentifiable error occurs.""" - - -class StoreConnectionError(KVStoreAdapterError): - """Raised when unable to connect to or communicate with the underlying store.""" - - -class KVStoreAdapterOperationError(KVStoreAdapterError): - """Raised when a store operation fails due to operational issues.""" - - -class SerializationError(KVStoreAdapterOperationError): - """Raised when data cannot be serialized for storage.""" - - -class DeserializationError(KVStoreAdapterOperationError): - """Raised when stored data cannot be deserialized back to its original form.""" - - -class ConfigurationError(KVStoreAdapterError): - """Raised when store configuration is invalid or incomplete.""" diff --git a/key-value/key-value-aio/src/key_value/aio/errors/__init__.py b/key-value/key-value-aio/src/key_value/aio/errors/__init__.py new file mode 100644 index 00000000..eab3e7ef --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/errors/__init__.py @@ -0,0 +1,21 @@ +from key_value.aio.errors.base import BaseKeyValueError +from key_value.aio.errors.key_value import ( + DeserializationError, + InvalidTTLError, + KeyValueOperationError, + MissingKeyError, + SerializationError, +) +from key_value.aio.errors.store import KeyValueStoreError, StoreConnectionError, StoreSetupError + +__all__ = [ + "BaseKeyValueError", + "DeserializationError", + "InvalidTTLError", + "KeyValueOperationError", + "KeyValueStoreError", + "MissingKeyError", + "SerializationError", + "StoreConnectionError", + "StoreSetupError", +] diff --git a/key-value/key-value-aio/src/key_value/aio/errors/base.py b/key-value/key-value-aio/src/key_value/aio/errors/base.py new file mode 100644 index 00000000..f8fc081b --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/errors/base.py @@ -0,0 +1,20 @@ +ExtraInfoType = dict[str, str | int | float | bool | None] + + +class BaseKeyValueError(Exception): + """Base exception for all KV Store Adapter errors.""" + + def __init__(self, message: str | None = None, extra_info: ExtraInfoType | None = None): + message_parts: list[str] = [] + + if message: + message_parts.append(message) + + if extra_info: + extra_info_str = ";".join(f"{k}: {v}" for k, v in extra_info.items()) + if message: + extra_info_str = "(" + extra_info_str + ")" + + message_parts.append(extra_info_str) + + super().__init__(": ".join(message_parts)) diff --git a/key-value/key-value-aio/src/key_value/aio/errors/key_value.py b/key-value/key-value-aio/src/key_value/aio/errors/key_value.py new file mode 100644 index 00000000..e439f47b --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/errors/key_value.py @@ -0,0 +1,33 @@ +from key_value.aio.errors.base import BaseKeyValueError + + +class KeyValueOperationError(BaseKeyValueError): + """Base exception for all Key-Value operation errors.""" + + +class SerializationError(KeyValueOperationError): + """Raised when data cannot be serialized for storage.""" + + +class DeserializationError(KeyValueOperationError): + """Raised when stored data cannot be deserialized back to its original form.""" + + +class MissingKeyError(KeyValueOperationError): + """Raised when a key is missing from the store.""" + + def __init__(self, operation: str, collection: str | None = None, key: str | None = None): + super().__init__( + message="A key was requested that was required but not found in the store.", + extra_info={"operation": operation, "collection": collection or "default", "key": key}, + ) + + +class InvalidTTLError(KeyValueOperationError): + """Raised when a TTL is invalid.""" + + def __init__(self, ttl: float): + super().__init__( + message="A TTL is invalid.", + extra_info={"ttl": ttl}, + ) diff --git a/key-value/key-value-aio/src/key_value/aio/errors/store.py b/key-value/key-value-aio/src/key_value/aio/errors/store.py new file mode 100644 index 00000000..1772ab05 --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/errors/store.py @@ -0,0 +1,13 @@ +from key_value.aio.errors.base import BaseKeyValueError + + +class KeyValueStoreError(BaseKeyValueError): + """Base exception for all Key-Value store errors.""" + + +class StoreSetupError(KeyValueStoreError): + """Raised when a store setup fails.""" + + +class StoreConnectionError(KeyValueStoreError): + """Raised when unable to connect to or communicate with the underlying store.""" diff --git a/key-value/key-value-aio/src/key_value/aio/protocols/__init__.py b/key-value/key-value-aio/src/key_value/aio/protocols/__init__.py new file mode 100644 index 00000000..1314fc2c --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/protocols/__init__.py @@ -0,0 +1 @@ +from key_value.aio.protocols.key_value import AsyncKeyValue as AsyncKeyValue diff --git a/key-value/key-value-aio/src/key_value/aio/types.py b/key-value/key-value-aio/src/key_value/aio/protocols/key_value.py similarity index 100% rename from key-value/key-value-aio/src/key_value/aio/types.py rename to key-value/key-value-aio/src/key_value/aio/protocols/key_value.py diff --git a/key-value/key-value-aio/src/key_value/aio/stores/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/__init__.py deleted file mode 100644 index 8b137891..00000000 --- a/key-value/key-value-aio/src/key_value/aio/stores/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/key-value/key-value-aio/src/key_value/aio/stores/base.py b/key-value/key-value-aio/src/key_value/aio/stores/base.py index c6896933..6e93e9b9 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/base.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/base.py @@ -11,8 +11,8 @@ from typing_extensions import Self, override -from key_value.aio.errors import InvalidTTLError, SetupError -from key_value.aio.types import ( +from key_value.aio.errors import InvalidTTLError, StoreSetupError +from key_value.aio.protocols.key_value import ( AsyncCullProtocol, AsyncDestroyCollectionProtocol, AsyncDestroyStoreProtocol, @@ -92,7 +92,7 @@ async def setup(self) -> None: try: await self._setup() except Exception as e: - raise SetupError(message=f"Failed to setup store: {e}", extra_info={"store": self.__class__.__name__}) from e + raise StoreSetupError(message=f"Failed to setup store: {e}", extra_info={"store": self.__class__.__name__}) from e self._setup_complete = True async def setup_collection(self, *, collection: str) -> None: @@ -104,7 +104,7 @@ async def setup_collection(self, *, collection: str) -> None: try: await self._setup_collection(collection=collection) except Exception as e: - raise SetupError(message=f"Failed to setup collection: {e}", extra_info={"collection": collection}) from e + raise StoreSetupError(message=f"Failed to setup collection: {e}", extra_info={"collection": collection}) from e self._setup_collection_complete[collection] = True @abstractmethod diff --git a/key-value/key-value-aio/src/key_value/aio/stores/disk/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/disk/__init__.py index 54d2e329..d6bedf9f 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/disk/__init__.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/disk/__init__.py @@ -1,4 +1,4 @@ -from .multi_store import MultiDiskStore -from .store import DiskStore +from key_value.aio.stores.disk.multi_store import MultiDiskStore +from key_value.aio.stores.disk.store import DiskStore __all__ = ["DiskStore", "MultiDiskStore"] diff --git a/key-value/key-value-aio/src/key_value/aio/stores/disk/multi_store.py b/key-value/key-value-aio/src/key_value/aio/stores/disk/multi_store.py index e841a493..1f6522b9 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/disk/multi_store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/disk/multi_store.py @@ -13,7 +13,7 @@ from diskcache import Cache from pathvalidate import sanitize_filename except ImportError as e: - msg = "DiskStore requires py-kv-store-adapter[disk]" + msg = "DiskStore requires py-key-value-aio[disk]" raise ImportError(msg) from e DEFAULT_DISK_STORE_SIZE_LIMIT = 1 * 1024 * 1024 * 1024 # 1GB diff --git a/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py b/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py index 2b5fb5d0..be3a84b6 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py @@ -11,7 +11,7 @@ try: from diskcache import Cache except ImportError as e: - msg = "DiskStore requires py-kv-store-adapter[disk]" + msg = "DiskStore requires py-key-value-aio[disk]" raise ImportError(msg) from e DEFAULT_DISK_STORE_MAX_SIZE = 1 * 1024 * 1024 * 1024 # 1GB diff --git a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/__init__.py index 3e9dbcb2..593dd460 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/__init__.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/__init__.py @@ -1,3 +1,3 @@ -from .store import ElasticsearchStore +from key_value.aio.stores.elasticsearch.store import ElasticsearchStore __all__ = ["ElasticsearchStore"] diff --git a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py index bdf2f4f5..115ad586 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py @@ -17,7 +17,6 @@ try: from elasticsearch import AsyncElasticsearch - from key_value.aio.stores.elasticsearch.utils import ( get_aggregations_from_body, get_body_from_response, @@ -26,7 +25,7 @@ get_source_from_body, ) except ImportError as e: - msg = "ElasticsearchStore requires py-kv-store-adapter[elasticsearch]" + msg = "ElasticsearchStore requires py-key-value-aio[elasticsearch]" raise ImportError(msg) from e if TYPE_CHECKING: @@ -78,7 +77,7 @@ class ElasticsearchStore( def __init__(self, *, elasticsearch_client: AsyncElasticsearch, index: str, default_collection: str | None = None) -> None: ... @overload - def __init__(self, *, url: str, api_key: str, index: str, default_collection: str | None = None) -> None: ... + def __init__(self, *, url: str, api_key: str | None = None, index: str, default_collection: str | None = None) -> None: ... def __init__( self, @@ -116,7 +115,7 @@ def __init__( super().__init__(default_collection=default_collection) @override - async def setup(self) -> None: + async def _setup(self) -> None: if await self._client.options(ignore_status=404).indices.exists(index=self._index): return @@ -210,7 +209,7 @@ async def _get_collection_keys(self, *, collection: str, limit: int | None = Non result: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).search( index=self._index, - fields=["key"], # pyright: ignore[reportArgumentType] + fields=[{"key": None}], body={ "query": { "term": { diff --git a/key-value/key-value-aio/src/key_value/aio/stores/memcached/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/memcached/__init__.py index a70097ac..427df973 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/memcached/__init__.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/memcached/__init__.py @@ -1,3 +1,3 @@ -from .store import MemcachedStore +from key_value.aio.stores.memcached.store import MemcachedStore __all__ = ["MemcachedStore"] diff --git a/key-value/key-value-aio/src/key_value/aio/stores/memcached/store.py b/key-value/key-value-aio/src/key_value/aio/stores/memcached/store.py index d9791665..6700acae 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/memcached/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/memcached/store.py @@ -10,7 +10,7 @@ try: from aiomcache import Client except ImportError as e: - msg = "MemcachedStore requires py-kv-store-adapter[memcached]" + msg = "MemcachedStore requires py-key-value-aio[memcached]" raise ImportError(msg) from e MAX_KEY_LENGTH = 240 diff --git a/key-value/key-value-aio/src/key_value/aio/stores/memory/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/memory/__init__.py index 7bcd5ca4..e0b2ef0c 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/memory/__init__.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/memory/__init__.py @@ -1,3 +1,3 @@ -from .store import MemoryStore +from key_value.aio.stores.memory.store import MemoryStore __all__ = ["MemoryStore"] diff --git a/key-value/key-value-aio/src/key_value/aio/stores/memory/store.py b/key-value/key-value-aio/src/key_value/aio/stores/memory/store.py index fabed4b9..b9746a37 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/memory/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/memory/store.py @@ -17,7 +17,7 @@ try: from cachetools import TLRUCache except ImportError as e: - msg = "MemoryStore requires py-kv-store-adapter[memory]" + msg = "MemoryStore requires py-key-value-aio[memory]" raise ImportError(msg) from e diff --git a/key-value/key-value-aio/src/key_value/aio/stores/mongodb/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/mongodb/__init__.py index 3941e70d..fdc1bced 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/mongodb/__init__.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/mongodb/__init__.py @@ -1,3 +1,3 @@ -from .store import MongoDBStore +from key_value.aio.stores.mongodb.store import MongoDBStore __all__ = ["MongoDBStore"] diff --git a/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py b/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py index 26a32171..4b67db1c 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py @@ -16,7 +16,7 @@ try: from pymongo import AsyncMongoClient except ImportError as e: - msg = "MongoDBStore requires py-kv-store-adapter[mongodb]" + msg = "MongoDBStore requires py-key-value-aio[mongodb]" raise ImportError(msg) from e diff --git a/key-value/key-value-aio/src/key_value/aio/stores/null/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/null/__init__.py index 7cbca0b5..1c571cf1 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/null/__init__.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/null/__init__.py @@ -1,3 +1,3 @@ -from .store import NullStore +from key_value.aio.stores.null.store import NullStore __all__ = ["NullStore"] diff --git a/key-value/key-value-aio/src/key_value/aio/stores/redis/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/redis/__init__.py index 600d165b..bb8e1875 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/redis/__init__.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/redis/__init__.py @@ -1,3 +1,3 @@ -from .store import RedisStore +from key_value.aio.stores.redis.store import RedisStore __all__ = ["RedisStore"] diff --git a/key-value/key-value-aio/src/key_value/aio/stores/redis/store.py b/key-value/key-value-aio/src/key_value/aio/stores/redis/store.py index 47c641f2..d9758097 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/redis/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/redis/store.py @@ -10,7 +10,7 @@ try: from redis.asyncio import Redis except ImportError as e: - msg = "RedisStore requires py-kv-store-adapter[redis]" + msg = "RedisStore requires py-key-value-aio[redis]" raise ImportError(msg) from e DEFAULT_PAGE_SIZE = 10000 diff --git a/key-value/key-value-aio/src/key_value/aio/stores/simple/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/simple/__init__.py index a0c59924..68d8964b 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/simple/__init__.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/simple/__init__.py @@ -1,3 +1,3 @@ -from .store import SimpleStore +from key_value.aio.stores.simple.store import SimpleStore __all__ = ["SimpleStore"] diff --git a/key-value/key-value-aio/src/key_value/aio/stores/valkey/__init__.py b/key-value/key-value-aio/src/key_value/aio/stores/valkey/__init__.py index 281d9a58..ce2cd4f2 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/valkey/__init__.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/valkey/__init__.py @@ -1,3 +1,3 @@ -from .store import ValkeyStore +from key_value.aio.stores.valkey.store import ValkeyStore __all__ = ["ValkeyStore"] diff --git a/key-value/key-value-aio/src/key_value/aio/stores/valkey/store.py b/key-value/key-value-aio/src/key_value/aio/stores/valkey/store.py index 14671b24..0cc76641 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/valkey/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/valkey/store.py @@ -1,8 +1,8 @@ from typing import overload -from glide.glide_client import BaseClient, ServerCredentials +from glide.glide_client import BaseClient from glide_shared.commands.core_options import ExpirySet, ExpiryType -from glide_shared.config import GlideClientConfiguration, NodeAddress +from glide_shared.config import GlideClientConfiguration, NodeAddress, ServerCredentials from typing_extensions import override from key_value.aio.stores.base import BaseContextManagerStore, BaseStore @@ -13,7 +13,7 @@ # Use redis-py asyncio client to communicate with a Valkey server (protocol compatible) from glide.glide_client import GlideClient except ImportError as e: - msg = "ValkeyStore requires py-kv-store-adapter[valkey]" + msg = "ValkeyStore requires py-key-value-aio[valkey]" raise ImportError(msg) from e diff --git a/key-value/key-value-aio/src/key_value/aio/utils/acompat.py b/key-value/key-value-aio/src/key_value/aio/utils/acompat.py new file mode 100644 index 00000000..72fc2eb5 --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/utils/acompat.py @@ -0,0 +1,18 @@ +import asyncio +import time +from collections.abc import Coroutine +from typing import Any + + +def asleep(seconds: float) -> Coroutine[Any, Any, None]: + """ + Equivalent to asyncio.sleep(), converted to time.sleep() by async_to_sync. + """ + return asyncio.sleep(seconds) + + +def sleep(seconds: float) -> None: + """ + Equivalent to time.sleep(), converted to asyncio.sleep() by async_to_sync. + """ + time.sleep(seconds) diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/__init__.py b/key-value/key-value-aio/src/key_value/aio/wrappers/__init__.py index cc7aadda..e69de29b 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/__init__.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/__init__.py @@ -1,17 +0,0 @@ -from .base import BaseWrapper -from .passthrough_cache import PassthroughCacheWrapper -from .prefix_collections import PrefixCollectionsWrapper -from .prefix_keys import PrefixKeysWrapper -from .single_collection import SingleCollectionWrapper -from .statistics import StatisticsWrapper -from .ttl_clamp import TTLClampWrapper - -__all__ = [ - "BaseWrapper", - "PassthroughCacheWrapper", - "PrefixCollectionsWrapper", - "PrefixKeysWrapper", - "SingleCollectionWrapper", - "StatisticsWrapper", - "TTLClampWrapper", -] diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/base.py b/key-value/key-value-aio/src/key_value/aio/wrappers/base.py index f79f7bc1..b8385f49 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/base.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/base.py @@ -3,7 +3,7 @@ from typing_extensions import override -from key_value.aio.types import AsyncKeyValue +from key_value.aio.protocols.key_value import AsyncKeyValue class BaseWrapper(AsyncKeyValue): diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache/__init__.py b/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache/__init__.py new file mode 100644 index 00000000..105c2f86 --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache/__init__.py @@ -0,0 +1,3 @@ +from key_value.aio.wrappers.passthrough_cache.wrapper import PassthroughCacheWrapper + +__all__ = ["PassthroughCacheWrapper"] diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache.py b/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache/wrapper.py similarity index 99% rename from key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache/wrapper.py index a5ad3fd6..d6bdd4b4 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache/wrapper.py @@ -3,7 +3,7 @@ from typing_extensions import override -from key_value.aio.types import AsyncKeyValue +from key_value.aio.protocols.key_value import AsyncKeyValue from key_value.aio.wrappers.base import BaseWrapper from key_value.aio.wrappers.ttl_clamp import TTLClampWrapper diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/__init__.py b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/__init__.py new file mode 100644 index 00000000..0ed199ff --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/__init__.py @@ -0,0 +1,3 @@ +from key_value.aio.wrappers.prefix_collections.wrapper import PrefixCollectionsWrapper + +__all__ = ["PrefixCollectionsWrapper"] diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections.py b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/wrapper.py similarity index 98% rename from key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/wrapper.py index 9ff67b44..cc2d155f 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/wrapper.py @@ -3,8 +3,8 @@ from typing_extensions import override +from key_value.aio.protocols.key_value import AsyncKeyValue from key_value.aio.stores.base import DEFAULT_COLLECTION_NAME -from key_value.aio.types import AsyncKeyValue from key_value.aio.utils.compound import prefix_collection, unprefix_collection from key_value.aio.wrappers.base import BaseWrapper diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/__init__.py b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/__init__.py new file mode 100644 index 00000000..4bcca2cd --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/__init__.py @@ -0,0 +1,3 @@ +from key_value.aio.wrappers.prefix_keys.wrapper import PrefixKeysWrapper + +__all__ = ["PrefixKeysWrapper"] diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys.py b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/wrapper.py similarity index 98% rename from key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/wrapper.py index 348b7588..a656642c 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/wrapper.py @@ -3,7 +3,7 @@ from typing_extensions import override -from key_value.aio.types import AsyncKeyValue +from key_value.aio.protocols.key_value import AsyncKeyValue from key_value.aio.utils.compound import prefix_key, unprefix_key from key_value.aio.wrappers.base import BaseWrapper diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/__init__.py b/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/__init__.py new file mode 100644 index 00000000..b6036258 --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/__init__.py @@ -0,0 +1,3 @@ +from key_value.aio.wrappers.single_collection.wrapper import SingleCollectionWrapper + +__all__ = ["SingleCollectionWrapper"] diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection.py b/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/wrapper.py similarity index 98% rename from key-value/key-value-aio/src/key_value/aio/wrappers/single_collection.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/wrapper.py index d5282c69..5f351546 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/wrapper.py @@ -3,8 +3,8 @@ from typing_extensions import override +from key_value.aio.protocols.key_value import AsyncKeyValue from key_value.aio.stores.base import DEFAULT_COLLECTION_NAME -from key_value.aio.types import AsyncKeyValue from key_value.aio.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key from key_value.aio.wrappers.base import BaseWrapper diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/statistics/__init__.py b/key-value/key-value-aio/src/key_value/aio/wrappers/statistics/__init__.py new file mode 100644 index 00000000..c80db4d3 --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/statistics/__init__.py @@ -0,0 +1,3 @@ +from key_value.aio.wrappers.statistics.wrapper import StatisticsWrapper + +__all__ = ["StatisticsWrapper"] diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/statistics.py b/key-value/key-value-aio/src/key_value/aio/wrappers/statistics/wrapper.py similarity index 99% rename from key-value/key-value-aio/src/key_value/aio/wrappers/statistics.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/statistics/wrapper.py index e2411fb9..80eb4adc 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/statistics.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/statistics/wrapper.py @@ -4,7 +4,7 @@ from typing_extensions import override -from key_value.aio.types import AsyncKeyValue +from key_value.aio.protocols.key_value import AsyncKeyValue from key_value.aio.wrappers.base import BaseWrapper diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp/__init__.py b/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp/__init__.py new file mode 100644 index 00000000..8187c560 --- /dev/null +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp/__init__.py @@ -0,0 +1,3 @@ +from key_value.aio.wrappers.ttl_clamp.wrapper import TTLClampWrapper + +__all__ = ["TTLClampWrapper"] diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp.py b/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp/wrapper.py similarity index 97% rename from key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp.py rename to key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp/wrapper.py index 054b1de2..ff4b2af9 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp/wrapper.py @@ -3,7 +3,7 @@ from typing_extensions import override -from key_value.aio.types import AsyncKeyValue +from key_value.aio.protocols.key_value import AsyncKeyValue from key_value.aio.wrappers.base import BaseWrapper diff --git a/key-value/key-value-aio/tests/adapters/test_pydantic.py b/key-value/key-value-aio/tests/adapters/test_pydantic.py index a2cc6056..ce5427e8 100644 --- a/key-value/key-value-aio/tests/adapters/test_pydantic.py +++ b/key-value/key-value-aio/tests/adapters/test_pydantic.py @@ -43,15 +43,15 @@ async def store(self) -> MemoryStore: @pytest.fixture async def user_adapter(self, store: MemoryStore) -> PydanticAdapter[User]: - return PydanticAdapter[User](kv_store=store, pydantic_model=User) + return PydanticAdapter[User](key_value=store, pydantic_model=User) @pytest.fixture async def product_adapter(self, store: MemoryStore) -> PydanticAdapter[Product]: - return PydanticAdapter[Product](kv_store=store, pydantic_model=Product) + return PydanticAdapter[Product](key_value=store, pydantic_model=Product) @pytest.fixture async def order_adapter(self, store: MemoryStore) -> PydanticAdapter[Order]: - return PydanticAdapter[Order](kv_store=store, pydantic_model=Order) + return PydanticAdapter[Order](key_value=store, pydantic_model=Order) async def test_simple_adapter(self, user_adapter: PydanticAdapter[User]): await user_adapter.put(collection="test", key="test", value=SAMPLE_USER) diff --git a/key-value/key-value-aio/tests/adapters/test_raise.py b/key-value/key-value-aio/tests/adapters/test_raise.py index 60509757..72110b67 100644 --- a/key-value/key-value-aio/tests/adapters/test_raise.py +++ b/key-value/key-value-aio/tests/adapters/test_raise.py @@ -12,7 +12,7 @@ async def store() -> MemoryStore: @pytest.fixture async def adapter(store: MemoryStore) -> RaiseOnMissingAdapter: - return RaiseOnMissingAdapter(kv_store=store) + return RaiseOnMissingAdapter(key_value=store) async def test_get(adapter: RaiseOnMissingAdapter): diff --git a/key-value/key-value-aio/tests/cases.py b/key-value/key-value-aio/tests/cases.py index 54c041a0..d4c87025 100644 --- a/key-value/key-value-aio/tests/cases.py +++ b/key-value/key-value-aio/tests/cases.py @@ -1,5 +1,13 @@ +from datetime import datetime, timezone from typing import Any +FIXED_DATETIME = datetime(2025, 1, 1, 0, 0, 0, tzinfo=timezone.utc) +FIXED_TIME = FIXED_DATETIME.time() + +LARGE_STRING: str = "a" * 10000 # 10KB +LARGE_INT: int = 1 * 10**18 # 18 digits +LARGE_FLOAT: float = 1.0 * 10**63 # 63 digits + SIMPLE_CASE: dict[str, Any] = { "key_1": "value_1", "key_2": 1, @@ -18,10 +26,18 @@ ({"key": 1}, '{"key": 1}'), ({"key": 1.0}, '{"key": 1.0}'), ({"key": [1, 2, 3]}, '{"key": [1, 2, 3]}'), + # ({"key": (1, 2, 3)}, '{"key": [1, 2, 3]}'), ({"key": {"nested": "value"}}, '{"key": {"nested": "value"}}'), ({"key": True}, '{"key": true}'), ({"key": False}, '{"key": false}'), ({"key": None}, '{"key": null}'), + ( + {"key": {"int": 1, "float": 1.0, "list": [1, 2, 3], "dict": {"nested": "value"}, "bool": True, "null": None}}, + '{"key": {"int": 1, "float": 1.0, "list": [1, 2, 3], "dict": {"nested": "value"}, "bool": true, "null": null}}', + ), + ({"key": LARGE_STRING}, f'{{"key": "{LARGE_STRING}"}}'), + ({"key": LARGE_INT}, f'{{"key": {LARGE_INT}}}'), + ({"key": LARGE_FLOAT}, f'{{"key": {LARGE_FLOAT}}}'), ] DICTIONARY_TO_JSON_TEST_CASES_NAMES: list[str] = [ @@ -29,10 +45,15 @@ "int", "float", "list", + # "tuple", "dict", - "bool-false", "bool-true", + "bool-false", "null", + "dict-nested", + "large-string", + "large-int", + "large-float", ] OBJECT_TEST_CASES: list[dict[str, Any]] = [test_case[0] for test_case in DICTIONARY_TO_JSON_TEST_CASES] diff --git a/key-value/key-value-aio/tests/conftest.py b/key-value/key-value-aio/tests/conftest.py index 8979894a..fd6f77f1 100644 --- a/key-value/key-value-aio/tests/conftest.py +++ b/key-value/key-value-aio/tests/conftest.py @@ -1,3 +1,4 @@ +import asyncio import logging from collections.abc import Callable, Iterator from contextlib import contextmanager @@ -75,11 +76,11 @@ def docker_rm(name: str, raise_on_error: bool = False) -> bool: return True -def docker_run(name: str, image: str, ports: dict[str, int], raise_on_error: bool = False) -> bool: +def docker_run(name: str, image: str, ports: dict[str, int], environment: dict[str, str], raise_on_error: bool = False) -> bool: logger.info(f"Running container {name} with image {image} and ports {ports}") client = get_docker_client() try: - client.containers.run(name=name, image=image, ports=ports, detach=True) + client.containers.run(name=name, image=image, ports=ports, environment=environment, detach=True) except Exception: logger.info(f"Container {name} failed to run") if raise_on_error: @@ -90,13 +91,15 @@ def docker_run(name: str, image: str, ports: dict[str, int], raise_on_error: boo @contextmanager -def docker_container(name: str, image: str, ports: dict[str, int], raise_on_error: bool = True) -> Iterator[None]: +def docker_container( + name: str, image: str, ports: dict[str, int], environment: dict[str, str] | None = None, raise_on_error: bool = True +) -> Iterator[None]: logger.info(f"Creating container {name} with image {image} and ports {ports}") try: - docker_pull(image, raise_on_error=True) - docker_stop(name, raise_on_error=False) - docker_rm(name, raise_on_error=False) - docker_run(name, image, ports, raise_on_error=True) + docker_pull(image=image, raise_on_error=True) + docker_stop(name=name, raise_on_error=False) + docker_rm(name=name, raise_on_error=False) + docker_run(name=name, image=image, ports=ports, environment=environment or {}, raise_on_error=True) logger.info(f"Container {name} created") yield except Exception: @@ -109,3 +112,15 @@ def docker_container(name: str, image: str, ports: dict[str, int], raise_on_erro docker_rm(name, raise_on_error=False) logger.info(f"Container {name} stopped and removed") return + + +def async_running_in_event_loop() -> bool: + try: + asyncio.get_event_loop() + except RuntimeError: + return False + return True + + +def running_in_event_loop() -> bool: + return False diff --git a/key-value/key-value-aio/tests/protocols/__init__.py b/key-value/key-value-aio/tests/protocols/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/key-value/key-value-aio/tests/protocols/test_types.py b/key-value/key-value-aio/tests/protocols/test_types.py new file mode 100644 index 00000000..9df69a27 --- /dev/null +++ b/key-value/key-value-aio/tests/protocols/test_types.py @@ -0,0 +1,17 @@ +from key_value.aio.protocols.key_value import AsyncKeyValue +from key_value.aio.stores.memory import MemoryStore + + +async def test_key_value_protocol(): + async def test_protocol(key_value: AsyncKeyValue): + assert await key_value.get(collection="test", key="test") is None + await key_value.put(collection="test", key="test", value={"test": "test"}) + assert await key_value.delete(collection="test", key="test") + await key_value.put(collection="test", key="test_2", value={"test": "test"}) + + memory_store = MemoryStore() + + await test_protocol(key_value=memory_store) + + assert await memory_store.get(collection="test", key="test") is None + assert await memory_store.get(collection="test", key="test_2") == {"test": "test"} diff --git a/key-value/key-value-aio/tests/stores/conftest.py b/key-value/key-value-aio/tests/stores/conftest.py index 686ecf3f..9b2faaae 100644 --- a/key-value/key-value-aio/tests/stores/conftest.py +++ b/key-value/key-value-aio/tests/stores/conftest.py @@ -3,8 +3,9 @@ import os import subprocess from abc import ABC, abstractmethod -from collections.abc import AsyncGenerator +from collections.abc import AsyncGenerator, Callable from datetime import datetime, timedelta, timezone +from typing import Any import pytest from dirty_equals import IsFloat @@ -13,6 +14,9 @@ from key_value.aio.errors import InvalidTTLError, SerializationError from key_value.aio.stores.base import BaseContextManagerStore, BaseStore from key_value.aio.stores.memory.store import MemoryStore +from key_value.aio.utils.acompat import asleep, sleep +from tests.cases import DICTIONARY_TO_JSON_TEST_CASES_NAMES, OBJECT_TEST_CASES +from tests.conftest import async_running_in_event_loop @pytest.fixture @@ -63,6 +67,14 @@ def should_skip_docker_tests() -> bool: return not should_run_docker_tests() +def wait_for_store(wait_fn: Callable[[], bool], max_time: int = 10) -> bool: + for _ in range(max_time): + if wait_fn(): + return True + sleep(seconds=1) + return False + + class BaseStoreTests(ABC): async def eventually_consistent(self) -> None: # noqa: B027 """Subclasses can override this to wait for eventually consistent operations.""" @@ -95,6 +107,11 @@ async def test_get_put_get(self, store: BaseStore): await store.put(collection="test", key="test", value={"test": "test"}) assert await store.get(collection="test", key="test") == {"test": "test"} + @pytest.mark.parametrize(argnames="value", argvalues=OBJECT_TEST_CASES, ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES) + async def test_get_complex_put_get(self, store: BaseStore, value: dict[str, Any]): + await store.put(collection="test", key="test", value=value) + assert await store.get(collection="test", key="test") == value + async def test_put_many_get(self, store: BaseStore): await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) assert await store.get(collection="test", key="test") == {"test": "test"} @@ -120,6 +137,12 @@ async def test_put_get_delete_get(self, store: BaseStore): assert await store.delete(collection="test", key="test") assert await store.get(collection="test", key="test") is None + async def test_put_many_get_get_delete_many_get_many(self, store: BaseStore): + await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + assert await store.delete_many(collection="test", keys=["test", "test_2"]) == 2 + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] + async def test_put_many_get_many_delete_many_get_many(self, store: BaseStore): await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] @@ -175,7 +198,7 @@ async def test_negative_ttl(self, store: BaseStore): async def test_put_expired_get_none(self, store: BaseStore): """Tests that a put call with a negative ttl will return None when getting the key.""" await store.put(collection="test_collection", key="test_key", value={"test": "test"}, ttl=1) - await asyncio.sleep(3) + await asleep(seconds=3) assert await store.get(collection="test_collection", key="test_key") is None async def test_long_collection_name(self, store: BaseStore): @@ -209,6 +232,7 @@ async def test_not_unbounded(self, store: BaseStore): assert await store.get(collection="test_collection", key="test_key_0") is None assert await store.get(collection="test_collection", key="test_key_999") is not None + @pytest.mark.skipif(condition=not async_running_in_event_loop(), reason="Cannot run concurrent operations in event loop") async def test_concurrent_operations(self, store: BaseStore): """Tests that the store can handle concurrent operations.""" diff --git a/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py b/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py index 6813d901..7e5052b2 100644 --- a/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py +++ b/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py @@ -2,38 +2,55 @@ from collections.abc import AsyncGenerator import pytest -from elasticsearch import AsyncElasticsearch +from elasticsearch import Elasticsearch from typing_extensions import override from key_value.aio.stores.base import BaseStore from key_value.aio.stores.elasticsearch import ElasticsearchStore -from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin +from tests.conftest import docker_container +from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, wait_for_store TEST_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB +ES_HOST = "localhost" +ES_PORT = 9200 +ES_URL = f"http://{ES_HOST}:{ES_PORT}" +ES_VERSION = "9.1.4" +ES_IMAGE = f"docker.elastic.co/elasticsearch/elasticsearch:{ES_VERSION}" -@pytest.fixture -async def elasticsearch_client() -> AsyncGenerator[AsyncElasticsearch, None]: - es_url = os.getenv("ES_URL") - es_api_key = os.getenv("ES_API_KEY") +def get_elasticsearch_client() -> Elasticsearch: + return Elasticsearch(hosts=[ES_URL]) - assert isinstance(es_url, str) - assert isinstance(es_api_key, str) +def ping_elasticsearch() -> bool: + es_client: Elasticsearch = get_elasticsearch_client() - client = AsyncElasticsearch(hosts=[es_url], api_key=es_api_key) + return es_client.ping() - async with client: - yield client + +class ElasticsearchFailedToStartError(Exception): + pass @pytest.mark.skipif(os.getenv("ES_URL") is None, reason="Elasticsearch is not configured") class TestElasticsearchStore(ContextManagerStoreTestMixin, BaseStoreTests): + @pytest.fixture(autouse=True, scope="session") + async def setup_elasticsearch(self) -> AsyncGenerator[None, None]: + with docker_container( + "elasticsearch-test", ES_IMAGE, {"9200": 9200}, {"discovery.type": "single-node", "xpack.security.enabled": "false"} + ): + if not wait_for_store(wait_fn=ping_elasticsearch, max_time=30): + msg = "Elasticsearch failed to start" + raise ElasticsearchFailedToStartError(msg) + + yield + @override @pytest.fixture - async def store(self, elasticsearch_client: AsyncElasticsearch) -> ElasticsearchStore: - _ = await elasticsearch_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test") - return ElasticsearchStore(elasticsearch_client=elasticsearch_client, index="kv-store-e2e-test") + async def store(self) -> ElasticsearchStore: + es_client = get_elasticsearch_client() + _ = es_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test") + return ElasticsearchStore(url=ES_URL, index="kv-store-e2e-test") @pytest.mark.skip(reason="Distributed Caches are unbounded") @override diff --git a/key-value/key-value-aio/tests/stores/memcached/test_memcached.py b/key-value/key-value-aio/tests/stores/memcached/test_memcached.py index 27199faf..38e7358b 100644 --- a/key-value/key-value-aio/tests/stores/memcached/test_memcached.py +++ b/key-value/key-value-aio/tests/stores/memcached/test_memcached.py @@ -8,6 +8,7 @@ from key_value.aio.stores.base import BaseStore from key_value.aio.stores.memcached import MemcachedStore +from key_value.aio.utils.acompat import asleep from tests.conftest import docker_container from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests @@ -37,7 +38,7 @@ async def wait_memcached() -> bool: result = await asyncio.wait_for(ping_memcached(), timeout=1) if result: return True - await asyncio.sleep(delay=1) + await asleep(1) return False diff --git a/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py b/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py index ee6eccd5..a9e0b5cd 100644 --- a/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py +++ b/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py @@ -1,17 +1,16 @@ -import asyncio import contextlib from collections.abc import AsyncGenerator from typing import Any import pytest from inline_snapshot import snapshot -from pymongo import AsyncMongoClient +from pymongo import MongoClient from typing_extensions import override from key_value.aio.stores.base import BaseStore from key_value.aio.stores.mongodb import MongoDBStore from tests.conftest import docker_container -from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests +from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests, wait_for_store # MongoDB test configuration MONGODB_HOST = "localhost" @@ -21,24 +20,16 @@ WAIT_FOR_MONGODB_TIMEOUT = 30 -async def ping_mongodb() -> bool: +def ping_mongodb() -> bool: try: - client: AsyncMongoClient[Any] = AsyncMongoClient[Any](host=MONGODB_HOST, port=MONGODB_HOST_PORT) - _ = await client.list_database_names() + client: MongoClient[Any] = MongoClient[Any](host=MONGODB_HOST, port=MONGODB_HOST_PORT) + _ = client.list_database_names() except Exception: return False return True -async def wait_mongodb() -> bool: - for _ in range(WAIT_FOR_MONGODB_TIMEOUT): - if await ping_mongodb(): - return True - await asyncio.sleep(delay=1) - return False - - class MongoDBFailedToStartError(Exception): pass @@ -48,7 +39,7 @@ class TestMongoDBStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_mongodb(self) -> AsyncGenerator[None, None]: with docker_container("mongodb-test", "mongo:7", {"27017": 27017}): - if not await wait_mongodb(): + if not wait_for_store(wait_fn=ping_mongodb): msg = "MongoDB failed to start" raise MongoDBFailedToStartError(msg) diff --git a/key-value/key-value-aio/tests/stores/redis/test_redis.py b/key-value/key-value-aio/tests/stores/redis/test_redis.py index 39352a27..dce86313 100644 --- a/key-value/key-value-aio/tests/stores/redis/test_redis.py +++ b/key-value/key-value-aio/tests/stores/redis/test_redis.py @@ -1,14 +1,12 @@ -import asyncio from collections.abc import AsyncGenerator import pytest -from redis.asyncio import Redis from typing_extensions import override from key_value.aio.stores.base import BaseStore from key_value.aio.stores.redis import RedisStore from tests.conftest import docker_container, docker_stop -from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests +from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests, wait_for_store # Redis test configuration REDIS_HOST = "localhost" @@ -18,25 +16,16 @@ WAIT_FOR_REDIS_TIMEOUT = 30 -async def ping_redis() -> bool: +def ping_redis() -> bool: + from redis import Redis + client = Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) try: - return await client.ping() # pyright: ignore[reportUnknownMemberType, reportAny] + return client.ping() # pyright: ignore[reportUnknownMemberType, reportAny, reportReturnType] except Exception: return False -async def wait_redis() -> bool: - # with a timeout of 10 seconds - for _ in range(WAIT_FOR_REDIS_TIMEOUT): - result = await asyncio.wait_for(ping_redis(), timeout=1) - if result: - return True - await asyncio.sleep(delay=1) - - return False - - class RedisFailedToStartError(Exception): pass @@ -49,7 +38,7 @@ async def setup_redis(self) -> AsyncGenerator[None, None]: docker_stop("valkey-test", raise_on_error=False) with docker_container("redis-test", "redis", {"6379": 6379}): - if not await wait_redis(): + if not wait_for_store(wait_fn=ping_redis): msg = "Redis failed to start" raise RedisFailedToStartError(msg) @@ -75,6 +64,8 @@ async def test_redis_url_connection(self): async def test_redis_client_connection(self): """Test Redis store creation with existing client.""" + from redis.asyncio import Redis + client = Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) store = RedisStore(client=client) diff --git a/key-value/key-value-aio/tests/stores/valkey/test_valkey.py b/key-value/key-value-aio/tests/stores/valkey/test_valkey.py index 8bdeda06..3102bc49 100644 --- a/key-value/key-value-aio/tests/stores/valkey/test_valkey.py +++ b/key-value/key-value-aio/tests/stores/valkey/test_valkey.py @@ -1,21 +1,11 @@ -import asyncio from collections.abc import AsyncGenerator import pytest from typing_extensions import override from key_value.aio.stores.base import BaseStore -from tests.conftest import docker_container, docker_stop, try_import -from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, detect_on_windows, should_skip_docker_tests - -with try_import() as has_valkey: - from glide.glide_client import GlideClient - from glide_shared.config import GlideClientConfiguration, NodeAddress - - from key_value.aio.stores.valkey import ValkeyStore - -if not has_valkey(): - pytestmark = pytest.mark.skip(reason="GlideClient is not installed") +from tests.conftest import docker_container, docker_stop +from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, detect_on_windows, should_skip_docker_tests, wait_for_store # Valkey test configuration VALKEY_HOST = "localhost" @@ -32,36 +22,31 @@ class ValkeyFailedToStartError(Exception): @pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not running") @pytest.mark.skipif(detect_on_windows(), reason="Valkey is not supported on Windows") class TestValkeyStore(ContextManagerStoreTestMixin, BaseStoreTests): - async def get_valkey_client(self): + def get_valkey_client(self): + from glide_shared.config import GlideClientConfiguration, NodeAddress + from glide_sync.glide_client import GlideClient + client_config: GlideClientConfiguration = GlideClientConfiguration( addresses=[NodeAddress(host=VALKEY_HOST, port=VALKEY_PORT)], database_id=VALKEY_DB ) - return await GlideClient.create(config=client_config) + return GlideClient.create(config=client_config) - async def ping_valkey(self) -> bool: + def ping_valkey(self) -> bool: try: - client = await self.get_valkey_client() - _ = await client.ping() + client = self.get_valkey_client() + _ = client.ping() except Exception: return False return True - async def wait_valkey(self) -> bool: - for _ in range(WAIT_FOR_VALKEY_TIMEOUT): - result = await asyncio.wait_for(self.ping_valkey(), timeout=1) - if result: - return True - await asyncio.sleep(delay=1) - return False - @pytest.fixture(scope="session") async def setup_valkey(self) -> AsyncGenerator[None, None]: # Double-check that the Redis test container is stopped docker_stop("redis-test", raise_on_error=False) with docker_container("valkey-test", "valkey/valkey:latest", {"6379": 6379}): - if not await self.wait_valkey(): + if not wait_for_store(wait_fn=self.ping_valkey): msg = "Valkey failed to start" raise ValkeyFailedToStartError(msg) @@ -70,10 +55,13 @@ async def setup_valkey(self) -> AsyncGenerator[None, None]: @override @pytest.fixture async def store(self, setup_valkey: None): + from key_value.aio.stores.valkey import ValkeyStore + store: ValkeyStore = ValkeyStore(host=VALKEY_HOST, port=VALKEY_PORT, db=VALKEY_DB) - client: GlideClient = await self.get_valkey_client() - _ = await client.flushdb() + # This is a syncronous client + client = self.get_valkey_client() + _ = client.flushdb() return store diff --git a/key-value/key-value-aio/tests/test_types.py b/key-value/key-value-aio/tests/test_types.py deleted file mode 100644 index a4add53a..00000000 --- a/key-value/key-value-aio/tests/test_types.py +++ /dev/null @@ -1,17 +0,0 @@ -from key_value.aio.stores.memory import MemoryStore -from key_value.aio.types import AsyncKeyValue - - -async def test_kv_store_protocol(): - async def test_protocol(kv_store: AsyncKeyValue): - assert await kv_store.get(collection="test", key="test") is None - await kv_store.put(collection="test", key="test", value={"test": "test"}) - assert await kv_store.delete(collection="test", key="test") - await kv_store.put(collection="test", key="test_2", value={"test": "test"}) - - memory_store = MemoryStore() - - await test_protocol(kv_store=memory_store) - - assert await memory_store.get(collection="test", key="test") is None - assert await memory_store.get(collection="test", key="test_2") == {"test": "test"} diff --git a/key-value/key-value-aio/tests/utils/__init__.py b/key-value/key-value-aio/tests/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/key-value/key-value-sync/.vscode/settings.json b/key-value/key-value-sync/.vscode/settings.json new file mode 100644 index 00000000..d7338ad7 --- /dev/null +++ b/key-value/key-value-sync/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "python.testing.pytestArgs": [ + "tests" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, +} \ No newline at end of file diff --git a/key-value/key-value-sync/README.md b/key-value/key-value-sync/README.md new file mode 100644 index 00000000..fa89b3bc --- /dev/null +++ b/key-value/key-value-sync/README.md @@ -0,0 +1 @@ +See the root [README.md](../README.md) for more information. \ No newline at end of file diff --git a/key-value/key-value-sync/pyproject.toml b/key-value/key-value-sync/pyproject.toml new file mode 100644 index 00000000..209113a9 --- /dev/null +++ b/key-value/key-value-sync/pyproject.toml @@ -0,0 +1,80 @@ +[project] +name = "py-key-value-sync" +version = "0.2.0" +description = "Sync Key-Value" +readme = "README.md" +requires-python = ">=3.10" +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +dependencies = [ + "py-key-value-aio>=0.2.0", +] + +[tool.uv.sources] +py-key-value-aio = { workspace = true } + + + +[build-system] +requires = ["uv_build>=0.8.2,<0.9.0"] +build-backend = "uv_build" + +[tool.uv.build-backend] +module-name = "key_value.sync" + +[project.optional-dependencies] +memory = ["cachetools>=6.0.0"] +disk = ["diskcache>=5.6.0", "pathvalidate>=3.3.1",] +redis = ["redis>=6.0.0"] +mongodb = ["pymongo>=4.15.0"] +valkey = ["valkey-glide-sync>=2.1.0"] +memcached = ["aiomcache>=0.8.0"] +elasticsearch = ["elasticsearch>=9.0.0", "aiohttp>=3.12"] +pydantic = ["pydantic>=2.11.9"] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +addopts = ["--inline-snapshot=disable","-vv","-s"] +markers = [ + "skip_on_ci: Skip running the test when running on CI", +] +timeout = 10 + + +env_files = [".env"] + +[dependency-groups] +dev = [ + "py-key-value-sync[memory,disk,redis,elasticsearch,memcached,mongodb]", + "py-key-value-sync[valkey]; platform_system != 'Windows'", + "py-key-value-sync[pydantic]", + "pytest", + "pytest-mock", + "pytest-asyncio", + "ruff", + "diskcache-stubs>=5.6.3.6.20240818", + "pytest-dotenv>=0.5.2", + "dirty-equals>=0.10.0", + "inline-snapshot>=0.29.0", + "pytest-redis>=3.1.3", + "basedpyright>=1.31.5", + "pytest-timeout>=2.4.0", + "ast-comments>=1.2.3", +] +lint = [ + "ruff" +] + +[tool.ruff] +extend="../../pyproject.toml" + +[tool.pyright] +extends = "../../pyproject.toml" diff --git a/key-value/key-value-sync/src/key_value/sync/__init__.py b/key-value/key-value-sync/src/key_value/sync/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/key-value/key-value-sync/src/key_value/sync/adapters/__init__.py b/key-value/key-value-sync/src/key_value/sync/adapters/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/adapters/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/src/key_value/sync/adapters/pydantic/__init__.py b/key-value/key-value-sync/src/key_value/sync/adapters/pydantic/__init__.py new file mode 100644 index 00000000..ec149155 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/adapters/pydantic/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.adapters.pydantic.adapter import PydanticAdapter + +__all__ = ["PydanticAdapter"] diff --git a/key-value/key-value-sync/src/key_value/sync/adapters/raise_on_missing/__init__.py b/key-value/key-value-sync/src/key_value/sync/adapters/raise_on_missing/__init__.py new file mode 100644 index 00000000..40cf8b3a --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/adapters/raise_on_missing/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.adapters.raise_on_missing.adapter import RaiseOnMissingAdapter + +__all__ = ["RaiseOnMissingAdapter"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/__init__.py new file mode 100644 index 00000000..ec149155 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.adapters.pydantic.adapter import PydanticAdapter + +__all__ = ["PydanticAdapter"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/adapter.py b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/adapter.py new file mode 100644 index 00000000..4b1810a0 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/adapter.py @@ -0,0 +1,117 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'adapter.py' +# DO NOT CHANGE! Change the original file instead. +from collections.abc import Sequence +from typing import Any, Generic, TypeVar + +from pydantic import BaseModel, ValidationError +from pydantic_core import PydanticSerializationError + +from key_value.sync.code_gen.errors import DeserializationError, SerializationError +from key_value.sync.code_gen.protocols.key_value import KeyValue + +T = TypeVar("T", bound=BaseModel) + + +class PydanticAdapter(Generic[T]): + """Adapter around a KVStore-compliant Store that allows type-safe persistence of Pydantic models.""" + + def __init__(self, key_value: KeyValue, pydantic_model: type[T], default_collection: str | None = None) -> None: + self.key_value: KeyValue = key_value + self.pydantic_model: type[T] = pydantic_model + self.default_collection: str | None = default_collection + + def _validate_model(self, value: dict[str, Any]) -> T: + try: + return self.pydantic_model.model_validate(obj=value) + except ValidationError as e: + msg = f"Invalid Pydantic model: {e}" + raise DeserializationError(msg) from e + + def _serialize_model(self, value: T) -> dict[str, Any]: + try: + return value.model_dump(mode="json") + except PydanticSerializationError as e: + msg = f"Invalid Pydantic model: {e}" + raise SerializationError(msg) from e + + def get(self, key: str, *, collection: str | None = None) -> T | None: + """Get and validate a model by key. + + Returns the parsed model instance, or None if not present. + Raises DeserializationError if the stored data cannot be validated as the model. + """ + collection = collection or self.default_collection + + if value := self.key_value.get(key=key, collection=collection): + return self._validate_model(value=value) + + return None + + def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[T | None]: + """Batch get and validate models by keys, preserving order. + + Each element is either a parsed model instance or None if missing. + """ + collection = collection or self.default_collection + + values: list[dict[str, Any] | None] = self.key_value.get_many(keys=keys, collection=collection) + + return [self._validate_model(value=value) if value else None for value in values] + + def put(self, key: str, value: T, *, collection: str | None = None, ttl: float | None = None) -> None: + """Serialize and store a model. + + Propagates SerializationError if the model cannot be serialized. + """ + collection = collection or self.default_collection + + value_dict: dict[str, Any] = self._serialize_model(value=value) + + self.key_value.put(key=key, value=value_dict, collection=collection, ttl=ttl) + + def put_many(self, keys: Sequence[str], values: Sequence[T], *, collection: str | None = None, ttl: float | None = None) -> None: + """Serialize and store multiple models, preserving order alignment with keys.""" + collection = collection or self.default_collection + + value_dicts: list[dict[str, Any]] = [self._serialize_model(value=value) for value in values] + + self.key_value.put_many(keys=keys, values=value_dicts, collection=collection, ttl=ttl) + + def delete(self, key: str, *, collection: str | None = None) -> bool: + """Delete a model by key. Returns True if a value was deleted, else False.""" + collection = collection or self.default_collection + + return self.key_value.delete(key=key, collection=collection) + + def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + """Delete multiple models by key. Returns the count of deleted entries.""" + collection = collection or self.default_collection + + return self.key_value.delete_many(keys=keys, collection=collection) + + def ttl(self, key: str, *, collection: str | None = None) -> tuple[T | None, float | None]: + """Get a model and its TTL seconds if present. + + Returns (model, ttl_seconds) or (None, None) if missing. + """ + collection = collection or self.default_collection + + entry: dict[str, Any] | None + ttl_info: float | None + + (entry, ttl_info) = self.key_value.ttl(key=key, collection=collection) + + if entry is not None: + model_validate: T = self._validate_model(value=entry) + return (model_validate, ttl_info) + + return (None, None) + + def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[T | None, float | None]]: + """Batch get models with TTLs. Each element is (model|None, ttl_seconds|None).""" + collection = collection or self.default_collection + + entries: list[tuple[dict[str, Any] | None, float | None]] = self.key_value.ttl_many(keys=keys, collection=collection) + + return [(self._validate_model(value=entry) if entry else None, ttl_info) for (entry, ttl_info) in entries] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/__init__.py new file mode 100644 index 00000000..40cf8b3a --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.adapters.raise_on_missing.adapter import RaiseOnMissingAdapter + +__all__ = ["RaiseOnMissingAdapter"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/adapter.py b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/adapter.py new file mode 100644 index 00000000..c1424fb8 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/adapter.py @@ -0,0 +1,181 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'adapter.py' +# DO NOT CHANGE! Change the original file instead. +from collections.abc import Sequence +from typing import Any, Literal, overload + +from key_value.sync.code_gen.errors import MissingKeyError +from key_value.sync.code_gen.protocols.key_value import KeyValue + + +class RaiseOnMissingAdapter: + """Adapter around a KVStore that raises on missing values for get/get_many/ttl/ttl_many. + + When `raise_on_missing=True`, methods raise `MissingKeyError` instead of returning None. + """ + + def __init__(self, key_value: KeyValue) -> None: + self.key_value: KeyValue = key_value + + @overload + def get(self, key: str, *, collection: str | None = None, raise_on_missing: Literal[False] = False) -> dict[str, Any] | None: ... + + @overload + def get(self, key: str, *, collection: str | None = None, raise_on_missing: Literal[True]) -> dict[str, Any]: ... + + def get(self, key: str, *, collection: str | None = None, raise_on_missing: bool = False) -> dict[str, Any] | None: + """Retrieve a value by key from the specified collection. + + Args: + key: The key to retrieve the value from. + collection: The collection to retrieve the value from. If no collection is provided, it will use the default collection. + raise_on_missing: Whether to raise a MissingKeyError if the key is not found. + + Returns: + The value associated with the key. If the key is not found, None will be returned. + """ + result = self.key_value.get(key=key, collection=collection) + + if result is not None: + return result + + if raise_on_missing: + raise MissingKeyError(operation="get", collection=collection, key=key) + + return None + + @overload + def get_many( + self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[False] = False + ) -> list[dict[str, Any] | None]: ... + + @overload + def get_many(self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[True]) -> list[dict[str, Any]]: ... + + def get_many( + self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: bool = False + ) -> list[dict[str, Any]] | list[dict[str, Any] | None]: + """Retrieve multiple values by key from the specified collection. + + Args: + keys: The keys to retrieve the values from. + collection: The collection to retrieve keys from. If no collection is provided, it will use the default collection. + + Returns: + The values for the keys, or [] if the key is not found. + """ + results: list[dict[str, Any] | None] = self.key_value.get_many(collection=collection, keys=keys) + + for i, key in enumerate(keys): + if results[i] is None and raise_on_missing: + raise MissingKeyError(operation="get_many", collection=collection, key=key) + + return results + + @overload + def ttl( + self, key: str, *, collection: str | None = None, raise_on_missing: Literal[False] = False + ) -> tuple[dict[str, Any] | None, float | None]: ... + + @overload + def ttl(self, key: str, *, collection: str | None = None, raise_on_missing: Literal[True]) -> tuple[dict[str, Any], float | None]: ... + + def ttl(self, key: str, *, collection: str | None = None, raise_on_missing: bool = False) -> tuple[dict[str, Any] | None, float | None]: + """Retrieve the value and TTL information for a key-value pair from the specified collection. + + Args: + key: The key to retrieve the TTL information from. + collection: The collection to retrieve the TTL information from. If no collection is provided, + it will use the default collection. + + Returns: + The value and TTL information for the key. If the key is not found, (None, None) will be returned. + """ + (value, ttl) = self.key_value.ttl(key=key, collection=collection) + + if value is not None: + return (value, ttl) + + if raise_on_missing: + raise MissingKeyError(operation="ttl", collection=collection, key=key) + + return (None, None) + + @overload + def ttl_many( + self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[False] = False + ) -> list[tuple[dict[str, Any] | None, float | None]]: ... + + @overload + def ttl_many( + self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[True] + ) -> list[tuple[dict[str, Any], float | None]]: ... + + def ttl_many( + self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: bool = False + ) -> list[tuple[dict[str, Any], float | None]] | list[tuple[dict[str, Any] | None, float | None]]: + """Retrieve multiple values and TTL information by key from the specified collection. + + Args: + keys: The keys to retrieve the values and TTL information from. + collection: The collection to retrieve keys from. If no collection is provided, it will use the default collection. + """ + results: list[tuple[dict[str, Any] | None, float | None]] = self.key_value.ttl_many(collection=collection, keys=keys) + + for i, key in enumerate(keys): + if results[i][0] is None and raise_on_missing: + raise MissingKeyError(operation="ttl_many", collection=collection, key=key) + + return results + + def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + """Store a key-value pair in the specified collection with optional TTL. + + Args: + key: The key to store the value in. + value: The value to store. + collection: The collection to store the value in. If no collection is provided, it will use the default collection. + ttl: The optional time-to-live (expiry duration) for the key-value pair. Defaults to no TTL. Note: The + backend store will convert the provided format to its own internal format. + """ + return self.key_value.put(key=key, value=value, collection=collection, ttl=ttl) + + def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + """Store multiple key-value pairs in the specified collection. + + Args: + keys: The keys to store the values in. + values: The values to store. + collection: The collection to store keys in. If no collection is provided, it will use the default collection. + ttl: The optional time-to-live (expiry duration) for the key-value pairs. Defaults to no TTL. Note: The + backend store will convert the provided format to its own internal format. + """ + return self.key_value.put_many(keys=keys, values=values, collection=collection, ttl=ttl) + + def delete(self, key: str, *, collection: str | None = None) -> bool: + """Delete a key-value pair from the specified collection. + + Args: + key: The key to delete the value from. + collection: The collection to delete the value from. If no collection is provided, it will use the default collection. + """ + return self.key_value.delete(key=key, collection=collection) + + def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + """Delete multiple key-value pairs from the specified collection. + + Args: + keys: The keys to delete the values from. + collection: The collection to delete keys from. If no collection is provided, it will use the default collection. + + Returns: + The number of keys deleted. + """ + return self.key_value.delete_many(keys=keys, collection=collection) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/errors/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/errors/__init__.py new file mode 100644 index 00000000..3ca01799 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/errors/__init__.py @@ -0,0 +1,24 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.errors.base import BaseKeyValueError +from key_value.sync.code_gen.errors.key_value import ( + DeserializationError, + InvalidTTLError, + KeyValueOperationError, + MissingKeyError, + SerializationError, +) +from key_value.sync.code_gen.errors.store import KeyValueStoreError, StoreConnectionError, StoreSetupError + +__all__ = [ + "BaseKeyValueError", + "DeserializationError", + "InvalidTTLError", + "KeyValueOperationError", + "KeyValueStoreError", + "MissingKeyError", + "SerializationError", + "StoreConnectionError", + "StoreSetupError", +] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/errors/base.py b/key-value/key-value-sync/src/key_value/sync/code_gen/errors/base.py new file mode 100644 index 00000000..7c7d4ba6 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/errors/base.py @@ -0,0 +1,23 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'base.py' +# DO NOT CHANGE! Change the original file instead. +ExtraInfoType = dict[str, str | int | float | bool | None] + + +class BaseKeyValueError(Exception): + """Base exception for all KV Store Adapter errors.""" + + def __init__(self, message: str | None = None, extra_info: ExtraInfoType | None = None): + message_parts: list[str] = [] + + if message: + message_parts.append(message) + + if extra_info: + extra_info_str = ";".join(f"{k}: {v}" for (k, v) in extra_info.items()) + if message: + extra_info_str = "(" + extra_info_str + ")" + + message_parts.append(extra_info_str) + + super().__init__(": ".join(message_parts)) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/errors/key_value.py b/key-value/key-value-sync/src/key_value/sync/code_gen/errors/key_value.py new file mode 100644 index 00000000..7d14831c --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/errors/key_value.py @@ -0,0 +1,33 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'key_value.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.errors.base import BaseKeyValueError + + +class KeyValueOperationError(BaseKeyValueError): + """Base exception for all Key-Value operation errors.""" + + +class SerializationError(KeyValueOperationError): + """Raised when data cannot be serialized for storage.""" + + +class DeserializationError(KeyValueOperationError): + """Raised when stored data cannot be deserialized back to its original form.""" + + +class MissingKeyError(KeyValueOperationError): + """Raised when a key is missing from the store.""" + + def __init__(self, operation: str, collection: str | None = None, key: str | None = None): + super().__init__( + message="A key was requested that was required but not found in the store.", + extra_info={"operation": operation, "collection": collection or "default", "key": key}, + ) + + +class InvalidTTLError(KeyValueOperationError): + """Raised when a TTL is invalid.""" + + def __init__(self, ttl: float): + super().__init__(message="A TTL is invalid.", extra_info={"ttl": ttl}) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/errors/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/errors/store.py new file mode 100644 index 00000000..df55b09a --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/errors/store.py @@ -0,0 +1,16 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'store.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.errors.base import BaseKeyValueError + + +class KeyValueStoreError(BaseKeyValueError): + """Base exception for all Key-Value store errors.""" + + +class StoreSetupError(KeyValueStoreError): + """Raised when a store setup fails.""" + + +class StoreConnectionError(KeyValueStoreError): + """Raised when unable to connect to or communicate with the underlying store.""" diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/protocols/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/protocols/__init__.py new file mode 100644 index 00000000..1a152476 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/protocols/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.protocols.key_value import KeyValue as KeyValue diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/protocols/key_value.py b/key-value/key-value-sync/src/key_value/sync/code_gen/protocols/key_value.py new file mode 100644 index 00000000..33c7d548 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/protocols/key_value.py @@ -0,0 +1,178 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'key_value.py' +# DO NOT CHANGE! Change the original file instead. +from collections.abc import Sequence +from typing import Any, Protocol, runtime_checkable + + +@runtime_checkable +class KeyValueProtocol(Protocol): + """A subset of KV operations: get/put/delete and TTL variants, including bulk calls.""" + + def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + """Retrieve a value by key from the specified collection. + + Args: + key: The key to retrieve the value from. + collection: The collection to retrieve the value from. If no collection is provided, it will use the default collection. + + Returns: + The value associated with the key. If the key is not found, None will be returned. + """ + ... + + def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + """Retrieve the value and TTL information for a key-value pair from the specified collection. + + Args: + key: The key to retrieve the TTL information from. + collection: The collection to retrieve the TTL information from. If no collection is provided, + it will use the default collection. + + Returns: + The value and TTL information for the key. If the key is not found, (None, None) will be returned. + """ + ... + + def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + """Store a key-value pair in the specified collection with optional TTL. + + Args: + key: The key to store the value in. + value: The value to store. + collection: The collection to store the value in. If no collection is provided, it will use the default collection. + ttl: The optional time-to-live (expiry duration) for the key-value pair. Defaults to no TTL. Note: The + backend store will convert the provided format to its own internal format. + """ + ... + + def delete(self, key: str, *, collection: str | None = None) -> bool: + """Delete a key-value pair from the specified collection. + + Args: + key: The key to delete the value from. + collection: The collection to delete the value from. If no collection is provided, it will use the default collection. + """ + ... + + def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + """Retrieve multiple values by key from the specified collection. + + Args: + keys: The keys to retrieve the values from. + collection: The collection to retrieve keys from. If no collection is provided, it will use the default collection. + + Returns: + A list of values for the keys. Each value is either a dict or None if the key is not found. + """ + ... + + def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + """Retrieve multiple values and TTL information by key from the specified collection. + + Args: + keys: The keys to retrieve the values and TTL information from. + collection: The collection to retrieve keys from. If no collection is provided, it will use the default collection. + + Returns: + A list of tuples containing (value, ttl) for each key. Each tuple contains either (dict, float) or (None, None) if the + key is not found. + """ + ... + + def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + """Store multiple key-value pairs in the specified collection. + + Args: + keys: The keys to store the values in. + values: The values to store. + collection: The collection to store keys in. If no collection is provided, it will use the default collection. + ttl: The optional time-to-live (expiry duration) for the key-value pairs. Defaults to no TTL. Note: The + backend store will convert the provided format to its own internal format. + """ + ... + + def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + """Delete multiple key-value pairs from the specified collection. + + Args: + keys: The keys to delete the values from. + collection: The collection to delete keys from. If no collection is provided, it will use the default collection. + + Returns: + The number of keys deleted. + """ + ... + + +@runtime_checkable +class CullProtocol(Protocol): + def cull(self) -> None: + """Cull the store. + + This will remove all expired keys from the store. + """ + ... + + +@runtime_checkable +class EnumerateKeysProtocol(Protocol): + """Protocol segment to enumerate keys in a collection.""" + + def keys(self, collection: str | None = None, *, limit: int | None = None) -> list[str]: + """List all keys in the specified collection. + + Args: + collection: The collection to list the keys from. If no collection is provided, it will use the default collection. + limit: The maximum number of keys to list. The behavior when no limit is provided is store-dependent. + """ + ... + + +@runtime_checkable +class EnumerateCollectionsProtocol(Protocol): + def collections(self, *, limit: int | None = None) -> list[str]: + """List all available collection names (may include empty collections). + + Args: + limit: The maximum number of collections to list. The behavior when no limit is provided is store-dependent. + """ + ... + + +@runtime_checkable +class DestroyStoreProtocol(Protocol): + """Protocol segment for store-destruction semantics.""" + + def destroy(self) -> bool: + """Destroy the keystore. + + This will clear all collections and keys from the store. + """ + ... + + +@runtime_checkable +class DestroyCollectionProtocol(Protocol): + def destroy_collection(self, collection: str) -> bool: + """Destroy the specified collection. + + Args: + collection: The collection to destroy. + """ + ... + + +class KeyValue(KeyValueProtocol, Protocol): + """A protocol for key-value store operations. + + Includes basic operations: get, put, delete, ttl + Includes bulk operations: get_many, put_many, delete_many, ttl_many. + """ diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/base.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/base.py new file mode 100644 index 00000000..29b59fdd --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/base.py @@ -0,0 +1,379 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'base.py' +# DO NOT CHANGE! Change the original file instead. +""" +Base abstract class for managed key-value store implementations. +""" + +from abc import ABC, abstractmethod +from collections import defaultdict +from collections.abc import Sequence +from threading import Lock +from types import TracebackType +from typing import Any + +from typing_extensions import Self, override + +from key_value.sync.code_gen.errors import InvalidTTLError, StoreSetupError +from key_value.sync.code_gen.protocols.key_value import ( + CullProtocol, + DestroyCollectionProtocol, + DestroyStoreProtocol, + EnumerateCollectionsProtocol, + EnumerateKeysProtocol, + KeyValueProtocol, +) +from key_value.sync.code_gen.utils.managed_entry import ManagedEntry +from key_value.sync.code_gen.utils.time_to_live import now + +DEFAULT_COLLECTION_NAME = "default_collection" + + +def validate_one_ttl(t: float | None, raise_error: bool = False) -> bool: + if t is None: + return True + if t <= 0: + if raise_error: + raise InvalidTTLError(ttl=t) + return False + return True + + +def validate_ttls(t: list[float | None] | float | None, raise_error: bool = False) -> bool: + if not isinstance(t, Sequence): + t = [t] + return all(validate_one_ttl(t=ttl, raise_error=raise_error) for ttl in t) + + +class BaseStore(KeyValueProtocol, ABC): + """An opinionated Abstract base class for managed key-value stores using ManagedEntry objects. + + This class implements all of the methods required for compliance with the KVStore protocol but + requires subclasses to implement the _get_managed_entry, _put_managed_entry, and _delete_managed_entry methods. + + Subclasses can also override the _get_managed_entries, _put_managed_entries, and _delete_managed_entries methods if desired. + + Subclasses can implement the _setup, which will be called once before the first use of the store, and _setup_collection, which will + be called once per collection before the first use of a collection. + """ + + _setup_complete: bool + _setup_lock: Lock + + _setup_collection_locks: defaultdict[str, Lock] + _setup_collection_complete: defaultdict[str, bool] + + default_collection: str + + def __init__(self, *, default_collection: str | None = None) -> None: + """Initialize the managed key-value store. + + Args: + default_collection: The default collection to use if no collection is provided. + Defaults to "default_collection". + """ + + self._setup_complete = False + self._setup_lock = Lock() + self._setup_collection_locks = defaultdict(Lock) + self._setup_collection_complete = defaultdict(bool) + + self.default_collection = default_collection or DEFAULT_COLLECTION_NAME + + super().__init__() + + def _setup(self) -> None: + """Initialize the store (called once before first use).""" + + def _setup_collection(self, *, collection: str) -> None: # pyright: ignore[reportUnusedParameter] + "Initialize the collection (called once before first use of the collection)." + + def setup(self) -> None: + if not self._setup_complete: + with self._setup_lock: + if not self._setup_complete: + try: + self._setup() + except Exception as e: + raise StoreSetupError(message=f"Failed to setup store: {e}", extra_info={"store": self.__class__.__name__}) from e + self._setup_complete = True + + def setup_collection(self, *, collection: str) -> None: + self.setup() + + if not self._setup_collection_complete[collection]: + with self._setup_collection_locks[collection]: + if not self._setup_collection_complete[collection]: + try: + self._setup_collection(collection=collection) + except Exception as e: + raise StoreSetupError(message=f"Failed to setup collection: {e}", extra_info={"collection": collection}) from e + self._setup_collection_complete[collection] = True + + @abstractmethod + def _get_managed_entry(self, *, collection: str, key: str) -> ManagedEntry | None: + """Retrieve a cache entry by key from the specified collection.""" + + def _get_managed_entries(self, *, collection: str, keys: Sequence[str]) -> list[ManagedEntry | None]: + """Retrieve multiple managed entries by key from the specified collection.""" + + return [self._get_managed_entry(collection=collection, key=key) for key in keys] + + @override + def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + """Retrieve a value by key from the specified collection. + + Args: + collection: The collection to retrieve the value from. If no collection is provided, it will use the default collection. + key: The key to retrieve the value from. + + Returns: + The value associated with the key, or None if not found or expired. + """ + collection = collection or self.default_collection + self.setup_collection(collection=collection) + + managed_entry: ManagedEntry | None = self._get_managed_entry(collection=collection, key=key) + + if not managed_entry: + return None + + if managed_entry.is_expired: + return None + + return managed_entry.value + + @override + def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + collection = collection or self.default_collection + self.setup_collection(collection=collection) + + entries = self._get_managed_entries(keys=keys, collection=collection) + return [entry.value if entry and (not entry.is_expired) else None for entry in entries] + + @override + def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + collection = collection or self.default_collection + self.setup_collection(collection=collection) + + managed_entry: ManagedEntry | None = self._get_managed_entry(collection=collection, key=key) + + if not managed_entry or managed_entry.is_expired: + return (None, None) + + return (managed_entry.value, managed_entry.ttl) + + @override + def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + """Retrieve multiple values and TTLs by key from the specified collection. + + Returns a list of tuples of the form (value, ttl_seconds). Missing or expired + entries are represented as (None, None). + """ + collection = collection or self.default_collection + self.setup_collection(collection=collection) + + entries = self._get_managed_entries(keys=keys, collection=collection) + return [(entry.value, entry.ttl) if entry and (not entry.is_expired) else (None, None) for entry in entries] + + @abstractmethod + def _put_managed_entry(self, *, collection: str, key: str, managed_entry: ManagedEntry) -> None: + """Store a managed entry by key in the specified collection.""" + ... + + def _put_managed_entries(self, *, collection: str, keys: Sequence[str], managed_entries: Sequence[ManagedEntry]) -> None: + """Store multiple managed entries by key in the specified collection.""" + + for key, managed_entry in zip(keys, managed_entries, strict=True): + self._put_managed_entry(collection=collection, key=key, managed_entry=managed_entry) + + @override + def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + """Store a key-value pair in the specified collection with optional TTL.""" + collection = collection or self.default_collection + self.setup_collection(collection=collection) + + _ = validate_ttls(t=ttl, raise_error=True) + + managed_entry: ManagedEntry = ManagedEntry(value=value, ttl=ttl, created_at=now()) + + self._put_managed_entry(collection=collection, key=key, managed_entry=managed_entry) + + @override + def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + """Store multiple key-value pairs in the specified collection.""" + if len(keys) != len(values): + msg = "put_many called but a different number of keys and values were provided" + raise ValueError(msg) from None + + if ttl and isinstance(ttl, Sequence) and (len(ttl) != len(keys)): + msg = "put_many called but a different number of keys and ttl values were provided" + raise ValueError(msg) from None + + collection = collection or self.default_collection + self.setup_collection(collection=collection) + + ttl_for_entries: list[float | None] = [] + + if ttl is None: + ttl_for_entries = [None] * len(keys) + elif isinstance(ttl, Sequence): + ttl_for_entries = list(ttl) + elif isinstance(ttl, float): + ttl_for_entries = [ttl] * len(keys) + + _ = validate_ttls(t=ttl_for_entries, raise_error=True) + + managed_entries: list[ManagedEntry] = [] + + for value, value_ttl in zip(values, ttl_for_entries, strict=True): + managed_entries.append(ManagedEntry(value=value, ttl=value_ttl, created_at=now())) + + self._put_managed_entries(collection=collection, keys=keys, managed_entries=managed_entries) + + @abstractmethod + def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + """Delete a managed entry by key from the specified collection.""" + ... + + def _delete_managed_entries(self, *, keys: Sequence[str], collection: str) -> int: + """Delete multiple managed entries by key from the specified collection.""" + + deleted_count: int = 0 + + for key in keys: + if self._delete_managed_entry(key=key, collection=collection): + deleted_count += 1 + + return deleted_count + + @override + def delete(self, key: str, *, collection: str | None = None) -> bool: + collection = collection or self.default_collection + self.setup_collection(collection=collection) + + return self._delete_managed_entry(key=key, collection=collection) + + @override + def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + """Delete multiple managed entries by key from the specified collection.""" + collection = collection or self.default_collection + self.setup_collection(collection=collection) + + return self._delete_managed_entries(keys=keys, collection=collection) + + +class BaseEnumerateKeysStore(BaseStore, EnumerateKeysProtocol, ABC): + """An abstract base class for enumerate key-value stores. + + Subclasses must implement the get_collection_keys and get_collection_names methods. + """ + + @override + def keys(self, collection: str | None = None, *, limit: int | None = None) -> list[str]: + """List all keys in the specified collection.""" + + collection = collection or self.default_collection + self.setup_collection(collection=collection) + + return self._get_collection_keys(collection=collection, limit=limit) + + @abstractmethod + def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> list[str]: + """List all keys in the specified collection.""" + + +class BaseContextManagerStore(BaseStore, ABC): + """An abstract base class for context manager stores.""" + + def __enter__(self) -> Self: + self.setup() + return self + + def __exit__(self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None) -> None: + self._close() + + def close(self) -> None: + self._close() + + @abstractmethod + def _close(self) -> None: + """Close the store.""" + ... + + +class BaseEnumerateCollectionsStore(BaseStore, EnumerateCollectionsProtocol, ABC): + @override + def collections(self, *, limit: int | None = None) -> list[str]: + """List all available collection names (may include empty collections).""" + self.setup() + + return self._get_collection_names(limit=limit) + + @abstractmethod + def _get_collection_names(self, *, limit: int | None = None) -> list[str]: + """List all available collection names (may include empty collections).""" + + +class BaseDestroyStore(BaseStore, DestroyStoreProtocol, ABC): + """An abstract base class for destroyable stores. + + Subclasses must implement the delete_store method. + """ + + @override + def destroy(self) -> bool: + """Destroy the store.""" + self.setup() + + return self._delete_store() + + @abstractmethod + def _delete_store(self) -> bool: + """Delete the store.""" + ... + + +class BaseDestroyCollectionStore(BaseStore, DestroyCollectionProtocol, ABC): + """An abstract base class for destroyable collections. + + Subclasses must implement the delete_collection method. + """ + + @override + def destroy_collection(self, collection: str) -> bool: + """Destroy the collection.""" + self.setup() + + return self._delete_collection(collection=collection) + + @abstractmethod + def _delete_collection(self, *, collection: str) -> bool: + """Delete the collection.""" + ... + + +class BaseCullStore(BaseStore, CullProtocol, ABC): + """An abstract base class for cullable stores. + + Subclasses must implement the cull method. + """ + + @override + def cull(self) -> None: + """Cull the store.""" + self.setup() + + return self._cull() + + @abstractmethod + def _cull(self) -> None: + """Cull the store.""" + ... diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/__init__.py new file mode 100644 index 00000000..68263221 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/__init__.py @@ -0,0 +1,7 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.disk.multi_store import MultiDiskStore +from key_value.sync.code_gen.stores.disk.store import DiskStore + +__all__ = ["DiskStore", "MultiDiskStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/multi_store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/multi_store.py new file mode 100644 index 00000000..6571d60d --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/multi_store.py @@ -0,0 +1,148 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'multi_store.py' +# DO NOT CHANGE! Change the original file instead. +import time +from collections.abc import Callable +from pathlib import Path +from typing import overload + +from typing_extensions import override + +from key_value.sync.code_gen.stores.base import BaseContextManagerStore, BaseStore +from key_value.sync.code_gen.utils.compound import compound_key +from key_value.sync.code_gen.utils.managed_entry import ManagedEntry + +try: + from diskcache import Cache + from pathvalidate import sanitize_filename +except ImportError as e: + msg = "DiskStore requires py-key-value-aio[disk]" + raise ImportError(msg) from e + +DEFAULT_DISK_STORE_SIZE_LIMIT = 1 * 1024 * 1024 * 1024 # 1GB + +CacheFactory = Callable[[str], Cache] + + +def _sanitize_collection_for_filesystem(collection: str) -> str: + """Sanitize the collection name so that it can be used as a directory name on the filesystem.""" + + return sanitize_filename(filename=collection) + + +class MultiDiskStore(BaseContextManagerStore, BaseStore): + """A disk-based store that uses the diskcache library to store data. The MultiDiskStore creates one diskcache Cache + instance per collection.""" + + _cache: dict[str, Cache] + + _disk_cache_factory: CacheFactory + + _base_directory: Path + + _max_size: int | None + + @overload + def __init__(self, *, disk_cache_factory: CacheFactory, default_collection: str | None = None) -> None: + """Initialize the disk caches. + + Args: + disk_cache_factory: A factory function that creates a diskcache Cache instance for a given collection. + default_collection: The default collection to use if no collection is provided. + """ + + @overload + def __init__(self, *, base_directory: Path, max_size: int | None = None, default_collection: str | None = None) -> None: + """Initialize the disk caches. + + Args: + base_directory: The directory to use for the disk caches. + max_size: The maximum size of the disk caches. Defaults to 1GB. + default_collection: The default collection to use if no collection is provided. + """ + + def __init__( + self, + *, + disk_cache_factory: CacheFactory | None = None, + base_directory: Path | None = None, + max_size: int | None = None, + default_collection: str | None = None, + ) -> None: + """Initialize the disk caches. + + Args: + disk_cache_factory: A factory function that creates a diskcache Cache instance for a given collection. + base_directory: The directory to use for the disk caches. + max_size: The maximum size of the disk caches. Defaults to 1GB. + default_collection: The default collection to use if no collection is provided. + """ + if disk_cache_factory is None and base_directory is None: + msg = "Either disk_cache_factory or base_directory must be provided" + raise ValueError(msg) + + if base_directory is None: + base_directory = Path.cwd() + + self._max_size = max_size + + self._base_directory = base_directory.resolve() + + def default_disk_cache_factory(collection: str) -> Cache: + sanitized_collection: str = _sanitize_collection_for_filesystem(collection=collection) + + cache_directory: Path = self._base_directory / sanitized_collection + + cache_directory.mkdir(parents=True, exist_ok=True) + + return Cache(directory=cache_directory, size_limit=self._max_size or DEFAULT_DISK_STORE_SIZE_LIMIT) + + self._disk_cache_factory = disk_cache_factory or default_disk_cache_factory + + self._cache = {} + + super().__init__(default_collection=default_collection) + + @override + def _setup_collection(self, *, collection: str) -> None: + self._cache[collection] = self._disk_cache_factory(collection) + + @override + def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + combo_key: str = compound_key(collection=collection, key=key) + + expire_epoch: float + + (managed_entry_str, expire_epoch) = self._cache[collection].get(key=combo_key, expire_time=True) # pyright: ignore[reportAny] + + if not isinstance(managed_entry_str, str): + return None + + ttl = expire_epoch - time.time() if expire_epoch else None + + managed_entry: ManagedEntry = ManagedEntry.from_json(json_str=managed_entry_str, ttl=ttl) + + return managed_entry + + @override + def _put_managed_entry(self, *, key: str, collection: str, managed_entry: ManagedEntry) -> None: + combo_key: str = compound_key(collection=collection, key=key) + + _ = self._cache[collection].set(key=combo_key, value=managed_entry.to_json(include_expiration=False), expire=managed_entry.ttl) + + @override + def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + combo_key: str = compound_key(collection=collection, key=key) + + return self._cache[collection].delete(key=combo_key, retry=True) + + def _sync_close(self) -> None: + for cache in self._cache.values(): + cache.close() + + @override + def _close(self) -> None: + self._sync_close() + + def __del__(self) -> None: + self._sync_close() diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/store.py new file mode 100644 index 00000000..47e5ecbc --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/store.py @@ -0,0 +1,116 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'store.py' +# DO NOT CHANGE! Change the original file instead. +import time +from pathlib import Path +from typing import overload + +from typing_extensions import override + +from key_value.sync.code_gen.stores.base import BaseContextManagerStore, BaseStore +from key_value.sync.code_gen.utils.compound import compound_key +from key_value.sync.code_gen.utils.managed_entry import ManagedEntry + +try: + from diskcache import Cache +except ImportError as e: + msg = "DiskStore requires py-key-value-aio[disk]" + raise ImportError(msg) from e + +DEFAULT_DISK_STORE_MAX_SIZE = 1 * 1024 * 1024 * 1024 # 1GB + + +class DiskStore(BaseContextManagerStore, BaseStore): + """A disk-based store that uses the diskcache library to store data.""" + + _cache: Cache + + @overload + def __init__(self, *, disk_cache: Cache, default_collection: str | None = None) -> None: + """Initialize the disk cache. + + Args: + disk_cache: An existing diskcache Cache instance to use. + default_collection: The default collection to use if no collection is provided. + """ + + @overload + def __init__(self, *, directory: Path | str, max_size: int | None = None, default_collection: str | None = None) -> None: + """Initialize the disk cache. + + Args: + directory: The directory to use for the disk cache. + max_size: The maximum size of the disk cache. Defaults to 1GB. + default_collection: The default collection to use if no collection is provided. + """ + + def __init__( + self, + *, + disk_cache: Cache | None = None, + directory: Path | str | None = None, + max_size: int | None = None, + default_collection: str | None = None, + ) -> None: + """Initialize the disk cache. + + Args: + disk_cache: An existing diskcache Cache instance to use. + directory: The directory to use for the disk cache. + max_size: The maximum size of the disk cache. Defaults to 1GB. + default_collection: The default collection to use if no collection is provided. + """ + if disk_cache is not None and directory is not None: + msg = "Either disk_cache or directory must be provided" + raise ValueError(msg) + + if disk_cache is None and directory is None: + msg = "Either disk_cache or directory must be provided" + raise ValueError(msg) + + if disk_cache: + self._cache = disk_cache + elif directory: + directory = Path(directory) + + directory.mkdir(parents=True, exist_ok=True) + + self._cache = Cache(directory=directory, size_limit=max_size or DEFAULT_DISK_STORE_MAX_SIZE) + + super().__init__(default_collection=default_collection) + + @override + def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + combo_key: str = compound_key(collection=collection, key=key) + + expire_epoch: float | None + + (managed_entry_str, expire_epoch) = self._cache.get(key=combo_key, expire_time=True) # pyright: ignore[reportAny] + + if not isinstance(managed_entry_str, str): + return None + + ttl = expire_epoch - time.time() if expire_epoch else None + + managed_entry: ManagedEntry = ManagedEntry.from_json(json_str=managed_entry_str, ttl=ttl) + + return managed_entry + + @override + def _put_managed_entry(self, *, key: str, collection: str, managed_entry: ManagedEntry) -> None: + combo_key: str = compound_key(collection=collection, key=key) + + _ = self._cache.set(key=combo_key, value=managed_entry.to_json(), expire=managed_entry.ttl) + + @override + def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + combo_key: str = compound_key(collection=collection, key=key) + + return self._cache.delete(key=combo_key, retry=True) + + @override + def _close(self) -> None: + self._cache.close() + + def __del__(self) -> None: + self._cache.close() diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/__init__.py new file mode 100644 index 00000000..714d442f --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.elasticsearch.store import ElasticsearchStore + +__all__ = ["ElasticsearchStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py new file mode 100644 index 00000000..62d6e8f2 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py @@ -0,0 +1,232 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'store.py' +# DO NOT CHANGE! Change the original file instead. +import hashlib +from typing import TYPE_CHECKING, Any, overload + +from typing_extensions import override + +from key_value.sync.code_gen.stores.base import ( + BaseContextManagerStore, + BaseCullStore, + BaseDestroyCollectionStore, + BaseEnumerateCollectionsStore, + BaseEnumerateKeysStore, + BaseStore, +) +from key_value.sync.code_gen.utils.compound import compound_key +from key_value.sync.code_gen.utils.managed_entry import ManagedEntry, load_from_json +from key_value.sync.code_gen.utils.time_to_live import now_as_epoch, try_parse_datetime_str + +try: + from elasticsearch import Elasticsearch + from key_value.sync.code_gen.stores.elasticsearch.utils import ( + get_aggregations_from_body, + get_body_from_response, + get_first_value_from_field_in_hit, + get_hits_from_response, + get_source_from_body, + ) +except ImportError as e: + msg = "ElasticsearchStore requires py-key-value-aio[elasticsearch]" + raise ImportError(msg) from e + +if TYPE_CHECKING: + from datetime import datetime + + from elastic_transport import ObjectApiResponse + +DEFAULT_INDEX = "kv-store" + +DEFAULT_MAPPING = { + "properties": { + "created_at": {"type": "date"}, + "expires_at": {"type": "date"}, + "collection": {"type": "keyword"}, + "key": {"type": "keyword"}, + "value": {"type": "keyword", "index": False, "doc_values": False, "ignore_above": 256}, + } +} + +DEFAULT_PAGE_SIZE = 10000 +PAGE_LIMIT = 10000 + +MAX_KEY_LENGTH = 256 + + +class ElasticsearchStore( + BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, BaseDestroyCollectionStore, BaseCullStore, BaseContextManagerStore, BaseStore +): + """A elasticsearch-based store.""" + + _client: Elasticsearch + + _index: str + + @overload + def __init__(self, *, elasticsearch_client: Elasticsearch, index: str, default_collection: str | None = None) -> None: ... + + @overload + def __init__(self, *, url: str, api_key: str | None = None, index: str, default_collection: str | None = None) -> None: ... + + def __init__( + self, + *, + elasticsearch_client: Elasticsearch | None = None, + url: str | None = None, + api_key: str | None = None, + index: str, + default_collection: str | None = None, + ) -> None: + """Initialize the elasticsearch store. + + Args: + elasticsearch_client: The elasticsearch client to use. + url: The url of the elasticsearch cluster. + api_key: The api key to use. + index: The index to use. + default_collection: The default collection to use if no collection is provided. + """ + if elasticsearch_client is None and url is None: + msg = "Either elasticsearch_client or url must be provided" + raise ValueError(msg) + + if elasticsearch_client: + self._client = elasticsearch_client + elif url: + self._client = Elasticsearch( + hosts=[url], api_key=api_key, http_compress=True, request_timeout=10, retry_on_timeout=True, max_retries=3 + ) + else: + msg = "Either elasticsearch_client or url must be provided" + raise ValueError(msg) + + self._index = index or DEFAULT_INDEX + super().__init__(default_collection=default_collection) + + @override + def _setup(self) -> None: + if self._client.options(ignore_status=404).indices.exists(index=self._index): + return + + _ = self._client.options(ignore_status=404).indices.create(index=self._index, mappings=DEFAULT_MAPPING) + + @override + def _setup_collection(self, *, collection: str) -> None: + pass + + def sanitize_document_id(self, key: str) -> str: + if len(key) > MAX_KEY_LENGTH: + sha256_hash: str = hashlib.sha256(key.encode()).hexdigest() + return sha256_hash[:64] + return key + + @override + def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + combo_key: str = compound_key(collection=collection, key=key) + + elasticsearch_response = self._client.options(ignore_status=404).get(index=self._index, id=self.sanitize_document_id(key=combo_key)) + + body: dict[str, Any] = get_body_from_response(response=elasticsearch_response) + + if not (source := get_source_from_body(body=body)): + return None + + if not (value_str := source.get("value")) or not isinstance(value_str, str): + return None + + created_at: datetime | None = try_parse_datetime_str(value=source.get("created_at")) + expires_at: datetime | None = try_parse_datetime_str(value=source.get("expires_at")) + + return ManagedEntry(value=load_from_json(value_str), created_at=created_at, expires_at=expires_at) + + @override + def _put_managed_entry(self, *, key: str, collection: str, managed_entry: ManagedEntry) -> None: + combo_key: str = compound_key(collection=collection, key=key) + + document: dict[str, Any] = {"collection": collection, "key": key, "value": managed_entry.to_json(include_metadata=False)} + + if managed_entry.created_at: + document["created_at"] = managed_entry.created_at.isoformat() + if managed_entry.expires_at: + document["expires_at"] = managed_entry.expires_at.isoformat() + + _ = self._client.index(index=self._index, id=self.sanitize_document_id(key=combo_key), body=document) + + @override + def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + combo_key: str = compound_key(collection=collection, key=key) + + elasticsearch_response: ObjectApiResponse[Any] = self._client.options(ignore_status=404).delete( + index=self._index, id=self.sanitize_document_id(key=combo_key) + ) + + body: dict[str, Any] = get_body_from_response(response=elasticsearch_response) + + if not (result := body.get("result")) or not isinstance(result, str): + return False + + return result == "deleted" + + @override + def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> list[str]: + """Get up to 10,000 keys in the specified collection (eventually consistent).""" + + limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) + + result: ObjectApiResponse[Any] = self._client.options(ignore_status=404).search( + index=self._index, fields=[{"key": None}], body={"query": {"term": {"collection": collection}}}, source_includes=[], size=limit + ) + + if not (hits := get_hits_from_response(response=result)): + return [] + + all_keys: list[str] = [] + + for hit in hits: + if not (key := get_first_value_from_field_in_hit(hit=hit, field="key", value_type=str)): + continue + + all_keys.append(key) + + return all_keys + + @override + def _get_collection_names(self, *, limit: int | None = None) -> list[str]: + """List up to 10,000 collections in the elasticsearch store (eventually consistent).""" + + limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) + + search_response: ObjectApiResponse[Any] = self._client.options(ignore_status=404).search( + index=self._index, aggregations={"collections": {"terms": {"field": "collection"}}}, size=limit + ) + + body: dict[str, Any] = get_body_from_response(response=search_response) + aggregations: dict[str, Any] = get_aggregations_from_body(body=body) + + buckets: list[Any] = aggregations["collections"]["buckets"] # pyright: ignore[reportAny] + + return [bucket["key"] for bucket in buckets] # pyright: ignore[reportAny] + + @override + def _delete_collection(self, *, collection: str) -> bool: + result: ObjectApiResponse[Any] = self._client.options(ignore_status=404).delete_by_query( + index=self._index, body={"query": {"term": {"collection": collection}}} + ) + + body: dict[str, Any] = get_body_from_response(response=result) + + if not (deleted := body.get("deleted")) or not isinstance(deleted, int): + return False + + return deleted > 0 + + @override + def _cull(self) -> None: + _ = self._client.options(ignore_status=404).delete_by_query( + index=self._index, body={"query": {"range": {"expires_at": {"lt": now_as_epoch()}}}} + ) + + @override + def _close(self) -> None: + self._client.close() diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/utils.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/utils.py new file mode 100644 index 00000000..d4c95a3f --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/utils.py @@ -0,0 +1,110 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'utils.py' +# DO NOT CHANGE! Change the original file instead. +from typing import Any, TypeVar, cast + +from elastic_transport import ObjectApiResponse + + +def get_body_from_response(response: ObjectApiResponse[Any]) -> dict[str, Any]: + if not (body := response.body): # pyright: ignore[reportAny] + return {} + + if not isinstance(body, dict) or not all(isinstance(key, str) for key in body): # pyright: ignore[reportUnknownVariableType] + return {} + + return cast(typ="dict[str, Any]", val=body) + + +def get_source_from_body(body: dict[str, Any]) -> dict[str, Any]: + if not (source := body.get("_source")): + return {} + + if not isinstance(source, dict) or not all(isinstance(key, str) for key in source): # pyright: ignore[reportUnknownVariableType] + return {} + + return cast(typ="dict[str, Any]", val=source) + + +def get_aggregations_from_body(body: dict[str, Any]) -> dict[str, Any]: + if not (aggregations := body.get("aggregations")): + return {} + + if not isinstance(aggregations, dict) or not all(isinstance(key, str) for key in aggregations): # pyright: ignore[reportUnknownVariableType] + return {} + + return cast(typ="dict[str, Any]", val=aggregations) + + +def get_hits_from_response(response: ObjectApiResponse[Any]) -> list[dict[str, Any]]: + if not (body := response.body): # pyright: ignore[reportAny] + return [] + + if not isinstance(body, dict) or not all(isinstance(key, str) for key in body): # pyright: ignore[reportUnknownVariableType] + return [] + + body_dict: dict[str, Any] = cast(typ="dict[str, Any]", val=body) + + if not (hits := body_dict.get("hits")): + return [] + + hits_dict: dict[str, Any] = cast(typ="dict[str, Any]", val=hits) + + if not (hits_list := hits_dict.get("hits")): + return [] + + if not all(isinstance(hit, dict) for hit in hits_list): # pyright: ignore[reportAny] + return [] + + hits_list_dict: list[dict[str, Any]] = cast(typ="list[dict[str, Any]]", val=hits_list) + + return hits_list_dict + + +T = TypeVar("T") + + +def get_fields_from_hit(hit: dict[str, Any]) -> dict[str, list[Any]]: + if not (fields := hit.get("fields")): + return {} + + if not isinstance(fields, dict) or not all(isinstance(key, str) for key in fields): # pyright: ignore[reportUnknownVariableType] + msg = f"Fields in hit {hit} is not a dict" + raise TypeError(msg) + + if not all(isinstance(value, list) for value in fields.values()): # pyright: ignore[reportUnknownVariableType] + msg = f"Fields in hit {hit} is not a dict of lists" + raise TypeError(msg) + + return cast(typ="dict[str, list[Any]]", val=fields) + + +def get_field_from_hit(hit: dict[str, Any], field: str) -> list[Any]: + if not (fields := get_fields_from_hit(hit=hit)): + return [] + + if not (value := fields.get(field)): + msg = f"Field {field} is not in hit {hit}" + raise TypeError(msg) + + return value + + +def get_values_from_field_in_hit(hit: dict[str, Any], field: str, value_type: type[T]) -> list[T]: + if not (value := get_field_from_hit(hit=hit, field=field)): + msg = f"Field {field} is not in hit {hit}" + raise TypeError(msg) + + if not all(isinstance(item, value_type) for item in value): # pyright: ignore[reportAny] + msg = f"Field {field} in hit {hit} is not a list of {value_type}" + raise TypeError(msg) + + return cast(typ="list[T]", val=value) + + +def get_first_value_from_field_in_hit(hit: dict[str, Any], field: str, value_type: type[T]) -> T: + values: list[T] = get_values_from_field_in_hit(hit=hit, field=field, value_type=value_type) + if len(values) != 1: + msg: str = f"Field {field} in hit {hit} is not a single value" + raise TypeError(msg) + return values[0] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/memory/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/memory/__init__.py new file mode 100644 index 00000000..a0028fd2 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/memory/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.memory.store import MemoryStore + +__all__ = ["MemoryStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/memory/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/memory/store.py new file mode 100644 index 00000000..7b8c71d3 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/memory/store.py @@ -0,0 +1,160 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'store.py' +# DO NOT CHANGE! Change the original file instead. +import sys +from dataclasses import dataclass, field +from datetime import datetime +from typing import Any + +from typing_extensions import Self, override + +from key_value.sync.code_gen.stores.base import ( + BaseDestroyCollectionStore, + BaseDestroyStore, + BaseEnumerateCollectionsStore, + BaseEnumerateKeysStore, +) +from key_value.sync.code_gen.utils.managed_entry import ManagedEntry +from key_value.sync.code_gen.utils.time_to_live import epoch_to_datetime + +try: + from cachetools import TLRUCache +except ImportError as e: + msg = "MemoryStore requires py-key-value-aio[memory]" + raise ImportError(msg) from e + + +@dataclass +class MemoryCacheEntry: + json_str: str + + expires_at: datetime | None + + ttl_at_insert: float | None = field(default=None) + + @classmethod + def from_managed_entry(cls, managed_entry: ManagedEntry, ttl: float | None = None) -> Self: + return cls(json_str=managed_entry.to_json(), expires_at=managed_entry.expires_at, ttl_at_insert=ttl) + + def to_managed_entry(self) -> ManagedEntry: + return ManagedEntry.from_json(json_str=self.json_str) + + +def _memory_cache_ttu(_key: Any, value: MemoryCacheEntry, now: float) -> float: # pyright: ignore[reportAny] + "Calculate time-to-use for cache entries based on their TTL." + if value.ttl_at_insert is None: + return sys.maxsize + + expiration_epoch: float = now + value.ttl_at_insert + + value.expires_at = epoch_to_datetime(epoch=expiration_epoch) + + return expiration_epoch + + +def _memory_cache_getsizeof(value: MemoryCacheEntry) -> int: # pyright: ignore[reportUnusedParameter] # noqa: ARG001 + "Return size of cache entry (always 1 for entry counting)." + return 1 + + +DEFAULT_MAX_ENTRIES_PER_COLLECTION = 10000 + +DEFAULT_PAGE_SIZE = 10000 +PAGE_LIMIT = 10000 + + +class MemoryCollection: + _cache: TLRUCache[str, MemoryCacheEntry] + + def __init__(self, max_entries: int = DEFAULT_MAX_ENTRIES_PER_COLLECTION): + self._cache = TLRUCache[str, MemoryCacheEntry](maxsize=max_entries, ttu=_memory_cache_ttu, getsizeof=_memory_cache_getsizeof) + + def get(self, key: str) -> ManagedEntry | None: + managed_entry_str: MemoryCacheEntry | None = self._cache.get(key) + + if managed_entry_str is None: + return None + + managed_entry: ManagedEntry = managed_entry_str.to_managed_entry() + + return managed_entry + + def put(self, key: str, value: ManagedEntry) -> None: + self._cache[key] = MemoryCacheEntry.from_managed_entry(managed_entry=value, ttl=value.ttl) + + def delete(self, key: str) -> bool: + return self._cache.pop(key, None) is not None + + def keys(self, *, limit: int | None = None) -> list[str]: + limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) + return list(self._cache.keys())[:limit] + + +class MemoryStore(BaseDestroyStore, BaseDestroyCollectionStore, BaseEnumerateCollectionsStore, BaseEnumerateKeysStore): + """In-memory key-value store using TLRU (Time-aware Least Recently Used) cache.""" + + max_entries_per_collection: int + + _cache: dict[str, MemoryCollection] + + def __init__(self, *, max_entries_per_collection: int = DEFAULT_MAX_ENTRIES_PER_COLLECTION, default_collection: str | None = None): + """Initialize the in-memory cache. + + Args: + max_entries_per_collection: The maximum number of entries per collection. Defaults to 10000. + default_collection: The default collection to use if no collection is provided. + """ + + self.max_entries_per_collection = max_entries_per_collection + + self._cache = {} + + super().__init__(default_collection=default_collection) + + @override + def _setup_collection(self, *, collection: str) -> None: + self._cache[collection] = MemoryCollection(max_entries=self.max_entries_per_collection) + + @override + def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + collection_cache: MemoryCollection = self._cache[collection] + + return collection_cache.get(key=key) + + @override + def _put_managed_entry(self, *, key: str, collection: str, managed_entry: ManagedEntry) -> None: + collection_cache: MemoryCollection = self._cache[collection] + + collection_cache.put(key=key, value=managed_entry) + + @override + def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + collection_cache: MemoryCollection = self._cache[collection] + + return collection_cache.delete(key=key) + + @override + def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> list[str]: + collection_cache: MemoryCollection = self._cache[collection] + + return collection_cache.keys(limit=limit) + + @override + def _get_collection_names(self, *, limit: int | None = None) -> list[str]: + limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) + return list(self._cache.keys())[:limit] + + @override + def _delete_collection(self, *, collection: str) -> bool: + if collection not in self._cache: + return False + + del self._cache[collection] + + return True + + @override + def _delete_store(self) -> bool: + self._cache.clear() + + return True diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/__init__.py new file mode 100644 index 00000000..6f0139ff --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.mongodb.store import MongoDBStore + +__all__ = ["MongoDBStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/store.py new file mode 100644 index 00000000..c175277f --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/store.py @@ -0,0 +1,194 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'store.py' +# DO NOT CHANGE! Change the original file instead. +from datetime import datetime +from typing import TYPE_CHECKING, Any, TypedDict, overload + +from pymongo.collection import Collection +from pymongo.database import Database +from typing_extensions import Self, override + +from key_value.sync.code_gen.stores.base import ( + BaseContextManagerStore, + BaseDestroyCollectionStore, + BaseEnumerateCollectionsStore, + BaseStore, +) +from key_value.sync.code_gen.utils.managed_entry import ManagedEntry +from key_value.sync.code_gen.utils.sanitize import ALPHANUMERIC_CHARACTERS, sanitize_string +from key_value.sync.code_gen.utils.time_to_live import now + +if TYPE_CHECKING: + from pymongo.results import DeleteResult + +try: + from pymongo import MongoClient +except ImportError as e: + msg = "MongoDBStore requires py-key-value-aio[mongodb]" + raise ImportError(msg) from e + +DEFAULT_DB = "kv-store-adapter" +DEFAULT_COLLECTION = "kv" + +DEFAULT_PAGE_SIZE = 10000 +PAGE_LIMIT = 10000 + +# MongoDB collection name length limit +# https://www.mongodb.com/docs/manual/reference/limits/ +# For unsharded collections and views, the namespace length limit is 255 bytes. +# For sharded collections, the namespace length limit is 235 bytes. +# So limit the collection name to 200 bytes +MAX_COLLECTION_LENGTH = 200 +COLLECTION_ALLOWED_CHARACTERS = ALPHANUMERIC_CHARACTERS + "_" + + +class MongoDBStoreDocument(TypedDict): + value: dict[str, Any] + + created_at: datetime | None + expires_at: datetime | None + + +class MongoDBStore(BaseEnumerateCollectionsStore, BaseDestroyCollectionStore, BaseContextManagerStore, BaseStore): + """MongoDB-based key-value store using Motor (sync MongoDB driver).""" + + _client: MongoClient[dict[str, Any]] + _db: Database[dict[str, Any]] + _collections_by_name: dict[str, Collection[dict[str, Any]]] + + @overload + def __init__( + self, + *, + client: MongoClient[dict[str, Any]], + db_name: str | None = None, + coll_name: str | None = None, + default_collection: str | None = None, + ) -> None: ... + + @overload + def __init__( + self, *, url: str, db_name: str | None = None, coll_name: str | None = None, default_collection: str | None = None + ) -> None: ... + + def __init__( + self, + *, + client: MongoClient[dict[str, Any]] | None = None, + url: str | None = None, + db_name: str | None = None, + coll_name: str | None = None, + default_collection: str | None = None, + ) -> None: + """Initialize the MongoDB store. + + The store uses a single MongoDB collection to persist entries for all adapter collections. + We store compound keys "{collection}::{key}" and a JSON string payload. Optional TTL is persisted + as ISO timestamps in the JSON payload itself to maintain consistent semantics across backends. + """ + + if client: + self._client = client + elif url: + self._client = MongoClient(url) + else: + # Defaults to localhost + self._client = MongoClient() + + db_name = db_name or DEFAULT_DB + coll_name = coll_name or DEFAULT_COLLECTION + + self._db = self._client[db_name] + self._collections_by_name = {} + + super().__init__(default_collection=default_collection) + + @override + def __enter__(self) -> Self: + _ = self._client.__enter__() + return self + + @override + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: # pyright: ignore[reportAny] + self._client.__exit__(exc_type, exc_val, exc_tb) + + def _sanitize_collection_name(self, collection: str) -> str: + return sanitize_string(value=collection, max_length=MAX_COLLECTION_LENGTH, allowed_characters=ALPHANUMERIC_CHARACTERS) + + @override + def _setup_collection(self, *, collection: str) -> None: + # Ensure index on the unique combo key and supporting queries + collection = self._sanitize_collection_name(collection=collection) + + collection_filter: dict[str, str] = {"name": collection} + matching_collections: list[str] = self._db.list_collection_names(filter=collection_filter) + + if matching_collections: + return + + new_collection: Collection[dict[str, Any]] = self._db.create_collection(name=collection) + + _ = new_collection.create_index(keys="key") + + self._collections_by_name[collection] = new_collection + + @override + def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + collection = self._sanitize_collection_name(collection=collection) + + doc: dict[str, Any] | None = self._collections_by_name[collection].find_one(filter={"key": key}) + + if not doc: + return None + + json_value: str | None = doc.get("value") + + if not isinstance(json_value, str): + return None + + return ManagedEntry.from_json(json_str=json_value) + + @override + def _put_managed_entry(self, *, key: str, collection: str, managed_entry: ManagedEntry) -> None: + json_value: str = managed_entry.to_json() + + collection = self._sanitize_collection_name(collection=collection) + + _ = self._collections_by_name[collection].update_one( + filter={"key": key}, + update={ + "$set": { + "collection": collection, + "key": key, + "value": json_value, + "created_at": managed_entry.created_at.isoformat() if managed_entry.created_at else None, + "expires_at": managed_entry.expires_at.isoformat() if managed_entry.expires_at else None, + "updated_at": now().isoformat(), + } + }, + upsert=True, + ) + + @override + def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + collection = self._sanitize_collection_name(collection=collection) + + result: DeleteResult = self._collections_by_name[collection].delete_one(filter={"key": key}) + return bool(result.deleted_count) + + @override + def _get_collection_names(self, *, limit: int | None = None) -> list[str]: + limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) + + return list(self._collections_by_name.keys())[:limit] + + @override + def _delete_collection(self, *, collection: str) -> bool: + collection = self._sanitize_collection_name(collection=collection) + + _ = self._db.drop_collection(name_or_collection=collection) + return True + + @override + def _close(self) -> None: + pass diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/null/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/null/__init__.py new file mode 100644 index 00000000..f97eeae3 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/null/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.null.store import NullStore + +__all__ = ["NullStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/null/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/null/store.py new file mode 100644 index 00000000..b6f23ea4 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/null/store.py @@ -0,0 +1,23 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'store.py' +# DO NOT CHANGE! Change the original file instead. +from typing_extensions import override + +from key_value.sync.code_gen.stores.base import BaseStore +from key_value.sync.code_gen.utils.managed_entry import ManagedEntry + + +class NullStore(BaseStore): + """Null object pattern store that accepts all operations but stores nothing.""" + + @override + def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + return None + + @override + def _put_managed_entry(self, *, key: str, collection: str, managed_entry: ManagedEntry) -> None: + pass + + @override + def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + return False diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/redis/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/redis/__init__.py new file mode 100644 index 00000000..c0f25a2a --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/redis/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.redis.store import RedisStore + +__all__ = ["RedisStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/redis/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/redis/store.py new file mode 100644 index 00000000..f788209c --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/redis/store.py @@ -0,0 +1,129 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'store.py' +# DO NOT CHANGE! Change the original file instead. +from typing import Any, overload +from urllib.parse import urlparse + +from typing_extensions import override + +from key_value.sync.code_gen.stores.base import BaseContextManagerStore, BaseDestroyStore, BaseEnumerateKeysStore, BaseStore +from key_value.sync.code_gen.utils.compound import compound_key, get_keys_from_compound_keys +from key_value.sync.code_gen.utils.managed_entry import ManagedEntry + +try: + from redis import Redis +except ImportError as e: + msg = "RedisStore requires py-key-value-aio[redis]" + raise ImportError(msg) from e + +DEFAULT_PAGE_SIZE = 10000 +PAGE_LIMIT = 10000 + + +class RedisStore(BaseDestroyStore, BaseEnumerateKeysStore, BaseContextManagerStore, BaseStore): + """Redis-based key-value store.""" + + _client: Redis + + @overload + def __init__(self, *, client: Redis, default_collection: str | None = None) -> None: ... + + @overload + def __init__(self, *, url: str, default_collection: str | None = None) -> None: ... + + @overload + def __init__( + self, *, host: str = "localhost", port: int = 6379, db: int = 0, password: str | None = None, default_collection: str | None = None + ) -> None: ... + + def __init__( + self, + *, + client: Redis | None = None, + default_collection: str | None = None, + url: str | None = None, + host: str = "localhost", + port: int = 6379, + db: int = 0, + password: str | None = None, + ) -> None: + """Initialize the Redis store. + + Args: + client: An existing Redis client to use. + url: Redis URL (e.g., redis://localhost:6379/0). + host: Redis host. Defaults to localhost. + port: Redis port. Defaults to 6379. + db: Redis database number. Defaults to 0. + password: Redis password. Defaults to None. + default_collection: The default collection to use if no collection is provided. + """ + if client: + self._client = client + elif url: + parsed_url = urlparse(url) + self._client = Redis( + host=parsed_url.hostname or "localhost", + port=parsed_url.port or 6379, + db=int(parsed_url.path.lstrip("/")) if parsed_url.path and parsed_url.path != "/" else 0, + password=parsed_url.password or password, + decode_responses=True, + ) + else: + self._client = Redis(host=host, port=port, db=db, password=password, decode_responses=True) + + super().__init__(default_collection=default_collection) + + @override + def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + combo_key: str = compound_key(collection=collection, key=key) + + redis_response: Any = self._client.get(name=combo_key) # pyright: ignore[reportAny] + + if not isinstance(redis_response, str): + return None + + managed_entry: ManagedEntry = ManagedEntry.from_json(json_str=redis_response) + + return managed_entry + + @override + def _put_managed_entry(self, *, key: str, collection: str, managed_entry: ManagedEntry) -> None: + combo_key: str = compound_key(collection=collection, key=key) + + json_value: str = managed_entry.to_json() + + if managed_entry.ttl is not None: + # Redis does not support <= 0 TTLs + ttl = max(int(managed_entry.ttl), 1) + + _ = self._client.setex(name=combo_key, time=ttl, value=json_value) # pyright: ignore[reportAny] + else: + _ = self._client.set(name=combo_key, value=json_value) # pyright: ignore[reportAny] + + @override + def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + combo_key: str = compound_key(collection=collection, key=key) + + return self._client.delete(combo_key) != 0 # pyright: ignore[reportAny] + + @override + def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> list[str]: + limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) + + pattern = compound_key(collection=collection, key="*") + + # redis.asyncio scan returns tuple(cursor, keys) + _cursor: int + keys: list[str] + (_cursor, keys) = self._client.scan(cursor=0, match=pattern, count=limit) # pyright: ignore[reportUnknownMemberType, reportAny] + + return get_keys_from_compound_keys(compound_keys=keys, collection=collection) + + @override + def _delete_store(self) -> bool: + return self._client.flushdb() # pyright: ignore[reportUnknownMemberType, reportAny] + + @override + def _close(self) -> None: + self._client.close() diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/simple/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/simple/__init__.py new file mode 100644 index 00000000..f93da560 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/simple/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.simple.store import SimpleStore + +__all__ = ["SimpleStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/simple/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/simple/store.py new file mode 100644 index 00000000..af820ae4 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/simple/store.py @@ -0,0 +1,100 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'store.py' +# DO NOT CHANGE! Change the original file instead. +from collections import defaultdict +from dataclasses import dataclass +from datetime import datetime + +from typing_extensions import override + +from key_value.sync.code_gen.stores.base import BaseDestroyStore, BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, BaseStore +from key_value.sync.code_gen.utils.compound import compound_key, get_collections_from_compound_keys, get_keys_from_compound_keys +from key_value.sync.code_gen.utils.managed_entry import ManagedEntry, load_from_json +from key_value.sync.code_gen.utils.time_to_live import seconds_to + +DEFAULT_SIMPLE_STORE_MAX_ENTRIES = 10000 + + +@dataclass +class SimpleStoreEntry: + json_str: str + + created_at: datetime | None + expires_at: datetime | None + + @property + def current_ttl(self) -> float | None: + if self.expires_at is None: + return None + + return seconds_to(datetime=self.expires_at) + + def to_managed_entry(self) -> ManagedEntry: + managed_entry: ManagedEntry = ManagedEntry( + value=load_from_json(json_str=self.json_str), expires_at=self.expires_at, created_at=self.created_at + ) + + return managed_entry + + +class SimpleStore(BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, BaseDestroyStore, BaseStore): + """Simple managed dictionary-based key-value store for testing and development.""" + + max_entries: int + + _data: dict[str, SimpleStoreEntry] + + def __init__(self, max_entries: int = DEFAULT_SIMPLE_STORE_MAX_ENTRIES, default_collection: str | None = None): + """Initialize the simple store. + + Args: + max_entries: The maximum number of entries to store. Defaults to 10000. + default_collection: The default collection to use if no collection is provided. + """ + + self.max_entries = max_entries + + self._data = defaultdict[str, SimpleStoreEntry]() + + super().__init__(default_collection=default_collection) + + @override + def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + combo_key: str = compound_key(collection=collection, key=key) + + store_entry: SimpleStoreEntry | None = self._data.get(combo_key) + + if store_entry is None: + return None + + return store_entry.to_managed_entry() + + @override + def _put_managed_entry(self, *, key: str, collection: str, managed_entry: ManagedEntry) -> None: + combo_key: str = compound_key(collection=collection, key=key) + + if len(self._data) >= self.max_entries: + _ = self._data.pop(next(iter(self._data))) + + self._data[combo_key] = SimpleStoreEntry( + json_str=managed_entry.to_json(include_metadata=False), expires_at=managed_entry.expires_at, created_at=managed_entry.created_at + ) + + @override + def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + combo_key: str = compound_key(collection=collection, key=key) + + return self._data.pop(combo_key, None) is not None + + @override + def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> list[str]: + return get_keys_from_compound_keys(compound_keys=list(self._data.keys()), collection=collection) + + @override + def _get_collection_names(self, *, limit: int | None = None) -> list[str]: + return get_collections_from_compound_keys(compound_keys=list(self._data.keys())) + + @override + def _delete_store(self) -> bool: + self._data.clear() + return True diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/valkey/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/valkey/__init__.py new file mode 100644 index 00000000..d993f122 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/valkey/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.valkey.store import ValkeyStore + +__all__ = ["ValkeyStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/valkey/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/valkey/store.py new file mode 100644 index 00000000..486725c6 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/valkey/store.py @@ -0,0 +1,124 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'store.py' +# DO NOT CHANGE! Change the original file instead. +from typing import overload + +from glide_shared.commands.core_options import ExpirySet, ExpiryType +from glide_shared.config import GlideClientConfiguration, NodeAddress, ServerCredentials +from glide_sync.glide_client import BaseClient +from typing_extensions import override + +from key_value.sync.code_gen.stores.base import BaseContextManagerStore, BaseStore +from key_value.sync.code_gen.utils.compound import compound_key +from key_value.sync.code_gen.utils.managed_entry import ManagedEntry + +try: + # Use redis-py asyncio client to communicate with a Valkey server (protocol compatible) + from glide_sync.glide_client import GlideClient +except ImportError as e: + msg = "ValkeyStore requires py-key-value-aio[valkey]" + raise ImportError(msg) from e + +DEFAULT_PAGE_SIZE = 10000 +PAGE_LIMIT = 10000 + + +class ValkeyStore(BaseContextManagerStore, BaseStore): + """Valkey-based key-value store (Redis protocol compatible).""" + + _connected_client: BaseClient | None + _client_config: GlideClientConfiguration | None + + @overload + def __init__(self, *, client: BaseClient, default_collection: str | None = None) -> None: ... + + @overload + def __init__( + self, + *, + host: str = "localhost", + port: int = 6379, + db: int = 0, + username: str | None = None, + password: str | None = None, + default_collection: str | None = None, + ) -> None: ... + + def __init__( + self, + *, + client: BaseClient | None = None, + default_collection: str | None = None, + host: str = "localhost", + port: int = 6379, + db: int = 0, + username: str | None = None, + password: str | None = None, + ) -> None: + if client is not None: + self._connected_client = client + else: + # redis client accepts URL + addresses: list[NodeAddress] = [NodeAddress(host=host, port=port)] + credentials: ServerCredentials | None = ServerCredentials(password=password, username=username) if password else None + self._client_config = GlideClientConfiguration(addresses=addresses, database_id=db, credentials=credentials) + self._connected_client = None + + super().__init__(default_collection=default_collection) + + @override + def _setup(self) -> None: + if self._connected_client is None: + if self._client_config is None: + # This should never happen, makes the type checker happy though + msg = "Client configuration is not set" + raise ValueError(msg) + + self._connected_client = GlideClient.create(config=self._client_config) + + @property + def _client(self) -> BaseClient: + if self._connected_client is None: + # This should never happen, makes the type checker happy though + msg = "Client is not connected" + raise ValueError(msg) + return self._connected_client + + @override + def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: + combo_key: str = compound_key(collection=collection, key=key) + + response: bytes | None = self._client.get(key=combo_key) + if not isinstance(response, bytes): + return None + decoded_response: str = response.decode("utf-8") + return ManagedEntry.from_json(json_str=decoded_response) + + @override + def _put_managed_entry(self, *, key: str, collection: str, managed_entry: ManagedEntry) -> None: + combo_key: str = compound_key(collection=collection, key=key) + + json_value: str = managed_entry.to_json() + + expiry: ExpirySet | None = ExpirySet(expiry_type=ExpiryType.SEC, value=int(managed_entry.ttl)) if managed_entry.ttl else None + + _ = self._client.set(key=combo_key, value=json_value, expiry=expiry) + + @override + def _delete_managed_entry(self, *, key: str, collection: str) -> bool: + combo_key: str = compound_key(collection=collection, key=key) + return self._client.delete(keys=[combo_key]) != 0 + + @override + def _close(self) -> None: + self._client.close() + + +# @override +# async def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> list[str]: +# limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) +# pattern = compound_key(collection=collection, key="*") +# _cursor: int +# keys: list[str] +# _cursor, keys = await self._client.scan(cursor=0, match=pattern, count=limit) +# return get_keys_from_compound_keys(compound_keys=keys, collection=collection) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/acompat.py b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/acompat.py new file mode 100644 index 00000000..fcaaefd9 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/acompat.py @@ -0,0 +1,21 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'acompat.py' +# DO NOT CHANGE! Change the original file instead. +import asyncio +import time +from collections.abc import Coroutine +from typing import Any + + +def asleep(seconds: float) -> Coroutine[Any, Any, None]: + """ + Equivalent to asyncio.sleep(), converted to time.sleep() by async_to_sync. + """ + return asyncio.sleep(seconds) + + +def sleep(seconds: float) -> None: + """ + Equivalent to time.sleep(), converted to asyncio.sleep() by async_to_sync. + """ + time.sleep(seconds) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/compound.py b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/compound.py new file mode 100644 index 00000000..709b00f8 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/compound.py @@ -0,0 +1,78 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'compound.py' +# DO NOT CHANGE! Change the original file instead. +DEFAULT_COMPOUND_SEPARATOR = "::" +DEFAULT_PREFIX_SEPARATOR = "__" + + +def compound_string(first: str, second: str, separator: str | None = None) -> str: + separator = separator or DEFAULT_COMPOUND_SEPARATOR + return f"{first}{separator}{second}" + + +def uncompound_string(string: str, separator: str | None = None) -> tuple[str, str]: + separator = separator or DEFAULT_COMPOUND_SEPARATOR + if separator not in string: + msg: str = f"String {string} is not a compound identifier" + raise TypeError(msg) from None + + split_key: list[str] = string.split(separator, 1) + + if len(split_key) != 2: # noqa: PLR2004 + msg = f"String {string} is not a compound identifier" + raise TypeError(msg) from None + + return (split_key[0], split_key[1]) + + +def uncompound_strings(strings: list[str], separator: str | None = None) -> list[tuple[str, str]]: + separator = separator or DEFAULT_COMPOUND_SEPARATOR + return [uncompound_string(string=string, separator=separator) for string in strings] + + +def compound_key(collection: str, key: str, separator: str | None = None) -> str: + separator = separator or DEFAULT_COMPOUND_SEPARATOR + return compound_string(first=collection, second=key, separator=separator) + + +def uncompound_key(key: str, separator: str | None = None) -> tuple[str, str]: + separator = separator or DEFAULT_COMPOUND_SEPARATOR + return uncompound_string(string=key, separator=separator) + + +def prefix_key(key: str, prefix: str, separator: str | None = None) -> str: + separator = separator or DEFAULT_PREFIX_SEPARATOR + return compound_string(first=prefix, second=key, separator=separator) + + +def unprefix_key(key: str, prefix: str, separator: str | None = None) -> str: + separator = separator or DEFAULT_PREFIX_SEPARATOR + if not key.startswith(prefix + separator): + msg = f"Key {key} is not prefixed with {prefix}{separator}" + raise ValueError(msg) + return key[len(prefix + separator) :] + + +def prefix_collection(collection: str, prefix: str, separator: str | None = None) -> str: + separator = separator or DEFAULT_PREFIX_SEPARATOR + return compound_string(first=prefix, second=collection, separator=separator) + + +def unprefix_collection(collection: str, prefix: str, separator: str | None = None) -> str: + separator = separator or DEFAULT_PREFIX_SEPARATOR + if not collection.startswith(prefix + separator): + msg = f"Collection {collection} is not prefixed with {prefix}{separator}" + raise ValueError(msg) + return collection[len(prefix + separator) :] + + +def get_collections_from_compound_keys(compound_keys: list[str], separator: str | None = None) -> list[str]: + """Return a unique list of collections from a list of compound keys.""" + separator = separator or DEFAULT_COMPOUND_SEPARATOR + return list({key_collection for (key_collection, _) in uncompound_strings(strings=compound_keys, separator=separator)}) + + +def get_keys_from_compound_keys(compound_keys: list[str], collection: str, separator: str | None = None) -> list[str]: + """Return all keys from a list of compound keys for a given collection.""" + separator = separator or DEFAULT_COMPOUND_SEPARATOR + return [key for (key_collection, key) in uncompound_strings(strings=compound_keys, separator=separator) if key_collection == collection] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/managed_entry.py b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/managed_entry.py new file mode 100644 index 00000000..ea38dc97 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/managed_entry.py @@ -0,0 +1,102 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'managed_entry.py' +# DO NOT CHANGE! Change the original file instead. +import json +from dataclasses import dataclass, field +from datetime import datetime +from typing import Any, cast + +from typing_extensions import Self + +from key_value.sync.code_gen.errors import DeserializationError, SerializationError +from key_value.sync.code_gen.utils.time_to_live import now, now_plus, try_parse_datetime_str + + +@dataclass(kw_only=True) +class ManagedEntry: + """A managed cache entry containing value data and TTL metadata. + + The entry supports either TTL seconds or absolute expiration datetime. On init: + - If `ttl` is provided but `expires_at` is not, an `expires_at` will be computed. + - If `expires_at` is provided but `ttl` is not, a live TTL will be computed on access. + """ + + value: dict[str, Any] + + created_at: datetime | None = field(default=None) + ttl: float | None = field(default=None) + expires_at: datetime | None = field(default=None) + + def __post_init__(self) -> None: + if self.ttl is not None and self.expires_at is None: + self.expires_at = now_plus(seconds=self.ttl) + elif self.expires_at is not None and self.ttl is None: + self.recalculate_ttl() + + @property + def is_expired(self) -> bool: + if self.expires_at is None: + return False + return self.expires_at <= now() + + def recalculate_ttl(self) -> None: + if self.expires_at is not None and self.ttl is None: + self.ttl = (self.expires_at - now()).total_seconds() + + def to_json(self, include_metadata: bool = True, include_expiration: bool = True, include_creation: bool = True) -> str: + data: dict[str, Any] = {} + + if include_metadata: + data["value"] = self.value + if include_creation and self.created_at: + data["created_at"] = self.created_at.isoformat() + if include_expiration and self.expires_at: + data["expires_at"] = self.expires_at.isoformat() + else: + data = self.value + + return dump_to_json(obj=data) + + @classmethod + def from_json(cls, json_str: str, includes_metadata: bool = True, ttl: float | None = None) -> Self: + data: dict[str, Any] = load_from_json(json_str=json_str) + + if not includes_metadata: + return cls(value=data) + + created_at: datetime | None = try_parse_datetime_str(value=data.get("created_at")) + expires_at: datetime | None = try_parse_datetime_str(value=data.get("expires_at")) + + value: dict[str, Any] | None = data.get("value") + + if value is None: + msg = "Value is None" + raise DeserializationError(msg) + + return cls(created_at=created_at, expires_at=expires_at, ttl=ttl, value=value) + + +def dump_to_json(obj: dict[str, Any]) -> str: + try: + return json.dumps(obj) + except (json.JSONDecodeError, TypeError) as e: + msg: str = f"Failed to serialize object to JSON: {e}" + raise SerializationError(msg) from e + + +def load_from_json(json_str: str) -> dict[str, Any]: + try: + deserialized_obj: Any = json.loads(json_str) # pyright: ignore[reportAny] + except (json.JSONDecodeError, TypeError) as e: + msg: str = f"Failed to deserialize JSON string: {e}" + raise DeserializationError(msg) from e + + if not isinstance(deserialized_obj, dict): + msg = "Deserialized object is not a dictionary" + raise DeserializationError(msg) + + if not all(isinstance(key, str) for key in deserialized_obj): # pyright: ignore[reportUnknownVariableType] + msg = "Deserialized object contains non-string keys" + raise DeserializationError(msg) + + return cast(typ="dict[str, Any]", val=deserialized_obj) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/sanitize.py b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/sanitize.py new file mode 100644 index 00000000..d43a3655 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/sanitize.py @@ -0,0 +1,159 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'sanitize.py' +# DO NOT CHANGE! Change the original file instead. +import hashlib +from enum import Enum + +MINIMUM_MAX_LENGTH = 16 + +DEFAULT_HASH_FRAGMENT_SIZE = 8 + +DEFAULT_HASH_FRAGMENT_SEPARATOR = "-" +DEFAULT_REPLACEMENT_CHARACTER = "_" + +LOWERCASE_ALPHABET = "abcdefghijklmnopqrstuvwxyz" +UPPERCASE_ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" +NUMBERS = "0123456789" +ALPHANUMERIC_CHARACTERS = LOWERCASE_ALPHABET + UPPERCASE_ALPHABET + NUMBERS + + +def generate_hash_fragment(value: str, size: int = DEFAULT_HASH_FRAGMENT_SIZE) -> str: + """Generate a hash fragment of the value.""" + + return hashlib.sha256(value.encode()).hexdigest()[:size] + + +class HashFragmentMode(str, Enum): + ALWAYS = "always" + NEVER = "never" + ONLY_IF_CHANGED = "only_if_changed" + + +def sanitize_characters_in_string(value: str, allowed_characters: str, replace_with: str) -> str: + """Replace characters in a string. If multiple characters are in a row that are not allowed, only + the first one will be replaced. The rest will be removed. If all characters are not allowed, an + empty string will be returned. + + Args: + value: The value to replace characters in. + to_replace: The characters to replace. + replace_with: The characters to replace with. + """ + new_value = "" + last_char_was_replaced = False + + for char in value: + if char in allowed_characters: + new_value += char + last_char_was_replaced = False + else: + if last_char_was_replaced: + continue + new_value += replace_with + last_char_was_replaced = True + + if len(new_value) == 1 and last_char_was_replaced: + return "" + + return new_value + + +def sanitize_string( + value: str, + max_length: int, + allowed_characters: str | None = None, + replacement_character: str = DEFAULT_REPLACEMENT_CHARACTER, + hash_fragment_separator: str = DEFAULT_HASH_FRAGMENT_SEPARATOR, + hash_fragment_mode: HashFragmentMode = HashFragmentMode.ONLY_IF_CHANGED, + hash_fragment_length: int = DEFAULT_HASH_FRAGMENT_SIZE, +) -> str: + """Sanitize the value, replacing characters and optionally adding a fragment a hash of the value if requested. + + If the entire value is sanitized and hash_fragment_mode is HashFragmentMode.ALWAYS or HashFragmentMode.ONLY_IF_CHANGED, + the value returned will be the hash fragment only. + + If the entire value is sanitized and hash_fragment_mode is HashFragmentMode.NEVER, an error will be raised. + + Args: + value: The value to sanitize. + allowed_characters: The allowed characters in the value. + max_length: The maximum length of the value (with the hash fragment added). + hash_fragment_separator: The separator to add between the value and the hash fragment. + hash_fragment_mode: The mode to add the hash fragment. + """ + if max_length < MINIMUM_MAX_LENGTH: + msg = f"max_length must be greater than or equal to {MINIMUM_MAX_LENGTH}" + raise ValueError(msg) + + if hash_fragment_length > max_length // 2: + msg = "hash_fragment_length must be less than or equal to half of max_length" + raise ValueError(msg) + + hash_fragment: str = generate_hash_fragment(value=value, size=hash_fragment_length) + hash_fragment_size_required: int = len(hash_fragment_separator) + len(hash_fragment) + + sanitized_value: str = ( + sanitize_characters_in_string(value=value, allowed_characters=allowed_characters, replace_with=replacement_character) + if allowed_characters + else value + ) + + actual_max_length: int + + if hash_fragment_mode == HashFragmentMode.ALWAYS: + actual_max_length = max_length - hash_fragment_size_required + + sanitized_value = sanitized_value[:actual_max_length] + + if not sanitized_value: + return hash_fragment + + return sanitized_value + hash_fragment_separator + hash_fragment + + if hash_fragment_mode == HashFragmentMode.ONLY_IF_CHANGED: + sanitized_value = sanitized_value[:max_length] + + if value == sanitized_value: + return value + + actual_max_length = max_length - hash_fragment_size_required + + sanitized_value = sanitized_value[:actual_max_length] + + if not sanitized_value: + return hash_fragment + + return sanitized_value + hash_fragment_separator + hash_fragment + + if not sanitized_value: + msg = "Entire value was sanitized and hash_fragment_mode is HashFragmentMode.NEVER" + raise ValueError(msg) + + return sanitized_value + + +def hash_excess_length(value: str, max_length: int) -> str: + """Hash part of the value if it exceeds the maximum length. This operation + will truncate the value to the maximum length minus 8 characters and will swap + the last 8 characters with the first 8 characters of the generated hash. + + Args: + value: The value to hash. + max_length: The maximum length of the value. Must be greater than 32. + + Returns: + The hashed value if the value exceeds the maximum length, otherwise the original value. + """ + if max_length <= MINIMUM_MAX_LENGTH: + msg = f"max_length must be greater than {MINIMUM_MAX_LENGTH}" + raise ValueError(msg) + + if len(value) <= max_length: + return value + + truncated_value = value[: max_length - 8] + + hash_of_value = hashlib.sha256(value.encode()).hexdigest() + first_eight_of_hash = hash_of_value[:8] + + return truncated_value + first_eight_of_hash diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/time_to_live.py b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/time_to_live.py new file mode 100644 index 00000000..c8c46dca --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/time_to_live.py @@ -0,0 +1,41 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'time_to_live.py' +# DO NOT CHANGE! Change the original file instead. +import time +from datetime import datetime, timedelta, timezone +from typing import Any + + +def epoch_to_datetime(epoch: float) -> datetime: + """Convert an epoch timestamp to a datetime object.""" + return datetime.fromtimestamp(epoch, tz=timezone.utc) + + +def now_as_epoch() -> float: + """Get the current time as epoch seconds.""" + return time.time() + + +def now() -> datetime: + """Get the current time as a datetime object.""" + return datetime.now(tz=timezone.utc) + + +def seconds_to(datetime: datetime) -> float: + """Get the number of seconds between the current time and a datetime object.""" + return (datetime - now()).total_seconds() + + +def now_plus(seconds: float) -> datetime: + """Get the current time plus a number of seconds as a datetime object.""" + return datetime.now(tz=timezone.utc) + timedelta(seconds=seconds) + + +def try_parse_datetime_str(value: Any) -> datetime | None: # pyright: ignore[reportAny] + try: + if isinstance(value, str): + return datetime.fromisoformat(value) + except ValueError: + return None + + return None diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/base.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/base.py new file mode 100644 index 00000000..f301d3d8 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/base.py @@ -0,0 +1,54 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'base.py' +# DO NOT CHANGE! Change the original file instead. +from collections.abc import Sequence +from typing import Any + +from typing_extensions import override + +from key_value.sync.code_gen.protocols.key_value import KeyValue + + +class BaseWrapper(KeyValue): + """A base wrapper for KVStore implementations that passes through to the underlying store.""" + + store: KeyValue + + @override + def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + return self.store.get(collection=collection, key=key) + + @override + def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + return self.store.get_many(collection=collection, keys=keys) + + @override + def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + return self.store.ttl(collection=collection, key=key) + + @override + def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + return self.store.ttl_many(collection=collection, keys=keys) + + @override + def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + return self.store.put(collection=collection, key=key, value=value, ttl=ttl) + + @override + def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + return self.store.put_many(keys=keys, values=values, collection=collection, ttl=ttl) + + @override + def delete(self, key: str, *, collection: str | None = None) -> bool: + return self.store.delete(collection=collection, key=key) + + @override + def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + return self.store.delete_many(keys=keys, collection=collection) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/passthrough_cache/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/passthrough_cache/__init__.py new file mode 100644 index 00000000..7312b07c --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/passthrough_cache/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.wrappers.passthrough_cache.wrapper import PassthroughCacheWrapper + +__all__ = ["PassthroughCacheWrapper"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/passthrough_cache/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/passthrough_cache/wrapper.py new file mode 100644 index 00000000..d7c9eddc --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/passthrough_cache/wrapper.py @@ -0,0 +1,166 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'wrapper.py' +# DO NOT CHANGE! Change the original file instead. +from collections.abc import Sequence +from typing import Any + +from typing_extensions import override + +from key_value.sync.code_gen.protocols.key_value import KeyValue +from key_value.sync.code_gen.wrappers.base import BaseWrapper +from key_value.sync.code_gen.wrappers.ttl_clamp import TTLClampWrapper + +DEFAULT_MAX_TTL: float = 30 * 60 +DEFAULT_MISSING_TTL: float = 30 * 60 + + +class PassthroughCacheWrapper(BaseWrapper): + """Two-tier wrapper: reads from cache store, falls back to primary and populates cache. + + TTLs from the primary are respected when writing into the cache using a clamped TTL policy. + """ + + def __init__( + self, primary_store: KeyValue, cache_store: KeyValue, maximum_ttl: float | None = None, missing_ttl: float | None = None + ) -> None: + """Initialize the passthrough cache wrapper. + + Args: + primary_store: The primary store to wrap. + cache_store: The cache store to wrap. + maximum_ttl: The maximum TTL for puts into the cache store. Defaults to 30 minutes. + missing_ttl: The TTL to use for entries that do not have a TTL. Defaults to 30 minutes. + """ + self.store: KeyValue = primary_store + self.cache_store: KeyValue = cache_store + + self.cache_store = TTLClampWrapper( + store=cache_store, min_ttl=0, max_ttl=maximum_ttl or DEFAULT_MAX_TTL, missing_ttl=missing_ttl or DEFAULT_MISSING_TTL + ) + + super().__init__() + + @override + def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + if managed_entry := self.cache_store.get(collection=collection, key=key): + return managed_entry + + (uncached_entry, ttl) = self.store.ttl(collection=collection, key=key) + + if not uncached_entry: + return None + + self.cache_store.put(collection=collection, key=key, value=uncached_entry, ttl=ttl) + + return uncached_entry + + @override + def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + key_to_value: dict[str, dict[str, Any] | None] = dict.fromkeys(keys, None) + + # First check the cache store for the entries + cached_entries: list[dict[str, Any] | None] = self.cache_store.get_many(collection=collection, keys=keys) + + for i, key in enumerate(iterable=keys): + key_to_value[key] = cached_entries[i] + + uncached_keys = [key for (key, value) in key_to_value.items() if value is None] + + uncached_entries: list[tuple[dict[str, Any] | None, float | None]] = self.store.ttl_many(collection=collection, keys=uncached_keys) + + entries_to_cache: list[dict[str, Any]] = [] + entries_to_cache_keys: list[str] = [] + entries_to_cache_ttls: list[float | None] = [] + + for i, key in enumerate(iterable=uncached_keys): + (entry, ttl) = uncached_entries[i] + if entry is not None: + entries_to_cache_keys.append(key) + entries_to_cache.append(entry) + entries_to_cache_ttls.append(ttl) + + key_to_value[key] = entry + + if entries_to_cache: + self.cache_store.put_many(collection=collection, keys=entries_to_cache_keys, values=entries_to_cache, ttl=entries_to_cache_ttls) + + return [key_to_value[key] for key in keys] + + @override + def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + (cached_entry, ttl) = self.cache_store.ttl(collection=collection, key=key) + + if cached_entry: + return (cached_entry, ttl) + + (uncached_entry, ttl) = self.store.ttl(collection=collection, key=key) + + if not uncached_entry: + return (None, None) + + self.cache_store.put(collection=collection, key=key, value=uncached_entry, ttl=ttl) + + return (uncached_entry, ttl) + + @override + def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + key_to_value: dict[str, tuple[dict[str, Any] | None, float | None]] = dict.fromkeys(keys, (None, None)) # type: ignore + + # First check the cache store for the entries + cached_entries: list[tuple[dict[str, Any] | None, float | None]] = self.cache_store.ttl_many(collection=collection, keys=keys) + + for i, key in enumerate(iterable=keys): + key_to_value[key] = (cached_entries[i][0], cached_entries[i][1]) + + uncached_keys = [key for (key, value) in key_to_value.items() if value == (None, None)] + + uncached_entries: list[tuple[dict[str, Any] | None, float | None]] = self.store.ttl_many(collection=collection, keys=uncached_keys) + + entries_to_cache: list[dict[str, Any]] = [] + entries_to_cache_keys: list[str] = [] + entries_to_cache_ttls: list[float | None] = [] + + for i, key in enumerate(iterable=uncached_keys): + (entry, ttl) = uncached_entries[i] + if entry is not None: + entries_to_cache_keys.append(key) + entries_to_cache.append(entry) + entries_to_cache_ttls.append(ttl) + + key_to_value[key] = (entry, ttl) + + if entries_to_cache: + self.cache_store.put_many(collection=collection, keys=entries_to_cache_keys, values=entries_to_cache, ttl=entries_to_cache_ttls) + + return [key_to_value[key] for key in keys] + + @override + def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + _ = self.cache_store.delete(collection=collection, key=key) + + self.store.put(collection=collection, key=key, value=value, ttl=ttl) + + @override + def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + _ = self.cache_store.delete_many(collection=collection, keys=keys) + + self.store.put_many(keys=keys, values=values, collection=collection, ttl=ttl) + + @override + def delete(self, key: str, *, collection: str | None = None) -> bool: + _ = self.cache_store.delete(collection=collection, key=key) + + return self.store.delete(collection=collection, key=key) + + @override + def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + _ = self.cache_store.delete_many(collection=collection, keys=keys) + + return self.store.delete_many(collection=collection, keys=keys) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/__init__.py new file mode 100644 index 00000000..32d0c5e1 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.wrappers.prefix_collections.wrapper import PrefixCollectionsWrapper + +__all__ = ["PrefixCollectionsWrapper"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/wrapper.py new file mode 100644 index 00000000..fe370eda --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/wrapper.py @@ -0,0 +1,82 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'wrapper.py' +# DO NOT CHANGE! Change the original file instead. +from collections.abc import Sequence +from typing import Any + +from typing_extensions import override + +from key_value.sync.code_gen.protocols.key_value import KeyValue +from key_value.sync.code_gen.stores.base import DEFAULT_COLLECTION_NAME +from key_value.sync.code_gen.utils.compound import prefix_collection, unprefix_collection +from key_value.sync.code_gen.wrappers.base import BaseWrapper + + +class PrefixCollectionsWrapper(BaseWrapper): + """A wrapper that prefixes collection names before delegating to the underlying store.""" + + def __init__(self, store: KeyValue, prefix: str, default_collection: str | None = None) -> None: + """Initialize the prefix collections wrapper. + + Args: + store: The store to wrap. + prefix: The prefix to add to the collections. + default_collection: The default collection to use if no collection is provided. Will be automatically prefixed with the `prefix` + """ + self.store: KeyValue = store + self.prefix: str = prefix + self.default_collection: str = default_collection or DEFAULT_COLLECTION_NAME + super().__init__() + + def _prefix_collection(self, collection: str | None) -> str: + return prefix_collection(prefix=self.prefix, collection=collection or self.default_collection) + + def _unprefix_collection(self, collection: str) -> str: + return unprefix_collection(prefix=self.prefix, collection=collection) + + @override + def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + new_collection: str = self._prefix_collection(collection=collection) + return self.store.get(key=key, collection=new_collection) + + @override + def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + new_collection: str = self._prefix_collection(collection=collection) + return self.store.get_many(keys=keys, collection=new_collection) + + @override + def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + new_collection: str = self._prefix_collection(collection=collection) + return self.store.ttl(key=key, collection=new_collection) + + @override + def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + new_collection: str = self._prefix_collection(collection=collection) + return self.store.ttl_many(keys=keys, collection=new_collection) + + @override + def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + new_collection: str = self._prefix_collection(collection=collection) + return self.store.put(key=key, value=value, collection=new_collection, ttl=ttl) + + @override + def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + new_collection: str = self._prefix_collection(collection=collection) + return self.store.put_many(keys=keys, values=values, collection=new_collection, ttl=ttl) + + @override + def delete(self, key: str, *, collection: str | None = None) -> bool: + new_collection: str = self._prefix_collection(collection=collection) + return self.store.delete(key=key, collection=new_collection) + + @override + def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + new_collection: str = self._prefix_collection(collection=collection) + return self.store.delete_many(keys=keys, collection=new_collection) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/__init__.py new file mode 100644 index 00000000..e7153a57 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.wrappers.prefix_keys.wrapper import PrefixKeysWrapper + +__all__ = ["PrefixKeysWrapper"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/wrapper.py new file mode 100644 index 00000000..751f2710 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/wrapper.py @@ -0,0 +1,79 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'wrapper.py' +# DO NOT CHANGE! Change the original file instead. +from collections.abc import Sequence +from typing import Any + +from typing_extensions import override + +from key_value.sync.code_gen.protocols.key_value import KeyValue +from key_value.sync.code_gen.utils.compound import prefix_key, unprefix_key +from key_value.sync.code_gen.wrappers.base import BaseWrapper + + +class PrefixKeysWrapper(BaseWrapper): + """A wrapper that prefixes key names before delegating to the underlying store.""" + + def __init__(self, store: KeyValue, prefix: str) -> None: + """Initialize the prefix keys wrapper. + + Args: + store: The store to wrap. + prefix: The prefix to add to the keys. + """ + self.store: KeyValue = store + self.prefix: str = prefix + super().__init__() + + def _prefix_key(self, key: str) -> str: + return prefix_key(prefix=self.prefix, key=key) + + def _unprefix_key(self, key: str) -> str: + return unprefix_key(prefix=self.prefix, key=key) + + @override + def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + new_key: str = self._prefix_key(key=key) + return self.store.get(key=new_key, collection=collection) + + @override + def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + new_keys: list[str] = [self._prefix_key(key=key) for key in keys] + return self.store.get_many(keys=new_keys, collection=collection) + + @override + def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + new_key: str = self._prefix_key(key=key) + return self.store.ttl(key=new_key, collection=collection) + + @override + def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + new_keys: list[str] = [self._prefix_key(key=key) for key in keys] + return self.store.ttl_many(keys=new_keys, collection=collection) + + @override + def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + new_key: str = self._prefix_key(key=key) + return self.store.put(key=new_key, value=value, collection=collection, ttl=ttl) + + @override + def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + new_keys: list[str] = [self._prefix_key(key=key) for key in keys] + return self.store.put_many(keys=new_keys, values=values, collection=collection, ttl=ttl) + + @override + def delete(self, key: str, *, collection: str | None = None) -> bool: + new_key: str = self._prefix_key(key=key) + return self.store.delete(key=new_key, collection=collection) + + @override + def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + new_keys: list[str] = [self._prefix_key(key=key) for key in keys] + return self.store.delete_many(keys=new_keys, collection=collection) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/__init__.py new file mode 100644 index 00000000..81f319e1 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.wrappers.single_collection.wrapper import SingleCollectionWrapper + +__all__ = ["SingleCollectionWrapper"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/wrapper.py new file mode 100644 index 00000000..b67a8d22 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/wrapper.py @@ -0,0 +1,86 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'wrapper.py' +# DO NOT CHANGE! Change the original file instead. +from collections.abc import Sequence +from typing import Any + +from typing_extensions import override + +from key_value.sync.code_gen.protocols.key_value import KeyValue +from key_value.sync.code_gen.stores.base import DEFAULT_COLLECTION_NAME +from key_value.sync.code_gen.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key +from key_value.sync.code_gen.wrappers.base import BaseWrapper + + +class SingleCollectionWrapper(BaseWrapper): + """A wrapper that stores all collections within a single backing collection via key prefixing.""" + + def __init__( + self, store: KeyValue, single_collection: str, default_collection: str | None = None, separator: str | None = None + ) -> None: + """Initialize the prefix collections wrapper. + + Args: + store: The store to wrap. + single_collection: The single collection to use to store all collections. + default_collection: The default collection to use if no collection is provided. + """ + self.store: KeyValue = store + self.single_collection: str = single_collection + self.default_collection: str = default_collection or DEFAULT_COLLECTION_NAME + self.separator: str = separator or DEFAULT_PREFIX_SEPARATOR + super().__init__() + + def _prefix_key(self, key: str, collection: str | None = None) -> str: + collection_to_use = collection or self.default_collection + return prefix_key(prefix=collection_to_use, key=key, separator=self.separator) + + def _unprefix_key(self, key: str) -> str: + return unprefix_key(prefix=self.single_collection, key=key, separator=self.separator) + + @override + def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + new_key: str = self._prefix_key(key=key, collection=collection) + return self.store.get(key=new_key, collection=self.single_collection) + + @override + def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] + return self.store.get_many(keys=new_keys, collection=self.single_collection) + + @override + def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + new_key: str = self._prefix_key(key=key, collection=collection) + return self.store.ttl(key=new_key, collection=self.single_collection) + + @override + def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] + return self.store.ttl_many(keys=new_keys, collection=self.single_collection) + + @override + def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + new_key: str = self._prefix_key(key=key, collection=collection) + return self.store.put(key=new_key, value=value, collection=self.single_collection, ttl=ttl) + + @override + def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] + return self.store.put_many(keys=new_keys, values=values, collection=self.single_collection, ttl=ttl) + + @override + def delete(self, key: str, *, collection: str | None = None) -> bool: + new_key: str = self._prefix_key(key=key, collection=collection) + return self.store.delete(key=new_key, collection=self.single_collection) + + @override + def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] + return self.store.delete_many(keys=new_keys, collection=self.single_collection) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/statistics/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/statistics/__init__.py new file mode 100644 index 00000000..242bae1b --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/statistics/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.wrappers.statistics.wrapper import StatisticsWrapper + +__all__ = ["StatisticsWrapper"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/statistics/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/statistics/wrapper.py new file mode 100644 index 00000000..bdab60a8 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/statistics/wrapper.py @@ -0,0 +1,217 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'wrapper.py' +# DO NOT CHANGE! Change the original file instead. +from collections.abc import Sequence +from dataclasses import dataclass, field +from typing import Any + +from typing_extensions import override + +from key_value.sync.code_gen.protocols.key_value import KeyValue +from key_value.sync.code_gen.wrappers.base import BaseWrapper + + +@dataclass +class BaseStatistics: + """Base statistics container with operation counting.""" + + count: int = field(default=0) + "The number of operations." + + def increment(self, *, increment: int = 1) -> None: + self.count += increment + + +@dataclass +class BaseHitMissStatistics(BaseStatistics): + """Statistics container with hit/miss tracking for cache-like operations.""" + + hit: int = field(default=0) + "The number of hits." + miss: int = field(default=0) + "The number of misses." + + def increment_hit(self, *, increment: int = 1) -> None: + self.increment(increment=increment) + self.hit += increment + + def increment_miss(self, *, increment: int = 1) -> None: + self.increment(increment=increment) + self.miss += increment + + +@dataclass +class GetStatistics(BaseHitMissStatistics): + """A class for statistics about a KV Store collection.""" + + +@dataclass +class PutStatistics(BaseStatistics): + """A class for statistics about a KV Store collection.""" + + +@dataclass +class DeleteStatistics(BaseHitMissStatistics): + """A class for statistics about a KV Store collection.""" + + +@dataclass +class ExistsStatistics(BaseHitMissStatistics): + """A class for statistics about a KV Store collection.""" + + +@dataclass +class TTLStatistics(BaseHitMissStatistics): + """A class for statistics about a KV Store collection.""" + + +@dataclass +class KVStoreCollectionStatistics(BaseStatistics): + """A class for statistics about a KV Store collection.""" + + get: GetStatistics = field(default_factory=GetStatistics) + "The statistics for the get operation." + + ttl: TTLStatistics = field(default_factory=TTLStatistics) + "The statistics for the ttl operation." + + put: PutStatistics = field(default_factory=PutStatistics) + "The statistics for the put operation." + + delete: DeleteStatistics = field(default_factory=DeleteStatistics) + "The statistics for the delete operation." + + exists: ExistsStatistics = field(default_factory=ExistsStatistics) + "The statistics for the exists operation." + + +@dataclass +class KVStoreStatistics: + """Statistics container for a KV Store.""" + + collections: dict[str, KVStoreCollectionStatistics] = field(default_factory=dict) + + def get_collection(self, collection: str) -> KVStoreCollectionStatistics: + if collection not in self.collections: + self.collections[collection] = KVStoreCollectionStatistics() + return self.collections[collection] + + +DEFAULT_COLLECTION_NAME = "__no_collection__" + + +class StatisticsWrapper(BaseWrapper): + """Statistics wrapper around a KV Store that tracks operation statistics. + + Note: enumeration and destroy operations are not tracked by this wrapper. + """ + + def __init__(self, store: KeyValue) -> None: + self.store: KeyValue = store + self._statistics: KVStoreStatistics = KVStoreStatistics() + + @property + def statistics(self) -> KVStoreStatistics: + return self._statistics + + @override + def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | None: + collection = collection or DEFAULT_COLLECTION_NAME + + if value := self.store.get(collection=collection, key=key): + self.statistics.get_collection(collection=collection).get.increment_hit() + return value + + self.statistics.get_collection(collection=collection).get.increment_miss() + + return None + + @override + def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: + collection = collection or DEFAULT_COLLECTION_NAME + + (value, ttl) = self.store.ttl(collection=collection, key=key) + + if value: + self.statistics.get_collection(collection=collection).ttl.increment_hit() + return (value, ttl) + + self.statistics.get_collection(collection=collection).ttl.increment_miss() + return (None, None) + + @override + def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + collection = collection or DEFAULT_COLLECTION_NAME + + self.store.put(collection=collection, key=key, value=value, ttl=ttl) + + self.statistics.get_collection(collection=collection).put.increment() + + @override + def delete(self, key: str, *, collection: str | None = None) -> bool: + collection = collection or DEFAULT_COLLECTION_NAME + + if self.store.delete(collection=collection, key=key): + self.statistics.get_collection(collection=collection).delete.increment_hit() + return True + + self.statistics.get_collection(collection=collection).delete.increment_miss() + + return False + + @override + def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + collection = collection or DEFAULT_COLLECTION_NAME + + results: list[dict[str, Any] | None] = self.store.get_many(keys=keys, collection=collection) + + hits = len([result for result in results if result is not None]) + misses = len([result for result in results if result is None]) + + self.statistics.get_collection(collection=collection).get.increment_hit(increment=hits) + self.statistics.get_collection(collection=collection).get.increment_miss(increment=misses) + + return results + + @override + def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + collection = collection or DEFAULT_COLLECTION_NAME + + self.store.put_many(keys=keys, values=values, collection=collection, ttl=ttl) + + self.statistics.get_collection(collection=collection).put.increment(increment=len(keys)) + + @override + def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + collection = collection or DEFAULT_COLLECTION_NAME + + deleted_count: int = self.store.delete_many(keys=keys, collection=collection) + + hits = deleted_count + misses = len(keys) - deleted_count + + self.statistics.get_collection(collection=collection).delete.increment_hit(increment=hits) + self.statistics.get_collection(collection=collection).delete.increment_miss(increment=misses) + + return deleted_count + + @override + def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + collection = collection or DEFAULT_COLLECTION_NAME + + results: list[tuple[dict[str, Any] | None, float | None]] = self.store.ttl_many(keys=keys, collection=collection) + + hits = len([result for result in results if result[0] is not None]) + misses = len([result for result in results if result[0] is None]) + + self.statistics.get_collection(collection=collection).ttl.increment_hit(increment=hits) + self.statistics.get_collection(collection=collection).ttl.increment_miss(increment=misses) + + return results diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/ttl_clamp/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/ttl_clamp/__init__.py new file mode 100644 index 00000000..fb9603fd --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/ttl_clamp/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.wrappers.ttl_clamp.wrapper import TTLClampWrapper + +__all__ = ["TTLClampWrapper"] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/ttl_clamp/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/ttl_clamp/wrapper.py new file mode 100644 index 00000000..6f251566 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/ttl_clamp/wrapper.py @@ -0,0 +1,64 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'wrapper.py' +# DO NOT CHANGE! Change the original file instead. +from collections.abc import Sequence +from typing import Any, overload + +from typing_extensions import override + +from key_value.sync.code_gen.protocols.key_value import KeyValue +from key_value.sync.code_gen.wrappers.base import BaseWrapper + + +class TTLClampWrapper(BaseWrapper): + """Wrapper that enforces a maximum TTL for puts into the store.""" + + def __init__(self, store: KeyValue, min_ttl: float, max_ttl: float, missing_ttl: float | None = None) -> None: + """Initialize the TTL clamp wrapper. + + Args: + store: The store to wrap. + min_ttl: The minimum TTL for puts into the store. + max_ttl: The maximum TTL for puts into the store. + missing_ttl: The TTL to use for entries that do not have a TTL. Defaults to None. + """ + self.store: KeyValue = store + self.min_ttl: float = min_ttl + self.max_ttl: float = max_ttl + self.missing_ttl: float | None = missing_ttl + + super().__init__() + + @overload + def _ttl_clamp(self, ttl: float) -> float: ... + + @overload + def _ttl_clamp(self, ttl: float | None) -> float | None: ... + + def _ttl_clamp(self, ttl: float | None) -> float | None: + if ttl is None: + return self.missing_ttl + + return max(self.min_ttl, min(ttl, self.max_ttl)) + + @override + def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, ttl: float | None = None) -> None: + self.store.put(collection=collection, key=key, value=value, ttl=self._ttl_clamp(ttl=ttl)) + + @override + def put_many( + self, + keys: Sequence[str], + values: Sequence[dict[str, Any]], + *, + collection: str | None = None, + ttl: Sequence[float | None] | float | None = None, + ) -> None: + clamped_ttl: Sequence[float | None] | float | None = None + + if isinstance(ttl, Sequence): + clamped_ttl = [self._ttl_clamp(ttl=t) for t in ttl] + elif isinstance(ttl, float): + clamped_ttl = self._ttl_clamp(ttl=ttl) + + self.store.put_many(keys=keys, values=values, collection=collection, ttl=clamped_ttl) diff --git a/key-value/key-value-sync/src/key_value/sync/errors/__init__.py b/key-value/key-value-sync/src/key_value/sync/errors/__init__.py new file mode 100644 index 00000000..3ca01799 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/errors/__init__.py @@ -0,0 +1,24 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.errors.base import BaseKeyValueError +from key_value.sync.code_gen.errors.key_value import ( + DeserializationError, + InvalidTTLError, + KeyValueOperationError, + MissingKeyError, + SerializationError, +) +from key_value.sync.code_gen.errors.store import KeyValueStoreError, StoreConnectionError, StoreSetupError + +__all__ = [ + "BaseKeyValueError", + "DeserializationError", + "InvalidTTLError", + "KeyValueOperationError", + "KeyValueStoreError", + "MissingKeyError", + "SerializationError", + "StoreConnectionError", + "StoreSetupError", +] diff --git a/key-value/key-value-sync/src/key_value/sync/protocols/__init__.py b/key-value/key-value-sync/src/key_value/sync/protocols/__init__.py new file mode 100644 index 00000000..1a152476 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/protocols/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.protocols.key_value import KeyValue as KeyValue diff --git a/key-value/key-value-sync/src/key_value/sync/stores/disk/__init__.py b/key-value/key-value-sync/src/key_value/sync/stores/disk/__init__.py new file mode 100644 index 00000000..68263221 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/stores/disk/__init__.py @@ -0,0 +1,7 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.disk.multi_store import MultiDiskStore +from key_value.sync.code_gen.stores.disk.store import DiskStore + +__all__ = ["DiskStore", "MultiDiskStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/stores/elasticsearch/__init__.py b/key-value/key-value-sync/src/key_value/sync/stores/elasticsearch/__init__.py new file mode 100644 index 00000000..714d442f --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/stores/elasticsearch/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.elasticsearch.store import ElasticsearchStore + +__all__ = ["ElasticsearchStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/stores/memory/__init__.py b/key-value/key-value-sync/src/key_value/sync/stores/memory/__init__.py new file mode 100644 index 00000000..a0028fd2 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/stores/memory/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.memory.store import MemoryStore + +__all__ = ["MemoryStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/stores/mongodb/__init__.py b/key-value/key-value-sync/src/key_value/sync/stores/mongodb/__init__.py new file mode 100644 index 00000000..6f0139ff --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/stores/mongodb/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.mongodb.store import MongoDBStore + +__all__ = ["MongoDBStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/stores/null/__init__.py b/key-value/key-value-sync/src/key_value/sync/stores/null/__init__.py new file mode 100644 index 00000000..f97eeae3 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/stores/null/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.null.store import NullStore + +__all__ = ["NullStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/stores/redis/__init__.py b/key-value/key-value-sync/src/key_value/sync/stores/redis/__init__.py new file mode 100644 index 00000000..c0f25a2a --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/stores/redis/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.redis.store import RedisStore + +__all__ = ["RedisStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/stores/simple/__init__.py b/key-value/key-value-sync/src/key_value/sync/stores/simple/__init__.py new file mode 100644 index 00000000..f93da560 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/stores/simple/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.simple.store import SimpleStore + +__all__ = ["SimpleStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/stores/valkey/__init__.py b/key-value/key-value-sync/src/key_value/sync/stores/valkey/__init__.py new file mode 100644 index 00000000..d993f122 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/stores/valkey/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.stores.valkey.store import ValkeyStore + +__all__ = ["ValkeyStore"] diff --git a/key-value/key-value-sync/src/key_value/sync/wrappers/__init__.py b/key-value/key-value-sync/src/key_value/sync/wrappers/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/wrappers/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/src/key_value/sync/wrappers/passthrough_cache/__init__.py b/key-value/key-value-sync/src/key_value/sync/wrappers/passthrough_cache/__init__.py new file mode 100644 index 00000000..7312b07c --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/wrappers/passthrough_cache/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.wrappers.passthrough_cache.wrapper import PassthroughCacheWrapper + +__all__ = ["PassthroughCacheWrapper"] diff --git a/key-value/key-value-sync/src/key_value/sync/wrappers/prefix_collections/__init__.py b/key-value/key-value-sync/src/key_value/sync/wrappers/prefix_collections/__init__.py new file mode 100644 index 00000000..32d0c5e1 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/wrappers/prefix_collections/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.wrappers.prefix_collections.wrapper import PrefixCollectionsWrapper + +__all__ = ["PrefixCollectionsWrapper"] diff --git a/key-value/key-value-sync/src/key_value/sync/wrappers/prefix_keys/__init__.py b/key-value/key-value-sync/src/key_value/sync/wrappers/prefix_keys/__init__.py new file mode 100644 index 00000000..e7153a57 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/wrappers/prefix_keys/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.wrappers.prefix_keys.wrapper import PrefixKeysWrapper + +__all__ = ["PrefixKeysWrapper"] diff --git a/key-value/key-value-sync/src/key_value/sync/wrappers/single_collection/__init__.py b/key-value/key-value-sync/src/key_value/sync/wrappers/single_collection/__init__.py new file mode 100644 index 00000000..81f319e1 --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/wrappers/single_collection/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.wrappers.single_collection.wrapper import SingleCollectionWrapper + +__all__ = ["SingleCollectionWrapper"] diff --git a/key-value/key-value-sync/src/key_value/sync/wrappers/statistics/__init__.py b/key-value/key-value-sync/src/key_value/sync/wrappers/statistics/__init__.py new file mode 100644 index 00000000..242bae1b --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/wrappers/statistics/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.wrappers.statistics.wrapper import StatisticsWrapper + +__all__ = ["StatisticsWrapper"] diff --git a/key-value/key-value-sync/src/key_value/sync/wrappers/ttl_clamp/__init__.py b/key-value/key-value-sync/src/key_value/sync/wrappers/ttl_clamp/__init__.py new file mode 100644 index 00000000..fb9603fd --- /dev/null +++ b/key-value/key-value-sync/src/key_value/sync/wrappers/ttl_clamp/__init__.py @@ -0,0 +1,6 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.wrappers.ttl_clamp.wrapper import TTLClampWrapper + +__all__ = ["TTLClampWrapper"] diff --git a/key-value/key-value-sync/tests/code_gen/__init__.py b/key-value/key-value-sync/tests/code_gen/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/tests/code_gen/adapters/__init__.py b/key-value/key-value-sync/tests/code_gen/adapters/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/adapters/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/tests/code_gen/adapters/test_pydantic.py b/key-value/key-value-sync/tests/code_gen/adapters/test_pydantic.py new file mode 100644 index 00000000..5d3a9782 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/adapters/test_pydantic.py @@ -0,0 +1,76 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_pydantic.py' +# DO NOT CHANGE! Change the original file instead. +from datetime import datetime, timezone + +import pytest +from pydantic import AnyHttpUrl, BaseModel + +from key_value.sync.code_gen.adapters.pydantic import PydanticAdapter +from key_value.sync.code_gen.stores.memory.store import MemoryStore + + +class User(BaseModel): + name: str + age: int + email: str + + +class Product(BaseModel): + name: str + price: float + quantity: int + url: AnyHttpUrl + + +class Order(BaseModel): + created_at: datetime + updated_at: datetime + user: User + product: Product + paid: bool + + +FIXED_CREATED_AT: datetime = datetime(year=2021, month=1, day=1, hour=12, minute=0, second=0, tzinfo=timezone.utc) +FIXED_UPDATED_AT: datetime = datetime(year=2021, month=1, day=1, hour=15, minute=0, second=0, tzinfo=timezone.utc) + +SAMPLE_USER: User = User(name="John Doe", email="john.doe@example.com", age=30) +SAMPLE_PRODUCT: Product = Product(name="Widget", price=29.99, quantity=10, url=AnyHttpUrl(url="https://example.com")) +SAMPLE_ORDER: Order = Order(created_at=datetime.now(), updated_at=datetime.now(), user=SAMPLE_USER, product=SAMPLE_PRODUCT, paid=False) + + +class TestPydanticAdapter: + @pytest.fixture + def store(self) -> MemoryStore: + return MemoryStore() + + @pytest.fixture + def user_adapter(self, store: MemoryStore) -> PydanticAdapter[User]: + return PydanticAdapter[User](key_value=store, pydantic_model=User) + + @pytest.fixture + def product_adapter(self, store: MemoryStore) -> PydanticAdapter[Product]: + return PydanticAdapter[Product](key_value=store, pydantic_model=Product) + + @pytest.fixture + def order_adapter(self, store: MemoryStore) -> PydanticAdapter[Order]: + return PydanticAdapter[Order](key_value=store, pydantic_model=Order) + + def test_simple_adapter(self, user_adapter: PydanticAdapter[User]): + user_adapter.put(collection="test", key="test", value=SAMPLE_USER) + cached_user: User | None = user_adapter.get(collection="test", key="test") + assert cached_user == SAMPLE_USER + + assert user_adapter.delete(collection="test", key="test") + + cached_user = user_adapter.get(collection="test", key="test") + assert cached_user is None + + def test_complex_adapter(self, order_adapter: PydanticAdapter[Order]): + order_adapter.put(collection="test", key="test", value=SAMPLE_ORDER, ttl=10) + cached_order: Order | None = order_adapter.get(collection="test", key="test") + assert cached_order == SAMPLE_ORDER + + assert order_adapter.delete(collection="test", key="test") + cached_order = order_adapter.get(collection="test", key="test") + assert cached_order is None diff --git a/key-value/key-value-sync/tests/code_gen/adapters/test_raise.py b/key-value/key-value-sync/tests/code_gen/adapters/test_raise.py new file mode 100644 index 00000000..4be02fff --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/adapters/test_raise.py @@ -0,0 +1,40 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_raise.py' +# DO NOT CHANGE! Change the original file instead. +import pytest + +from key_value.sync.code_gen.adapters.raise_on_missing import RaiseOnMissingAdapter +from key_value.sync.code_gen.errors import MissingKeyError +from key_value.sync.code_gen.stores.memory.store import MemoryStore + + +@pytest.fixture +def store() -> MemoryStore: + return MemoryStore() + + +@pytest.fixture +def adapter(store: MemoryStore) -> RaiseOnMissingAdapter: + return RaiseOnMissingAdapter(key_value=store) + + +def test_get(adapter: RaiseOnMissingAdapter): + adapter.put(collection="test", key="test", value={"test": "test"}) + assert adapter.get(collection="test", key="test") == {"test": "test"} + + +def test_get_missing(adapter: RaiseOnMissingAdapter): + with pytest.raises(MissingKeyError): + _ = adapter.get(collection="test", key="test", raise_on_missing=True) + + +def test_get_many(adapter: RaiseOnMissingAdapter): + adapter.put(collection="test", key="test", value={"test": "test"}) + adapter.put(collection="test", key="test_2", value={"test": "test_2"}) + assert adapter.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + + +def test_get_many_missing(adapter: RaiseOnMissingAdapter): + adapter.put(collection="test", key="test", value={"test": "test"}) + with pytest.raises(MissingKeyError): + _ = adapter.get_many(collection="test", keys=["test", "test_2"], raise_on_missing=True) diff --git a/key-value/key-value-sync/tests/code_gen/cases.py b/key-value/key-value-sync/tests/code_gen/cases.py new file mode 100644 index 00000000..0fffeebe --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/cases.py @@ -0,0 +1,64 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'cases.py' +# DO NOT CHANGE! Change the original file instead. +from datetime import datetime, timezone +from typing import Any + +FIXED_DATETIME = datetime(2025, 1, 1, 0, 0, 0, tzinfo=timezone.utc) +FIXED_TIME = FIXED_DATETIME.time() + +LARGE_STRING: str = "a" * 10000 # 10KB +LARGE_INT: int = 1 * 10**18 # 18 digits +LARGE_FLOAT: float = 1.0 * 10**63 # 63 digits + +SIMPLE_CASE: dict[str, Any] = { + "key_1": "value_1", + "key_2": 1, + "key_3": 1.0, + "key_4": [1, 2, 3], + "key_5": {"nested": "value"}, + "key_6": True, + "key_7": False, + "key_8": None, +} + +SIMPLE_CASE_JSON: str = '{"key_1": "value_1", "key_2": 1, "key_3": 1.0, "key_4": [1, 2, 3], "key_5": {"nested": "value"}, "key_6": true, "key_7": false, "key_8": null}' + +# ({"key": (1, 2, 3)}, '{"key": [1, 2, 3]}'), +DICTIONARY_TO_JSON_TEST_CASES: list[tuple[dict[str, Any], str]] = [ + ({"key": "value"}, '{"key": "value"}'), + ({"key": 1}, '{"key": 1}'), + ({"key": 1.0}, '{"key": 1.0}'), + ({"key": [1, 2, 3]}, '{"key": [1, 2, 3]}'), + ({"key": {"nested": "value"}}, '{"key": {"nested": "value"}}'), + ({"key": True}, '{"key": true}'), + ({"key": False}, '{"key": false}'), + ({"key": None}, '{"key": null}'), + ( + {"key": {"int": 1, "float": 1.0, "list": [1, 2, 3], "dict": {"nested": "value"}, "bool": True, "null": None}}, + '{"key": {"int": 1, "float": 1.0, "list": [1, 2, 3], "dict": {"nested": "value"}, "bool": true, "null": null}}', + ), + ({"key": LARGE_STRING}, f'{{"key": "{LARGE_STRING}"}}'), + ({"key": LARGE_INT}, f'{{"key": {LARGE_INT}}}'), + ({"key": LARGE_FLOAT}, f'{{"key": {LARGE_FLOAT}}}'), +] + +# "tuple", +DICTIONARY_TO_JSON_TEST_CASES_NAMES: list[str] = [ + "string", + "int", + "float", + "list", + "dict", + "bool-true", + "bool-false", + "null", + "dict-nested", + "large-string", + "large-int", + "large-float", +] + +OBJECT_TEST_CASES: list[dict[str, Any]] = [test_case[0] for test_case in DICTIONARY_TO_JSON_TEST_CASES] + +JSON_TEST_CASES: list[str] = [test_case[1] for test_case in DICTIONARY_TO_JSON_TEST_CASES] diff --git a/key-value/key-value-sync/tests/code_gen/conftest.py b/key-value/key-value-sync/tests/code_gen/conftest.py new file mode 100644 index 00000000..6405f031 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/conftest.py @@ -0,0 +1,129 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'conftest.py' +# DO NOT CHANGE! Change the original file instead. +import asyncio +import logging +from collections.abc import Callable, Iterator +from contextlib import contextmanager + +import pytest +from docker import DockerClient + +logger = logging.getLogger(__name__) + +logging.basicConfig(level=logging.INFO) + + +@contextmanager +def try_import() -> Iterator[Callable[[], bool]]: + import_success = False + + def check_import() -> bool: + return import_success + + try: + yield check_import + except ImportError: + pass + else: + import_success = True + + +def get_docker_client() -> DockerClient: + return DockerClient.from_env() + + +@pytest.fixture(scope="session") +def docker_client() -> DockerClient: + return get_docker_client() + + +def docker_pull(image: str, raise_on_error: bool = False) -> bool: + logger.info(f"Pulling image {image}") + client = get_docker_client() + try: + client.images.pull(image) + except Exception: + logger.info(f"Image {image} failed to pull") + if raise_on_error: + raise + return False + return True + + +def docker_stop(name: str, raise_on_error: bool = False) -> bool: + logger.info(f"Stopping container {name}") + client = get_docker_client() + try: + client.containers.get(name).stop() + except Exception: + logger.info(f"Container {name} failed to stop") + if raise_on_error: + raise + return False + logger.info(f"Container {name} stopped") + return True + + +def docker_rm(name: str, raise_on_error: bool = False) -> bool: + logger.info(f"Removing container {name}") + client = get_docker_client() + try: + client.containers.get(container_id=name).remove() + except Exception: + logger.info(f"Container {name} failed to remove") + if raise_on_error: + raise + return False + logger.info(f"Container {name} removed") + return True + + +def docker_run(name: str, image: str, ports: dict[str, int], environment: dict[str, str], raise_on_error: bool = False) -> bool: + logger.info(f"Running container {name} with image {image} and ports {ports}") + client = get_docker_client() + try: + client.containers.run(name=name, image=image, ports=ports, environment=environment, detach=True) + except Exception: + logger.info(f"Container {name} failed to run") + if raise_on_error: + raise + return False + logger.info(f"Container {name} running") + return True + + +@contextmanager +def docker_container( + name: str, image: str, ports: dict[str, int], environment: dict[str, str] | None = None, raise_on_error: bool = True +) -> Iterator[None]: + logger.info(f"Creating container {name} with image {image} and ports {ports}") + try: + docker_pull(image=image, raise_on_error=True) + docker_stop(name=name, raise_on_error=False) + docker_rm(name=name, raise_on_error=False) + docker_run(name=name, image=image, ports=ports, environment=environment or {}, raise_on_error=True) + logger.info(f"Container {name} created") + yield + except Exception: + logger.info(f"Container {name} failed to create") + if raise_on_error: + raise + return + finally: + docker_stop(name, raise_on_error=False) + docker_rm(name, raise_on_error=False) + logger.info(f"Container {name} stopped and removed") + return + + +def async_running_in_event_loop() -> bool: + try: + asyncio.get_event_loop() + except RuntimeError: + return False + return True + + +def running_in_event_loop() -> bool: + return False diff --git a/key-value/key-value-sync/tests/code_gen/protocols/__init__.py b/key-value/key-value-sync/tests/code_gen/protocols/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/protocols/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/tests/code_gen/protocols/test_types.py b/key-value/key-value-sync/tests/code_gen/protocols/test_types.py new file mode 100644 index 00000000..2d4abd61 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/protocols/test_types.py @@ -0,0 +1,20 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_types.py' +# DO NOT CHANGE! Change the original file instead. +from key_value.sync.code_gen.protocols.key_value import KeyValue +from key_value.sync.code_gen.stores.memory import MemoryStore + + +def test_key_value_protocol(): + def test_protocol(key_value: KeyValue): + assert key_value.get(collection="test", key="test") is None + key_value.put(collection="test", key="test", value={"test": "test"}) + assert key_value.delete(collection="test", key="test") + key_value.put(collection="test", key="test_2", value={"test": "test"}) + + memory_store = MemoryStore() + + test_protocol(key_value=memory_store) + + assert memory_store.get(collection="test", key="test") is None + assert memory_store.get(collection="test", key="test_2") == {"test": "test"} diff --git a/key-value/key-value-sync/tests/code_gen/stores/__init__.py b/key-value/key-value-sync/tests/code_gen/stores/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/tests/code_gen/stores/base/__init__.py b/key-value/key-value-sync/tests/code_gen/stores/base/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/base/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/tests/code_gen/stores/conftest.py b/key-value/key-value-sync/tests/code_gen/stores/conftest.py new file mode 100644 index 00000000..edea640a --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/conftest.py @@ -0,0 +1,287 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'conftest.py' +# DO NOT CHANGE! Change the original file instead. +import asyncio +import hashlib +import os +import subprocess +from abc import ABC, abstractmethod +from collections.abc import Callable, Generator +from datetime import datetime, timedelta, timezone +from typing import Any + +import pytest +from dirty_equals import IsFloat +from pydantic import AnyHttpUrl + +from key_value.sync.code_gen.errors import InvalidTTLError, SerializationError +from key_value.sync.code_gen.stores.base import BaseContextManagerStore, BaseStore +from key_value.sync.code_gen.stores.memory.store import MemoryStore +from key_value.sync.code_gen.utils.acompat import sleep +from tests.code_gen.cases import DICTIONARY_TO_JSON_TEST_CASES_NAMES, OBJECT_TEST_CASES +from tests.code_gen.conftest import running_in_event_loop + + +@pytest.fixture +def memory_store() -> MemoryStore: + return MemoryStore(max_entries_per_collection=500) + + +def now() -> datetime: + return datetime.now(tz=timezone.utc) + + +def now_plus(seconds: int) -> datetime: + return now() + timedelta(seconds=seconds) + + +def is_around(value: float, delta: float = 1) -> bool: + return value - delta < value < value + delta + + +def detect_docker() -> bool: + try: + result = subprocess.run(["docker", "ps"], check=False, capture_output=True, text=True) # noqa: S607 + except Exception: + return False + else: + return result.returncode == 0 + + +def detect_on_ci() -> bool: + return os.getenv("CI", "false") == "true" + + +def detect_on_windows() -> bool: + return os.name == "nt" + + +def detect_on_macos() -> bool: + return os.name == "darwin" + + +def should_run_docker_tests() -> bool: + if detect_on_ci(): + return all([detect_docker(), not detect_on_windows(), not detect_on_macos()]) + return detect_docker() + + +def should_skip_docker_tests() -> bool: + return not should_run_docker_tests() + + +def wait_for_store(wait_fn: Callable[[], bool], max_time: int = 10) -> bool: + for _ in range(max_time): + if wait_fn(): + return True + sleep(seconds=1) + return False + + +class BaseStoreTests(ABC): + def eventually_consistent(self) -> None: # noqa: B027 + "Subclasses can override this to wait for eventually consistent operations." + + @pytest.fixture + @abstractmethod + def store(self) -> BaseStore | Generator[BaseStore, None, None]: ... + + # The first test requires a docker pull, so we only time the actual test + + @pytest.mark.timeout(5, func_only=True) + def test_empty_get(self, store: BaseStore): + """Tests that the get method returns None from an empty store.""" + assert store.get(collection="test", key="test") is None + + def test_empty_put(self, store: BaseStore): + """Tests that the put method does not raise an exception when called on a new store.""" + store.put(collection="test", key="test", value={"test": "test"}) + + def test_empty_ttl(self, store: BaseStore): + """Tests that the ttl method returns None from an empty store.""" + assert store.ttl(collection="test", key="test") == (None, None) + + def test_put_serialization_errors(self, store: BaseStore): + """Tests that the put method does not raise an exception when called on a new store.""" + with pytest.raises(SerializationError): + store.put(collection="test", key="test", value={"test": AnyHttpUrl("https://test.com")}) + + def test_get_put_get(self, store: BaseStore): + assert store.get(collection="test", key="test") is None + store.put(collection="test", key="test", value={"test": "test"}) + assert store.get(collection="test", key="test") == {"test": "test"} + + @pytest.mark.parametrize(argnames="value", argvalues=OBJECT_TEST_CASES, ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES) + def test_get_complex_put_get(self, store: BaseStore, value: dict[str, Any]): + store.put(collection="test", key="test", value=value) + assert store.get(collection="test", key="test") == value + + def test_put_many_get(self, store: BaseStore): + store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert store.get(collection="test", key="test") == {"test": "test"} + assert store.get(collection="test", key="test_2") == {"test": "test_2"} + + def test_put_many_get_many(self, store: BaseStore): + store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + + def test_put_put_get_many(self, store: BaseStore): + store.put(collection="test", key="test", value={"test": "test"}) + store.put(collection="test", key="test_2", value={"test": "test_2"}) + assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + + def test_put_put_get_many_missing_one(self, store: BaseStore): + store.put(collection="test", key="test", value={"test": "test"}) + store.put(collection="test", key="test_2", value={"test": "test_2"}) + assert store.get_many(collection="test", keys=["test", "test_2", "test_3"]) == [{"test": "test"}, {"test": "test_2"}, None] + + def test_put_get_delete_get(self, store: BaseStore): + store.put(collection="test", key="test", value={"test": "test"}) + assert store.get(collection="test", key="test") == {"test": "test"} + assert store.delete(collection="test", key="test") + assert store.get(collection="test", key="test") is None + + def test_put_many_get_get_delete_many_get_many(self, store: BaseStore): + store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + assert store.delete_many(collection="test", keys=["test", "test_2"]) == 2 + assert store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] + + def test_put_many_get_many_delete_many_get_many(self, store: BaseStore): + store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + assert store.delete_many(collection="test", keys=["test", "test_2"]) == 2 + assert store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] + + def test_get_put_get_delete_get(self, store: BaseStore): + """Tests that the get, put, delete, and get methods work together to store and retrieve a value from an empty store.""" + + assert store.get(collection="test", key="test") is None + + store.put(collection="test", key="test", value={"test": "test"}) + + assert store.get(collection="test", key="test") == {"test": "test"} + + assert store.delete(collection="test", key="test") + + assert store.get(collection="test", key="test") is None + + def test_get_put_get_put_delete_get(self, store: BaseStore): + """Tests that the get, put, get, put, delete, and get methods work together to store and retrieve a value from an empty store.""" + store.put(collection="test", key="test", value={"test": "test"}) + assert store.get(collection="test", key="test") == {"test": "test"} + + store.put(collection="test", key="test", value={"test": "test_2"}) + + assert store.get(collection="test", key="test") == {"test": "test_2"} + assert store.delete(collection="test", key="test") + assert store.get(collection="test", key="test") is None + + def test_put_many_delete_delete_get_many(self, store: BaseStore): + store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + assert store.delete(collection="test", key="test") + assert store.delete(collection="test", key="test_2") + assert store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] + + def test_put_ttl_get_ttl(self, store: BaseStore): + """Tests that the put and get ttl methods work together to store and retrieve a ttl from an empty store.""" + store.put(collection="test", key="test", value={"test": "test"}, ttl=100) + (value, ttl) = store.ttl(collection="test", key="test") + + assert value == {"test": "test"} + assert ttl is not None + assert ttl == IsFloat(approx=100) + + def test_negative_ttl(self, store: BaseStore): + """Tests that a negative ttl will return None when getting the key.""" + with pytest.raises(InvalidTTLError): + store.put(collection="test", key="test", value={"test": "test"}, ttl=-100) + + @pytest.mark.timeout(10) + def test_put_expired_get_none(self, store: BaseStore): + """Tests that a put call with a negative ttl will return None when getting the key.""" + store.put(collection="test_collection", key="test_key", value={"test": "test"}, ttl=1) + sleep(seconds=3) + assert store.get(collection="test_collection", key="test_key") is None + + def test_long_collection_name(self, store: BaseStore): + """Tests that a long collection name will not raise an error.""" + store.put(collection="test_collection" * 100, key="test_key", value={"test": "test"}) + assert store.get(collection="test_collection" * 100, key="test_key") == {"test": "test"} + + def test_special_characters_in_collection_name(self, store: BaseStore): + """Tests that a special characters in the collection name will not raise an error.""" + store.put(collection="test_collection!@#$%^&*()", key="test_key", value={"test": "test"}) + assert store.get(collection="test_collection!@#$%^&*()", key="test_key") == {"test": "test"} + + def test_long_key_name(self, store: BaseStore): + """Tests that a long key name will not raise an error.""" + store.put(collection="test_collection", key="test_key" * 100, value={"test": "test"}) + assert store.get(collection="test_collection", key="test_key" * 100) == {"test": "test"} + + def test_special_characters_in_key_name(self, store: BaseStore): + """Tests that a special characters in the key name will not raise an error.""" + store.put(collection="test_collection", key="test_key!@#$%^&*()", value={"test": "test"}) + assert store.get(collection="test_collection", key="test_key!@#$%^&*()") == {"test": "test"} + + @pytest.mark.timeout(20) + def test_not_unbounded(self, store: BaseStore): + """Tests that the store is not unbounded.""" + + for i in range(1000): + value = hashlib.sha256(f"test_{i}".encode()).hexdigest() + store.put(collection="test_collection", key=f"test_key_{i}", value={"test": value}) + + assert store.get(collection="test_collection", key="test_key_0") is None + assert store.get(collection="test_collection", key="test_key_999") is not None + + @pytest.mark.skipif(condition=not running_in_event_loop(), reason="Cannot run concurrent operations in event loop") + def test_concurrent_operations(self, store: BaseStore): + """Tests that the store can handle concurrent operations.""" + + def worker(store: BaseStore, worker_id: int): + for i in range(10): + assert store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None + + store.put(collection="test_collection", key=f"test_{worker_id}_{i}", value={"test": f"test_{i}"}) + assert store.get(collection="test_collection", key=f"test_{worker_id}_{i}") == {"test": f"test_{i}"} + + store.put(collection="test_collection", key=f"test_{worker_id}_{i}", value={"test": f"test_{i}_2"}) + assert store.get(collection="test_collection", key=f"test_{worker_id}_{i}") == {"test": f"test_{i}_2"} + + assert store.delete(collection="test_collection", key=f"test_{worker_id}_{i}") + assert store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None + + _ = asyncio.gather(*[worker(store, worker_id) for worker_id in range(5)]) + + @pytest.mark.timeout(15) + def test_minimum_put_many_get_many_performance(self, store: BaseStore): + """Tests that the store meets minimum performance requirements.""" + keys = [f"test_{i}" for i in range(10)] + values = [{"test": f"test_{i}"} for i in range(10)] + store.put_many(collection="test_collection", keys=keys, values=values) + assert store.get_many(collection="test_collection", keys=keys) == values + + @pytest.mark.timeout(15) + def test_minimum_put_many_delete_many_performance(self, store: BaseStore): + """Tests that the store meets minimum performance requirements.""" + keys = [f"test_{i}" for i in range(10)] + values = [{"test": f"test_{i}"} for i in range(10)] + store.put_many(collection="test_collection", keys=keys, values=values) + assert store.delete_many(collection="test_collection", keys=keys) == 10 + + +class ContextManagerStoreTestMixin: + @pytest.fixture(params=[True, False], ids=["with_ctx_manager", "no_ctx_manager"], autouse=True) + def enter_exit_store( + self, request: pytest.FixtureRequest, store: BaseContextManagerStore + ) -> Generator[BaseContextManagerStore, None, None]: + context_manager = request.param # pyright: ignore[reportAny] + + if context_manager: + with store: + yield store + else: + yield store + store.close() diff --git a/key-value/key-value-sync/tests/code_gen/stores/disk/__init__.py b/key-value/key-value-sync/tests/code_gen/stores/disk/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/disk/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/tests/code_gen/stores/disk/test_disk.py b/key-value/key-value-sync/tests/code_gen/stores/disk/test_disk.py new file mode 100644 index 00000000..11428a0a --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/disk/test_disk.py @@ -0,0 +1,27 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_disk.py' +# DO NOT CHANGE! Change the original file instead. +import tempfile +from collections.abc import Generator + +import pytest +from typing_extensions import override + +from key_value.sync.code_gen.stores.disk import DiskStore +from tests.code_gen.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin + +TEST_SIZE_LIMIT = 100 * 1024 # 100KB + + +class TestDiskStore(ContextManagerStoreTestMixin, BaseStoreTests): + @pytest.fixture(scope="session") + def disk_store(self) -> Generator[DiskStore, None, None]: + with tempfile.TemporaryDirectory() as temp_dir: + yield DiskStore(directory=temp_dir, max_size=TEST_SIZE_LIMIT) + + @override + @pytest.fixture + def store(self, disk_store: DiskStore) -> DiskStore: + disk_store._cache.clear() # pyright: ignore[reportPrivateUsage] + + return disk_store diff --git a/key-value/key-value-sync/tests/code_gen/stores/disk/test_multi_disk.py b/key-value/key-value-sync/tests/code_gen/stores/disk/test_multi_disk.py new file mode 100644 index 00000000..af7a6b22 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/disk/test_multi_disk.py @@ -0,0 +1,29 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_multi_disk.py' +# DO NOT CHANGE! Change the original file instead. +import tempfile +from collections.abc import Generator +from pathlib import Path + +import pytest +from typing_extensions import override + +from key_value.sync.code_gen.stores.disk.multi_store import MultiDiskStore +from tests.code_gen.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin + +TEST_SIZE_LIMIT = 100 * 1024 # 100KB + + +class TestMultiDiskStore(ContextManagerStoreTestMixin, BaseStoreTests): + @pytest.fixture(scope="session") + def multi_disk_store(self) -> Generator[MultiDiskStore, None, None]: + with tempfile.TemporaryDirectory() as temp_dir: + yield MultiDiskStore(base_directory=Path(temp_dir), max_size=TEST_SIZE_LIMIT) + + @override + @pytest.fixture + def store(self, multi_disk_store: MultiDiskStore) -> MultiDiskStore: + for collection in multi_disk_store._cache: # pyright: ignore[reportPrivateUsage] + multi_disk_store._cache[collection].clear() # pyright: ignore[reportPrivateUsage] + + return multi_disk_store diff --git a/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/__init__.py b/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py b/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py new file mode 100644 index 00000000..d013d0ea --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py @@ -0,0 +1,64 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_elasticsearch.py' +# DO NOT CHANGE! Change the original file instead. +import os +from collections.abc import Generator + +import pytest +from elasticsearch import Elasticsearch +from typing_extensions import override + +from key_value.sync.code_gen.stores.base import BaseStore +from key_value.sync.code_gen.stores.elasticsearch import ElasticsearchStore +from tests.code_gen.conftest import docker_container +from tests.code_gen.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, wait_for_store + +TEST_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB +ES_HOST = "localhost" +ES_PORT = 9200 +ES_URL = f"http://{ES_HOST}:{ES_PORT}" +ES_VERSION = "9.1.4" +ES_IMAGE = f"docker.elastic.co/elasticsearch/elasticsearch:{ES_VERSION}" + + +def get_elasticsearch_client() -> Elasticsearch: + return Elasticsearch(hosts=[ES_URL]) + + +def ping_elasticsearch() -> bool: + es_client: Elasticsearch = get_elasticsearch_client() + + return es_client.ping() + + +class ElasticsearchFailedToStartError(Exception): + pass + + +@pytest.mark.skipif(os.getenv("ES_URL") is None, reason="Elasticsearch is not configured") +class TestElasticsearchStore(ContextManagerStoreTestMixin, BaseStoreTests): + @pytest.fixture(autouse=True, scope="session") + def setup_elasticsearch(self) -> Generator[None, None, None]: + with docker_container( + "elasticsearch-test", ES_IMAGE, {"9200": 9200}, {"discovery.type": "single-node", "xpack.security.enabled": "false"} + ): + if not wait_for_store(wait_fn=ping_elasticsearch): + msg = "Elasticsearch failed to start" + raise ElasticsearchFailedToStartError(msg) + + yield + + @override + @pytest.fixture + def store(self) -> ElasticsearchStore: + es_client = get_elasticsearch_client() + _ = es_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test") + return ElasticsearchStore(url=ES_URL, index="kv-store-e2e-test") + + @pytest.mark.skip(reason="Distributed Caches are unbounded") + @override + def test_not_unbounded(self, store: BaseStore): ... + + @pytest.mark.skip(reason="Skip concurrent tests on distributed caches") + @override + def test_concurrent_operations(self, store: BaseStore): ... diff --git a/key-value/key-value-sync/tests/code_gen/stores/memory/__init__.py b/key-value/key-value-sync/tests/code_gen/stores/memory/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/memory/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/tests/code_gen/stores/memory/test_memory.py b/key-value/key-value-sync/tests/code_gen/stores/memory/test_memory.py new file mode 100644 index 00000000..6c83981a --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/memory/test_memory.py @@ -0,0 +1,15 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_memory.py' +# DO NOT CHANGE! Change the original file instead. +import pytest +from typing_extensions import override + +from key_value.sync.code_gen.stores.memory.store import MemoryStore +from tests.code_gen.stores.conftest import BaseStoreTests + + +class TestMemoryStore(BaseStoreTests): + @override + @pytest.fixture + def store(self) -> MemoryStore: + return MemoryStore(max_entries_per_collection=500) diff --git a/key-value/key-value-sync/tests/code_gen/stores/mongodb/test_mongodb.py b/key-value/key-value-sync/tests/code_gen/stores/mongodb/test_mongodb.py new file mode 100644 index 00000000..b2d0993e --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/mongodb/test_mongodb.py @@ -0,0 +1,75 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_mongodb.py' +# DO NOT CHANGE! Change the original file instead. +import contextlib +from collections.abc import Generator +from typing import Any + +import pytest +from inline_snapshot import snapshot +from pymongo import MongoClient +from typing_extensions import override + +from key_value.sync.code_gen.stores.base import BaseStore +from key_value.sync.code_gen.stores.mongodb import MongoDBStore +from tests.code_gen.conftest import docker_container +from tests.code_gen.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests, wait_for_store + +# MongoDB test configuration +MONGODB_HOST = "localhost" +MONGODB_HOST_PORT = 27017 +MONGODB_TEST_DB = "kv-store-adapter-tests" + +WAIT_FOR_MONGODB_TIMEOUT = 30 + + +def ping_mongodb() -> bool: + try: + client: MongoClient[Any] = MongoClient[Any](host=MONGODB_HOST, port=MONGODB_HOST_PORT) + _ = client.list_database_names() + except Exception: + return False + + return True + + +class MongoDBFailedToStartError(Exception): + pass + + +@pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not available") +class TestMongoDBStore(ContextManagerStoreTestMixin, BaseStoreTests): + @pytest.fixture(autouse=True, scope="session") + def setup_mongodb(self) -> Generator[None, None, None]: + with docker_container("mongodb-test", "mongo:7", {"27017": 27017}): + if not wait_for_store(wait_fn=ping_mongodb): + msg = "MongoDB failed to start" + raise MongoDBFailedToStartError(msg) + + yield + + @override + @pytest.fixture + def store(self, setup_mongodb: None) -> MongoDBStore: + store = MongoDBStore(url=f"mongodb://{MONGODB_HOST}:{MONGODB_HOST_PORT}", db_name=MONGODB_TEST_DB) + # Ensure a clean db by dropping our default test collection if it exists + with contextlib.suppress(Exception): + _ = store._client.drop_database(name_or_database=MONGODB_TEST_DB) # pyright: ignore[reportPrivateUsage] + + return store + + @pytest.fixture + def mongodb_store(self, store: MongoDBStore) -> MongoDBStore: + return store + + @pytest.mark.skip(reason="Distributed Caches are unbounded") + @override + def test_not_unbounded(self, store: BaseStore): ... + + def test_mongodb_collection_name_sanitization(self, mongodb_store: MongoDBStore): + """Tests that a special characters in the collection name will not raise an error.""" + mongodb_store.put(collection="test_collection!@#$%^&*()", key="test_key", value={"test": "test"}) + assert mongodb_store.get(collection="test_collection!@#$%^&*()", key="test_key") == {"test": "test"} + + collections = mongodb_store.collections() + assert collections == snapshot(["test_collection_-daf4a2ec"]) diff --git a/key-value/key-value-sync/tests/code_gen/stores/redis/__init__.py b/key-value/key-value-sync/tests/code_gen/stores/redis/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/redis/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/tests/code_gen/stores/redis/test_redis.py b/key-value/key-value-sync/tests/code_gen/stores/redis/test_redis.py new file mode 100644 index 00000000..ad49a248 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/redis/test_redis.py @@ -0,0 +1,82 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_redis.py' +# DO NOT CHANGE! Change the original file instead. +from collections.abc import Generator + +import pytest +from typing_extensions import override + +from key_value.sync.code_gen.stores.base import BaseStore +from key_value.sync.code_gen.stores.redis import RedisStore +from tests.code_gen.conftest import docker_container, docker_stop +from tests.code_gen.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests, wait_for_store + +# Redis test configuration +REDIS_HOST = "localhost" +REDIS_PORT = 6379 +REDIS_DB = 15 # Use a separate database for tests + +WAIT_FOR_REDIS_TIMEOUT = 30 + + +def ping_redis() -> bool: + from redis import Redis + + client = Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) + try: + return client.ping() # pyright: ignore[reportUnknownMemberType, reportAny, reportReturnType] + except Exception: + return False + + +class RedisFailedToStartError(Exception): + pass + + +@pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not running") +class TestRedisStore(ContextManagerStoreTestMixin, BaseStoreTests): + @pytest.fixture(autouse=True, scope="session") + def setup_redis(self) -> Generator[None, None, None]: + # Double-check that the Valkey test container is stopped + docker_stop("valkey-test", raise_on_error=False) + + with docker_container("redis-test", "redis", {"6379": 6379}): + if not wait_for_store(wait_fn=ping_redis): + msg = "Redis failed to start" + raise RedisFailedToStartError(msg) + + yield + + @override + @pytest.fixture + def store(self, setup_redis: RedisStore) -> RedisStore: + """Create a Redis store for testing.""" + # Create the store with test database + redis_store = RedisStore(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB) + _ = redis_store._client.flushdb() # pyright: ignore[reportPrivateUsage, reportUnknownMemberType, reportAny] + return redis_store + + def test_redis_url_connection(self): + """Test Redis store creation with URL.""" + redis_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}" + store = RedisStore(url=redis_url) + _ = store._client.flushdb() # pyright: ignore[reportPrivateUsage, reportUnknownMemberType, reportAny] + store.put(collection="test", key="url_test", value={"test": "value"}) + result = store.get(collection="test", key="url_test") + assert result == {"test": "value"} + + def test_redis_client_connection(self): + """Test Redis store creation with existing client.""" + from redis import Redis + + client = Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) + store = RedisStore(client=client) + + _ = store._client.flushdb() # pyright: ignore[reportPrivateUsage, reportUnknownMemberType, reportAny] + store.put(collection="test", key="client_test", value={"test": "value"}) + result = store.get(collection="test", key="client_test") + assert result == {"test": "value"} + + @pytest.mark.skip(reason="Distributed Caches are unbounded") + @override + def test_not_unbounded(self, store: BaseStore): ... diff --git a/key-value/key-value-sync/tests/code_gen/stores/simple/__init__.py b/key-value/key-value-sync/tests/code_gen/stores/simple/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/simple/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/tests/code_gen/stores/simple/test_store.py b/key-value/key-value-sync/tests/code_gen/stores/simple/test_store.py new file mode 100644 index 00000000..0f869c6c --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/simple/test_store.py @@ -0,0 +1,15 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_store.py' +# DO NOT CHANGE! Change the original file instead. +import pytest +from typing_extensions import override + +from key_value.sync.code_gen.stores.simple.store import SimpleStore +from tests.code_gen.stores.conftest import BaseStoreTests + + +class TestSimpleStore(BaseStoreTests): + @override + @pytest.fixture + def store(self) -> SimpleStore: + return SimpleStore(max_entries=500) diff --git a/key-value/key-value-sync/tests/code_gen/stores/valkey/test_valkey.py b/key-value/key-value-sync/tests/code_gen/stores/valkey/test_valkey.py new file mode 100644 index 00000000..f4b988c9 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/valkey/test_valkey.py @@ -0,0 +1,79 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_valkey.py' +# DO NOT CHANGE! Change the original file instead. +from collections.abc import Generator + +import pytest +from typing_extensions import override + +from key_value.sync.code_gen.stores.base import BaseStore +from tests.code_gen.conftest import docker_container, docker_stop +from tests.code_gen.stores.conftest import ( + BaseStoreTests, + ContextManagerStoreTestMixin, + detect_on_windows, + should_skip_docker_tests, + wait_for_store, +) + +# Valkey test configuration +VALKEY_HOST = "localhost" +VALKEY_PORT = 6379 # avoid clashing with Redis tests +VALKEY_DB = 15 + +WAIT_FOR_VALKEY_TIMEOUT = 30 + + +class ValkeyFailedToStartError(Exception): + pass + + +@pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not running") +@pytest.mark.skipif(detect_on_windows(), reason="Valkey is not supported on Windows") +class TestValkeyStore(ContextManagerStoreTestMixin, BaseStoreTests): + def get_valkey_client(self): + from glide_shared.config import GlideClientConfiguration, NodeAddress + from glide_sync.glide_client import GlideClient + + client_config: GlideClientConfiguration = GlideClientConfiguration( + addresses=[NodeAddress(host=VALKEY_HOST, port=VALKEY_PORT)], database_id=VALKEY_DB + ) + return GlideClient.create(config=client_config) + + def ping_valkey(self) -> bool: + try: + client = self.get_valkey_client() + _ = client.ping() + except Exception: + return False + + return True + + @pytest.fixture(scope="session") + def setup_valkey(self) -> Generator[None, None, None]: + # Double-check that the Redis test container is stopped + docker_stop("redis-test", raise_on_error=False) + + with docker_container("valkey-test", "valkey/valkey:latest", {"6379": 6379}): + if not wait_for_store(wait_fn=self.ping_valkey): + msg = "Valkey failed to start" + raise ValkeyFailedToStartError(msg) + + yield + + @override + @pytest.fixture + def store(self, setup_valkey: None): + from key_value.sync.code_gen.stores.valkey import ValkeyStore + + store: ValkeyStore = ValkeyStore(host=VALKEY_HOST, port=VALKEY_PORT, db=VALKEY_DB) + + # This is a syncronous client + client = self.get_valkey_client() + _ = client.flushdb() + + return store + + @pytest.mark.skip(reason="Distributed Caches are unbounded") + @override + def test_not_unbounded(self, store: BaseStore): ... diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/__init__.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_clamp_ttl.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_clamp_ttl.py new file mode 100644 index 00000000..56bda02f --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_clamp_ttl.py @@ -0,0 +1,51 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_clamp_ttl.py' +# DO NOT CHANGE! Change the original file instead. +import pytest +from dirty_equals import IsFloat +from typing_extensions import override + +from key_value.sync.code_gen.stores.memory.store import MemoryStore +from key_value.sync.code_gen.wrappers.ttl_clamp import TTLClampWrapper +from tests.code_gen.stores.conftest import BaseStoreTests + + +class TestTTLClampWrapper(BaseStoreTests): + @override + @pytest.fixture + def store(self, memory_store: MemoryStore) -> TTLClampWrapper: + return TTLClampWrapper(store=memory_store, min_ttl=0, max_ttl=100) + + def test_put_below_min_ttl(self, memory_store: MemoryStore): + ttl_clamp_store: TTLClampWrapper = TTLClampWrapper(store=memory_store, min_ttl=50, max_ttl=100) + + ttl_clamp_store.put(collection="test", key="test", value={"test": "test"}, ttl=5) + assert ttl_clamp_store.get(collection="test", key="test") is not None + + (value, ttl) = ttl_clamp_store.ttl(collection="test", key="test") + assert value is not None + assert ttl is not None + assert ttl == IsFloat(approx=50) + + def test_put_above_max_ttl(self, memory_store: MemoryStore): + ttl_clamp_store: TTLClampWrapper = TTLClampWrapper(store=memory_store, min_ttl=0, max_ttl=100) + + ttl_clamp_store.put(collection="test", key="test", value={"test": "test"}, ttl=1000) + assert ttl_clamp_store.get(collection="test", key="test") is not None + + (value, ttl) = ttl_clamp_store.ttl(collection="test", key="test") + assert value is not None + assert ttl is not None + assert ttl == IsFloat(approx=100) + + def test_put_missing_ttl(self, memory_store: MemoryStore): + ttl_clamp_store: TTLClampWrapper = TTLClampWrapper(store=memory_store, min_ttl=0, max_ttl=100, missing_ttl=50) + + ttl_clamp_store.put(collection="test", key="test", value={"test": "test"}, ttl=None) + assert ttl_clamp_store.get(collection="test", key="test") is not None + + (value, ttl) = ttl_clamp_store.ttl(collection="test", key="test") + assert value is not None + assert ttl is not None + + assert ttl == IsFloat(approx=50) diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_passthrough_cache.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_passthrough_cache.py new file mode 100644 index 00000000..4724f694 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_passthrough_cache.py @@ -0,0 +1,32 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_passthrough_cache.py' +# DO NOT CHANGE! Change the original file instead. +import tempfile +from collections.abc import Generator + +import pytest +from typing_extensions import override + +from key_value.sync.code_gen.stores.disk.store import DiskStore +from key_value.sync.code_gen.stores.memory.store import MemoryStore +from key_value.sync.code_gen.wrappers.passthrough_cache import PassthroughCacheWrapper +from tests.code_gen.stores.conftest import BaseStoreTests + +DISK_STORE_SIZE_LIMIT = 100 * 1024 # 100KB + + +class TestPassthroughCacheWrapper(BaseStoreTests): + @pytest.fixture(scope="session") + def primary_store(self) -> Generator[DiskStore, None, None]: + with tempfile.TemporaryDirectory() as temp_dir, DiskStore(directory=temp_dir, max_size=DISK_STORE_SIZE_LIMIT) as disk_store: + yield disk_store + + @pytest.fixture + def cache_store(self, memory_store: MemoryStore) -> MemoryStore: + return memory_store + + @override + @pytest.fixture + def store(self, primary_store: DiskStore, cache_store: MemoryStore) -> PassthroughCacheWrapper: + primary_store._cache.clear() # pyright: ignore[reportPrivateUsage] + return PassthroughCacheWrapper(primary_store=primary_store, cache_store=cache_store) diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_collection.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_collection.py new file mode 100644 index 00000000..85b2e89b --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_collection.py @@ -0,0 +1,16 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_prefix_collection.py' +# DO NOT CHANGE! Change the original file instead. +import pytest +from typing_extensions import override + +from key_value.sync.code_gen.stores.memory.store import MemoryStore +from key_value.sync.code_gen.wrappers.prefix_collections import PrefixCollectionsWrapper +from tests.code_gen.stores.conftest import BaseStoreTests + + +class TestPrefixCollectionWrapper(BaseStoreTests): + @override + @pytest.fixture + def store(self, memory_store: MemoryStore) -> PrefixCollectionsWrapper: + return PrefixCollectionsWrapper(store=memory_store, prefix="collection_prefix") diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_key.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_key.py new file mode 100644 index 00000000..15b94f41 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_key.py @@ -0,0 +1,16 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_prefix_key.py' +# DO NOT CHANGE! Change the original file instead. +import pytest +from typing_extensions import override + +from key_value.sync.code_gen.stores.memory.store import MemoryStore +from key_value.sync.code_gen.wrappers.prefix_keys import PrefixKeysWrapper +from tests.code_gen.stores.conftest import BaseStoreTests + + +class TestPrefixKeyWrapper(BaseStoreTests): + @override + @pytest.fixture + def store(self, memory_store: MemoryStore) -> PrefixKeysWrapper: + return PrefixKeysWrapper(store=memory_store, prefix="key_prefix") diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_single_collection.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_single_collection.py new file mode 100644 index 00000000..16f4d63b --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_single_collection.py @@ -0,0 +1,16 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_single_collection.py' +# DO NOT CHANGE! Change the original file instead. +import pytest +from typing_extensions import override + +from key_value.sync.code_gen.stores.memory.store import MemoryStore +from key_value.sync.code_gen.wrappers.single_collection import SingleCollectionWrapper +from tests.code_gen.stores.conftest import BaseStoreTests + + +class TestSingleCollectionWrapper(BaseStoreTests): + @override + @pytest.fixture + def store(self, memory_store: MemoryStore) -> SingleCollectionWrapper: + return SingleCollectionWrapper(store=memory_store, single_collection="test") diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_statistics.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_statistics.py new file mode 100644 index 00000000..90e8bc75 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_statistics.py @@ -0,0 +1,16 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_statistics.py' +# DO NOT CHANGE! Change the original file instead. +import pytest +from typing_extensions import override + +from key_value.sync.code_gen.stores.memory.store import MemoryStore +from key_value.sync.code_gen.wrappers.statistics import StatisticsWrapper +from tests.code_gen.stores.conftest import BaseStoreTests + + +class TestStatisticsWrapper(BaseStoreTests): + @override + @pytest.fixture + def store(self, memory_store: MemoryStore) -> StatisticsWrapper: + return StatisticsWrapper(store=memory_store) diff --git a/key-value/key-value-sync/tests/code_gen/utils/__init__.py b/key-value/key-value-sync/tests/code_gen/utils/__init__.py new file mode 100644 index 00000000..b1835176 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/utils/__init__.py @@ -0,0 +1,4 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file '__init__.py' +# DO NOT CHANGE! Change the original file instead. + diff --git a/key-value/key-value-sync/tests/code_gen/utils/test_managed_entry.py b/key-value/key-value-sync/tests/code_gen/utils/test_managed_entry.py new file mode 100644 index 00000000..60e55744 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/utils/test_managed_entry.py @@ -0,0 +1,30 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_managed_entry.py' +# DO NOT CHANGE! Change the original file instead. +from datetime import datetime, timezone +from typing import Any + +import pytest + +from key_value.sync.code_gen.utils.managed_entry import dump_to_json, load_from_json +from tests.code_gen.cases import DICTIONARY_TO_JSON_TEST_CASES, DICTIONARY_TO_JSON_TEST_CASES_NAMES + +FIXED_DATETIME = datetime(2025, 1, 1, 0, 0, 0, tzinfo=timezone.utc) +FIXED_DATETIME_STRING = FIXED_DATETIME.isoformat() + + +@pytest.mark.parametrize(argnames=("obj", "expected"), argvalues=DICTIONARY_TO_JSON_TEST_CASES, ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES) +def test_dump_to_json(obj: dict[str, Any], expected: str): + assert dump_to_json(obj) == expected + + +@pytest.mark.parametrize(argnames=("obj", "expected"), argvalues=DICTIONARY_TO_JSON_TEST_CASES, ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES) +def test_load_from_json(obj: dict[str, Any], expected: str): + assert load_from_json(expected) == obj + + +@pytest.mark.parametrize(argnames=("obj", "expected"), argvalues=DICTIONARY_TO_JSON_TEST_CASES, ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES) +def test_roundtrip_json(obj: dict[str, Any], expected: str): + dumped_json: str = dump_to_json(obj) + assert dumped_json == expected + assert load_from_json(dumped_json) == obj diff --git a/key-value/key-value-sync/tests/code_gen/utils/test_sanitize.py b/key-value/key-value-sync/tests/code_gen/utils/test_sanitize.py new file mode 100644 index 00000000..be11c580 --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/utils/test_sanitize.py @@ -0,0 +1,82 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'test_sanitize.py' +# DO NOT CHANGE! Change the original file instead. +import pytest +from inline_snapshot import snapshot + +from key_value.sync.code_gen.utils.sanitize import ( + ALPHANUMERIC_CHARACTERS, + LOWERCASE_ALPHABET, + NUMBERS, + UPPERCASE_ALPHABET, + HashFragmentMode, + sanitize_string, +) + +ALWAYS_HASH = HashFragmentMode.ALWAYS +ONLY_IF_CHANGED_HASH = HashFragmentMode.ONLY_IF_CHANGED +NEVER_HASH = HashFragmentMode.NEVER + + +def test_sanitize_string(): + sanitized_string = sanitize_string(value="test string", max_length=16) + assert sanitized_string == snapshot("test string") + + sanitized_string = sanitize_string(value="test string", max_length=16, hash_fragment_mode=ALWAYS_HASH) + assert sanitized_string == snapshot("test st-d5579c46") + + sanitized_string = sanitize_string(value="test string", max_length=16, hash_fragment_mode=ONLY_IF_CHANGED_HASH) + assert sanitized_string == snapshot("test string") + + sanitized_string = sanitize_string(value="test string", max_length=16, hash_fragment_mode=NEVER_HASH) + assert sanitized_string == snapshot("test string") + + +@pytest.mark.parametrize(argnames="hash_fragment_mode", argvalues=[ONLY_IF_CHANGED_HASH, NEVER_HASH]) +@pytest.mark.parametrize(argnames="max_length", argvalues=[16, 32]) +@pytest.mark.parametrize( + argnames=("value", "allowed_chars"), + argvalues=[ + ("test", None), + ("test", "test"), + ("test_test", "test_"), + ("!@#$%^&*()", "!@#$%^&*()"), + ("test", LOWERCASE_ALPHABET), + ("test", ALPHANUMERIC_CHARACTERS), + ], +) +def test_unchanged_strings(value: str, allowed_chars: str | None, max_length: int, hash_fragment_mode: HashFragmentMode): + sanitized_string = sanitize_string( + value=value, allowed_characters=allowed_chars, max_length=max_length, hash_fragment_mode=hash_fragment_mode + ) + assert sanitized_string == value + + +@pytest.mark.parametrize(argnames="hash_fragment_mode", argvalues=[ONLY_IF_CHANGED_HASH, ALWAYS_HASH]) +def test_changed_strings(hash_fragment_mode: HashFragmentMode): + def process_string(value: str, allowed_characters: str | None) -> str: + return sanitize_string(value=value, allowed_characters=allowed_characters, max_length=16, hash_fragment_mode=hash_fragment_mode) + + sanitized_string = process_string(value="test", allowed_characters=NUMBERS) + assert sanitized_string == snapshot("9f86d081") + + sanitized_string = process_string(value="test", allowed_characters=UPPERCASE_ALPHABET) + assert sanitized_string == snapshot("9f86d081") + + sanitized_string = process_string(value="test with spaces", allowed_characters=LOWERCASE_ALPHABET) + assert sanitized_string == snapshot("test_wi-ed2daf39") + + sanitized_string = process_string(value="test too long with spaces", allowed_characters=ALPHANUMERIC_CHARACTERS) + assert sanitized_string == snapshot("test_to-479b94c3") + + sanitized_string = process_string(value="test too long with spaces", allowed_characters=None) + assert sanitized_string == snapshot("test to-479b94c3") + + sanitized_string = process_string(value="test too long with spaces", allowed_characters=ALPHANUMERIC_CHARACTERS) + assert sanitized_string == snapshot("test_to-479b94c3") + + sanitized_string = process_string(value="test way too long with spaces", allowed_characters=None) + assert sanitized_string == snapshot("test wa-3d014b9b") + + sanitized_string = process_string(value="test way too long with spaces", allowed_characters=ALPHANUMERIC_CHARACTERS) + assert sanitized_string == snapshot("test_wa-3d014b9b") diff --git a/py-key-value.code-workspace b/py-key-value.code-workspace new file mode 100644 index 00000000..8281d9c4 --- /dev/null +++ b/py-key-value.code-workspace @@ -0,0 +1,17 @@ +{ + "folders": [ + { + "name": "key-value-aio", + "path": "key-value/key-value-aio", + }, + { + "name": "key-value-sync", + "path": "key-value/key-value-sync" + }, + { + "name": "root", + "path": "." + } + ], + "settings": {} +} \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 40fcdd9e..faed800d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,6 +9,7 @@ requires-python = ">=3.10" [tool.uv.workspace] members = [ "key-value/key-value-aio", + "key-value/key-value-sync", ] [tool.pyright] @@ -79,3 +80,6 @@ line-length = 140 dev = [ "docker>=7.1.0", ] + +[tool.pytest.ini_options] +consider_namespace_packages = true diff --git a/scripts/build_sync_library.py b/scripts/build_sync_library.py new file mode 100644 index 00000000..db22e116 --- /dev/null +++ b/scripts/build_sync_library.py @@ -0,0 +1,626 @@ +#!/usr/bin/env python +"""Convert async code in the project to sync code. + +Note: the version of Python used to run this script affects the output. + +Hint: in order to explore the AST of a module you can run: + + python -m ast path/to/module.py + +""" + +from __future__ import annotations + +import logging +import subprocess as sp +import sys +from argparse import ArgumentParser, Namespace, RawDescriptionHelpFormatter +from collections.abc import Sequence +from copy import deepcopy +from dataclasses import dataclass, field +from pathlib import Path +from typing import TYPE_CHECKING, Any, ClassVar, Literal + +import ast_comments as ast # type: ignore +from typing_extensions import override + +if TYPE_CHECKING: + from collections.abc import Sequence + + from ast_comments import AST + +# The version of Python officially used for the conversion. +# Output may differ in other versions. +# Should be consistent with the Python version used in lint.yml +PYVER = "3.10" + +PROJECT_ROOT = Path(__file__).parent.parent +ASYNC_PROJECT_DIR = PROJECT_ROOT / "key-value" / "key-value-aio" +ASYNC_PROJECT_MODULE_DIR = ASYNC_PROJECT_DIR / "src" / "key_value" / "aio" +ASYNC_PROJECT_TESTS_DIR = ASYNC_PROJECT_DIR / "tests" + +SYNC_PROJECT_DIR = PROJECT_ROOT / "key-value" / "key-value-sync" +SYNC_PROJECT_MODULE_DIR = SYNC_PROJECT_DIR / "src" / "key_value" / "sync" +SYNC_PROJECT_CODE_GEN_DIR = SYNC_PROJECT_MODULE_DIR / "code_gen" +SYNC_PROJECT_TESTS_DIR = SYNC_PROJECT_DIR / "tests" +SYNC_PROJECT_TESTS_CODE_GEN_DIR = SYNC_PROJECT_TESTS_DIR / "code_gen" + +PATHS_TO_LINT = [SYNC_PROJECT_MODULE_DIR, SYNC_PROJECT_TESTS_DIR] +EXCLUDE_FILES = ["key-value/key-value-aio/src/key_value/aio/__init__.py"] +EXCLUDE_DIRECTORIES = ["key-value/key-value-aio/src/key_value/aio/stores/memcached", "key-value/key-value-aio/tests/stores/memcached"] + +SCRIPT_NAME = Path(sys.argv[0]).name + +logger = logging.getLogger() + + +def main() -> int: + options: ConversionOptions = parse_cmdline() + + logging.basicConfig(level=logging.INFO, format="%(levelname)s %(message)s") + + if not check_python_version(version=options.python_version): + msg = f"Expecting output generated by Python {options.python_version} but are running {get_python_version()}." + raise RuntimeError(msg) + + if options.wipe_code_gen_dir: + logger.info("Wiping code gen directory: %s", SYNC_PROJECT_CODE_GEN_DIR) + # if SYNC_PROJECT_CODE_GEN_DIR.exists(): + # shutil.rmtree(SYNC_PROJECT_CODE_GEN_DIR) + + for file in SYNC_PROJECT_MODULE_DIR.rglob("*.py"): + if file_has_header(filepath=file): + file.unlink() + + for file in SYNC_PROJECT_TESTS_DIR.rglob("*.py"): + if file_has_header(filepath=file): + file.unlink() + + for file_to_convert in options.files_to_convert: + for output_path in file_to_convert.output_path: + logger.info("Starting to convert: %s", file_to_convert.input_path) + convert(source_path=file_to_convert.input_path, output_path=output_path) + logger.info("Finished converting: %s", output_path) + + for path in options.paths_to_lint: + logger.info("Starting to lint: %s", path) + lint(path=path) + logger.info("Finished linting: %s", path) + + return 0 + + +def lint(path: Path) -> None: + """Call the linting tool on the given path.""" + _ = sp.check_call(["uv", "run", "ruff", "format", "-q", str(path)]) # noqa: S603, S607 + _ = sp.check_call(["uv", "run", "ruff", "check", "--fix", str(path)]) # noqa: S603, S607 + + +def convert(source_path: Path, output_path: Path) -> None: + """Convert the given source path to the given output path.""" + with source_path.open() as f: + source = f.read() + + tree: AST = ast.parse(source=source, filename=str(object=source_path)) # pyright: ignore[reportUnknownMemberType] + tree = async_to_sync(tree=tree, filepath=source_path) + output: str = tree_to_str(tree=tree, filepath=source_path) + + output_path.parent.mkdir(parents=True, exist_ok=True) + + with output_path.open("w") as f: + print(output, file=f) + + +def async_to_sync(tree: ast.AST, filepath: Path | None = None) -> ast.AST: # noqa: ARG001 + tree = BlanksInserter().visit(tree) + tree = RenameAsyncToSync().visit(tree) + tree = AsyncToSync().visit(tree) + return tree # noqa: RET504 + + +def file_has_header(filepath: Path) -> bool: + return filepath.read_text().startswith("# WARNING: this file is auto-generated by") + + +def tree_to_str(tree: ast.AST, filepath: Path) -> str: + rv = f"""\ +# WARNING: this file is auto-generated by '{SCRIPT_NAME}' +# from the original file '{filepath.name}' +# DO NOT CHANGE! Change the original file instead. +""" + rv += unparse(tree) + return rv + + +class AsyncToSync(ast.NodeTransformer): # type: ignore + def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AST: # noqa: N802 + new_node: ast.FunctionDef = ast.FunctionDef(**node.__dict__) + ast.copy_location(new_node, old_node=node) + self.visit(node=new_node) + return new_node + + def visit_AsyncFor(self, node: ast.AsyncFor) -> ast.AST: # noqa: N802 + new_node: ast.For = ast.For(**node.__dict__) + ast.copy_location(new_node, old_node=node) + self.visit(node=new_node) + return new_node + + def visit_AsyncWith(self, node: ast.AsyncWith) -> ast.AST: # noqa: N802 + new_node: ast.With = ast.With(**node.__dict__) + ast.copy_location(new_node, old_node=node) + self.visit(node=new_node) + return new_node + + def visit_Await(self, node: ast.Await) -> ast.AST: # noqa: N802 + new_node: ast.expr = node.value + self.visit(new_node) + return new_node + + def visit_If(self, node: ast.If) -> ast.AST: # noqa: N802 + # Drop `if is_async()` branch. + # + # Assume that the test guards an async object becoming sync and remove + # the async side, because it will likely contain `await` constructs + # illegal into a sync function. + value: bool + comment: str + match node: + # manage `is_async()` + case ast.If(test=ast.Call(func=ast.Name(id="is_async"))): + for child in node.orelse: + self.visit(child) + return node.orelse + + # Manage `if True|False: # ASYNC` + # drop the unneeded branch + case ast.If( # pyright: ignore[reportUnnecessaryComparison] + test=ast.Constant(value=bool(value)), + body=[ast.Comment(value=comment), *_], # pyright: ignore[reportUnknownVariableType] + ) if comment.startswith("# ASYNC"): + stmts: list[ast.AST] + # body[0] is the ASYNC comment, drop it + stmts = node.orelse if value else node.body[1:] + for child in stmts: + self.visit(child) + return stmts + case _: + pass + + self.generic_visit(node) + return node + + +class RenameAsyncToSync(ast.NodeTransformer): # type: ignore + module_map: ClassVar[dict[str, str]] = { + "key_value.aio": "key_value.sync.code_gen", + "pymongo.asynchronous": "pymongo", + "tests.stores.conftest": "tests.code_gen.stores.conftest", + "tests.conftest": "tests.code_gen.conftest", + "tests.cases": "tests.code_gen.cases", + } + names_map: ClassVar[dict[str, str]] = { + "__aenter__": "__enter__", + "__aexit__": "__exit__", + "__aiter__": "__iter__", + "asyncio.locks": "threading", + "AsyncElasticsearch": "Elasticsearch", + "AsyncDatabase": "Database", + "AsyncCollection": "Collection", + "AsyncMongoClient": "MongoClient", + "redis.asyncio": "redis", + "glide.glide_client": "glide_sync.glide_client", + "asynccontextmanager": "contextmanager", + "AsyncKeyValueProtocol": "KeyValueProtocol", + "AsyncCullProtocol": "CullProtocol", + "AsyncDestroyCollectionProtocol": "DestroyCollectionProtocol", + "AsyncDestroyStoreProtocol": "DestroyStoreProtocol", + "AsyncEnumerateCollectionsProtocol": "EnumerateCollectionsProtocol", + "AsyncEnumerateKeysProtocol": "EnumerateKeysProtocol", + "AsyncKeyValue": "KeyValue", + "AsyncGenerator": "Generator", + "asyncio.sleep": "time.sleep", + "async_running_in_event_loop": "running_in_event_loop", + "asleep": "sleep", + } + _skip_imports: ClassVar[dict[str, set[str]]] = { + "acompat": {"alist", "anext"}, + } + + def visit_Module(self, node: ast.Module) -> ast.AST: # noqa: N802 + self._fix_docstring(node.body) + self.generic_visit(node) + return node + + def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AST: # noqa: N802 + self._fix_docstring(node.body) + node.name = self.names_map.get(node.name, node.name) + for arg in node.args.args: + arg.arg = self.names_map.get(arg.arg, arg.arg) + for arg in node.args.args: + attr: str + match arg.annotation: + case ast.arg(annotation=ast.Attribute(attr=attr)): + arg.annotation.attr = self.names_map.get(attr, attr) + case ast.arg(annotation=ast.Subscript(value=ast.Attribute(attr=attr))): + arg.annotation.value.attr = self.names_map.get(attr, attr) + + self.generic_visit(node) + return node + + @override + def visit_Call(self, node: ast.Call) -> ast.AST: + # match node: + # case ast.Call(func=ast.Name(id="cast")): + # node.args[0] = self._convert_if_literal_string(node.args[0]) + + _ = self.generic_visit(node) + return node + + @override + def visit_FunctionDef(self, node: ast.FunctionDef) -> ast.AST: + self._fix_docstring(node.body) + if node.decorator_list: + self._fix_decorator(node.decorator_list) + _ = self.generic_visit(node) + return node + + def _fix_docstring(self, body: Sequence[ast.AST]) -> None: + doc: str + match body and body[0]: + case ast.Expr(value=ast.Constant(value=str(doc))): + doc = doc.replace("Async", "") + doc = doc.replace("(async", "(sync") + body[0].value.value = doc + + def _fix_decorator(self, decorator_list: Sequence[ast.AST]) -> None: + for dec in decorator_list: + match dec: + case ast.Call( + func=ast.Attribute(value=ast.Name(id="pytest"), attr="fixture"), + keywords=[ast.keyword(arg="params", value=ast.List())], + ): + elts = dec.keywords[0].value.elts + for i, elt in enumerate(elts): + elts[i] = self._convert_if_literal_string(elt) + + def _convert_if_literal_string(self, node: ast.AST) -> ast.AST: + value: str + match node: + case ast.Constant(value=str(value)): + node.value = self._visit_type_string(value) + + return node + + def _visit_type_string(self, source: str) -> str: + # Convert the string to tree, visit, and convert it back to string + tree = ast.parse(source, type_comments=False) + tree = async_to_sync(tree) + rv = unparse(tree) + return rv + + def visit_ClassDef(self, node: ast.ClassDef) -> ast.AST: + self._fix_docstring(node.body) + node.name = self.names_map.get(node.name, node.name) + node = self._fix_base_params(node) + self.generic_visit(node) + return node + + def _fix_base_params(self, node: ast.ClassDef) -> ast.AST: + # Handle : + # class AsyncCursor(BaseCursor["AsyncConnection[Any]", Row]): + # the base cannot be a token, even with __future__ annotation. + elts: list[ast.AST] + for base in node.bases: + match base: + case ast.Name(): # handle myClass(AsyncBaseClass) -> myClass(BaseClass) + base.id = self.names_map.get(base.id, base.id) + case ast.Subscript(slice=ast.Tuple(elts=elts)): + for i, elt in enumerate(elts): + elts[i] = self._convert_if_literal_string(elt) + case ast.Subscript(slice=ast.Constant()): + base.slice = self._convert_if_literal_string(base.slice) + + return node + + def visit_ImportFrom(self, node: ast.ImportFrom) -> ast.AST | None: # noqa: N802 + if node.module: + # Remove import of async utils eclypsing builtins + if skips := self._skip_imports.get(node.module): + node.names = [n for n in node.names if n.name not in skips] + if not node.names: + return None + + # Handle any custom module mappings: key_value.aio.utils -> key_value.sync.utils + node.module = self.names_map.get(node.module) or node.module + for find_module, replace_module in self.module_map.items(): + if node.module.startswith(find_module): + node.module = node.module.replace(find_module, replace_module) + break + + # Handle any import names: from ... import AsyncKeyValue -> from ... import KeyValue + for n in node.names: + n.name = self.names_map.get(n.name, n.name) + + # Handle any import asliases: + # from: ... import ... as AsyncThing + # to: ... import ... as Thing + if n.asname: + n.asname = self.names_map.get(n.asname, n.asname) + + return node + + def visit_Name(self, node: ast.Name) -> ast.AST: # noqa: N802 + if node.id in self.names_map: + node.id = self.names_map[node.id] + return node + + def visit_Attribute(self, node: ast.Attribute) -> ast.AST: # noqa: N802 + if node.attr in self.names_map: + node.attr = self.names_map[node.attr] + self.generic_visit(node) + return node + + def visit_Subscript(self, node: ast.Subscript) -> ast.AST: # noqa: N802 + # Manage AsyncGenerator[X, Y] -> Generator[X, None, Y] + self._manage_async_generator(node) + # # Won't result in a recursion because we change the args number + # self.visit(node) + # return node + + self.generic_visit(node) + return node + + def _manage_async_generator(self, node: ast.Subscript) -> ast.AST | None: + match node: + case ast.Subscript(value=ast.Name(id="AsyncGenerator"), slice=ast.Tuple(elts=[_, _])): + node.slice.elts.insert(1, deepcopy(node.slice.elts[1])) + self.generic_visit(node) + return node + return None + + +class BlanksInserter(ast.NodeTransformer): # type: ignore + """ + Restore the missing spaces in the source (or something similar) + """ + + def generic_visit(self, node: ast.AST) -> ast.AST: + if isinstance(getattr(node, "body", None), list): + node.body = self._inject_blanks(node.body) + super().generic_visit(node) + return node + + def _inject_blanks(self, body: list[ast.Node]) -> list[ast.AST]: + if not body: + return body + + new_body = [] + before = body[0] + new_body.append(before) + for i in range(1, len(body)): + after = body[i] + nblanks = after.lineno - before.end_lineno - 1 + if nblanks > 0: + # Inserting one blank is enough. + blank = ast.Comment( + value="", + inline=False, + lineno=before.end_lineno + 1, + end_lineno=before.end_lineno + 1, + col_offset=0, + end_col_offset=0, + ) + new_body.append(blank) + new_body.append(after) + before = after + + return new_body + + +def unparse(tree: ast.AST) -> str: + rv: str = Unparser().visit(tree) + rv = _fix_comment_on_decorators(rv) + return rv + + +def _fix_comment_on_decorators(source: str) -> str: + """ + Re-associate comments to decorators. + + In a case like: + + 1 @deco # comment + 2 def func(x): + 3 pass + + it seems that Function lineno is 2 instead of 1 (Python 3.10). Because + the Comment lineno is 1, it ends up printed above the function, instead + of inline. This is a problem for '# type: ignore' comments. + + Maybe the problem could be fixed in the tree, but this solution is a + simpler way to start. + """ + lines = source.splitlines() + + comment_at = None + for i, line in enumerate(lines): + if line.lstrip().startswith("#"): + comment_at = i + elif not line.strip(): + pass + elif line.lstrip().startswith("@classmethod"): + if comment_at is not None: + lines[i] = lines[i] + " " + lines[comment_at].lstrip() + lines[comment_at] = "" + else: + comment_at = None + + return "\n".join(lines) + + +class Unparser(ast._Unparser): # type: ignore + """ + Try to emit long strings as multiline. + + The normal class only tries to emit docstrings as multiline, + but the resulting source doesn't pass flake8. + """ + + # Beware: private method. Tested with in Python 3.10, 3.11. + def _write_constant(self, value: Any) -> None: + if isinstance(value, str) and len(value) > 50: + self._write_str_avoiding_backslashes(value) + else: + super()._write_constant(value) + + +def swap_key_value_aio_to_key_value_sync(path: Path) -> Path: + """Swap the key-value-aio to key-value-sync.""" + return Path(*[part.replace("key-value-aio", "key-value-sync") for part in path.parts]) + + +def swap_aio_to_sync(path: Path) -> Path: + """Swap the aio to sync.""" + return Path(*[part.replace("aio", "sync") for part in path.parts]) + + +def swap_sync_to_sync_code_gen(path: Path) -> Path: + """Swap the code_gen to sync_code_gen.""" + new_parts: list[str] = [] + for part in path.parts: + if part == "sync": + new_parts.extend(["sync", "code_gen"]) + else: + new_parts.append(part) + return Path(*new_parts) + + +def get_sync_path_for_file(path: Path) -> Path: + """Use the current file structure to redirect to the sync tree. + + i.e. + this: key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py + to this: key-value/key-value-sync/src/key_value/sync/stores/elasticsearch/store.py + """ + path = swap_key_value_aio_to_key_value_sync(path) + return swap_aio_to_sync(path) + + +def get_sync_codegen_path_for_file(path: Path) -> Path: + """Use the current file structure to redirect to the sync tree. + + i.e. + this: key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py + to this: key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py + + if the file is an __init__.py, then we don't need/want to put it under code_gen + """ + path = swap_key_value_aio_to_key_value_sync(path) + path = swap_aio_to_sync(path) + return swap_sync_to_sync_code_gen(path) + + +@dataclass +class FileToConvert: + input_path: Path + output_path: list[Path] + + def __post_init__(self) -> None: + if not self.input_path.exists() and not self.input_path.is_file(): + msg = f"Input path {self.input_path} does not exist or is not a file" + raise ValueError(msg) + + +@dataclass +class ConversionOptions: + files_to_convert: list[FileToConvert] = field(default_factory=list) + paths_to_lint: list[Path] = field(default_factory=list) + log_level: Literal["INFO", "DEBUG"] = field(default="INFO") + python_version: str = field(default=PYVER) + wipe_code_gen_dir: bool = field(default=False) + + +def get_python_version() -> str: + """Get the Python version.""" + return ".".join(map(str, sys.version_info[:2])) + + +def check_python_version(version: str) -> bool: + """Get the Python version.""" + return get_python_version() == version + + +def get_inputs_from_opt(opt: Namespace) -> list[Path]: + """Type-safe way to get the inputs from the options.""" + return opt.inputs # pyright: ignore[reportAny] + + +def path_is_excluded(path: Path, exclude_files: list[Path], exclude_directories: list[Path]) -> bool: + """Check if the path is excluded.""" + resolved_path = path.resolve() + for exclude_file in exclude_files: + if resolved_path == exclude_file.resolve(): + return True + + for exclude_directory in exclude_directories: # noqa: SIM110 + if resolved_path.is_relative_to(exclude_directory.resolve()): + return True + + return False + + +def parse_cmdline() -> ConversionOptions: + parser = ArgumentParser(description=__doc__, formatter_class=RawDescriptionHelpFormatter) + _ = parser.add_argument( + "inputs", + metavar="FILE", + nargs="*", + type=Path, + help="the files to process (process all files if not specified)", + ) + + opt: Namespace = parser.parse_args() + + files_to_convert: list[FileToConvert] = [] + + if opt.inputs: + files_to_convert = [FileToConvert(input_path=Path(input_path).resolve(), output_path=[]) for input_path in opt.inputs] + return ConversionOptions(files_to_convert=files_to_convert, paths_to_lint=PATHS_TO_LINT, wipe_code_gen_dir=False) + + excluded_paths: list[Path] = [Path(excluded_file).resolve() for excluded_file in EXCLUDE_FILES] + excluded_directories: list[Path] = [Path(excluded_directory).resolve() for excluded_directory in EXCLUDE_DIRECTORIES] + + for file_path in ASYNC_PROJECT_MODULE_DIR.rglob(pattern="*.py"): + if path_is_excluded(path=file_path, exclude_files=excluded_paths, exclude_directories=excluded_directories): + continue + + output_paths: list[Path] = [] + # Convert + # From: key-value-aio/src/key_value/aio/**.py + # To: key-value-sync/src/key_value/sync/code_gen/**.py + relative_path = file_path.relative_to(ASYNC_PROJECT_MODULE_DIR) + output_paths.append(SYNC_PROJECT_CODE_GEN_DIR / relative_path) + if file_path.name == "__init__.py": + # We also want to copy the __init__.py files to match the structure of the aio package + # From: key-value-aio/src/key_value/aio/**/__init__.py + # To: key-value-sync/src/key_value/sync/**/__init__.py + output_paths.append(SYNC_PROJECT_MODULE_DIR / relative_path) + + files_to_convert.append(FileToConvert(input_path=file_path, output_path=output_paths)) + + for file_path in ASYNC_PROJECT_TESTS_DIR.rglob(pattern="*.py"): + if path_is_excluded(path=file_path, exclude_files=excluded_paths, exclude_directories=excluded_directories): + continue + + # Convert + # From: key-value-aio/tests/**.py + # To: key-value-sync/tests/code_gen/**.py + relative_path = file_path.relative_to(ASYNC_PROJECT_TESTS_DIR) + output_path: Path = SYNC_PROJECT_TESTS_CODE_GEN_DIR / relative_path + files_to_convert.append(FileToConvert(input_path=file_path, output_path=[output_path])) + + return ConversionOptions(files_to_convert=files_to_convert, paths_to_lint=PATHS_TO_LINT, wipe_code_gen_dir=True) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/uv.lock b/uv.lock index 07569bda..c526e0ef 100644 --- a/uv.lock +++ b/uv.lock @@ -10,6 +10,7 @@ resolution-markers = [ members = [ "py-key-value", "py-key-value-aio", + "py-key-value-sync", ] [[package]] @@ -231,6 +232,88 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, ] +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44", size = 184283, upload-time = "2025-09-08T23:22:08.01Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49", size = 180504, upload-time = "2025-09-08T23:22:10.637Z" }, + { url = "https://files.pythonhosted.org/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c", size = 208811, upload-time = "2025-09-08T23:22:12.267Z" }, + { url = "https://files.pythonhosted.org/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb", size = 216402, upload-time = "2025-09-08T23:22:13.455Z" }, + { url = "https://files.pythonhosted.org/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0", size = 203217, upload-time = "2025-09-08T23:22:14.596Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4", size = 203079, upload-time = "2025-09-08T23:22:15.769Z" }, + { url = "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", size = 216475, upload-time = "2025-09-08T23:22:17.427Z" }, + { url = "https://files.pythonhosted.org/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495", size = 218829, upload-time = "2025-09-08T23:22:19.069Z" }, + { url = "https://files.pythonhosted.org/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5", size = 211211, upload-time = "2025-09-08T23:22:20.588Z" }, + { url = "https://files.pythonhosted.org/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb", size = 218036, upload-time = "2025-09-08T23:22:22.143Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a", size = 172184, upload-time = "2025-09-08T23:22:23.328Z" }, + { url = "https://files.pythonhosted.org/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739", size = 182790, upload-time = "2025-09-08T23:22:24.752Z" }, + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + [[package]] name = "charset-normalizer" version = "3.4.3" @@ -945,6 +1028,108 @@ dev = [ ] lint = [{ name = "ruff" }] +[[package]] +name = "py-key-value-sync" +version = "0.2.0" +source = { editable = "key-value/key-value-sync" } +dependencies = [ + { name = "py-key-value-aio" }, +] + +[package.optional-dependencies] +disk = [ + { name = "diskcache" }, + { name = "pathvalidate" }, +] +elasticsearch = [ + { name = "aiohttp" }, + { name = "elasticsearch" }, +] +memcached = [ + { name = "aiomcache" }, +] +memory = [ + { name = "cachetools" }, +] +mongodb = [ + { name = "pymongo" }, +] +pydantic = [ + { name = "pydantic" }, +] +redis = [ + { name = "redis" }, +] +valkey = [ + { name = "valkey-glide-sync" }, +] + +[package.dev-dependencies] +dev = [ + { name = "ast-comments" }, + { name = "basedpyright" }, + { name = "dirty-equals" }, + { name = "diskcache-stubs" }, + { name = "inline-snapshot" }, + { name = "py-key-value-sync", extra = ["disk", "elasticsearch", "memcached", "memory", "mongodb", "pydantic", "redis"] }, + { name = "py-key-value-sync", extra = ["valkey"], marker = "sys_platform != 'win32'" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-dotenv" }, + { name = "pytest-mock" }, + { name = "pytest-redis" }, + { name = "pytest-timeout" }, + { name = "ruff" }, +] +lint = [ + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiohttp", marker = "extra == 'elasticsearch'", specifier = ">=3.12" }, + { name = "aiomcache", marker = "extra == 'memcached'", specifier = ">=0.8.0" }, + { name = "cachetools", marker = "extra == 'memory'", specifier = ">=6.0.0" }, + { name = "diskcache", marker = "extra == 'disk'", specifier = ">=5.6.0" }, + { name = "elasticsearch", marker = "extra == 'elasticsearch'", specifier = ">=9.0.0" }, + { name = "pathvalidate", marker = "extra == 'disk'", specifier = ">=3.3.1" }, + { name = "py-key-value-aio", editable = "key-value/key-value-aio" }, + { name = "pydantic", marker = "extra == 'pydantic'", specifier = ">=2.11.9" }, + { name = "pymongo", marker = "extra == 'mongodb'", specifier = ">=4.15.0" }, + { name = "redis", marker = "extra == 'redis'", specifier = ">=6.0.0" }, + { name = "valkey-glide-sync", marker = "extra == 'valkey'", specifier = ">=2.1.0" }, +] +provides-extras = ["memory", "disk", "redis", "mongodb", "valkey", "memcached", "elasticsearch", "pydantic"] + +[package.metadata.requires-dev] +dev = [ + { name = "ast-comments", specifier = ">=1.2.3" }, + { name = "basedpyright", specifier = ">=1.31.5" }, + { name = "dirty-equals", specifier = ">=0.10.0" }, + { name = "diskcache-stubs", specifier = ">=5.6.3.6.20240818" }, + { name = "inline-snapshot", specifier = ">=0.29.0" }, + { name = "py-key-value-sync", extras = ["memory", "disk", "redis", "elasticsearch", "memcached", "mongodb"] }, + { name = "py-key-value-sync", extras = ["pydantic"] }, + { name = "py-key-value-sync", extras = ["valkey"], marker = "sys_platform != 'win32'" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-dotenv", specifier = ">=0.5.2" }, + { name = "pytest-mock" }, + { name = "pytest-redis", specifier = ">=3.1.3" }, + { name = "pytest-timeout", specifier = ">=2.4.0" }, + { name = "ruff" }, +] +lint = [{ name = "ruff" }] + +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + [[package]] name = "pydantic" version = "2.11.9" @@ -1434,6 +1619,43 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/45/ea/9833c8e95b02dcd2ff31e0f688e34d5dc8416b8ffd08d8b7ca068a1983d8/valkey_glide-2.1.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:717f6ae2bbc6aefe7c68a6961a24ac024ca790ba1e048e252bbe661e4d7efe98", size = 5135392, upload-time = "2025-09-17T14:58:53.606Z" }, ] +[[package]] +name = "valkey-glide-sync" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, + { name = "protobuf" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/a3/2ea00e43e864fba51ef9aaa5c57200879e6ec1cde1f8c5f4dbf961ccd539/valkey_glide_sync-2.1.0.tar.gz", hash = "sha256:54ceef782c5022e42e1d6c0ef8acbff2a0575cbe9f9e90d7f009914b1080fa45", size = 557595, upload-time = "2025-09-17T15:00:15.132Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/83/e595385dd9868a466c42a8185989beb9605ce8978d1f6f1cad377043b9ef/valkey_glide_sync-2.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:0dc6cd16d2c735478ff6b2c7992af9184a21f17e605737ac1b473e8fe72de160", size = 3752114, upload-time = "2025-09-17T14:59:23.495Z" }, + { url = "https://files.pythonhosted.org/packages/8a/91/238482f79cafd16f39218704bda6fc69faec8d4f4c2da6334a42ec406b8c/valkey_glide_sync-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f4123d19245f8343c0e10801ff198dd9eeea3e525abda9ab36269bf551465de", size = 3479471, upload-time = "2025-09-17T14:59:25.279Z" }, + { url = "https://files.pythonhosted.org/packages/27/61/ae7fae8a6959b9d6bf0f50b2e4dd28f929beae6edf5f546ede1b48a8d870/valkey_glide_sync-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8075dd369dcf2ded79f82be3541cc06a09ccb804c844a72f6bbeac45694a125d", size = 3579331, upload-time = "2025-09-17T14:59:27.054Z" }, + { url = "https://files.pythonhosted.org/packages/87/92/b4297214e061541ca2422258d32bc111c4028ea9a092c231acb3699b050f/valkey_glide_sync-2.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:fef25d2ff0a2d2308f1545d268eea493533683cf6e3b3131bb73dfa77591e4ef", size = 3882363, upload-time = "2025-09-17T14:59:28.476Z" }, + { url = "https://files.pythonhosted.org/packages/2e/4d/e6ad56ca96365671777e2253bae0a5714e6f656b19e3258e7b3097a5a567/valkey_glide_sync-2.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:fd62978c414fc74ad31a560f6e6c7fbc7e2f63d58a63f7a9e6ed338ca02abfd4", size = 3752114, upload-time = "2025-09-17T14:59:29.792Z" }, + { url = "https://files.pythonhosted.org/packages/ee/a7/6f58c84b8bf725fdad6dddfe7f4cee21636988c2d90e7abba45f61f14784/valkey_glide_sync-2.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c79924f2dcb685e54e0b856f337a3bddc076aa604caf151c6c8b6ffeb1f3cb0a", size = 3479470, upload-time = "2025-09-17T14:59:31.14Z" }, + { url = "https://files.pythonhosted.org/packages/24/ea/d97b71f42ce07aa4bb06e541cca0d55fba463cacdc45abe9f4287db80643/valkey_glide_sync-2.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd933427c2285302387146e819ea17f521c4a503db1911a0adfba9459c73fc43", size = 3579331, upload-time = "2025-09-17T14:59:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/43/9f/19281474fc814aa4a46c49f17fe97209e6c27984a37a0bcbd130028d7b44/valkey_glide_sync-2.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:9838c1eccb3851a1a5af1e41c8806dc93c9a684486c926b47b2fdc72c72a767b", size = 3882364, upload-time = "2025-09-17T14:59:33.778Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3e/a7f9469c2c984839e72515acc861c6cc0f7cad1a9b7e4710cd6d3da8454b/valkey_glide_sync-2.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0dc1dc7b70e47602382466ec430eb6e560f7f8a28bc3bfe9ec5aae34edaaa53f", size = 3752115, upload-time = "2025-09-17T14:59:35.156Z" }, + { url = "https://files.pythonhosted.org/packages/3f/2d/ec8dae655732267ece2b9fc7c80df9bc84d05022b425c0f86dcdbe79f8a9/valkey_glide_sync-2.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fbd22b9a94cb082fdda024f59248a932253c1cfe3d86e8d88ea27eb456d41d5", size = 3479471, upload-time = "2025-09-17T14:59:36.503Z" }, + { url = "https://files.pythonhosted.org/packages/f4/c6/965d7a877a69d97284449da6d0464eef16bad46d324d13a6f683e705f871/valkey_glide_sync-2.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e09356f3f8f6be1911c60a724ac4fa390dd2a182fd59a01bd9323ae29d2fb4c", size = 3579332, upload-time = "2025-09-17T14:59:38.629Z" }, + { url = "https://files.pythonhosted.org/packages/ab/44/38455df16ec1b0d9aa178f58e6bf608597ca7e5d22a67b2a63bd199076fb/valkey_glide_sync-2.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:219ee418c8952e2f83601b39ab4a984da91c55331e241c75c41bfbc445800dd2", size = 3882364, upload-time = "2025-09-17T14:59:40.341Z" }, + { url = "https://files.pythonhosted.org/packages/12/c9/24aa888f3a2d2791e0a13c81f6f65173e598508c58a0b2c8031700985435/valkey_glide_sync-2.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1f0661486cfa88e89a7b571b354a221056956e0c63f5083791dc61728d8bc55c", size = 3752114, upload-time = "2025-09-17T14:59:42.088Z" }, + { url = "https://files.pythonhosted.org/packages/30/14/a6be66a19a5b3b5217b4df336739960510e76e2e21dad73bef94c9fda20e/valkey_glide_sync-2.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:057f0e76fa86cc9b7bdcfb9e0695acf322f8990885ee3041d894f2a933d9de7b", size = 3479470, upload-time = "2025-09-17T14:59:43.706Z" }, + { url = "https://files.pythonhosted.org/packages/d6/cc/11b694a2030376fd31bd8ea8d4b3e47cb99f22a2f956835818904c6edd7e/valkey_glide_sync-2.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea22f033a82f3127ba10591165266d169843604943610dd751b71c0ee9f7d94e", size = 3579333, upload-time = "2025-09-17T14:59:45.462Z" }, + { url = "https://files.pythonhosted.org/packages/f0/8b/2d0ff9891bce4e7f71da5369cfd157063988b1c5b9856de858ce71473edd/valkey_glide_sync-2.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:9680a9da38faec149db11f8978574c5884db831d66fed17b60565cf535be23d1", size = 3882363, upload-time = "2025-09-17T14:59:46.926Z" }, + { url = "https://files.pythonhosted.org/packages/80/51/e6af84741a8cfb8a338405ebb507b098bf1f2b8dd110e85df2aa0d0c7336/valkey_glide_sync-2.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:253305311ead9aa8fa59fcb14279fecfcafb89d06e2837d307af6c7f6f4cd7aa", size = 3752456, upload-time = "2025-09-17T14:59:54.865Z" }, + { url = "https://files.pythonhosted.org/packages/83/e8/12aa6a3b75814caa6f2abe02a6798780d263c7300f64bfbe30628ed5307e/valkey_glide_sync-2.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2f62427cf5612c5897ffba295c151bb67bca5c4243ece8945a2e8d4878e22e6f", size = 3479477, upload-time = "2025-09-17T14:59:56.235Z" }, + { url = "https://files.pythonhosted.org/packages/4c/12/3f002aef8e6cb45cda56875c89428f4f46bd32b37f4dc70b728b282b19e2/valkey_glide_sync-2.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b8396c1f93a54869c44f1dcd5a0a6f0fd6425d5a795b00d77784b3463e3de5f", size = 3579337, upload-time = "2025-09-17T14:59:57.641Z" }, + { url = "https://files.pythonhosted.org/packages/ea/e9/47ec2b1699e2a69a17bc06b111922f959f4730c6c583928422b8146ce1dd/valkey_glide_sync-2.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:afbe77e3ea49b0b0e6e5ae29bd8f44ce8b185b72015da0355bbc7b27756eb6fd", size = 3882369, upload-time = "2025-09-17T14:59:59.411Z" }, + { url = "https://files.pythonhosted.org/packages/d8/dc/c561c5a58629f93ce113b2ae8d830f684be913e99e1aec256dd662ef1d4a/valkey_glide_sync-2.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ac0c1d68102474d897e0d59706b789cad4e4f8cd87c96e6e680e2fbe63819423", size = 3752453, upload-time = "2025-09-17T15:00:01.686Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f7/67330c5683e29cd93489601edaf823cfae8b8a169e31a779fb8550b0a3ff/valkey_glide_sync-2.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2dbeaf67f252a8951b95a99fe9680897deb8beffc99b81d3459bfe66d4463fcd", size = 3479478, upload-time = "2025-09-17T15:00:03.156Z" }, + { url = "https://files.pythonhosted.org/packages/9a/11/2eff76497855df7d8388e9e575492f34fcb5a946eeea2ce061a1353892b7/valkey_glide_sync-2.1.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9058b65f3f63ad022d62fcf1fff1c31531c60930a05ae290b51fb2f80c2ed188", size = 3579337, upload-time = "2025-09-17T15:00:04.669Z" }, + { url = "https://files.pythonhosted.org/packages/fd/7d/7aea15d11c59b98b18bc672b789d2305ab269906eb0ff659686824ff57ec/valkey_glide_sync-2.1.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9739a8910099908ab6192e4b25c4a0c703e66bd0134bc3fd0658350946318cf0", size = 3882367, upload-time = "2025-09-17T15:00:06.049Z" }, +] + [[package]] name = "yarl" version = "1.20.1" From 945030772514292b2e56cb6fd28abdaca88faf12 Mon Sep 17 00:00:00 2001 From: William Easton Date: Sun, 28 Sep 2025 17:26:10 -0500 Subject: [PATCH 27/31] Checkpoint progress --- .vscode/settings.json | 9 + key-value/key-value-aio/pyproject.toml | 9 +- .../aio/adapters/pydantic/adapter.py | 2 +- .../aio/adapters/raise_on_missing/adapter.py | 3 +- .../src/key_value/aio/errors/__init__.py | 21 -- .../src/key_value/aio/stores/base.py | 6 +- .../key_value/aio/stores/disk/multi_store.py | 4 +- .../src/key_value/aio/stores/disk/store.py | 4 +- .../aio/stores/elasticsearch/store.py | 20 +- .../key_value/aio/stores/memcached/store.py | 4 +- .../src/key_value/aio/stores/memory/store.py | 4 +- .../src/key_value/aio/stores/mongodb/store.py | 6 +- .../src/key_value/aio/stores/null/store.py | 2 +- .../src/key_value/aio/stores/redis/store.py | 4 +- .../src/key_value/aio/stores/simple/store.py | 6 +- .../src/key_value/aio/stores/valkey/store.py | 4 +- .../wrappers/prefix_collections/wrapper.py | 2 +- .../aio/wrappers/prefix_keys/wrapper.py | 2 +- .../aio/wrappers/single_collection/wrapper.py | 2 +- .../tests/adapters/test_raise.py | 2 +- key-value/key-value-aio/tests/conftest.py | 33 +++ key-value/key-value-aio/tests/stores/base.py | 224 +++++++++++++++ .../key-value-aio/tests/stores/conftest.py | 261 ----------------- .../tests/stores/disk/test_disk.py | 2 +- .../tests/stores/disk/test_multi_disk.py | 2 +- .../elasticsearch/test_elasticsearch.py | 25 +- .../tests/stores/memcached/test_memcached.py | 19 +- .../tests/stores/memory/test_memory.py | 2 +- .../tests/stores/mongodb/test_mongodb.py | 15 +- .../tests/stores/redis/test_redis.py | 14 +- .../tests/stores/simple/test_store.py | 2 +- .../tests/stores/valkey/test_valkey.py | 26 +- .../tests/stores/wrappers/test_clamp_ttl.py | 2 +- .../stores/wrappers/test_passthrough_cache.py | 2 +- .../stores/wrappers/test_prefix_collection.py | 2 +- .../tests/stores/wrappers/test_prefix_key.py | 2 +- .../stores/wrappers/test_single_collection.py | 2 +- .../tests/stores/wrappers/test_statistics.py | 2 +- .../tests/utils/test_managed_entry.py | 39 --- key-value/key-value-shared-test/README.md | 1 + .../key-value-shared-test/pyproject.toml | 42 +++ .../src/key_value/shared_test}/__init__.py | 0 .../src/key_value/shared_test/cases.py | 115 ++++++++ key-value/key-value-shared/README.md | 1 + key-value/key-value-shared/pyproject.toml | 63 +++++ .../src/key_value/shared}/__init__.py | 0 .../src/key_value/shared/code_gen/gather.py | 17 ++ .../src/key_value/shared/code_gen/sleep.py} | 6 +- .../src/key_value/shared/errors/__init__.py | 13 + .../src/key_value/shared}/errors/base.py | 0 .../src/key_value/shared}/errors/key_value.py | 2 +- .../src/key_value/shared}/errors/store.py | 2 +- .../src/key_value/shared/stores/wait.py | 25 ++ .../src/key_value/shared/utils/__init__.py | 0 .../src/key_value/shared}/utils/compound.py | 0 .../key_value/shared}/utils/managed_entry.py | 4 +- .../src/key_value/shared}/utils/sanitize.py | 0 .../key_value/shared}/utils/time_to_live.py | 0 .../key-value-shared/tests/utils/__init__.py | 0 .../tests/utils/test_managed_entry.py | 39 +++ .../tests/utils/test_sanitize.py | 2 +- key-value/key-value-sync/pyproject.toml | 9 +- .../src/key_value/sync/code_gen/__init__.py | 0 .../code_gen/adapters/pydantic/adapter.py | 2 +- .../adapters/raise_on_missing/adapter.py | 3 +- .../sync/code_gen/errors/__init__.py | 24 -- .../key_value/sync/code_gen/errors/base.py | 23 -- .../sync/code_gen/errors/key_value.py | 33 --- .../key_value/sync/code_gen/errors/store.py | 16 -- .../sync/code_gen/stores/__init__.py | 0 .../key_value/sync/code_gen/stores/base.py | 6 +- .../sync/code_gen/stores/disk/multi_store.py | 4 +- .../sync/code_gen/stores/disk/store.py | 4 +- .../code_gen/stores/elasticsearch/store.py | 25 +- .../sync/code_gen/stores/memory/store.py | 4 +- .../sync/code_gen/stores/mongodb/store.py | 6 +- .../sync/code_gen/stores/null/store.py | 2 +- .../sync/code_gen/stores/redis/store.py | 4 +- .../sync/code_gen/stores/simple/store.py | 6 +- .../sync/code_gen/stores/valkey/store.py | 4 +- .../key_value/sync/code_gen/utils/acompat.py | 21 -- .../key_value/sync/code_gen/utils/compound.py | 78 ------ .../sync/code_gen/utils/managed_entry.py | 102 ------- .../key_value/sync/code_gen/utils/sanitize.py | 159 ----------- .../sync/code_gen/utils/time_to_live.py | 41 --- .../wrappers/prefix_collections/wrapper.py | 2 +- .../code_gen/wrappers/prefix_keys/wrapper.py | 2 +- .../wrappers/single_collection/wrapper.py | 2 +- .../src/key_value/sync/errors/__init__.py | 24 -- key-value/key-value-sync/tests/__init__.py | 0 .../tests/code_gen/adapters/test_raise.py | 2 +- .../key-value-sync/tests/code_gen/conftest.py | 33 +++ .../tests/code_gen/stores/base.py | 228 +++++++++++++++ .../tests/code_gen/stores/base/__init__.py | 4 - .../tests/code_gen/stores/conftest.py | 262 ------------------ .../tests/code_gen/stores/disk/test_disk.py | 2 +- .../code_gen/stores/disk/test_multi_disk.py | 2 +- .../elasticsearch/test_elasticsearch.py | 13 +- .../code_gen/stores/memory/test_memory.py | 2 +- .../code_gen/stores/mongodb/test_mongodb.py | 7 +- .../tests/code_gen/stores/redis/test_redis.py | 12 +- .../code_gen/stores/simple/test_store.py | 2 +- .../code_gen/stores/valkey/test_valkey.py | 13 +- .../stores/wrappers/test_clamp_ttl.py | 2 +- .../stores/wrappers/test_passthrough_cache.py | 2 +- .../stores/wrappers/test_prefix_collection.py | 2 +- .../stores/wrappers/test_prefix_key.py | 2 +- .../stores/wrappers/test_single_collection.py | 2 +- .../stores/wrappers/test_statistics.py | 2 +- .../tests/code_gen/utils/__init__.py | 4 - .../code_gen/utils/test_managed_entry.py | 30 -- .../tests/code_gen/utils/test_sanitize.py | 82 ------ pyproject.toml | 31 +-- scripts/build_sync_library.py | 3 + uv.lock | 72 ++++- 115 files changed, 1132 insertions(+), 1414 deletions(-) create mode 100644 .vscode/settings.json delete mode 100644 key-value/key-value-aio/src/key_value/aio/errors/__init__.py create mode 100644 key-value/key-value-aio/tests/stores/base.py delete mode 100644 key-value/key-value-aio/tests/utils/test_managed_entry.py create mode 100644 key-value/key-value-shared-test/README.md create mode 100644 key-value/key-value-shared-test/pyproject.toml rename key-value/{key-value-aio/tests/stores/base => key-value-shared-test/src/key_value/shared_test}/__init__.py (100%) create mode 100644 key-value/key-value-shared-test/src/key_value/shared_test/cases.py create mode 100644 key-value/key-value-shared/README.md create mode 100644 key-value/key-value-shared/pyproject.toml rename key-value/{key-value-aio/tests/utils => key-value-shared/src/key_value/shared}/__init__.py (100%) create mode 100644 key-value/key-value-shared/src/key_value/shared/code_gen/gather.py rename key-value/{key-value-aio/src/key_value/aio/utils/acompat.py => key-value-shared/src/key_value/shared/code_gen/sleep.py} (64%) create mode 100644 key-value/key-value-shared/src/key_value/shared/errors/__init__.py rename key-value/{key-value-aio/src/key_value/aio => key-value-shared/src/key_value/shared}/errors/base.py (100%) rename key-value/{key-value-aio/src/key_value/aio => key-value-shared/src/key_value/shared}/errors/key_value.py (94%) rename key-value/{key-value-aio/src/key_value/aio => key-value-shared/src/key_value/shared}/errors/store.py (84%) create mode 100644 key-value/key-value-shared/src/key_value/shared/stores/wait.py create mode 100644 key-value/key-value-shared/src/key_value/shared/utils/__init__.py rename key-value/{key-value-aio/src/key_value/aio => key-value-shared/src/key_value/shared}/utils/compound.py (100%) rename key-value/{key-value-aio/src/key_value/aio => key-value-shared/src/key_value/shared}/utils/managed_entry.py (95%) rename key-value/{key-value-aio/src/key_value/aio => key-value-shared/src/key_value/shared}/utils/sanitize.py (100%) rename key-value/{key-value-aio/src/key_value/aio => key-value-shared/src/key_value/shared}/utils/time_to_live.py (100%) create mode 100644 key-value/key-value-shared/tests/utils/__init__.py create mode 100644 key-value/key-value-shared/tests/utils/test_managed_entry.py rename key-value/{key-value-aio => key-value-shared}/tests/utils/test_sanitize.py (98%) create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/__init__.py delete mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/errors/__init__.py delete mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/errors/base.py delete mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/errors/key_value.py delete mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/errors/store.py create mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/stores/__init__.py delete mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/utils/acompat.py delete mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/utils/compound.py delete mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/utils/managed_entry.py delete mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/utils/sanitize.py delete mode 100644 key-value/key-value-sync/src/key_value/sync/code_gen/utils/time_to_live.py delete mode 100644 key-value/key-value-sync/src/key_value/sync/errors/__init__.py create mode 100644 key-value/key-value-sync/tests/__init__.py create mode 100644 key-value/key-value-sync/tests/code_gen/stores/base.py delete mode 100644 key-value/key-value-sync/tests/code_gen/stores/base/__init__.py delete mode 100644 key-value/key-value-sync/tests/code_gen/utils/__init__.py delete mode 100644 key-value/key-value-sync/tests/code_gen/utils/test_managed_entry.py delete mode 100644 key-value/key-value-sync/tests/code_gen/utils/test_sanitize.py diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..648d3133 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,9 @@ +{ + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "python.testing.pytestArgs": [ + "key-value", + "--import-mode=importlib", + "-vv", + ] +} \ No newline at end of file diff --git a/key-value/key-value-aio/pyproject.toml b/key-value/key-value-aio/pyproject.toml index 6659456c..a82c61ef 100644 --- a/key-value/key-value-aio/pyproject.toml +++ b/key-value/key-value-aio/pyproject.toml @@ -15,6 +15,7 @@ classifiers = [ "Programming Language :: Python :: 3.13", ] dependencies = [ + "py-key-value-shared>=0.2.0", ] @@ -25,6 +26,10 @@ build-backend = "uv_build" [tool.uv.build-backend] module-name = "key_value.aio" +[tool.uv.sources] +py-key-value-shared = { workspace = true } +py-key-value-shared-test = { workspace = true } + [project.optional-dependencies] memory = ["cachetools>=6.0.0"] disk = ["diskcache>=5.6.0", "pathvalidate>=3.3.1",] @@ -63,6 +68,8 @@ dev = [ "pytest-timeout>=2.4.0", "ast-comments>=1.2.3", "docker>=7.1.0", + "py-key-value-shared-test>=0.2.0", + ] lint = [ "ruff" @@ -72,4 +79,4 @@ lint = [ extend="../../pyproject.toml" [tool.pyright] -extends = "../../pyproject.toml" +extends = "../../pyproject.toml" \ No newline at end of file diff --git a/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/adapter.py b/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/adapter.py index 1661d657..2446f704 100644 --- a/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/adapter.py +++ b/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/adapter.py @@ -1,10 +1,10 @@ from collections.abc import Sequence from typing import Any, Generic, TypeVar +from key_value.shared.errors import DeserializationError, SerializationError from pydantic import BaseModel, ValidationError from pydantic_core import PydanticSerializationError -from key_value.aio.errors import DeserializationError, SerializationError from key_value.aio.protocols.key_value import AsyncKeyValue T = TypeVar("T", bound=BaseModel) diff --git a/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/adapter.py b/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/adapter.py index 448e7c7a..cc39d86b 100644 --- a/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/adapter.py +++ b/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/adapter.py @@ -1,7 +1,8 @@ from collections.abc import Sequence from typing import Any, Literal, overload -from key_value.aio.errors import MissingKeyError +from key_value.shared.errors import MissingKeyError + from key_value.aio.protocols.key_value import AsyncKeyValue diff --git a/key-value/key-value-aio/src/key_value/aio/errors/__init__.py b/key-value/key-value-aio/src/key_value/aio/errors/__init__.py deleted file mode 100644 index eab3e7ef..00000000 --- a/key-value/key-value-aio/src/key_value/aio/errors/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -from key_value.aio.errors.base import BaseKeyValueError -from key_value.aio.errors.key_value import ( - DeserializationError, - InvalidTTLError, - KeyValueOperationError, - MissingKeyError, - SerializationError, -) -from key_value.aio.errors.store import KeyValueStoreError, StoreConnectionError, StoreSetupError - -__all__ = [ - "BaseKeyValueError", - "DeserializationError", - "InvalidTTLError", - "KeyValueOperationError", - "KeyValueStoreError", - "MissingKeyError", - "SerializationError", - "StoreConnectionError", - "StoreSetupError", -] diff --git a/key-value/key-value-aio/src/key_value/aio/stores/base.py b/key-value/key-value-aio/src/key_value/aio/stores/base.py index 6e93e9b9..d2fb4fd7 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/base.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/base.py @@ -9,9 +9,11 @@ from types import TracebackType from typing import Any +from key_value.shared.errors import InvalidTTLError, StoreSetupError +from key_value.shared.utils.managed_entry import ManagedEntry +from key_value.shared.utils.time_to_live import now from typing_extensions import Self, override -from key_value.aio.errors import InvalidTTLError, StoreSetupError from key_value.aio.protocols.key_value import ( AsyncCullProtocol, AsyncDestroyCollectionProtocol, @@ -20,8 +22,6 @@ AsyncEnumerateKeysProtocol, AsyncKeyValueProtocol, ) -from key_value.aio.utils.managed_entry import ManagedEntry -from key_value.aio.utils.time_to_live import now DEFAULT_COLLECTION_NAME = "default_collection" diff --git a/key-value/key-value-aio/src/key_value/aio/stores/disk/multi_store.py b/key-value/key-value-aio/src/key_value/aio/stores/disk/multi_store.py index 1f6522b9..ea3f1a88 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/disk/multi_store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/disk/multi_store.py @@ -3,11 +3,11 @@ from pathlib import Path from typing import overload +from key_value.shared.utils.compound import compound_key +from key_value.shared.utils.managed_entry import ManagedEntry from typing_extensions import override from key_value.aio.stores.base import BaseContextManagerStore, BaseStore -from key_value.aio.utils.compound import compound_key -from key_value.aio.utils.managed_entry import ManagedEntry try: from diskcache import Cache diff --git a/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py b/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py index be3a84b6..9ea8c038 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py @@ -2,11 +2,11 @@ from pathlib import Path from typing import overload +from key_value.shared.utils.compound import compound_key +from key_value.shared.utils.managed_entry import ManagedEntry from typing_extensions import override from key_value.aio.stores.base import BaseContextManagerStore, BaseStore -from key_value.aio.utils.compound import compound_key -from key_value.aio.utils.managed_entry import ManagedEntry try: from diskcache import Cache diff --git a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py index 115ad586..741bf539 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py @@ -1,6 +1,9 @@ import hashlib from typing import TYPE_CHECKING, Any, overload +from key_value.shared.utils.compound import compound_key +from key_value.shared.utils.managed_entry import ManagedEntry, load_from_json +from key_value.shared.utils.time_to_live import now_as_epoch, try_parse_datetime_str from typing_extensions import override from key_value.aio.stores.base import ( @@ -11,9 +14,6 @@ BaseEnumerateKeysStore, BaseStore, ) -from key_value.aio.utils.compound import compound_key -from key_value.aio.utils.managed_entry import ManagedEntry, load_from_json -from key_value.aio.utils.time_to_live import now_as_epoch, try_parse_datetime_str try: from elasticsearch import AsyncElasticsearch @@ -71,6 +71,8 @@ class ElasticsearchStore( _client: AsyncElasticsearch + _is_serverless: bool + _index: str @overload @@ -112,6 +114,8 @@ def __init__( raise ValueError(msg) self._index = index or DEFAULT_INDEX + self._is_serverless = False + super().__init__(default_collection=default_collection) @override @@ -119,9 +123,14 @@ async def _setup(self) -> None: if await self._client.options(ignore_status=404).indices.exists(index=self._index): return + cluster_info = await self._client.options(ignore_status=404).info() + + self._is_serverless = cluster_info.get("version", {}).get("build_flavor") == "serverless" + _ = await self._client.options(ignore_status=404).indices.create( index=self._index, mappings=DEFAULT_MAPPING, + settings={}, ) @override @@ -159,6 +168,10 @@ async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry expires_at=expires_at, ) + @property + def _should_refresh_on_put(self) -> bool: + return not self._is_serverless + @override async def _put_managed_entry( self, @@ -184,6 +197,7 @@ async def _put_managed_entry( index=self._index, id=self.sanitize_document_id(key=combo_key), body=document, + refresh=self._should_refresh_on_put, ) @override diff --git a/key-value/key-value-aio/src/key_value/aio/stores/memcached/store.py b/key-value/key-value-aio/src/key_value/aio/stores/memcached/store.py index 6700acae..a5289a0e 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/memcached/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/memcached/store.py @@ -1,11 +1,11 @@ import hashlib from typing import overload +from key_value.shared.utils.compound import compound_key +from key_value.shared.utils.managed_entry import ManagedEntry from typing_extensions import override from key_value.aio.stores.base import BaseContextManagerStore, BaseDestroyStore, BaseStore -from key_value.aio.utils.compound import compound_key -from key_value.aio.utils.managed_entry import ManagedEntry try: from aiomcache import Client diff --git a/key-value/key-value-aio/src/key_value/aio/stores/memory/store.py b/key-value/key-value-aio/src/key_value/aio/stores/memory/store.py index b9746a37..b49c5708 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/memory/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/memory/store.py @@ -3,6 +3,8 @@ from datetime import datetime from typing import Any +from key_value.shared.utils.managed_entry import ManagedEntry +from key_value.shared.utils.time_to_live import epoch_to_datetime from typing_extensions import Self, override from key_value.aio.stores.base import ( @@ -11,8 +13,6 @@ BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, ) -from key_value.aio.utils.managed_entry import ManagedEntry -from key_value.aio.utils.time_to_live import epoch_to_datetime try: from cachetools import TLRUCache diff --git a/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py b/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py index 4b67db1c..e1a340f5 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py @@ -1,14 +1,14 @@ from datetime import datetime from typing import TYPE_CHECKING, Any, TypedDict, overload +from key_value.shared.utils.managed_entry import ManagedEntry +from key_value.shared.utils.sanitize import ALPHANUMERIC_CHARACTERS, sanitize_string +from key_value.shared.utils.time_to_live import now from pymongo.asynchronous.collection import AsyncCollection from pymongo.asynchronous.database import AsyncDatabase from typing_extensions import Self, override from key_value.aio.stores.base import BaseContextManagerStore, BaseDestroyCollectionStore, BaseEnumerateCollectionsStore, BaseStore -from key_value.aio.utils.managed_entry import ManagedEntry -from key_value.aio.utils.sanitize import ALPHANUMERIC_CHARACTERS, sanitize_string -from key_value.aio.utils.time_to_live import now if TYPE_CHECKING: from pymongo.results import DeleteResult diff --git a/key-value/key-value-aio/src/key_value/aio/stores/null/store.py b/key-value/key-value-aio/src/key_value/aio/stores/null/store.py index 2f7e1fc5..49eb126f 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/null/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/null/store.py @@ -1,7 +1,7 @@ +from key_value.shared.utils.managed_entry import ManagedEntry from typing_extensions import override from key_value.aio.stores.base import BaseStore -from key_value.aio.utils.managed_entry import ManagedEntry class NullStore(BaseStore): diff --git a/key-value/key-value-aio/src/key_value/aio/stores/redis/store.py b/key-value/key-value-aio/src/key_value/aio/stores/redis/store.py index d9758097..989b1a77 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/redis/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/redis/store.py @@ -1,11 +1,11 @@ from typing import Any, overload from urllib.parse import urlparse +from key_value.shared.utils.compound import compound_key, get_keys_from_compound_keys +from key_value.shared.utils.managed_entry import ManagedEntry from typing_extensions import override from key_value.aio.stores.base import BaseContextManagerStore, BaseDestroyStore, BaseEnumerateKeysStore, BaseStore -from key_value.aio.utils.compound import compound_key, get_keys_from_compound_keys -from key_value.aio.utils.managed_entry import ManagedEntry try: from redis.asyncio import Redis diff --git a/key-value/key-value-aio/src/key_value/aio/stores/simple/store.py b/key-value/key-value-aio/src/key_value/aio/stores/simple/store.py index 93a5bf7c..98ce2df9 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/simple/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/simple/store.py @@ -2,6 +2,9 @@ from dataclasses import dataclass from datetime import datetime +from key_value.shared.utils.compound import compound_key, get_collections_from_compound_keys, get_keys_from_compound_keys +from key_value.shared.utils.managed_entry import ManagedEntry, load_from_json +from key_value.shared.utils.time_to_live import seconds_to from typing_extensions import override from key_value.aio.stores.base import ( @@ -10,9 +13,6 @@ BaseEnumerateKeysStore, BaseStore, ) -from key_value.aio.utils.compound import compound_key, get_collections_from_compound_keys, get_keys_from_compound_keys -from key_value.aio.utils.managed_entry import ManagedEntry, load_from_json -from key_value.aio.utils.time_to_live import seconds_to DEFAULT_SIMPLE_STORE_MAX_ENTRIES = 10000 diff --git a/key-value/key-value-aio/src/key_value/aio/stores/valkey/store.py b/key-value/key-value-aio/src/key_value/aio/stores/valkey/store.py index 0cc76641..372be31f 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/valkey/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/valkey/store.py @@ -3,11 +3,11 @@ from glide.glide_client import BaseClient from glide_shared.commands.core_options import ExpirySet, ExpiryType from glide_shared.config import GlideClientConfiguration, NodeAddress, ServerCredentials +from key_value.shared.utils.compound import compound_key +from key_value.shared.utils.managed_entry import ManagedEntry from typing_extensions import override from key_value.aio.stores.base import BaseContextManagerStore, BaseStore -from key_value.aio.utils.compound import compound_key -from key_value.aio.utils.managed_entry import ManagedEntry try: # Use redis-py asyncio client to communicate with a Valkey server (protocol compatible) diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/wrapper.py b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/wrapper.py index cc2d155f..046437a9 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/wrapper.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/wrapper.py @@ -1,11 +1,11 @@ from collections.abc import Sequence from typing import Any +from key_value.shared.utils.compound import prefix_collection, unprefix_collection from typing_extensions import override from key_value.aio.protocols.key_value import AsyncKeyValue from key_value.aio.stores.base import DEFAULT_COLLECTION_NAME -from key_value.aio.utils.compound import prefix_collection, unprefix_collection from key_value.aio.wrappers.base import BaseWrapper diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/wrapper.py b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/wrapper.py index a656642c..98bf4343 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/wrapper.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/wrapper.py @@ -1,10 +1,10 @@ from collections.abc import Sequence from typing import Any +from key_value.shared.utils.compound import prefix_key, unprefix_key from typing_extensions import override from key_value.aio.protocols.key_value import AsyncKeyValue -from key_value.aio.utils.compound import prefix_key, unprefix_key from key_value.aio.wrappers.base import BaseWrapper diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/wrapper.py b/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/wrapper.py index 5f351546..397f0c7d 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/wrapper.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/wrapper.py @@ -1,11 +1,11 @@ from collections.abc import Sequence from typing import Any +from key_value.shared.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key from typing_extensions import override from key_value.aio.protocols.key_value import AsyncKeyValue from key_value.aio.stores.base import DEFAULT_COLLECTION_NAME -from key_value.aio.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key from key_value.aio.wrappers.base import BaseWrapper diff --git a/key-value/key-value-aio/tests/adapters/test_raise.py b/key-value/key-value-aio/tests/adapters/test_raise.py index 72110b67..7d7bb798 100644 --- a/key-value/key-value-aio/tests/adapters/test_raise.py +++ b/key-value/key-value-aio/tests/adapters/test_raise.py @@ -1,7 +1,7 @@ import pytest +from key_value.shared.errors import MissingKeyError from key_value.aio.adapters.raise_on_missing import RaiseOnMissingAdapter -from key_value.aio.errors import MissingKeyError from key_value.aio.stores.memory.store import MemoryStore diff --git a/key-value/key-value-aio/tests/conftest.py b/key-value/key-value-aio/tests/conftest.py index fd6f77f1..34a19558 100644 --- a/key-value/key-value-aio/tests/conftest.py +++ b/key-value/key-value-aio/tests/conftest.py @@ -1,5 +1,7 @@ import asyncio import logging +import os +import subprocess from collections.abc import Callable, Iterator from contextlib import contextmanager @@ -124,3 +126,34 @@ def async_running_in_event_loop() -> bool: def running_in_event_loop() -> bool: return False + + +def detect_docker() -> bool: + try: + result = subprocess.run(["docker", "ps"], check=False, capture_output=True, text=True) # noqa: S607 + except Exception: + return False + else: + return result.returncode == 0 + + +def detect_on_ci() -> bool: + return os.getenv("CI", "false") == "true" + + +def detect_on_windows() -> bool: + return os.name == "nt" + + +def detect_on_macos() -> bool: + return os.name == "darwin" + + +def should_run_docker_tests() -> bool: + if detect_on_ci(): + return all([detect_docker(), not detect_on_windows(), not detect_on_macos()]) + return detect_docker() + + +def should_skip_docker_tests() -> bool: + return not should_run_docker_tests() diff --git a/key-value/key-value-aio/tests/stores/base.py b/key-value/key-value-aio/tests/stores/base.py new file mode 100644 index 00000000..86353406 --- /dev/null +++ b/key-value/key-value-aio/tests/stores/base.py @@ -0,0 +1,224 @@ +import hashlib +from abc import ABC, abstractmethod +from collections.abc import AsyncGenerator +from typing import Any + +import pytest +from dirty_equals import IsFloat +from key_value.shared.code_gen.gather import async_gather +from key_value.shared.code_gen.sleep import asleep +from key_value.shared.errors import InvalidTTLError, SerializationError +from key_value.shared_test.cases import SIMPLE_TEST_DATA_ARGNAMES, SIMPLE_TEST_DATA_ARGVALUES, SIMPLE_TEST_DATA_IDS +from pydantic import AnyHttpUrl + +from key_value.aio.stores.base import BaseContextManagerStore, BaseStore +from tests.conftest import async_running_in_event_loop + + +class BaseStoreTests(ABC): + async def eventually_consistent(self) -> None: # noqa: B027 + """Subclasses can override this to wait for eventually consistent operations.""" + + @pytest.fixture + @abstractmethod + async def store(self) -> BaseStore | AsyncGenerator[BaseStore, None]: ... + + # The first test requires a docker pull, so we only time the actual test + @pytest.mark.timeout(5, func_only=True) + async def test_empty_get(self, store: BaseStore): + """Tests that the get method returns None from an empty store.""" + assert await store.get(collection="test", key="test") is None + + async def test_empty_put(self, store: BaseStore): + """Tests that the put method does not raise an exception when called on a new store.""" + await store.put(collection="test", key="test", value={"test": "test"}) + + async def test_empty_ttl(self, store: BaseStore): + """Tests that the ttl method returns None from an empty store.""" + ttl = await store.ttl(collection="test", key="test") + assert ttl == (None, None) + + async def test_put_serialization_errors(self, store: BaseStore): + """Tests that the put method does not raise an exception when called on a new store.""" + with pytest.raises(SerializationError): + await store.put(collection="test", key="test", value={"test": AnyHttpUrl("https://test.com")}) + + async def test_get_put_get(self, store: BaseStore): + assert await store.get(collection="test", key="test") is None + await store.put(collection="test", key="test", value={"test": "test"}) + assert await store.get(collection="test", key="test") == {"test": "test"} + + @pytest.mark.parametrize(argnames=SIMPLE_TEST_DATA_ARGNAMES, argvalues=SIMPLE_TEST_DATA_ARGVALUES, ids=SIMPLE_TEST_DATA_IDS) + async def test_get_complex_put_get(self, store: BaseStore, data: dict[str, Any], json: str): # pyright: ignore[reportUnusedParameter, reportUnusedParameter] # noqa: ARG002 + await store.put(collection="test", key="test", value=data) + assert await store.get(collection="test", key="test") == data + + async def test_put_many_get(self, store: BaseStore): + await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert await store.get(collection="test", key="test") == {"test": "test"} + assert await store.get(collection="test", key="test_2") == {"test": "test_2"} + + async def test_put_many_get_many(self, store: BaseStore): + await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + + async def test_put_put_get_many(self, store: BaseStore): + await store.put(collection="test", key="test", value={"test": "test"}) + await store.put(collection="test", key="test_2", value={"test": "test_2"}) + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + + async def test_put_put_get_many_missing_one(self, store: BaseStore): + await store.put(collection="test", key="test", value={"test": "test"}) + await store.put(collection="test", key="test_2", value={"test": "test_2"}) + assert await store.get_many(collection="test", keys=["test", "test_2", "test_3"]) == [{"test": "test"}, {"test": "test_2"}, None] + + async def test_put_get_delete_get(self, store: BaseStore): + await store.put(collection="test", key="test", value={"test": "test"}) + assert await store.get(collection="test", key="test") == {"test": "test"} + assert await store.delete(collection="test", key="test") + assert await store.get(collection="test", key="test") is None + + async def test_put_many_get_get_delete_many_get_many(self, store: BaseStore): + await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + assert await store.delete_many(collection="test", keys=["test", "test_2"]) == 2 + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] + + async def test_put_many_get_many_delete_many_get_many(self, store: BaseStore): + await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + assert await store.delete_many(collection="test", keys=["test", "test_2"]) == 2 + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] + + async def test_get_put_get_delete_get(self, store: BaseStore): + """Tests that the get, put, delete, and get methods work together to store and retrieve a value from an empty store.""" + + assert await store.get(collection="test", key="test") is None + + await store.put(collection="test", key="test", value={"test": "test"}) + + assert await store.get(collection="test", key="test") == {"test": "test"} + + assert await store.delete(collection="test", key="test") + + assert await store.get(collection="test", key="test") is None + + async def test_get_put_get_put_delete_get(self, store: BaseStore): + """Tests that the get, put, get, put, delete, and get methods work together to store and retrieve a value from an empty store.""" + await store.put(collection="test", key="test", value={"test": "test"}) + assert await store.get(collection="test", key="test") == {"test": "test"} + + await store.put(collection="test", key="test", value={"test": "test_2"}) + + assert await store.get(collection="test", key="test") == {"test": "test_2"} + assert await store.delete(collection="test", key="test") + assert await store.get(collection="test", key="test") is None + + async def test_put_many_delete_delete_get_many(self, store: BaseStore): + await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + assert await store.delete(collection="test", key="test") + assert await store.delete(collection="test", key="test_2") + assert await store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] + + async def test_put_ttl_get_ttl(self, store: BaseStore): + """Tests that the put and get ttl methods work together to store and retrieve a ttl from an empty store.""" + await store.put(collection="test", key="test", value={"test": "test"}, ttl=100) + value, ttl = await store.ttl(collection="test", key="test") + + assert value == {"test": "test"} + assert ttl is not None + assert ttl == IsFloat(approx=100) + + async def test_negative_ttl(self, store: BaseStore): + """Tests that a negative ttl will return None when getting the key.""" + with pytest.raises(InvalidTTLError): + await store.put(collection="test", key="test", value={"test": "test"}, ttl=-100) + + @pytest.mark.timeout(10) + async def test_put_expired_get_none(self, store: BaseStore): + """Tests that a put call with a negative ttl will return None when getting the key.""" + await store.put(collection="test_collection", key="test_key", value={"test": "test"}, ttl=1) + await asleep(seconds=3) + assert await store.get(collection="test_collection", key="test_key") is None + + async def test_long_collection_name(self, store: BaseStore): + """Tests that a long collection name will not raise an error.""" + await store.put(collection="test_collection" * 100, key="test_key", value={"test": "test"}) + assert await store.get(collection="test_collection" * 100, key="test_key") == {"test": "test"} + + async def test_special_characters_in_collection_name(self, store: BaseStore): + """Tests that a special characters in the collection name will not raise an error.""" + await store.put(collection="test_collection!@#$%^&*()", key="test_key", value={"test": "test"}) + assert await store.get(collection="test_collection!@#$%^&*()", key="test_key") == {"test": "test"} + + async def test_long_key_name(self, store: BaseStore): + """Tests that a long key name will not raise an error.""" + await store.put(collection="test_collection", key="test_key" * 100, value={"test": "test"}) + assert await store.get(collection="test_collection", key="test_key" * 100) == {"test": "test"} + + async def test_special_characters_in_key_name(self, store: BaseStore): + """Tests that a special characters in the key name will not raise an error.""" + await store.put(collection="test_collection", key="test_key!@#$%^&*()", value={"test": "test"}) + assert await store.get(collection="test_collection", key="test_key!@#$%^&*()") == {"test": "test"} + + @pytest.mark.timeout(20) + async def test_not_unbounded(self, store: BaseStore): + """Tests that the store is not unbounded.""" + + for i in range(1000): + value = hashlib.sha256(f"test_{i}".encode()).hexdigest() + await store.put(collection="test_collection", key=f"test_key_{i}", value={"test": value}) + + assert await store.get(collection="test_collection", key="test_key_0") is None + assert await store.get(collection="test_collection", key="test_key_999") is not None + + @pytest.mark.skipif(condition=not async_running_in_event_loop(), reason="Cannot run concurrent operations in event loop") + async def test_concurrent_operations(self, store: BaseStore): + """Tests that the store can handle concurrent operations.""" + + async def worker(store: BaseStore, worker_id: int): + for i in range(10): + assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None + + await store.put(collection="test_collection", key=f"test_{worker_id}_{i}", value={"test": f"test_{i}"}) + assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") == {"test": f"test_{i}"} + + await store.put(collection="test_collection", key=f"test_{worker_id}_{i}", value={"test": f"test_{i}_2"}) + assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") == {"test": f"test_{i}_2"} + + assert await store.delete(collection="test_collection", key=f"test_{worker_id}_{i}") + assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None + + _ = await async_gather(*[worker(store, worker_id) for worker_id in range(5)]) + + @pytest.mark.timeout(15) + async def test_minimum_put_many_get_many_performance(self, store: BaseStore): + """Tests that the store meets minimum performance requirements.""" + keys = [f"test_{i}" for i in range(10)] + values = [{"test": f"test_{i}"} for i in range(10)] + await store.put_many(collection="test_collection", keys=keys, values=values) + assert await store.get_many(collection="test_collection", keys=keys) == values + + @pytest.mark.timeout(15) + async def test_minimum_put_many_delete_many_performance(self, store: BaseStore): + """Tests that the store meets minimum performance requirements.""" + keys = [f"test_{i}" for i in range(10)] + values = [{"test": f"test_{i}"} for i in range(10)] + await store.put_many(collection="test_collection", keys=keys, values=values) + assert await store.delete_many(collection="test_collection", keys=keys) == 10 + + +class ContextManagerStoreTestMixin: + @pytest.fixture(params=[True, False], ids=["with_ctx_manager", "no_ctx_manager"], autouse=True) + async def enter_exit_store( + self, request: pytest.FixtureRequest, store: BaseContextManagerStore + ) -> AsyncGenerator[BaseContextManagerStore, None]: + context_manager = request.param # pyright: ignore[reportAny] + + if context_manager: + async with store: + yield store + else: + yield store + await store.close() diff --git a/key-value/key-value-aio/tests/stores/conftest.py b/key-value/key-value-aio/tests/stores/conftest.py index 9b2faaae..f501d1ce 100644 --- a/key-value/key-value-aio/tests/stores/conftest.py +++ b/key-value/key-value-aio/tests/stores/conftest.py @@ -1,22 +1,8 @@ -import asyncio -import hashlib -import os -import subprocess -from abc import ABC, abstractmethod -from collections.abc import AsyncGenerator, Callable from datetime import datetime, timedelta, timezone -from typing import Any import pytest -from dirty_equals import IsFloat -from pydantic import AnyHttpUrl -from key_value.aio.errors import InvalidTTLError, SerializationError -from key_value.aio.stores.base import BaseContextManagerStore, BaseStore from key_value.aio.stores.memory.store import MemoryStore -from key_value.aio.utils.acompat import asleep, sleep -from tests.cases import DICTIONARY_TO_JSON_TEST_CASES_NAMES, OBJECT_TEST_CASES -from tests.conftest import async_running_in_event_loop @pytest.fixture @@ -34,250 +20,3 @@ def now_plus(seconds: int) -> datetime: def is_around(value: float, delta: float = 1) -> bool: return value - delta < value < value + delta - - -def detect_docker() -> bool: - try: - result = subprocess.run(["docker", "ps"], check=False, capture_output=True, text=True) # noqa: S607 - except Exception: - return False - else: - return result.returncode == 0 - - -def detect_on_ci() -> bool: - return os.getenv("CI", "false") == "true" - - -def detect_on_windows() -> bool: - return os.name == "nt" - - -def detect_on_macos() -> bool: - return os.name == "darwin" - - -def should_run_docker_tests() -> bool: - if detect_on_ci(): - return all([detect_docker(), not detect_on_windows(), not detect_on_macos()]) - return detect_docker() - - -def should_skip_docker_tests() -> bool: - return not should_run_docker_tests() - - -def wait_for_store(wait_fn: Callable[[], bool], max_time: int = 10) -> bool: - for _ in range(max_time): - if wait_fn(): - return True - sleep(seconds=1) - return False - - -class BaseStoreTests(ABC): - async def eventually_consistent(self) -> None: # noqa: B027 - """Subclasses can override this to wait for eventually consistent operations.""" - - @pytest.fixture - @abstractmethod - async def store(self) -> BaseStore | AsyncGenerator[BaseStore, None]: ... - - # The first test requires a docker pull, so we only time the actual test - @pytest.mark.timeout(5, func_only=True) - async def test_empty_get(self, store: BaseStore): - """Tests that the get method returns None from an empty store.""" - assert await store.get(collection="test", key="test") is None - - async def test_empty_put(self, store: BaseStore): - """Tests that the put method does not raise an exception when called on a new store.""" - await store.put(collection="test", key="test", value={"test": "test"}) - - async def test_empty_ttl(self, store: BaseStore): - """Tests that the ttl method returns None from an empty store.""" - assert await store.ttl(collection="test", key="test") == (None, None) - - async def test_put_serialization_errors(self, store: BaseStore): - """Tests that the put method does not raise an exception when called on a new store.""" - with pytest.raises(SerializationError): - await store.put(collection="test", key="test", value={"test": AnyHttpUrl("https://test.com")}) - - async def test_get_put_get(self, store: BaseStore): - assert await store.get(collection="test", key="test") is None - await store.put(collection="test", key="test", value={"test": "test"}) - assert await store.get(collection="test", key="test") == {"test": "test"} - - @pytest.mark.parametrize(argnames="value", argvalues=OBJECT_TEST_CASES, ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES) - async def test_get_complex_put_get(self, store: BaseStore, value: dict[str, Any]): - await store.put(collection="test", key="test", value=value) - assert await store.get(collection="test", key="test") == value - - async def test_put_many_get(self, store: BaseStore): - await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) - assert await store.get(collection="test", key="test") == {"test": "test"} - assert await store.get(collection="test", key="test_2") == {"test": "test_2"} - - async def test_put_many_get_many(self, store: BaseStore): - await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) - assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] - - async def test_put_put_get_many(self, store: BaseStore): - await store.put(collection="test", key="test", value={"test": "test"}) - await store.put(collection="test", key="test_2", value={"test": "test_2"}) - assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] - - async def test_put_put_get_many_missing_one(self, store: BaseStore): - await store.put(collection="test", key="test", value={"test": "test"}) - await store.put(collection="test", key="test_2", value={"test": "test_2"}) - assert await store.get_many(collection="test", keys=["test", "test_2", "test_3"]) == [{"test": "test"}, {"test": "test_2"}, None] - - async def test_put_get_delete_get(self, store: BaseStore): - await store.put(collection="test", key="test", value={"test": "test"}) - assert await store.get(collection="test", key="test") == {"test": "test"} - assert await store.delete(collection="test", key="test") - assert await store.get(collection="test", key="test") is None - - async def test_put_many_get_get_delete_many_get_many(self, store: BaseStore): - await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) - assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] - assert await store.delete_many(collection="test", keys=["test", "test_2"]) == 2 - assert await store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] - - async def test_put_many_get_many_delete_many_get_many(self, store: BaseStore): - await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) - assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] - assert await store.delete_many(collection="test", keys=["test", "test_2"]) == 2 - assert await store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] - - async def test_get_put_get_delete_get(self, store: BaseStore): - """Tests that the get, put, delete, and get methods work together to store and retrieve a value from an empty store.""" - - assert await store.get(collection="test", key="test") is None - - await store.put(collection="test", key="test", value={"test": "test"}) - - assert await store.get(collection="test", key="test") == {"test": "test"} - - assert await store.delete(collection="test", key="test") - - assert await store.get(collection="test", key="test") is None - - async def test_get_put_get_put_delete_get(self, store: BaseStore): - """Tests that the get, put, get, put, delete, and get methods work together to store and retrieve a value from an empty store.""" - await store.put(collection="test", key="test", value={"test": "test"}) - assert await store.get(collection="test", key="test") == {"test": "test"} - - await store.put(collection="test", key="test", value={"test": "test_2"}) - - assert await store.get(collection="test", key="test") == {"test": "test_2"} - assert await store.delete(collection="test", key="test") - assert await store.get(collection="test", key="test") is None - - async def test_put_many_delete_delete_get_many(self, store: BaseStore): - await store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) - assert await store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] - assert await store.delete(collection="test", key="test") - assert await store.delete(collection="test", key="test_2") - assert await store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] - - async def test_put_ttl_get_ttl(self, store: BaseStore): - """Tests that the put and get ttl methods work together to store and retrieve a ttl from an empty store.""" - await store.put(collection="test", key="test", value={"test": "test"}, ttl=100) - value, ttl = await store.ttl(collection="test", key="test") - - assert value == {"test": "test"} - assert ttl is not None - assert ttl == IsFloat(approx=100) - - async def test_negative_ttl(self, store: BaseStore): - """Tests that a negative ttl will return None when getting the key.""" - with pytest.raises(InvalidTTLError): - await store.put(collection="test", key="test", value={"test": "test"}, ttl=-100) - - @pytest.mark.timeout(10) - async def test_put_expired_get_none(self, store: BaseStore): - """Tests that a put call with a negative ttl will return None when getting the key.""" - await store.put(collection="test_collection", key="test_key", value={"test": "test"}, ttl=1) - await asleep(seconds=3) - assert await store.get(collection="test_collection", key="test_key") is None - - async def test_long_collection_name(self, store: BaseStore): - """Tests that a long collection name will not raise an error.""" - await store.put(collection="test_collection" * 100, key="test_key", value={"test": "test"}) - assert await store.get(collection="test_collection" * 100, key="test_key") == {"test": "test"} - - async def test_special_characters_in_collection_name(self, store: BaseStore): - """Tests that a special characters in the collection name will not raise an error.""" - await store.put(collection="test_collection!@#$%^&*()", key="test_key", value={"test": "test"}) - assert await store.get(collection="test_collection!@#$%^&*()", key="test_key") == {"test": "test"} - - async def test_long_key_name(self, store: BaseStore): - """Tests that a long key name will not raise an error.""" - await store.put(collection="test_collection", key="test_key" * 100, value={"test": "test"}) - assert await store.get(collection="test_collection", key="test_key" * 100) == {"test": "test"} - - async def test_special_characters_in_key_name(self, store: BaseStore): - """Tests that a special characters in the key name will not raise an error.""" - await store.put(collection="test_collection", key="test_key!@#$%^&*()", value={"test": "test"}) - assert await store.get(collection="test_collection", key="test_key!@#$%^&*()") == {"test": "test"} - - @pytest.mark.timeout(20) - async def test_not_unbounded(self, store: BaseStore): - """Tests that the store is not unbounded.""" - - for i in range(1000): - value = hashlib.sha256(f"test_{i}".encode()).hexdigest() - await store.put(collection="test_collection", key=f"test_key_{i}", value={"test": value}) - - assert await store.get(collection="test_collection", key="test_key_0") is None - assert await store.get(collection="test_collection", key="test_key_999") is not None - - @pytest.mark.skipif(condition=not async_running_in_event_loop(), reason="Cannot run concurrent operations in event loop") - async def test_concurrent_operations(self, store: BaseStore): - """Tests that the store can handle concurrent operations.""" - - async def worker(store: BaseStore, worker_id: int): - for i in range(10): - assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None - - await store.put(collection="test_collection", key=f"test_{worker_id}_{i}", value={"test": f"test_{i}"}) - assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") == {"test": f"test_{i}"} - - await store.put(collection="test_collection", key=f"test_{worker_id}_{i}", value={"test": f"test_{i}_2"}) - assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") == {"test": f"test_{i}_2"} - - assert await store.delete(collection="test_collection", key=f"test_{worker_id}_{i}") - assert await store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None - - _ = await asyncio.gather(*[worker(store, worker_id) for worker_id in range(5)]) - - @pytest.mark.timeout(15) - async def test_minimum_put_many_get_many_performance(self, store: BaseStore): - """Tests that the store meets minimum performance requirements.""" - keys = [f"test_{i}" for i in range(10)] - values = [{"test": f"test_{i}"} for i in range(10)] - await store.put_many(collection="test_collection", keys=keys, values=values) - assert await store.get_many(collection="test_collection", keys=keys) == values - - @pytest.mark.timeout(15) - async def test_minimum_put_many_delete_many_performance(self, store: BaseStore): - """Tests that the store meets minimum performance requirements.""" - keys = [f"test_{i}" for i in range(10)] - values = [{"test": f"test_{i}"} for i in range(10)] - await store.put_many(collection="test_collection", keys=keys, values=values) - assert await store.delete_many(collection="test_collection", keys=keys) == 10 - - -class ContextManagerStoreTestMixin: - @pytest.fixture(params=[True, False], ids=["with_ctx_manager", "no_ctx_manager"], autouse=True) - async def enter_exit_store( - self, request: pytest.FixtureRequest, store: BaseContextManagerStore - ) -> AsyncGenerator[BaseContextManagerStore, None]: - context_manager = request.param # pyright: ignore[reportAny] - - if context_manager: - async with store: - yield store - else: - yield store - await store.close() diff --git a/key-value/key-value-aio/tests/stores/disk/test_disk.py b/key-value/key-value-aio/tests/stores/disk/test_disk.py index b8471bf1..2aaf7ceb 100644 --- a/key-value/key-value-aio/tests/stores/disk/test_disk.py +++ b/key-value/key-value-aio/tests/stores/disk/test_disk.py @@ -5,7 +5,7 @@ from typing_extensions import override from key_value.aio.stores.disk import DiskStore -from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin +from tests.stores.base import BaseStoreTests, ContextManagerStoreTestMixin TEST_SIZE_LIMIT = 100 * 1024 # 100KB diff --git a/key-value/key-value-aio/tests/stores/disk/test_multi_disk.py b/key-value/key-value-aio/tests/stores/disk/test_multi_disk.py index d838100f..09d226e3 100644 --- a/key-value/key-value-aio/tests/stores/disk/test_multi_disk.py +++ b/key-value/key-value-aio/tests/stores/disk/test_multi_disk.py @@ -6,7 +6,7 @@ from typing_extensions import override from key_value.aio.stores.disk.multi_store import MultiDiskStore -from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin +from tests.stores.base import BaseStoreTests, ContextManagerStoreTestMixin TEST_SIZE_LIMIT = 100 * 1024 # 100KB diff --git a/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py b/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py index 7e5052b2..b8d52363 100644 --- a/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py +++ b/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py @@ -2,13 +2,14 @@ from collections.abc import AsyncGenerator import pytest -from elasticsearch import Elasticsearch +from elasticsearch import AsyncElasticsearch +from key_value.shared.stores.wait import async_wait_for_true from typing_extensions import override from key_value.aio.stores.base import BaseStore from key_value.aio.stores.elasticsearch import ElasticsearchStore from tests.conftest import docker_container -from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, wait_for_store +from tests.stores.base import BaseStoreTests, ContextManagerStoreTestMixin TEST_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB ES_HOST = "localhost" @@ -18,14 +19,15 @@ ES_IMAGE = f"docker.elastic.co/elasticsearch/elasticsearch:{ES_VERSION}" -def get_elasticsearch_client() -> Elasticsearch: - return Elasticsearch(hosts=[ES_URL]) +def get_elasticsearch_client() -> AsyncElasticsearch: + return AsyncElasticsearch(hosts=[ES_URL]) -def ping_elasticsearch() -> bool: - es_client: Elasticsearch = get_elasticsearch_client() +async def ping_elasticsearch() -> bool: + es_client: AsyncElasticsearch = get_elasticsearch_client() - return es_client.ping() + async with es_client: + return await es_client.ping() class ElasticsearchFailedToStartError(Exception): @@ -39,7 +41,7 @@ async def setup_elasticsearch(self) -> AsyncGenerator[None, None]: with docker_container( "elasticsearch-test", ES_IMAGE, {"9200": 9200}, {"discovery.type": "single-node", "xpack.security.enabled": "false"} ): - if not wait_for_store(wait_fn=ping_elasticsearch, max_time=30): + if not await async_wait_for_true(bool_fn=ping_elasticsearch, tries=30, wait_time=1): msg = "Elasticsearch failed to start" raise ElasticsearchFailedToStartError(msg) @@ -47,10 +49,11 @@ async def setup_elasticsearch(self) -> AsyncGenerator[None, None]: @override @pytest.fixture - async def store(self) -> ElasticsearchStore: + async def store(self) -> AsyncGenerator[ElasticsearchStore, None]: es_client = get_elasticsearch_client() - _ = es_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test") - return ElasticsearchStore(url=ES_URL, index="kv-store-e2e-test") + _ = await es_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test") + async with ElasticsearchStore(url=ES_URL, index="kv-store-e2e-test") as store: + yield store @pytest.mark.skip(reason="Distributed Caches are unbounded") @override diff --git a/key-value/key-value-aio/tests/stores/memcached/test_memcached.py b/key-value/key-value-aio/tests/stores/memcached/test_memcached.py index 38e7358b..52483fa8 100644 --- a/key-value/key-value-aio/tests/stores/memcached/test_memcached.py +++ b/key-value/key-value-aio/tests/stores/memcached/test_memcached.py @@ -1,16 +1,15 @@ -import asyncio import contextlib from collections.abc import AsyncGenerator import pytest from aiomcache import Client +from key_value.shared.stores.wait import async_wait_for_true from typing_extensions import override from key_value.aio.stores.base import BaseStore from key_value.aio.stores.memcached import MemcachedStore -from key_value.aio.utils.acompat import asleep -from tests.conftest import docker_container -from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests +from tests.conftest import docker_container, should_skip_docker_tests +from tests.stores.base import BaseStoreTests, ContextManagerStoreTestMixin # Memcached test configuration MEMCACHED_HOST = "localhost" @@ -33,15 +32,6 @@ async def ping_memcached() -> bool: await client.close() -async def wait_memcached() -> bool: - for _ in range(WAIT_FOR_MEMCACHED_TIMEOUT): - result = await asyncio.wait_for(ping_memcached(), timeout=1) - if result: - return True - await asleep(1) - return False - - class MemcachedFailedToStartError(Exception): pass @@ -50,9 +40,8 @@ class MemcachedFailedToStartError(Exception): class TestMemcachedStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_memcached(self) -> AsyncGenerator[None, None]: - await wait_memcached() with docker_container("memcached-test", "memcached:1.6-alpine", {"11211": 11211}): - if not await wait_memcached(): + if not await async_wait_for_true(bool_fn=ping_memcached, tries=30, wait_time=1): msg = "Memcached failed to start" raise MemcachedFailedToStartError(msg) diff --git a/key-value/key-value-aio/tests/stores/memory/test_memory.py b/key-value/key-value-aio/tests/stores/memory/test_memory.py index 810c3a25..ffd21235 100644 --- a/key-value/key-value-aio/tests/stores/memory/test_memory.py +++ b/key-value/key-value-aio/tests/stores/memory/test_memory.py @@ -2,7 +2,7 @@ from typing_extensions import override from key_value.aio.stores.memory.store import MemoryStore -from tests.stores.conftest import BaseStoreTests +from tests.stores.base import BaseStoreTests class TestMemoryStore(BaseStoreTests): diff --git a/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py b/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py index a9e0b5cd..15ec4470 100644 --- a/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py +++ b/key-value/key-value-aio/tests/stores/mongodb/test_mongodb.py @@ -4,13 +4,14 @@ import pytest from inline_snapshot import snapshot -from pymongo import MongoClient +from key_value.shared.stores.wait import async_wait_for_true +from pymongo import AsyncMongoClient from typing_extensions import override from key_value.aio.stores.base import BaseStore from key_value.aio.stores.mongodb import MongoDBStore -from tests.conftest import docker_container -from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests, wait_for_store +from tests.conftest import docker_container, should_skip_docker_tests +from tests.stores.base import BaseStoreTests, ContextManagerStoreTestMixin # MongoDB test configuration MONGODB_HOST = "localhost" @@ -20,10 +21,10 @@ WAIT_FOR_MONGODB_TIMEOUT = 30 -def ping_mongodb() -> bool: +async def ping_mongodb() -> bool: try: - client: MongoClient[Any] = MongoClient[Any](host=MONGODB_HOST, port=MONGODB_HOST_PORT) - _ = client.list_database_names() + client: AsyncMongoClient[Any] = AsyncMongoClient[Any](host=MONGODB_HOST, port=MONGODB_HOST_PORT) + _ = await client.list_database_names() except Exception: return False @@ -39,7 +40,7 @@ class TestMongoDBStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") async def setup_mongodb(self) -> AsyncGenerator[None, None]: with docker_container("mongodb-test", "mongo:7", {"27017": 27017}): - if not wait_for_store(wait_fn=ping_mongodb): + if not await async_wait_for_true(bool_fn=ping_mongodb, tries=30, wait_time=1): msg = "MongoDB failed to start" raise MongoDBFailedToStartError(msg) diff --git a/key-value/key-value-aio/tests/stores/redis/test_redis.py b/key-value/key-value-aio/tests/stores/redis/test_redis.py index dce86313..e879ffc3 100644 --- a/key-value/key-value-aio/tests/stores/redis/test_redis.py +++ b/key-value/key-value-aio/tests/stores/redis/test_redis.py @@ -1,12 +1,14 @@ from collections.abc import AsyncGenerator import pytest +from key_value.shared.stores.wait import async_wait_for_true +from redis.asyncio.client import Redis from typing_extensions import override from key_value.aio.stores.base import BaseStore from key_value.aio.stores.redis import RedisStore -from tests.conftest import docker_container, docker_stop -from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests, wait_for_store +from tests.conftest import docker_container, docker_stop, should_skip_docker_tests +from tests.stores.base import BaseStoreTests, ContextManagerStoreTestMixin # Redis test configuration REDIS_HOST = "localhost" @@ -16,10 +18,8 @@ WAIT_FOR_REDIS_TIMEOUT = 30 -def ping_redis() -> bool: - from redis import Redis - - client = Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) +async def ping_redis() -> bool: + client: Redis = Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) try: return client.ping() # pyright: ignore[reportUnknownMemberType, reportAny, reportReturnType] except Exception: @@ -38,7 +38,7 @@ async def setup_redis(self) -> AsyncGenerator[None, None]: docker_stop("valkey-test", raise_on_error=False) with docker_container("redis-test", "redis", {"6379": 6379}): - if not wait_for_store(wait_fn=ping_redis): + if not await async_wait_for_true(bool_fn=ping_redis, tries=30, wait_time=1): msg = "Redis failed to start" raise RedisFailedToStartError(msg) diff --git a/key-value/key-value-aio/tests/stores/simple/test_store.py b/key-value/key-value-aio/tests/stores/simple/test_store.py index e4104986..1ac0341f 100644 --- a/key-value/key-value-aio/tests/stores/simple/test_store.py +++ b/key-value/key-value-aio/tests/stores/simple/test_store.py @@ -2,7 +2,7 @@ from typing_extensions import override from key_value.aio.stores.simple.store import SimpleStore -from tests.stores.conftest import BaseStoreTests +from tests.stores.base import BaseStoreTests class TestSimpleStore(BaseStoreTests): diff --git a/key-value/key-value-aio/tests/stores/valkey/test_valkey.py b/key-value/key-value-aio/tests/stores/valkey/test_valkey.py index 3102bc49..411c0f8c 100644 --- a/key-value/key-value-aio/tests/stores/valkey/test_valkey.py +++ b/key-value/key-value-aio/tests/stores/valkey/test_valkey.py @@ -1,11 +1,15 @@ from collections.abc import AsyncGenerator import pytest +from key_value.shared.stores.wait import async_wait_for_true from typing_extensions import override from key_value.aio.stores.base import BaseStore -from tests.conftest import docker_container, docker_stop -from tests.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, detect_on_windows, should_skip_docker_tests, wait_for_store +from tests.conftest import detect_on_windows, docker_container, docker_stop, should_skip_docker_tests +from tests.stores.base import ( + BaseStoreTests, + ContextManagerStoreTestMixin, +) # Valkey test configuration VALKEY_HOST = "localhost" @@ -22,19 +26,19 @@ class ValkeyFailedToStartError(Exception): @pytest.mark.skipif(should_skip_docker_tests(), reason="Docker is not running") @pytest.mark.skipif(detect_on_windows(), reason="Valkey is not supported on Windows") class TestValkeyStore(ContextManagerStoreTestMixin, BaseStoreTests): - def get_valkey_client(self): + async def get_valkey_client(self): + from glide.glide_client import GlideClient from glide_shared.config import GlideClientConfiguration, NodeAddress - from glide_sync.glide_client import GlideClient client_config: GlideClientConfiguration = GlideClientConfiguration( addresses=[NodeAddress(host=VALKEY_HOST, port=VALKEY_PORT)], database_id=VALKEY_DB ) - return GlideClient.create(config=client_config) + return await GlideClient.create(config=client_config) - def ping_valkey(self) -> bool: + async def ping_valkey(self) -> bool: try: - client = self.get_valkey_client() - _ = client.ping() + client = await self.get_valkey_client() + _ = await client.ping() except Exception: return False @@ -46,7 +50,7 @@ async def setup_valkey(self) -> AsyncGenerator[None, None]: docker_stop("redis-test", raise_on_error=False) with docker_container("valkey-test", "valkey/valkey:latest", {"6379": 6379}): - if not wait_for_store(wait_fn=self.ping_valkey): + if not await async_wait_for_true(bool_fn=self.ping_valkey, tries=30, wait_time=1): msg = "Valkey failed to start" raise ValkeyFailedToStartError(msg) @@ -60,8 +64,8 @@ async def store(self, setup_valkey: None): store: ValkeyStore = ValkeyStore(host=VALKEY_HOST, port=VALKEY_PORT, db=VALKEY_DB) # This is a syncronous client - client = self.get_valkey_client() - _ = client.flushdb() + client = await self.get_valkey_client() + _ = await client.flushdb() return store diff --git a/key-value/key-value-aio/tests/stores/wrappers/test_clamp_ttl.py b/key-value/key-value-aio/tests/stores/wrappers/test_clamp_ttl.py index 374f8dd2..3e059a03 100644 --- a/key-value/key-value-aio/tests/stores/wrappers/test_clamp_ttl.py +++ b/key-value/key-value-aio/tests/stores/wrappers/test_clamp_ttl.py @@ -4,7 +4,7 @@ from key_value.aio.stores.memory.store import MemoryStore from key_value.aio.wrappers.ttl_clamp import TTLClampWrapper -from tests.stores.conftest import BaseStoreTests +from tests.stores.base import BaseStoreTests class TestTTLClampWrapper(BaseStoreTests): diff --git a/key-value/key-value-aio/tests/stores/wrappers/test_passthrough_cache.py b/key-value/key-value-aio/tests/stores/wrappers/test_passthrough_cache.py index 6ebb259c..830b6d24 100644 --- a/key-value/key-value-aio/tests/stores/wrappers/test_passthrough_cache.py +++ b/key-value/key-value-aio/tests/stores/wrappers/test_passthrough_cache.py @@ -7,7 +7,7 @@ from key_value.aio.stores.disk.store import DiskStore from key_value.aio.stores.memory.store import MemoryStore from key_value.aio.wrappers.passthrough_cache import PassthroughCacheWrapper -from tests.stores.conftest import BaseStoreTests +from tests.stores.base import BaseStoreTests DISK_STORE_SIZE_LIMIT = 100 * 1024 # 100KB diff --git a/key-value/key-value-aio/tests/stores/wrappers/test_prefix_collection.py b/key-value/key-value-aio/tests/stores/wrappers/test_prefix_collection.py index 379f2ba3..b1669ee0 100644 --- a/key-value/key-value-aio/tests/stores/wrappers/test_prefix_collection.py +++ b/key-value/key-value-aio/tests/stores/wrappers/test_prefix_collection.py @@ -3,7 +3,7 @@ from key_value.aio.stores.memory.store import MemoryStore from key_value.aio.wrappers.prefix_collections import PrefixCollectionsWrapper -from tests.stores.conftest import BaseStoreTests +from tests.stores.base import BaseStoreTests class TestPrefixCollectionWrapper(BaseStoreTests): diff --git a/key-value/key-value-aio/tests/stores/wrappers/test_prefix_key.py b/key-value/key-value-aio/tests/stores/wrappers/test_prefix_key.py index d96a572e..f59c823c 100644 --- a/key-value/key-value-aio/tests/stores/wrappers/test_prefix_key.py +++ b/key-value/key-value-aio/tests/stores/wrappers/test_prefix_key.py @@ -3,7 +3,7 @@ from key_value.aio.stores.memory.store import MemoryStore from key_value.aio.wrappers.prefix_keys import PrefixKeysWrapper -from tests.stores.conftest import BaseStoreTests +from tests.stores.base import BaseStoreTests class TestPrefixKeyWrapper(BaseStoreTests): diff --git a/key-value/key-value-aio/tests/stores/wrappers/test_single_collection.py b/key-value/key-value-aio/tests/stores/wrappers/test_single_collection.py index f579a1a1..56ab5da7 100644 --- a/key-value/key-value-aio/tests/stores/wrappers/test_single_collection.py +++ b/key-value/key-value-aio/tests/stores/wrappers/test_single_collection.py @@ -3,7 +3,7 @@ from key_value.aio.stores.memory.store import MemoryStore from key_value.aio.wrappers.single_collection import SingleCollectionWrapper -from tests.stores.conftest import BaseStoreTests +from tests.stores.base import BaseStoreTests class TestSingleCollectionWrapper(BaseStoreTests): diff --git a/key-value/key-value-aio/tests/stores/wrappers/test_statistics.py b/key-value/key-value-aio/tests/stores/wrappers/test_statistics.py index 818ec182..36b4d976 100644 --- a/key-value/key-value-aio/tests/stores/wrappers/test_statistics.py +++ b/key-value/key-value-aio/tests/stores/wrappers/test_statistics.py @@ -3,7 +3,7 @@ from key_value.aio.stores.memory.store import MemoryStore from key_value.aio.wrappers.statistics import StatisticsWrapper -from tests.stores.conftest import BaseStoreTests +from tests.stores.base import BaseStoreTests class TestStatisticsWrapper(BaseStoreTests): diff --git a/key-value/key-value-aio/tests/utils/test_managed_entry.py b/key-value/key-value-aio/tests/utils/test_managed_entry.py deleted file mode 100644 index 5de129c2..00000000 --- a/key-value/key-value-aio/tests/utils/test_managed_entry.py +++ /dev/null @@ -1,39 +0,0 @@ -from datetime import datetime, timezone -from typing import Any - -import pytest - -from key_value.aio.utils.managed_entry import dump_to_json, load_from_json -from tests.cases import DICTIONARY_TO_JSON_TEST_CASES, DICTIONARY_TO_JSON_TEST_CASES_NAMES - -FIXED_DATETIME = datetime(2025, 1, 1, 0, 0, 0, tzinfo=timezone.utc) -FIXED_DATETIME_STRING = FIXED_DATETIME.isoformat() - - -@pytest.mark.parametrize( - argnames=("obj", "expected"), - argvalues=DICTIONARY_TO_JSON_TEST_CASES, - ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES, -) -def test_dump_to_json(obj: dict[str, Any], expected: str): - assert dump_to_json(obj) == expected - - -@pytest.mark.parametrize( - argnames=("obj", "expected"), - argvalues=DICTIONARY_TO_JSON_TEST_CASES, - ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES, -) -def test_load_from_json(obj: dict[str, Any], expected: str): - assert load_from_json(expected) == obj - - -@pytest.mark.parametrize( - argnames=("obj", "expected"), - argvalues=DICTIONARY_TO_JSON_TEST_CASES, - ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES, -) -def test_roundtrip_json(obj: dict[str, Any], expected: str): - dumped_json: str = dump_to_json(obj) - assert dumped_json == expected - assert load_from_json(dumped_json) == obj diff --git a/key-value/key-value-shared-test/README.md b/key-value/key-value-shared-test/README.md new file mode 100644 index 00000000..e5ceda05 --- /dev/null +++ b/key-value/key-value-shared-test/README.md @@ -0,0 +1 @@ +Shared data for tests between key-value-aio and key-value-sync \ No newline at end of file diff --git a/key-value/key-value-shared-test/pyproject.toml b/key-value/key-value-shared-test/pyproject.toml new file mode 100644 index 00000000..c99f7c65 --- /dev/null +++ b/key-value/key-value-shared-test/pyproject.toml @@ -0,0 +1,42 @@ +[project] +name = "py-key-value-shared-test" +version = "0.2.0" +description = "Shared Key-Value Test" +readme = "README.md" +requires-python = ">=3.10" +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] + + + +[build-system] +requires = ["uv_build>=0.8.2,<0.9.0"] +build-backend = "uv_build" + +[tool.uv.build-backend] +module-name = "key_value.shared_test" + +[dependency-groups] +dev = [ + "ruff", + "basedpyright>=1.31.5", +] + +[tool.ruff] +extend="../../pyproject.toml" + +[tool.pyright] +extends = "../../pyproject.toml" + +executionEnvironments = [ + { root = "tests", reportPrivateUsage = false, extraPaths = ["src"]}, + { root = "src" } +] \ No newline at end of file diff --git a/key-value/key-value-aio/tests/stores/base/__init__.py b/key-value/key-value-shared-test/src/key_value/shared_test/__init__.py similarity index 100% rename from key-value/key-value-aio/tests/stores/base/__init__.py rename to key-value/key-value-shared-test/src/key_value/shared_test/__init__.py diff --git a/key-value/key-value-shared-test/src/key_value/shared_test/cases.py b/key-value/key-value-shared-test/src/key_value/shared_test/cases.py new file mode 100644 index 00000000..764261f7 --- /dev/null +++ b/key-value/key-value-shared-test/src/key_value/shared_test/cases.py @@ -0,0 +1,115 @@ +from dataclasses import dataclass +from datetime import datetime, timezone +from typing import Any + +FIXED_DATETIME = datetime(2025, 1, 1, 0, 0, 0, tzinfo=timezone.utc) +FIXED_TIME = FIXED_DATETIME.time() + + +@dataclass +class Case: + name: str + data: dict[str, Any] + json: str + + +NULL_CASE: Case = Case(name="null", data={"null_key": None}, json='{"null_key": null}') + +BOOL_TRUE_CASE: Case = Case(name="bool-true", data={"bool_true_key": True}, json='{"bool_true_key": true}') +BOOL_FALSE_CASE: Case = Case(name="bool-false", data={"bool_false_key": False}, json='{"bool_false_key": false}') + +INT_CASE: Case = Case(name="int", data={"int_key": 1}, json='{"int_key": 1}') +LARGE_INT_CASE: Case = Case(name="large-int", data={"large_int_key": 1 * 10**18}, json=f'{{"large_int_key": {1 * 10**18}}}') + +FLOAT_CASE: Case = Case(name="float", data={"float_key": 1.0}, json='{"float_key": 1.0}') +LARGE_FLOAT_CASE: Case = Case(name="large-float", data={"large_float_key": 1.0 * 10**63}, json=f'{{"large_float_key": {1.0 * 10**63}}}') + +STRING_CASE: Case = Case(name="string", data={"string_key": "string_value"}, json='{"string_key": "string_value"}') +LARGE_STRING_CASE: Case = Case(name="large-string", data={"large_string_key": "a" * 10000}, json=f'{{"large_string_key": "{"a" * 10000}"}}') + +DICT_CASE_ONE: Case = Case(name="dict-one", data={"dict_key_1": {"nested": "value"}}, json='{"dict_key_1": {"nested": "value"}}') +DICT_CASE_TWO: Case = Case( + name="dict-two", + data={"dict_key_1": {"nested": "value"}, "dict_key_2": {"nested": "value"}}, + json='{"dict_key_1": {"nested": "value"}, "dict_key_2": {"nested": "value"}}', +) +DICT_CASE_THREE: Case = Case( + name="dict-three", + data={"dict_key_1": {"nested": "value"}, "dict_key_2": {"nested": "value"}, "dict_key_3": {"nested": "value"}}, + json='{"dict_key_1": {"nested": "value"}, "dict_key_2": {"nested": "value"}, "dict_key_3": {"nested": "value"}}', +) + +LIST_CASE_ONE: Case = Case(name="list", data={"list_key": [1, 2, 3]}, json='{"list_key": [1, 2, 3]}') +LIST_CASE_TWO: Case = Case( + name="list-two", data={"list_key_1": [1, 2, 3], "list_key_2": [1, 2, 3]}, json='{"list_key_1": [1, 2, 3], "list_key_2": [1, 2, 3]}' +) +LIST_CASE_THREE: Case = Case( + name="list-three", data={"list_key_1": [1, True, 3.0, "string"]}, json='{"list_key_1": [1, true, 3.0, "string"]}' +) + + +TEST_CASE_DATA: list[dict[str, Any]] = [ + case.data + for case in [ + NULL_CASE, + BOOL_TRUE_CASE, + BOOL_FALSE_CASE, + INT_CASE, + LARGE_INT_CASE, + FLOAT_CASE, + LARGE_FLOAT_CASE, + STRING_CASE, + LARGE_STRING_CASE, + DICT_CASE_ONE, + DICT_CASE_TWO, + DICT_CASE_THREE, + LIST_CASE_ONE, + LIST_CASE_TWO, + LIST_CASE_THREE, + ] +] +TEST_CASE_JSON: list[str] = [ + case.json + for case in [ + NULL_CASE, + BOOL_TRUE_CASE, + BOOL_FALSE_CASE, + INT_CASE, + LARGE_INT_CASE, + FLOAT_CASE, + LARGE_FLOAT_CASE, + STRING_CASE, + LARGE_STRING_CASE, + DICT_CASE_ONE, + DICT_CASE_TWO, + DICT_CASE_THREE, + LIST_CASE_ONE, + LIST_CASE_TWO, + LIST_CASE_THREE, + ] +] + +SIMPLE_TEST_DATA_ARGNAMES: tuple[str, str] = ("data", "json") +SIMPLE_TEST_DATA_ARGVALUES: list[tuple[dict[str, Any], str]] = list(zip(TEST_CASE_DATA, TEST_CASE_JSON, strict=True)) +SIMPLE_TEST_DATA_IDS: list[str] = [ + case.name + for case in [ + NULL_CASE, + BOOL_TRUE_CASE, + BOOL_FALSE_CASE, + INT_CASE, + LARGE_INT_CASE, + FLOAT_CASE, + LARGE_FLOAT_CASE, + STRING_CASE, + LARGE_STRING_CASE, + DICT_CASE_ONE, + DICT_CASE_TWO, + DICT_CASE_THREE, + LIST_CASE_ONE, + LIST_CASE_TWO, + LIST_CASE_THREE, + ] +] + +__all__ = ["SIMPLE_TEST_DATA_ARGNAMES", "SIMPLE_TEST_DATA_ARGVALUES", "SIMPLE_TEST_DATA_IDS"] diff --git a/key-value/key-value-shared/README.md b/key-value/key-value-shared/README.md new file mode 100644 index 00000000..8c367936 --- /dev/null +++ b/key-value/key-value-shared/README.md @@ -0,0 +1 @@ +Shared code between key-value-aio and key-value-sync \ No newline at end of file diff --git a/key-value/key-value-shared/pyproject.toml b/key-value/key-value-shared/pyproject.toml new file mode 100644 index 00000000..61db48d8 --- /dev/null +++ b/key-value/key-value-shared/pyproject.toml @@ -0,0 +1,63 @@ +[project] +name = "py-key-value-shared" +version = "0.2.0" +description = "Shared Key-Value" +readme = "README.md" +requires-python = ">=3.10" +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] + + + +[build-system] +requires = ["uv_build>=0.8.2,<0.9.0"] +build-backend = "uv_build" + +[tool.uv.build-backend] +module-name = "key_value.shared" + +[tool.pytest.ini_options] +asyncio_mode = "auto" +addopts = ["--inline-snapshot=disable","-vv","-s"] +markers = [ + "skip_on_ci: Skip running the test when running on CI", +] +timeout = 10 + + +env_files = [".env"] + +[dependency-groups] +dev = [ + "pytest", + "pytest-mock", + "ruff", + "pytest-dotenv>=0.5.2", + "dirty-equals>=0.10.0", + "inline-snapshot>=0.29.0", + "basedpyright>=1.31.5", + "pytest-timeout>=2.4.0", + "py-key-value-shared-test>=0.2.0", +] + +[tool.uv.sources] +py-key-value-shared-test = { workspace = true } + +[tool.ruff] +extend="../../pyproject.toml" + +[tool.pyright] +extends = "../../pyproject.toml" + +executionEnvironments = [ + { root = "tests", reportPrivateUsage = false, extraPaths = ["src"]}, + { root = "src" } +] \ No newline at end of file diff --git a/key-value/key-value-aio/tests/utils/__init__.py b/key-value/key-value-shared/src/key_value/shared/__init__.py similarity index 100% rename from key-value/key-value-aio/tests/utils/__init__.py rename to key-value/key-value-shared/src/key_value/shared/__init__.py diff --git a/key-value/key-value-shared/src/key_value/shared/code_gen/gather.py b/key-value/key-value-shared/src/key_value/shared/code_gen/gather.py new file mode 100644 index 00000000..dc159553 --- /dev/null +++ b/key-value/key-value-shared/src/key_value/shared/code_gen/gather.py @@ -0,0 +1,17 @@ +import asyncio +from collections.abc import Awaitable +from typing import Any + + +async def async_gather(*aws: Awaitable[Any], return_exceptions: bool = False) -> list[Any]: + """ + Equivalent to asyncio.gather(), converted to asyncio.gather() by async_to_sync. + """ + return await asyncio.gather(*aws, return_exceptions=return_exceptions) + + +def gather(*args: Any, **kwargs: Any) -> tuple[Any, ...]: # noqa: ARG001 # pyright: ignore[reportUnusedParameter] + """ + Equivalent to asyncio.gather(), converted to asyncio.gather() by async_to_sync. + """ + return args diff --git a/key-value/key-value-aio/src/key_value/aio/utils/acompat.py b/key-value/key-value-shared/src/key_value/shared/code_gen/sleep.py similarity index 64% rename from key-value/key-value-aio/src/key_value/aio/utils/acompat.py rename to key-value/key-value-shared/src/key_value/shared/code_gen/sleep.py index 72fc2eb5..ec723bee 100644 --- a/key-value/key-value-aio/src/key_value/aio/utils/acompat.py +++ b/key-value/key-value-shared/src/key_value/shared/code_gen/sleep.py @@ -1,14 +1,12 @@ import asyncio import time -from collections.abc import Coroutine -from typing import Any -def asleep(seconds: float) -> Coroutine[Any, Any, None]: +async def asleep(seconds: float) -> None: """ Equivalent to asyncio.sleep(), converted to time.sleep() by async_to_sync. """ - return asyncio.sleep(seconds) + await asyncio.sleep(seconds) def sleep(seconds: float) -> None: diff --git a/key-value/key-value-shared/src/key_value/shared/errors/__init__.py b/key-value/key-value-shared/src/key_value/shared/errors/__init__.py new file mode 100644 index 00000000..8f2e0ea1 --- /dev/null +++ b/key-value/key-value-shared/src/key_value/shared/errors/__init__.py @@ -0,0 +1,13 @@ +from .key_value import DeserializationError, InvalidTTLError, KeyValueOperationError, MissingKeyError, SerializationError +from .store import KeyValueStoreError, StoreConnectionError, StoreSetupError + +__all__ = [ + "DeserializationError", + "InvalidTTLError", + "KeyValueOperationError", + "KeyValueStoreError", + "MissingKeyError", + "SerializationError", + "StoreConnectionError", + "StoreSetupError", +] diff --git a/key-value/key-value-aio/src/key_value/aio/errors/base.py b/key-value/key-value-shared/src/key_value/shared/errors/base.py similarity index 100% rename from key-value/key-value-aio/src/key_value/aio/errors/base.py rename to key-value/key-value-shared/src/key_value/shared/errors/base.py diff --git a/key-value/key-value-aio/src/key_value/aio/errors/key_value.py b/key-value/key-value-shared/src/key_value/shared/errors/key_value.py similarity index 94% rename from key-value/key-value-aio/src/key_value/aio/errors/key_value.py rename to key-value/key-value-shared/src/key_value/shared/errors/key_value.py index e439f47b..d0eec474 100644 --- a/key-value/key-value-aio/src/key_value/aio/errors/key_value.py +++ b/key-value/key-value-shared/src/key_value/shared/errors/key_value.py @@ -1,4 +1,4 @@ -from key_value.aio.errors.base import BaseKeyValueError +from key_value.shared.errors.base import BaseKeyValueError class KeyValueOperationError(BaseKeyValueError): diff --git a/key-value/key-value-aio/src/key_value/aio/errors/store.py b/key-value/key-value-shared/src/key_value/shared/errors/store.py similarity index 84% rename from key-value/key-value-aio/src/key_value/aio/errors/store.py rename to key-value/key-value-shared/src/key_value/shared/errors/store.py index 1772ab05..cac82d74 100644 --- a/key-value/key-value-aio/src/key_value/aio/errors/store.py +++ b/key-value/key-value-shared/src/key_value/shared/errors/store.py @@ -1,4 +1,4 @@ -from key_value.aio.errors.base import BaseKeyValueError +from key_value.shared.errors.base import BaseKeyValueError class KeyValueStoreError(BaseKeyValueError): diff --git a/key-value/key-value-shared/src/key_value/shared/stores/wait.py b/key-value/key-value-shared/src/key_value/shared/stores/wait.py new file mode 100644 index 00000000..fced0a5d --- /dev/null +++ b/key-value/key-value-shared/src/key_value/shared/stores/wait.py @@ -0,0 +1,25 @@ +from collections.abc import Awaitable, Callable + +from key_value.shared.code_gen.sleep import asleep, sleep + + +async def async_wait_for_true(bool_fn: Callable[[], Awaitable[bool]], tries: int = 10, wait_time: float = 1) -> bool: + """ + Wait for a store to be ready. + """ + for _ in range(tries): + if await bool_fn(): + return True + await asleep(seconds=wait_time) + return False + + +def wait_for_true(bool_fn: Callable[[], bool], tries: int = 10, wait_time: float = 1) -> bool: + """ + Wait for a store to be ready. + """ + for _ in range(tries): + if bool_fn(): + return True + sleep(seconds=wait_time) + return False diff --git a/key-value/key-value-shared/src/key_value/shared/utils/__init__.py b/key-value/key-value-shared/src/key_value/shared/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/key-value/key-value-aio/src/key_value/aio/utils/compound.py b/key-value/key-value-shared/src/key_value/shared/utils/compound.py similarity index 100% rename from key-value/key-value-aio/src/key_value/aio/utils/compound.py rename to key-value/key-value-shared/src/key_value/shared/utils/compound.py diff --git a/key-value/key-value-aio/src/key_value/aio/utils/managed_entry.py b/key-value/key-value-shared/src/key_value/shared/utils/managed_entry.py similarity index 95% rename from key-value/key-value-aio/src/key_value/aio/utils/managed_entry.py rename to key-value/key-value-shared/src/key_value/shared/utils/managed_entry.py index 714d76d9..bee322e1 100644 --- a/key-value/key-value-aio/src/key_value/aio/utils/managed_entry.py +++ b/key-value/key-value-shared/src/key_value/shared/utils/managed_entry.py @@ -5,8 +5,8 @@ from typing_extensions import Self -from key_value.aio.errors import DeserializationError, SerializationError -from key_value.aio.utils.time_to_live import now, now_plus, try_parse_datetime_str +from key_value.shared.errors import DeserializationError, SerializationError +from key_value.shared.utils.time_to_live import now, now_plus, try_parse_datetime_str @dataclass(kw_only=True) diff --git a/key-value/key-value-aio/src/key_value/aio/utils/sanitize.py b/key-value/key-value-shared/src/key_value/shared/utils/sanitize.py similarity index 100% rename from key-value/key-value-aio/src/key_value/aio/utils/sanitize.py rename to key-value/key-value-shared/src/key_value/shared/utils/sanitize.py diff --git a/key-value/key-value-aio/src/key_value/aio/utils/time_to_live.py b/key-value/key-value-shared/src/key_value/shared/utils/time_to_live.py similarity index 100% rename from key-value/key-value-aio/src/key_value/aio/utils/time_to_live.py rename to key-value/key-value-shared/src/key_value/shared/utils/time_to_live.py diff --git a/key-value/key-value-shared/tests/utils/__init__.py b/key-value/key-value-shared/tests/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/key-value/key-value-shared/tests/utils/test_managed_entry.py b/key-value/key-value-shared/tests/utils/test_managed_entry.py new file mode 100644 index 00000000..00304615 --- /dev/null +++ b/key-value/key-value-shared/tests/utils/test_managed_entry.py @@ -0,0 +1,39 @@ +from datetime import datetime, timezone +from typing import Any + +import pytest +from key_value.shared_test.cases import SIMPLE_TEST_DATA_ARGNAMES, SIMPLE_TEST_DATA_ARGVALUES, SIMPLE_TEST_DATA_IDS + +from key_value.shared.utils.managed_entry import dump_to_json, load_from_json + +FIXED_DATETIME = datetime(2025, 1, 1, 0, 0, 0, tzinfo=timezone.utc) +FIXED_DATETIME_STRING = FIXED_DATETIME.isoformat() + + +@pytest.mark.parametrize( + argnames=SIMPLE_TEST_DATA_ARGNAMES, + argvalues=SIMPLE_TEST_DATA_ARGVALUES, + ids=SIMPLE_TEST_DATA_IDS, +) +def test_dump_to_json(data: dict[str, Any], json: str): + assert dump_to_json(data) == json + + +@pytest.mark.parametrize( + argnames=SIMPLE_TEST_DATA_ARGNAMES, + argvalues=SIMPLE_TEST_DATA_ARGVALUES, + ids=SIMPLE_TEST_DATA_IDS, +) +def test_load_from_json(data: dict[str, Any], json: str): + assert load_from_json(json) == data + + +@pytest.mark.parametrize( + argnames=SIMPLE_TEST_DATA_ARGNAMES, + argvalues=SIMPLE_TEST_DATA_ARGVALUES, + ids=SIMPLE_TEST_DATA_IDS, +) +def test_roundtrip_json(data: dict[str, Any], json: str): + dumped_json: str = dump_to_json(data) + assert dumped_json == json + assert load_from_json(dumped_json) == data diff --git a/key-value/key-value-aio/tests/utils/test_sanitize.py b/key-value/key-value-shared/tests/utils/test_sanitize.py similarity index 98% rename from key-value/key-value-aio/tests/utils/test_sanitize.py rename to key-value/key-value-shared/tests/utils/test_sanitize.py index 86f425e3..f0bad5b4 100644 --- a/key-value/key-value-aio/tests/utils/test_sanitize.py +++ b/key-value/key-value-shared/tests/utils/test_sanitize.py @@ -1,7 +1,7 @@ import pytest from inline_snapshot import snapshot -from key_value.aio.utils.sanitize import ( +from key_value.shared.utils.sanitize import ( ALPHANUMERIC_CHARACTERS, LOWERCASE_ALPHABET, NUMBERS, diff --git a/key-value/key-value-sync/pyproject.toml b/key-value/key-value-sync/pyproject.toml index 209113a9..99fbec58 100644 --- a/key-value/key-value-sync/pyproject.toml +++ b/key-value/key-value-sync/pyproject.toml @@ -15,13 +15,12 @@ classifiers = [ "Programming Language :: Python :: 3.13", ] dependencies = [ - "py-key-value-aio>=0.2.0", + "py-key-value-shared>=0.2.0", ] [tool.uv.sources] -py-key-value-aio = { workspace = true } - - +py-key-value-shared = { workspace = true } +py-key-value-shared-test = { workspace = true } [build-system] requires = ["uv_build>=0.8.2,<0.9.0"] @@ -48,7 +47,6 @@ markers = [ ] timeout = 10 - env_files = [".env"] [dependency-groups] @@ -68,6 +66,7 @@ dev = [ "basedpyright>=1.31.5", "pytest-timeout>=2.4.0", "ast-comments>=1.2.3", + "py-key-value-shared-test>=0.2.0", ] lint = [ "ruff" diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/adapter.py b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/adapter.py index 4b1810a0..186113dd 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/adapter.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/adapter.py @@ -4,10 +4,10 @@ from collections.abc import Sequence from typing import Any, Generic, TypeVar +from key_value.shared.errors import DeserializationError, SerializationError from pydantic import BaseModel, ValidationError from pydantic_core import PydanticSerializationError -from key_value.sync.code_gen.errors import DeserializationError, SerializationError from key_value.sync.code_gen.protocols.key_value import KeyValue T = TypeVar("T", bound=BaseModel) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/adapter.py b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/adapter.py index c1424fb8..c04782cb 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/adapter.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/adapter.py @@ -4,7 +4,8 @@ from collections.abc import Sequence from typing import Any, Literal, overload -from key_value.sync.code_gen.errors import MissingKeyError +from key_value.shared.errors import MissingKeyError + from key_value.sync.code_gen.protocols.key_value import KeyValue diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/errors/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/errors/__init__.py deleted file mode 100644 index 3ca01799..00000000 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/errors/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file '__init__.py' -# DO NOT CHANGE! Change the original file instead. -from key_value.sync.code_gen.errors.base import BaseKeyValueError -from key_value.sync.code_gen.errors.key_value import ( - DeserializationError, - InvalidTTLError, - KeyValueOperationError, - MissingKeyError, - SerializationError, -) -from key_value.sync.code_gen.errors.store import KeyValueStoreError, StoreConnectionError, StoreSetupError - -__all__ = [ - "BaseKeyValueError", - "DeserializationError", - "InvalidTTLError", - "KeyValueOperationError", - "KeyValueStoreError", - "MissingKeyError", - "SerializationError", - "StoreConnectionError", - "StoreSetupError", -] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/errors/base.py b/key-value/key-value-sync/src/key_value/sync/code_gen/errors/base.py deleted file mode 100644 index 7c7d4ba6..00000000 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/errors/base.py +++ /dev/null @@ -1,23 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file 'base.py' -# DO NOT CHANGE! Change the original file instead. -ExtraInfoType = dict[str, str | int | float | bool | None] - - -class BaseKeyValueError(Exception): - """Base exception for all KV Store Adapter errors.""" - - def __init__(self, message: str | None = None, extra_info: ExtraInfoType | None = None): - message_parts: list[str] = [] - - if message: - message_parts.append(message) - - if extra_info: - extra_info_str = ";".join(f"{k}: {v}" for (k, v) in extra_info.items()) - if message: - extra_info_str = "(" + extra_info_str + ")" - - message_parts.append(extra_info_str) - - super().__init__(": ".join(message_parts)) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/errors/key_value.py b/key-value/key-value-sync/src/key_value/sync/code_gen/errors/key_value.py deleted file mode 100644 index 7d14831c..00000000 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/errors/key_value.py +++ /dev/null @@ -1,33 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file 'key_value.py' -# DO NOT CHANGE! Change the original file instead. -from key_value.sync.code_gen.errors.base import BaseKeyValueError - - -class KeyValueOperationError(BaseKeyValueError): - """Base exception for all Key-Value operation errors.""" - - -class SerializationError(KeyValueOperationError): - """Raised when data cannot be serialized for storage.""" - - -class DeserializationError(KeyValueOperationError): - """Raised when stored data cannot be deserialized back to its original form.""" - - -class MissingKeyError(KeyValueOperationError): - """Raised when a key is missing from the store.""" - - def __init__(self, operation: str, collection: str | None = None, key: str | None = None): - super().__init__( - message="A key was requested that was required but not found in the store.", - extra_info={"operation": operation, "collection": collection or "default", "key": key}, - ) - - -class InvalidTTLError(KeyValueOperationError): - """Raised when a TTL is invalid.""" - - def __init__(self, ttl: float): - super().__init__(message="A TTL is invalid.", extra_info={"ttl": ttl}) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/errors/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/errors/store.py deleted file mode 100644 index df55b09a..00000000 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/errors/store.py +++ /dev/null @@ -1,16 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file 'store.py' -# DO NOT CHANGE! Change the original file instead. -from key_value.sync.code_gen.errors.base import BaseKeyValueError - - -class KeyValueStoreError(BaseKeyValueError): - """Base exception for all Key-Value store errors.""" - - -class StoreSetupError(KeyValueStoreError): - """Raised when a store setup fails.""" - - -class StoreConnectionError(KeyValueStoreError): - """Raised when unable to connect to or communicate with the underlying store.""" diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/__init__.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/base.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/base.py index 29b59fdd..9a2b6715 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/base.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/base.py @@ -12,9 +12,11 @@ from types import TracebackType from typing import Any +from key_value.shared.errors import InvalidTTLError, StoreSetupError +from key_value.shared.utils.managed_entry import ManagedEntry +from key_value.shared.utils.time_to_live import now from typing_extensions import Self, override -from key_value.sync.code_gen.errors import InvalidTTLError, StoreSetupError from key_value.sync.code_gen.protocols.key_value import ( CullProtocol, DestroyCollectionProtocol, @@ -23,8 +25,6 @@ EnumerateKeysProtocol, KeyValueProtocol, ) -from key_value.sync.code_gen.utils.managed_entry import ManagedEntry -from key_value.sync.code_gen.utils.time_to_live import now DEFAULT_COLLECTION_NAME = "default_collection" diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/multi_store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/multi_store.py index 6571d60d..05aced78 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/multi_store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/multi_store.py @@ -6,11 +6,11 @@ from pathlib import Path from typing import overload +from key_value.shared.utils.compound import compound_key +from key_value.shared.utils.managed_entry import ManagedEntry from typing_extensions import override from key_value.sync.code_gen.stores.base import BaseContextManagerStore, BaseStore -from key_value.sync.code_gen.utils.compound import compound_key -from key_value.sync.code_gen.utils.managed_entry import ManagedEntry try: from diskcache import Cache diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/store.py index 47e5ecbc..a93b161b 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/store.py @@ -5,11 +5,11 @@ from pathlib import Path from typing import overload +from key_value.shared.utils.compound import compound_key +from key_value.shared.utils.managed_entry import ManagedEntry from typing_extensions import override from key_value.sync.code_gen.stores.base import BaseContextManagerStore, BaseStore -from key_value.sync.code_gen.utils.compound import compound_key -from key_value.sync.code_gen.utils.managed_entry import ManagedEntry try: from diskcache import Cache diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py index 62d6e8f2..17e3482d 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py @@ -4,6 +4,9 @@ import hashlib from typing import TYPE_CHECKING, Any, overload +from key_value.shared.utils.compound import compound_key +from key_value.shared.utils.managed_entry import ManagedEntry, load_from_json +from key_value.shared.utils.time_to_live import now_as_epoch, try_parse_datetime_str from typing_extensions import override from key_value.sync.code_gen.stores.base import ( @@ -14,12 +17,10 @@ BaseEnumerateKeysStore, BaseStore, ) -from key_value.sync.code_gen.utils.compound import compound_key -from key_value.sync.code_gen.utils.managed_entry import ManagedEntry, load_from_json -from key_value.sync.code_gen.utils.time_to_live import now_as_epoch, try_parse_datetime_str try: from elasticsearch import Elasticsearch + from key_value.sync.code_gen.stores.elasticsearch.utils import ( get_aggregations_from_body, get_body_from_response, @@ -61,6 +62,8 @@ class ElasticsearchStore( _client: Elasticsearch + _is_serverless: bool + _index: str @overload @@ -102,6 +105,8 @@ def __init__( raise ValueError(msg) self._index = index or DEFAULT_INDEX + self._is_serverless = False + super().__init__(default_collection=default_collection) @override @@ -109,7 +114,11 @@ def _setup(self) -> None: if self._client.options(ignore_status=404).indices.exists(index=self._index): return - _ = self._client.options(ignore_status=404).indices.create(index=self._index, mappings=DEFAULT_MAPPING) + cluster_info = self._client.options(ignore_status=404).info() + + self._is_serverless = cluster_info.get("version", {}).get("build_flavor") == "serverless" + + _ = self._client.options(ignore_status=404).indices.create(index=self._index, mappings=DEFAULT_MAPPING, settings={}) @override def _setup_collection(self, *, collection: str) -> None: @@ -140,6 +149,10 @@ def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | Non return ManagedEntry(value=load_from_json(value_str), created_at=created_at, expires_at=expires_at) + @property + def _should_refresh_on_put(self) -> bool: + return not self._is_serverless + @override def _put_managed_entry(self, *, key: str, collection: str, managed_entry: ManagedEntry) -> None: combo_key: str = compound_key(collection=collection, key=key) @@ -151,7 +164,9 @@ def _put_managed_entry(self, *, key: str, collection: str, managed_entry: Manage if managed_entry.expires_at: document["expires_at"] = managed_entry.expires_at.isoformat() - _ = self._client.index(index=self._index, id=self.sanitize_document_id(key=combo_key), body=document) + _ = self._client.index( + index=self._index, id=self.sanitize_document_id(key=combo_key), body=document, refresh=self._should_refresh_on_put + ) @override def _delete_managed_entry(self, *, key: str, collection: str) -> bool: diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/memory/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/memory/store.py index 7b8c71d3..ce27d003 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/memory/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/memory/store.py @@ -6,6 +6,8 @@ from datetime import datetime from typing import Any +from key_value.shared.utils.managed_entry import ManagedEntry +from key_value.shared.utils.time_to_live import epoch_to_datetime from typing_extensions import Self, override from key_value.sync.code_gen.stores.base import ( @@ -14,8 +16,6 @@ BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, ) -from key_value.sync.code_gen.utils.managed_entry import ManagedEntry -from key_value.sync.code_gen.utils.time_to_live import epoch_to_datetime try: from cachetools import TLRUCache diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/store.py index c175277f..24921dc9 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/store.py @@ -4,6 +4,9 @@ from datetime import datetime from typing import TYPE_CHECKING, Any, TypedDict, overload +from key_value.shared.utils.managed_entry import ManagedEntry +from key_value.shared.utils.sanitize import ALPHANUMERIC_CHARACTERS, sanitize_string +from key_value.shared.utils.time_to_live import now from pymongo.collection import Collection from pymongo.database import Database from typing_extensions import Self, override @@ -14,9 +17,6 @@ BaseEnumerateCollectionsStore, BaseStore, ) -from key_value.sync.code_gen.utils.managed_entry import ManagedEntry -from key_value.sync.code_gen.utils.sanitize import ALPHANUMERIC_CHARACTERS, sanitize_string -from key_value.sync.code_gen.utils.time_to_live import now if TYPE_CHECKING: from pymongo.results import DeleteResult diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/null/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/null/store.py index b6f23ea4..7c262494 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/null/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/null/store.py @@ -1,10 +1,10 @@ # WARNING: this file is auto-generated by 'build_sync_library.py' # from the original file 'store.py' # DO NOT CHANGE! Change the original file instead. +from key_value.shared.utils.managed_entry import ManagedEntry from typing_extensions import override from key_value.sync.code_gen.stores.base import BaseStore -from key_value.sync.code_gen.utils.managed_entry import ManagedEntry class NullStore(BaseStore): diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/redis/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/redis/store.py index f788209c..07051cc6 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/redis/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/redis/store.py @@ -4,11 +4,11 @@ from typing import Any, overload from urllib.parse import urlparse +from key_value.shared.utils.compound import compound_key, get_keys_from_compound_keys +from key_value.shared.utils.managed_entry import ManagedEntry from typing_extensions import override from key_value.sync.code_gen.stores.base import BaseContextManagerStore, BaseDestroyStore, BaseEnumerateKeysStore, BaseStore -from key_value.sync.code_gen.utils.compound import compound_key, get_keys_from_compound_keys -from key_value.sync.code_gen.utils.managed_entry import ManagedEntry try: from redis import Redis diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/simple/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/simple/store.py index af820ae4..d24d1d19 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/simple/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/simple/store.py @@ -5,12 +5,12 @@ from dataclasses import dataclass from datetime import datetime +from key_value.shared.utils.compound import compound_key, get_collections_from_compound_keys, get_keys_from_compound_keys +from key_value.shared.utils.managed_entry import ManagedEntry, load_from_json +from key_value.shared.utils.time_to_live import seconds_to from typing_extensions import override from key_value.sync.code_gen.stores.base import BaseDestroyStore, BaseEnumerateCollectionsStore, BaseEnumerateKeysStore, BaseStore -from key_value.sync.code_gen.utils.compound import compound_key, get_collections_from_compound_keys, get_keys_from_compound_keys -from key_value.sync.code_gen.utils.managed_entry import ManagedEntry, load_from_json -from key_value.sync.code_gen.utils.time_to_live import seconds_to DEFAULT_SIMPLE_STORE_MAX_ENTRIES = 10000 diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/valkey/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/valkey/store.py index 486725c6..828704bd 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/valkey/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/valkey/store.py @@ -6,11 +6,11 @@ from glide_shared.commands.core_options import ExpirySet, ExpiryType from glide_shared.config import GlideClientConfiguration, NodeAddress, ServerCredentials from glide_sync.glide_client import BaseClient +from key_value.shared.utils.compound import compound_key +from key_value.shared.utils.managed_entry import ManagedEntry from typing_extensions import override from key_value.sync.code_gen.stores.base import BaseContextManagerStore, BaseStore -from key_value.sync.code_gen.utils.compound import compound_key -from key_value.sync.code_gen.utils.managed_entry import ManagedEntry try: # Use redis-py asyncio client to communicate with a Valkey server (protocol compatible) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/acompat.py b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/acompat.py deleted file mode 100644 index fcaaefd9..00000000 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/acompat.py +++ /dev/null @@ -1,21 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file 'acompat.py' -# DO NOT CHANGE! Change the original file instead. -import asyncio -import time -from collections.abc import Coroutine -from typing import Any - - -def asleep(seconds: float) -> Coroutine[Any, Any, None]: - """ - Equivalent to asyncio.sleep(), converted to time.sleep() by async_to_sync. - """ - return asyncio.sleep(seconds) - - -def sleep(seconds: float) -> None: - """ - Equivalent to time.sleep(), converted to asyncio.sleep() by async_to_sync. - """ - time.sleep(seconds) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/compound.py b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/compound.py deleted file mode 100644 index 709b00f8..00000000 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/compound.py +++ /dev/null @@ -1,78 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file 'compound.py' -# DO NOT CHANGE! Change the original file instead. -DEFAULT_COMPOUND_SEPARATOR = "::" -DEFAULT_PREFIX_SEPARATOR = "__" - - -def compound_string(first: str, second: str, separator: str | None = None) -> str: - separator = separator or DEFAULT_COMPOUND_SEPARATOR - return f"{first}{separator}{second}" - - -def uncompound_string(string: str, separator: str | None = None) -> tuple[str, str]: - separator = separator or DEFAULT_COMPOUND_SEPARATOR - if separator not in string: - msg: str = f"String {string} is not a compound identifier" - raise TypeError(msg) from None - - split_key: list[str] = string.split(separator, 1) - - if len(split_key) != 2: # noqa: PLR2004 - msg = f"String {string} is not a compound identifier" - raise TypeError(msg) from None - - return (split_key[0], split_key[1]) - - -def uncompound_strings(strings: list[str], separator: str | None = None) -> list[tuple[str, str]]: - separator = separator or DEFAULT_COMPOUND_SEPARATOR - return [uncompound_string(string=string, separator=separator) for string in strings] - - -def compound_key(collection: str, key: str, separator: str | None = None) -> str: - separator = separator or DEFAULT_COMPOUND_SEPARATOR - return compound_string(first=collection, second=key, separator=separator) - - -def uncompound_key(key: str, separator: str | None = None) -> tuple[str, str]: - separator = separator or DEFAULT_COMPOUND_SEPARATOR - return uncompound_string(string=key, separator=separator) - - -def prefix_key(key: str, prefix: str, separator: str | None = None) -> str: - separator = separator or DEFAULT_PREFIX_SEPARATOR - return compound_string(first=prefix, second=key, separator=separator) - - -def unprefix_key(key: str, prefix: str, separator: str | None = None) -> str: - separator = separator or DEFAULT_PREFIX_SEPARATOR - if not key.startswith(prefix + separator): - msg = f"Key {key} is not prefixed with {prefix}{separator}" - raise ValueError(msg) - return key[len(prefix + separator) :] - - -def prefix_collection(collection: str, prefix: str, separator: str | None = None) -> str: - separator = separator or DEFAULT_PREFIX_SEPARATOR - return compound_string(first=prefix, second=collection, separator=separator) - - -def unprefix_collection(collection: str, prefix: str, separator: str | None = None) -> str: - separator = separator or DEFAULT_PREFIX_SEPARATOR - if not collection.startswith(prefix + separator): - msg = f"Collection {collection} is not prefixed with {prefix}{separator}" - raise ValueError(msg) - return collection[len(prefix + separator) :] - - -def get_collections_from_compound_keys(compound_keys: list[str], separator: str | None = None) -> list[str]: - """Return a unique list of collections from a list of compound keys.""" - separator = separator or DEFAULT_COMPOUND_SEPARATOR - return list({key_collection for (key_collection, _) in uncompound_strings(strings=compound_keys, separator=separator)}) - - -def get_keys_from_compound_keys(compound_keys: list[str], collection: str, separator: str | None = None) -> list[str]: - """Return all keys from a list of compound keys for a given collection.""" - separator = separator or DEFAULT_COMPOUND_SEPARATOR - return [key for (key_collection, key) in uncompound_strings(strings=compound_keys, separator=separator) if key_collection == collection] diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/managed_entry.py b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/managed_entry.py deleted file mode 100644 index ea38dc97..00000000 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/managed_entry.py +++ /dev/null @@ -1,102 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file 'managed_entry.py' -# DO NOT CHANGE! Change the original file instead. -import json -from dataclasses import dataclass, field -from datetime import datetime -from typing import Any, cast - -from typing_extensions import Self - -from key_value.sync.code_gen.errors import DeserializationError, SerializationError -from key_value.sync.code_gen.utils.time_to_live import now, now_plus, try_parse_datetime_str - - -@dataclass(kw_only=True) -class ManagedEntry: - """A managed cache entry containing value data and TTL metadata. - - The entry supports either TTL seconds or absolute expiration datetime. On init: - - If `ttl` is provided but `expires_at` is not, an `expires_at` will be computed. - - If `expires_at` is provided but `ttl` is not, a live TTL will be computed on access. - """ - - value: dict[str, Any] - - created_at: datetime | None = field(default=None) - ttl: float | None = field(default=None) - expires_at: datetime | None = field(default=None) - - def __post_init__(self) -> None: - if self.ttl is not None and self.expires_at is None: - self.expires_at = now_plus(seconds=self.ttl) - elif self.expires_at is not None and self.ttl is None: - self.recalculate_ttl() - - @property - def is_expired(self) -> bool: - if self.expires_at is None: - return False - return self.expires_at <= now() - - def recalculate_ttl(self) -> None: - if self.expires_at is not None and self.ttl is None: - self.ttl = (self.expires_at - now()).total_seconds() - - def to_json(self, include_metadata: bool = True, include_expiration: bool = True, include_creation: bool = True) -> str: - data: dict[str, Any] = {} - - if include_metadata: - data["value"] = self.value - if include_creation and self.created_at: - data["created_at"] = self.created_at.isoformat() - if include_expiration and self.expires_at: - data["expires_at"] = self.expires_at.isoformat() - else: - data = self.value - - return dump_to_json(obj=data) - - @classmethod - def from_json(cls, json_str: str, includes_metadata: bool = True, ttl: float | None = None) -> Self: - data: dict[str, Any] = load_from_json(json_str=json_str) - - if not includes_metadata: - return cls(value=data) - - created_at: datetime | None = try_parse_datetime_str(value=data.get("created_at")) - expires_at: datetime | None = try_parse_datetime_str(value=data.get("expires_at")) - - value: dict[str, Any] | None = data.get("value") - - if value is None: - msg = "Value is None" - raise DeserializationError(msg) - - return cls(created_at=created_at, expires_at=expires_at, ttl=ttl, value=value) - - -def dump_to_json(obj: dict[str, Any]) -> str: - try: - return json.dumps(obj) - except (json.JSONDecodeError, TypeError) as e: - msg: str = f"Failed to serialize object to JSON: {e}" - raise SerializationError(msg) from e - - -def load_from_json(json_str: str) -> dict[str, Any]: - try: - deserialized_obj: Any = json.loads(json_str) # pyright: ignore[reportAny] - except (json.JSONDecodeError, TypeError) as e: - msg: str = f"Failed to deserialize JSON string: {e}" - raise DeserializationError(msg) from e - - if not isinstance(deserialized_obj, dict): - msg = "Deserialized object is not a dictionary" - raise DeserializationError(msg) - - if not all(isinstance(key, str) for key in deserialized_obj): # pyright: ignore[reportUnknownVariableType] - msg = "Deserialized object contains non-string keys" - raise DeserializationError(msg) - - return cast(typ="dict[str, Any]", val=deserialized_obj) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/sanitize.py b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/sanitize.py deleted file mode 100644 index d43a3655..00000000 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/sanitize.py +++ /dev/null @@ -1,159 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file 'sanitize.py' -# DO NOT CHANGE! Change the original file instead. -import hashlib -from enum import Enum - -MINIMUM_MAX_LENGTH = 16 - -DEFAULT_HASH_FRAGMENT_SIZE = 8 - -DEFAULT_HASH_FRAGMENT_SEPARATOR = "-" -DEFAULT_REPLACEMENT_CHARACTER = "_" - -LOWERCASE_ALPHABET = "abcdefghijklmnopqrstuvwxyz" -UPPERCASE_ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" -NUMBERS = "0123456789" -ALPHANUMERIC_CHARACTERS = LOWERCASE_ALPHABET + UPPERCASE_ALPHABET + NUMBERS - - -def generate_hash_fragment(value: str, size: int = DEFAULT_HASH_FRAGMENT_SIZE) -> str: - """Generate a hash fragment of the value.""" - - return hashlib.sha256(value.encode()).hexdigest()[:size] - - -class HashFragmentMode(str, Enum): - ALWAYS = "always" - NEVER = "never" - ONLY_IF_CHANGED = "only_if_changed" - - -def sanitize_characters_in_string(value: str, allowed_characters: str, replace_with: str) -> str: - """Replace characters in a string. If multiple characters are in a row that are not allowed, only - the first one will be replaced. The rest will be removed. If all characters are not allowed, an - empty string will be returned. - - Args: - value: The value to replace characters in. - to_replace: The characters to replace. - replace_with: The characters to replace with. - """ - new_value = "" - last_char_was_replaced = False - - for char in value: - if char in allowed_characters: - new_value += char - last_char_was_replaced = False - else: - if last_char_was_replaced: - continue - new_value += replace_with - last_char_was_replaced = True - - if len(new_value) == 1 and last_char_was_replaced: - return "" - - return new_value - - -def sanitize_string( - value: str, - max_length: int, - allowed_characters: str | None = None, - replacement_character: str = DEFAULT_REPLACEMENT_CHARACTER, - hash_fragment_separator: str = DEFAULT_HASH_FRAGMENT_SEPARATOR, - hash_fragment_mode: HashFragmentMode = HashFragmentMode.ONLY_IF_CHANGED, - hash_fragment_length: int = DEFAULT_HASH_FRAGMENT_SIZE, -) -> str: - """Sanitize the value, replacing characters and optionally adding a fragment a hash of the value if requested. - - If the entire value is sanitized and hash_fragment_mode is HashFragmentMode.ALWAYS or HashFragmentMode.ONLY_IF_CHANGED, - the value returned will be the hash fragment only. - - If the entire value is sanitized and hash_fragment_mode is HashFragmentMode.NEVER, an error will be raised. - - Args: - value: The value to sanitize. - allowed_characters: The allowed characters in the value. - max_length: The maximum length of the value (with the hash fragment added). - hash_fragment_separator: The separator to add between the value and the hash fragment. - hash_fragment_mode: The mode to add the hash fragment. - """ - if max_length < MINIMUM_MAX_LENGTH: - msg = f"max_length must be greater than or equal to {MINIMUM_MAX_LENGTH}" - raise ValueError(msg) - - if hash_fragment_length > max_length // 2: - msg = "hash_fragment_length must be less than or equal to half of max_length" - raise ValueError(msg) - - hash_fragment: str = generate_hash_fragment(value=value, size=hash_fragment_length) - hash_fragment_size_required: int = len(hash_fragment_separator) + len(hash_fragment) - - sanitized_value: str = ( - sanitize_characters_in_string(value=value, allowed_characters=allowed_characters, replace_with=replacement_character) - if allowed_characters - else value - ) - - actual_max_length: int - - if hash_fragment_mode == HashFragmentMode.ALWAYS: - actual_max_length = max_length - hash_fragment_size_required - - sanitized_value = sanitized_value[:actual_max_length] - - if not sanitized_value: - return hash_fragment - - return sanitized_value + hash_fragment_separator + hash_fragment - - if hash_fragment_mode == HashFragmentMode.ONLY_IF_CHANGED: - sanitized_value = sanitized_value[:max_length] - - if value == sanitized_value: - return value - - actual_max_length = max_length - hash_fragment_size_required - - sanitized_value = sanitized_value[:actual_max_length] - - if not sanitized_value: - return hash_fragment - - return sanitized_value + hash_fragment_separator + hash_fragment - - if not sanitized_value: - msg = "Entire value was sanitized and hash_fragment_mode is HashFragmentMode.NEVER" - raise ValueError(msg) - - return sanitized_value - - -def hash_excess_length(value: str, max_length: int) -> str: - """Hash part of the value if it exceeds the maximum length. This operation - will truncate the value to the maximum length minus 8 characters and will swap - the last 8 characters with the first 8 characters of the generated hash. - - Args: - value: The value to hash. - max_length: The maximum length of the value. Must be greater than 32. - - Returns: - The hashed value if the value exceeds the maximum length, otherwise the original value. - """ - if max_length <= MINIMUM_MAX_LENGTH: - msg = f"max_length must be greater than {MINIMUM_MAX_LENGTH}" - raise ValueError(msg) - - if len(value) <= max_length: - return value - - truncated_value = value[: max_length - 8] - - hash_of_value = hashlib.sha256(value.encode()).hexdigest() - first_eight_of_hash = hash_of_value[:8] - - return truncated_value + first_eight_of_hash diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/time_to_live.py b/key-value/key-value-sync/src/key_value/sync/code_gen/utils/time_to_live.py deleted file mode 100644 index c8c46dca..00000000 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/utils/time_to_live.py +++ /dev/null @@ -1,41 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file 'time_to_live.py' -# DO NOT CHANGE! Change the original file instead. -import time -from datetime import datetime, timedelta, timezone -from typing import Any - - -def epoch_to_datetime(epoch: float) -> datetime: - """Convert an epoch timestamp to a datetime object.""" - return datetime.fromtimestamp(epoch, tz=timezone.utc) - - -def now_as_epoch() -> float: - """Get the current time as epoch seconds.""" - return time.time() - - -def now() -> datetime: - """Get the current time as a datetime object.""" - return datetime.now(tz=timezone.utc) - - -def seconds_to(datetime: datetime) -> float: - """Get the number of seconds between the current time and a datetime object.""" - return (datetime - now()).total_seconds() - - -def now_plus(seconds: float) -> datetime: - """Get the current time plus a number of seconds as a datetime object.""" - return datetime.now(tz=timezone.utc) + timedelta(seconds=seconds) - - -def try_parse_datetime_str(value: Any) -> datetime | None: # pyright: ignore[reportAny] - try: - if isinstance(value, str): - return datetime.fromisoformat(value) - except ValueError: - return None - - return None diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/wrapper.py index fe370eda..501d8105 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/wrapper.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/wrapper.py @@ -4,11 +4,11 @@ from collections.abc import Sequence from typing import Any +from key_value.shared.utils.compound import prefix_collection, unprefix_collection from typing_extensions import override from key_value.sync.code_gen.protocols.key_value import KeyValue from key_value.sync.code_gen.stores.base import DEFAULT_COLLECTION_NAME -from key_value.sync.code_gen.utils.compound import prefix_collection, unprefix_collection from key_value.sync.code_gen.wrappers.base import BaseWrapper diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/wrapper.py index 751f2710..f905228b 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/wrapper.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/wrapper.py @@ -4,10 +4,10 @@ from collections.abc import Sequence from typing import Any +from key_value.shared.utils.compound import prefix_key, unprefix_key from typing_extensions import override from key_value.sync.code_gen.protocols.key_value import KeyValue -from key_value.sync.code_gen.utils.compound import prefix_key, unprefix_key from key_value.sync.code_gen.wrappers.base import BaseWrapper diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/wrapper.py index b67a8d22..08c6627a 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/wrapper.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/wrapper.py @@ -4,11 +4,11 @@ from collections.abc import Sequence from typing import Any +from key_value.shared.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key from typing_extensions import override from key_value.sync.code_gen.protocols.key_value import KeyValue from key_value.sync.code_gen.stores.base import DEFAULT_COLLECTION_NAME -from key_value.sync.code_gen.utils.compound import DEFAULT_PREFIX_SEPARATOR, prefix_key, unprefix_key from key_value.sync.code_gen.wrappers.base import BaseWrapper diff --git a/key-value/key-value-sync/src/key_value/sync/errors/__init__.py b/key-value/key-value-sync/src/key_value/sync/errors/__init__.py deleted file mode 100644 index 3ca01799..00000000 --- a/key-value/key-value-sync/src/key_value/sync/errors/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file '__init__.py' -# DO NOT CHANGE! Change the original file instead. -from key_value.sync.code_gen.errors.base import BaseKeyValueError -from key_value.sync.code_gen.errors.key_value import ( - DeserializationError, - InvalidTTLError, - KeyValueOperationError, - MissingKeyError, - SerializationError, -) -from key_value.sync.code_gen.errors.store import KeyValueStoreError, StoreConnectionError, StoreSetupError - -__all__ = [ - "BaseKeyValueError", - "DeserializationError", - "InvalidTTLError", - "KeyValueOperationError", - "KeyValueStoreError", - "MissingKeyError", - "SerializationError", - "StoreConnectionError", - "StoreSetupError", -] diff --git a/key-value/key-value-sync/tests/__init__.py b/key-value/key-value-sync/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/key-value/key-value-sync/tests/code_gen/adapters/test_raise.py b/key-value/key-value-sync/tests/code_gen/adapters/test_raise.py index 4be02fff..92efa388 100644 --- a/key-value/key-value-sync/tests/code_gen/adapters/test_raise.py +++ b/key-value/key-value-sync/tests/code_gen/adapters/test_raise.py @@ -2,9 +2,9 @@ # from the original file 'test_raise.py' # DO NOT CHANGE! Change the original file instead. import pytest +from key_value.shared.errors import MissingKeyError from key_value.sync.code_gen.adapters.raise_on_missing import RaiseOnMissingAdapter -from key_value.sync.code_gen.errors import MissingKeyError from key_value.sync.code_gen.stores.memory.store import MemoryStore diff --git a/key-value/key-value-sync/tests/code_gen/conftest.py b/key-value/key-value-sync/tests/code_gen/conftest.py index 6405f031..2e32a894 100644 --- a/key-value/key-value-sync/tests/code_gen/conftest.py +++ b/key-value/key-value-sync/tests/code_gen/conftest.py @@ -3,6 +3,8 @@ # DO NOT CHANGE! Change the original file instead. import asyncio import logging +import os +import subprocess from collections.abc import Callable, Iterator from contextlib import contextmanager @@ -127,3 +129,34 @@ def async_running_in_event_loop() -> bool: def running_in_event_loop() -> bool: return False + + +def detect_docker() -> bool: + try: + result = subprocess.run(["docker", "ps"], check=False, capture_output=True, text=True) # noqa: S607 + except Exception: + return False + else: + return result.returncode == 0 + + +def detect_on_ci() -> bool: + return os.getenv("CI", "false") == "true" + + +def detect_on_windows() -> bool: + return os.name == "nt" + + +def detect_on_macos() -> bool: + return os.name == "darwin" + + +def should_run_docker_tests() -> bool: + if detect_on_ci(): + return all([detect_docker(), not detect_on_windows(), not detect_on_macos()]) + return detect_docker() + + +def should_skip_docker_tests() -> bool: + return not should_run_docker_tests() diff --git a/key-value/key-value-sync/tests/code_gen/stores/base.py b/key-value/key-value-sync/tests/code_gen/stores/base.py new file mode 100644 index 00000000..31222a1b --- /dev/null +++ b/key-value/key-value-sync/tests/code_gen/stores/base.py @@ -0,0 +1,228 @@ +# WARNING: this file is auto-generated by 'build_sync_library.py' +# from the original file 'base.py' +# DO NOT CHANGE! Change the original file instead. +import hashlib +from abc import ABC, abstractmethod +from collections.abc import Generator +from typing import Any + +import pytest +from dirty_equals import IsFloat +from key_value.shared.code_gen.gather import gather +from key_value.shared.code_gen.sleep import sleep +from key_value.shared.errors import InvalidTTLError, SerializationError +from key_value.shared_test.cases import SIMPLE_TEST_DATA_ARGNAMES, SIMPLE_TEST_DATA_ARGVALUES, SIMPLE_TEST_DATA_IDS +from pydantic import AnyHttpUrl + +from key_value.sync.code_gen.stores.base import BaseContextManagerStore, BaseStore +from tests.code_gen.conftest import running_in_event_loop + + +class BaseStoreTests(ABC): + def eventually_consistent(self) -> None: # noqa: B027 + "Subclasses can override this to wait for eventually consistent operations." + + @pytest.fixture + @abstractmethod + def store(self) -> BaseStore | Generator[BaseStore, None, None]: ... + + # The first test requires a docker pull, so we only time the actual test + + @pytest.mark.timeout(5, func_only=True) + def test_empty_get(self, store: BaseStore): + """Tests that the get method returns None from an empty store.""" + assert store.get(collection="test", key="test") is None + + def test_empty_put(self, store: BaseStore): + """Tests that the put method does not raise an exception when called on a new store.""" + store.put(collection="test", key="test", value={"test": "test"}) + + def test_empty_ttl(self, store: BaseStore): + """Tests that the ttl method returns None from an empty store.""" + ttl = store.ttl(collection="test", key="test") + assert ttl == (None, None) + + def test_put_serialization_errors(self, store: BaseStore): + """Tests that the put method does not raise an exception when called on a new store.""" + with pytest.raises(SerializationError): + store.put(collection="test", key="test", value={"test": AnyHttpUrl("https://test.com")}) + + def test_get_put_get(self, store: BaseStore): + assert store.get(collection="test", key="test") is None + store.put(collection="test", key="test", value={"test": "test"}) + assert store.get(collection="test", key="test") == {"test": "test"} + + @pytest.mark.parametrize(argnames=SIMPLE_TEST_DATA_ARGNAMES, argvalues=SIMPLE_TEST_DATA_ARGVALUES, ids=SIMPLE_TEST_DATA_IDS) + def test_get_complex_put_get(self, store: BaseStore, data: dict[str, Any], json: str): # pyright: ignore[reportUnusedParameter, reportUnusedParameter] # noqa: ARG002 + store.put(collection="test", key="test", value=data) + assert store.get(collection="test", key="test") == data + + def test_put_many_get(self, store: BaseStore): + store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert store.get(collection="test", key="test") == {"test": "test"} + assert store.get(collection="test", key="test_2") == {"test": "test_2"} + + def test_put_many_get_many(self, store: BaseStore): + store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + + def test_put_put_get_many(self, store: BaseStore): + store.put(collection="test", key="test", value={"test": "test"}) + store.put(collection="test", key="test_2", value={"test": "test_2"}) + assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + + def test_put_put_get_many_missing_one(self, store: BaseStore): + store.put(collection="test", key="test", value={"test": "test"}) + store.put(collection="test", key="test_2", value={"test": "test_2"}) + assert store.get_many(collection="test", keys=["test", "test_2", "test_3"]) == [{"test": "test"}, {"test": "test_2"}, None] + + def test_put_get_delete_get(self, store: BaseStore): + store.put(collection="test", key="test", value={"test": "test"}) + assert store.get(collection="test", key="test") == {"test": "test"} + assert store.delete(collection="test", key="test") + assert store.get(collection="test", key="test") is None + + def test_put_many_get_get_delete_many_get_many(self, store: BaseStore): + store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + assert store.delete_many(collection="test", keys=["test", "test_2"]) == 2 + assert store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] + + def test_put_many_get_many_delete_many_get_many(self, store: BaseStore): + store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + assert store.delete_many(collection="test", keys=["test", "test_2"]) == 2 + assert store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] + + def test_get_put_get_delete_get(self, store: BaseStore): + """Tests that the get, put, delete, and get methods work together to store and retrieve a value from an empty store.""" + + assert store.get(collection="test", key="test") is None + + store.put(collection="test", key="test", value={"test": "test"}) + + assert store.get(collection="test", key="test") == {"test": "test"} + + assert store.delete(collection="test", key="test") + + assert store.get(collection="test", key="test") is None + + def test_get_put_get_put_delete_get(self, store: BaseStore): + """Tests that the get, put, get, put, delete, and get methods work together to store and retrieve a value from an empty store.""" + store.put(collection="test", key="test", value={"test": "test"}) + assert store.get(collection="test", key="test") == {"test": "test"} + + store.put(collection="test", key="test", value={"test": "test_2"}) + + assert store.get(collection="test", key="test") == {"test": "test_2"} + assert store.delete(collection="test", key="test") + assert store.get(collection="test", key="test") is None + + def test_put_many_delete_delete_get_many(self, store: BaseStore): + store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) + assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] + assert store.delete(collection="test", key="test") + assert store.delete(collection="test", key="test_2") + assert store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] + + def test_put_ttl_get_ttl(self, store: BaseStore): + """Tests that the put and get ttl methods work together to store and retrieve a ttl from an empty store.""" + store.put(collection="test", key="test", value={"test": "test"}, ttl=100) + (value, ttl) = store.ttl(collection="test", key="test") + + assert value == {"test": "test"} + assert ttl is not None + assert ttl == IsFloat(approx=100) + + def test_negative_ttl(self, store: BaseStore): + """Tests that a negative ttl will return None when getting the key.""" + with pytest.raises(InvalidTTLError): + store.put(collection="test", key="test", value={"test": "test"}, ttl=-100) + + @pytest.mark.timeout(10) + def test_put_expired_get_none(self, store: BaseStore): + """Tests that a put call with a negative ttl will return None when getting the key.""" + store.put(collection="test_collection", key="test_key", value={"test": "test"}, ttl=1) + sleep(seconds=3) + assert store.get(collection="test_collection", key="test_key") is None + + def test_long_collection_name(self, store: BaseStore): + """Tests that a long collection name will not raise an error.""" + store.put(collection="test_collection" * 100, key="test_key", value={"test": "test"}) + assert store.get(collection="test_collection" * 100, key="test_key") == {"test": "test"} + + def test_special_characters_in_collection_name(self, store: BaseStore): + """Tests that a special characters in the collection name will not raise an error.""" + store.put(collection="test_collection!@#$%^&*()", key="test_key", value={"test": "test"}) + assert store.get(collection="test_collection!@#$%^&*()", key="test_key") == {"test": "test"} + + def test_long_key_name(self, store: BaseStore): + """Tests that a long key name will not raise an error.""" + store.put(collection="test_collection", key="test_key" * 100, value={"test": "test"}) + assert store.get(collection="test_collection", key="test_key" * 100) == {"test": "test"} + + def test_special_characters_in_key_name(self, store: BaseStore): + """Tests that a special characters in the key name will not raise an error.""" + store.put(collection="test_collection", key="test_key!@#$%^&*()", value={"test": "test"}) + assert store.get(collection="test_collection", key="test_key!@#$%^&*()") == {"test": "test"} + + @pytest.mark.timeout(20) + def test_not_unbounded(self, store: BaseStore): + """Tests that the store is not unbounded.""" + + for i in range(1000): + value = hashlib.sha256(f"test_{i}".encode()).hexdigest() + store.put(collection="test_collection", key=f"test_key_{i}", value={"test": value}) + + assert store.get(collection="test_collection", key="test_key_0") is None + assert store.get(collection="test_collection", key="test_key_999") is not None + + @pytest.mark.skipif(condition=not running_in_event_loop(), reason="Cannot run concurrent operations in event loop") + def test_concurrent_operations(self, store: BaseStore): + """Tests that the store can handle concurrent operations.""" + + def worker(store: BaseStore, worker_id: int): + for i in range(10): + assert store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None + + store.put(collection="test_collection", key=f"test_{worker_id}_{i}", value={"test": f"test_{i}"}) + assert store.get(collection="test_collection", key=f"test_{worker_id}_{i}") == {"test": f"test_{i}"} + + store.put(collection="test_collection", key=f"test_{worker_id}_{i}", value={"test": f"test_{i}_2"}) + assert store.get(collection="test_collection", key=f"test_{worker_id}_{i}") == {"test": f"test_{i}_2"} + + assert store.delete(collection="test_collection", key=f"test_{worker_id}_{i}") + assert store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None + + _ = gather(*[worker(store, worker_id) for worker_id in range(5)]) + + @pytest.mark.timeout(15) + def test_minimum_put_many_get_many_performance(self, store: BaseStore): + """Tests that the store meets minimum performance requirements.""" + keys = [f"test_{i}" for i in range(10)] + values = [{"test": f"test_{i}"} for i in range(10)] + store.put_many(collection="test_collection", keys=keys, values=values) + assert store.get_many(collection="test_collection", keys=keys) == values + + @pytest.mark.timeout(15) + def test_minimum_put_many_delete_many_performance(self, store: BaseStore): + """Tests that the store meets minimum performance requirements.""" + keys = [f"test_{i}" for i in range(10)] + values = [{"test": f"test_{i}"} for i in range(10)] + store.put_many(collection="test_collection", keys=keys, values=values) + assert store.delete_many(collection="test_collection", keys=keys) == 10 + + +class ContextManagerStoreTestMixin: + @pytest.fixture(params=[True, False], ids=["with_ctx_manager", "no_ctx_manager"], autouse=True) + def enter_exit_store( + self, request: pytest.FixtureRequest, store: BaseContextManagerStore + ) -> Generator[BaseContextManagerStore, None, None]: + context_manager = request.param # pyright: ignore[reportAny] + + if context_manager: + with store: + yield store + else: + yield store + store.close() diff --git a/key-value/key-value-sync/tests/code_gen/stores/base/__init__.py b/key-value/key-value-sync/tests/code_gen/stores/base/__init__.py deleted file mode 100644 index b1835176..00000000 --- a/key-value/key-value-sync/tests/code_gen/stores/base/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file '__init__.py' -# DO NOT CHANGE! Change the original file instead. - diff --git a/key-value/key-value-sync/tests/code_gen/stores/conftest.py b/key-value/key-value-sync/tests/code_gen/stores/conftest.py index edea640a..114e8c57 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/conftest.py +++ b/key-value/key-value-sync/tests/code_gen/stores/conftest.py @@ -1,25 +1,11 @@ # WARNING: this file is auto-generated by 'build_sync_library.py' # from the original file 'conftest.py' # DO NOT CHANGE! Change the original file instead. -import asyncio -import hashlib -import os -import subprocess -from abc import ABC, abstractmethod -from collections.abc import Callable, Generator from datetime import datetime, timedelta, timezone -from typing import Any import pytest -from dirty_equals import IsFloat -from pydantic import AnyHttpUrl -from key_value.sync.code_gen.errors import InvalidTTLError, SerializationError -from key_value.sync.code_gen.stores.base import BaseContextManagerStore, BaseStore from key_value.sync.code_gen.stores.memory.store import MemoryStore -from key_value.sync.code_gen.utils.acompat import sleep -from tests.code_gen.cases import DICTIONARY_TO_JSON_TEST_CASES_NAMES, OBJECT_TEST_CASES -from tests.code_gen.conftest import running_in_event_loop @pytest.fixture @@ -37,251 +23,3 @@ def now_plus(seconds: int) -> datetime: def is_around(value: float, delta: float = 1) -> bool: return value - delta < value < value + delta - - -def detect_docker() -> bool: - try: - result = subprocess.run(["docker", "ps"], check=False, capture_output=True, text=True) # noqa: S607 - except Exception: - return False - else: - return result.returncode == 0 - - -def detect_on_ci() -> bool: - return os.getenv("CI", "false") == "true" - - -def detect_on_windows() -> bool: - return os.name == "nt" - - -def detect_on_macos() -> bool: - return os.name == "darwin" - - -def should_run_docker_tests() -> bool: - if detect_on_ci(): - return all([detect_docker(), not detect_on_windows(), not detect_on_macos()]) - return detect_docker() - - -def should_skip_docker_tests() -> bool: - return not should_run_docker_tests() - - -def wait_for_store(wait_fn: Callable[[], bool], max_time: int = 10) -> bool: - for _ in range(max_time): - if wait_fn(): - return True - sleep(seconds=1) - return False - - -class BaseStoreTests(ABC): - def eventually_consistent(self) -> None: # noqa: B027 - "Subclasses can override this to wait for eventually consistent operations." - - @pytest.fixture - @abstractmethod - def store(self) -> BaseStore | Generator[BaseStore, None, None]: ... - - # The first test requires a docker pull, so we only time the actual test - - @pytest.mark.timeout(5, func_only=True) - def test_empty_get(self, store: BaseStore): - """Tests that the get method returns None from an empty store.""" - assert store.get(collection="test", key="test") is None - - def test_empty_put(self, store: BaseStore): - """Tests that the put method does not raise an exception when called on a new store.""" - store.put(collection="test", key="test", value={"test": "test"}) - - def test_empty_ttl(self, store: BaseStore): - """Tests that the ttl method returns None from an empty store.""" - assert store.ttl(collection="test", key="test") == (None, None) - - def test_put_serialization_errors(self, store: BaseStore): - """Tests that the put method does not raise an exception when called on a new store.""" - with pytest.raises(SerializationError): - store.put(collection="test", key="test", value={"test": AnyHttpUrl("https://test.com")}) - - def test_get_put_get(self, store: BaseStore): - assert store.get(collection="test", key="test") is None - store.put(collection="test", key="test", value={"test": "test"}) - assert store.get(collection="test", key="test") == {"test": "test"} - - @pytest.mark.parametrize(argnames="value", argvalues=OBJECT_TEST_CASES, ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES) - def test_get_complex_put_get(self, store: BaseStore, value: dict[str, Any]): - store.put(collection="test", key="test", value=value) - assert store.get(collection="test", key="test") == value - - def test_put_many_get(self, store: BaseStore): - store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) - assert store.get(collection="test", key="test") == {"test": "test"} - assert store.get(collection="test", key="test_2") == {"test": "test_2"} - - def test_put_many_get_many(self, store: BaseStore): - store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) - assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] - - def test_put_put_get_many(self, store: BaseStore): - store.put(collection="test", key="test", value={"test": "test"}) - store.put(collection="test", key="test_2", value={"test": "test_2"}) - assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] - - def test_put_put_get_many_missing_one(self, store: BaseStore): - store.put(collection="test", key="test", value={"test": "test"}) - store.put(collection="test", key="test_2", value={"test": "test_2"}) - assert store.get_many(collection="test", keys=["test", "test_2", "test_3"]) == [{"test": "test"}, {"test": "test_2"}, None] - - def test_put_get_delete_get(self, store: BaseStore): - store.put(collection="test", key="test", value={"test": "test"}) - assert store.get(collection="test", key="test") == {"test": "test"} - assert store.delete(collection="test", key="test") - assert store.get(collection="test", key="test") is None - - def test_put_many_get_get_delete_many_get_many(self, store: BaseStore): - store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) - assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] - assert store.delete_many(collection="test", keys=["test", "test_2"]) == 2 - assert store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] - - def test_put_many_get_many_delete_many_get_many(self, store: BaseStore): - store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) - assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] - assert store.delete_many(collection="test", keys=["test", "test_2"]) == 2 - assert store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] - - def test_get_put_get_delete_get(self, store: BaseStore): - """Tests that the get, put, delete, and get methods work together to store and retrieve a value from an empty store.""" - - assert store.get(collection="test", key="test") is None - - store.put(collection="test", key="test", value={"test": "test"}) - - assert store.get(collection="test", key="test") == {"test": "test"} - - assert store.delete(collection="test", key="test") - - assert store.get(collection="test", key="test") is None - - def test_get_put_get_put_delete_get(self, store: BaseStore): - """Tests that the get, put, get, put, delete, and get methods work together to store and retrieve a value from an empty store.""" - store.put(collection="test", key="test", value={"test": "test"}) - assert store.get(collection="test", key="test") == {"test": "test"} - - store.put(collection="test", key="test", value={"test": "test_2"}) - - assert store.get(collection="test", key="test") == {"test": "test_2"} - assert store.delete(collection="test", key="test") - assert store.get(collection="test", key="test") is None - - def test_put_many_delete_delete_get_many(self, store: BaseStore): - store.put_many(collection="test", keys=["test", "test_2"], values=[{"test": "test"}, {"test": "test_2"}]) - assert store.get_many(collection="test", keys=["test", "test_2"]) == [{"test": "test"}, {"test": "test_2"}] - assert store.delete(collection="test", key="test") - assert store.delete(collection="test", key="test_2") - assert store.get_many(collection="test", keys=["test", "test_2"]) == [None, None] - - def test_put_ttl_get_ttl(self, store: BaseStore): - """Tests that the put and get ttl methods work together to store and retrieve a ttl from an empty store.""" - store.put(collection="test", key="test", value={"test": "test"}, ttl=100) - (value, ttl) = store.ttl(collection="test", key="test") - - assert value == {"test": "test"} - assert ttl is not None - assert ttl == IsFloat(approx=100) - - def test_negative_ttl(self, store: BaseStore): - """Tests that a negative ttl will return None when getting the key.""" - with pytest.raises(InvalidTTLError): - store.put(collection="test", key="test", value={"test": "test"}, ttl=-100) - - @pytest.mark.timeout(10) - def test_put_expired_get_none(self, store: BaseStore): - """Tests that a put call with a negative ttl will return None when getting the key.""" - store.put(collection="test_collection", key="test_key", value={"test": "test"}, ttl=1) - sleep(seconds=3) - assert store.get(collection="test_collection", key="test_key") is None - - def test_long_collection_name(self, store: BaseStore): - """Tests that a long collection name will not raise an error.""" - store.put(collection="test_collection" * 100, key="test_key", value={"test": "test"}) - assert store.get(collection="test_collection" * 100, key="test_key") == {"test": "test"} - - def test_special_characters_in_collection_name(self, store: BaseStore): - """Tests that a special characters in the collection name will not raise an error.""" - store.put(collection="test_collection!@#$%^&*()", key="test_key", value={"test": "test"}) - assert store.get(collection="test_collection!@#$%^&*()", key="test_key") == {"test": "test"} - - def test_long_key_name(self, store: BaseStore): - """Tests that a long key name will not raise an error.""" - store.put(collection="test_collection", key="test_key" * 100, value={"test": "test"}) - assert store.get(collection="test_collection", key="test_key" * 100) == {"test": "test"} - - def test_special_characters_in_key_name(self, store: BaseStore): - """Tests that a special characters in the key name will not raise an error.""" - store.put(collection="test_collection", key="test_key!@#$%^&*()", value={"test": "test"}) - assert store.get(collection="test_collection", key="test_key!@#$%^&*()") == {"test": "test"} - - @pytest.mark.timeout(20) - def test_not_unbounded(self, store: BaseStore): - """Tests that the store is not unbounded.""" - - for i in range(1000): - value = hashlib.sha256(f"test_{i}".encode()).hexdigest() - store.put(collection="test_collection", key=f"test_key_{i}", value={"test": value}) - - assert store.get(collection="test_collection", key="test_key_0") is None - assert store.get(collection="test_collection", key="test_key_999") is not None - - @pytest.mark.skipif(condition=not running_in_event_loop(), reason="Cannot run concurrent operations in event loop") - def test_concurrent_operations(self, store: BaseStore): - """Tests that the store can handle concurrent operations.""" - - def worker(store: BaseStore, worker_id: int): - for i in range(10): - assert store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None - - store.put(collection="test_collection", key=f"test_{worker_id}_{i}", value={"test": f"test_{i}"}) - assert store.get(collection="test_collection", key=f"test_{worker_id}_{i}") == {"test": f"test_{i}"} - - store.put(collection="test_collection", key=f"test_{worker_id}_{i}", value={"test": f"test_{i}_2"}) - assert store.get(collection="test_collection", key=f"test_{worker_id}_{i}") == {"test": f"test_{i}_2"} - - assert store.delete(collection="test_collection", key=f"test_{worker_id}_{i}") - assert store.get(collection="test_collection", key=f"test_{worker_id}_{i}") is None - - _ = asyncio.gather(*[worker(store, worker_id) for worker_id in range(5)]) - - @pytest.mark.timeout(15) - def test_minimum_put_many_get_many_performance(self, store: BaseStore): - """Tests that the store meets minimum performance requirements.""" - keys = [f"test_{i}" for i in range(10)] - values = [{"test": f"test_{i}"} for i in range(10)] - store.put_many(collection="test_collection", keys=keys, values=values) - assert store.get_many(collection="test_collection", keys=keys) == values - - @pytest.mark.timeout(15) - def test_minimum_put_many_delete_many_performance(self, store: BaseStore): - """Tests that the store meets minimum performance requirements.""" - keys = [f"test_{i}" for i in range(10)] - values = [{"test": f"test_{i}"} for i in range(10)] - store.put_many(collection="test_collection", keys=keys, values=values) - assert store.delete_many(collection="test_collection", keys=keys) == 10 - - -class ContextManagerStoreTestMixin: - @pytest.fixture(params=[True, False], ids=["with_ctx_manager", "no_ctx_manager"], autouse=True) - def enter_exit_store( - self, request: pytest.FixtureRequest, store: BaseContextManagerStore - ) -> Generator[BaseContextManagerStore, None, None]: - context_manager = request.param # pyright: ignore[reportAny] - - if context_manager: - with store: - yield store - else: - yield store - store.close() diff --git a/key-value/key-value-sync/tests/code_gen/stores/disk/test_disk.py b/key-value/key-value-sync/tests/code_gen/stores/disk/test_disk.py index 11428a0a..994738a2 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/disk/test_disk.py +++ b/key-value/key-value-sync/tests/code_gen/stores/disk/test_disk.py @@ -8,7 +8,7 @@ from typing_extensions import override from key_value.sync.code_gen.stores.disk import DiskStore -from tests.code_gen.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin +from tests.code_gen.stores.base import BaseStoreTests, ContextManagerStoreTestMixin TEST_SIZE_LIMIT = 100 * 1024 # 100KB diff --git a/key-value/key-value-sync/tests/code_gen/stores/disk/test_multi_disk.py b/key-value/key-value-sync/tests/code_gen/stores/disk/test_multi_disk.py index af7a6b22..e6341075 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/disk/test_multi_disk.py +++ b/key-value/key-value-sync/tests/code_gen/stores/disk/test_multi_disk.py @@ -9,7 +9,7 @@ from typing_extensions import override from key_value.sync.code_gen.stores.disk.multi_store import MultiDiskStore -from tests.code_gen.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin +from tests.code_gen.stores.base import BaseStoreTests, ContextManagerStoreTestMixin TEST_SIZE_LIMIT = 100 * 1024 # 100KB diff --git a/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py b/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py index d013d0ea..6383157f 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py +++ b/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py @@ -6,12 +6,13 @@ import pytest from elasticsearch import Elasticsearch +from key_value.shared.stores.wait import wait_for_true from typing_extensions import override from key_value.sync.code_gen.stores.base import BaseStore from key_value.sync.code_gen.stores.elasticsearch import ElasticsearchStore from tests.code_gen.conftest import docker_container -from tests.code_gen.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, wait_for_store +from tests.code_gen.stores.base import BaseStoreTests, ContextManagerStoreTestMixin TEST_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB ES_HOST = "localhost" @@ -28,7 +29,8 @@ def get_elasticsearch_client() -> Elasticsearch: def ping_elasticsearch() -> bool: es_client: Elasticsearch = get_elasticsearch_client() - return es_client.ping() + with es_client: + return es_client.ping() class ElasticsearchFailedToStartError(Exception): @@ -42,7 +44,7 @@ def setup_elasticsearch(self) -> Generator[None, None, None]: with docker_container( "elasticsearch-test", ES_IMAGE, {"9200": 9200}, {"discovery.type": "single-node", "xpack.security.enabled": "false"} ): - if not wait_for_store(wait_fn=ping_elasticsearch): + if not wait_for_true(bool_fn=ping_elasticsearch, tries=30, wait_time=1): msg = "Elasticsearch failed to start" raise ElasticsearchFailedToStartError(msg) @@ -50,10 +52,11 @@ def setup_elasticsearch(self) -> Generator[None, None, None]: @override @pytest.fixture - def store(self) -> ElasticsearchStore: + def store(self) -> Generator[ElasticsearchStore, None, None]: es_client = get_elasticsearch_client() _ = es_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test") - return ElasticsearchStore(url=ES_URL, index="kv-store-e2e-test") + with ElasticsearchStore(url=ES_URL, index="kv-store-e2e-test") as store: + yield store @pytest.mark.skip(reason="Distributed Caches are unbounded") @override diff --git a/key-value/key-value-sync/tests/code_gen/stores/memory/test_memory.py b/key-value/key-value-sync/tests/code_gen/stores/memory/test_memory.py index 6c83981a..3b75dec3 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/memory/test_memory.py +++ b/key-value/key-value-sync/tests/code_gen/stores/memory/test_memory.py @@ -5,7 +5,7 @@ from typing_extensions import override from key_value.sync.code_gen.stores.memory.store import MemoryStore -from tests.code_gen.stores.conftest import BaseStoreTests +from tests.code_gen.stores.base import BaseStoreTests class TestMemoryStore(BaseStoreTests): diff --git a/key-value/key-value-sync/tests/code_gen/stores/mongodb/test_mongodb.py b/key-value/key-value-sync/tests/code_gen/stores/mongodb/test_mongodb.py index b2d0993e..49374074 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/mongodb/test_mongodb.py +++ b/key-value/key-value-sync/tests/code_gen/stores/mongodb/test_mongodb.py @@ -7,13 +7,14 @@ import pytest from inline_snapshot import snapshot +from key_value.shared.stores.wait import wait_for_true from pymongo import MongoClient from typing_extensions import override from key_value.sync.code_gen.stores.base import BaseStore from key_value.sync.code_gen.stores.mongodb import MongoDBStore -from tests.code_gen.conftest import docker_container -from tests.code_gen.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests, wait_for_store +from tests.code_gen.conftest import docker_container, should_skip_docker_tests +from tests.code_gen.stores.base import BaseStoreTests, ContextManagerStoreTestMixin # MongoDB test configuration MONGODB_HOST = "localhost" @@ -42,7 +43,7 @@ class TestMongoDBStore(ContextManagerStoreTestMixin, BaseStoreTests): @pytest.fixture(autouse=True, scope="session") def setup_mongodb(self) -> Generator[None, None, None]: with docker_container("mongodb-test", "mongo:7", {"27017": 27017}): - if not wait_for_store(wait_fn=ping_mongodb): + if not wait_for_true(bool_fn=ping_mongodb, tries=30, wait_time=1): msg = "MongoDB failed to start" raise MongoDBFailedToStartError(msg) diff --git a/key-value/key-value-sync/tests/code_gen/stores/redis/test_redis.py b/key-value/key-value-sync/tests/code_gen/stores/redis/test_redis.py index ad49a248..01f223d6 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/redis/test_redis.py +++ b/key-value/key-value-sync/tests/code_gen/stores/redis/test_redis.py @@ -4,12 +4,14 @@ from collections.abc import Generator import pytest +from key_value.shared.stores.wait import wait_for_true +from redis.asyncio.client import Redis from typing_extensions import override from key_value.sync.code_gen.stores.base import BaseStore from key_value.sync.code_gen.stores.redis import RedisStore -from tests.code_gen.conftest import docker_container, docker_stop -from tests.code_gen.stores.conftest import BaseStoreTests, ContextManagerStoreTestMixin, should_skip_docker_tests, wait_for_store +from tests.code_gen.conftest import docker_container, docker_stop, should_skip_docker_tests +from tests.code_gen.stores.base import BaseStoreTests, ContextManagerStoreTestMixin # Redis test configuration REDIS_HOST = "localhost" @@ -20,9 +22,7 @@ def ping_redis() -> bool: - from redis import Redis - - client = Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) + client: Redis = Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) try: return client.ping() # pyright: ignore[reportUnknownMemberType, reportAny, reportReturnType] except Exception: @@ -41,7 +41,7 @@ def setup_redis(self) -> Generator[None, None, None]: docker_stop("valkey-test", raise_on_error=False) with docker_container("redis-test", "redis", {"6379": 6379}): - if not wait_for_store(wait_fn=ping_redis): + if not wait_for_true(bool_fn=ping_redis, tries=30, wait_time=1): msg = "Redis failed to start" raise RedisFailedToStartError(msg) diff --git a/key-value/key-value-sync/tests/code_gen/stores/simple/test_store.py b/key-value/key-value-sync/tests/code_gen/stores/simple/test_store.py index 0f869c6c..1ee92614 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/simple/test_store.py +++ b/key-value/key-value-sync/tests/code_gen/stores/simple/test_store.py @@ -5,7 +5,7 @@ from typing_extensions import override from key_value.sync.code_gen.stores.simple.store import SimpleStore -from tests.code_gen.stores.conftest import BaseStoreTests +from tests.code_gen.stores.base import BaseStoreTests class TestSimpleStore(BaseStoreTests): diff --git a/key-value/key-value-sync/tests/code_gen/stores/valkey/test_valkey.py b/key-value/key-value-sync/tests/code_gen/stores/valkey/test_valkey.py index f4b988c9..e0449ddc 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/valkey/test_valkey.py +++ b/key-value/key-value-sync/tests/code_gen/stores/valkey/test_valkey.py @@ -4,17 +4,12 @@ from collections.abc import Generator import pytest +from key_value.shared.stores.wait import wait_for_true from typing_extensions import override from key_value.sync.code_gen.stores.base import BaseStore -from tests.code_gen.conftest import docker_container, docker_stop -from tests.code_gen.stores.conftest import ( - BaseStoreTests, - ContextManagerStoreTestMixin, - detect_on_windows, - should_skip_docker_tests, - wait_for_store, -) +from tests.code_gen.conftest import detect_on_windows, docker_container, docker_stop, should_skip_docker_tests +from tests.code_gen.stores.base import BaseStoreTests, ContextManagerStoreTestMixin # Valkey test configuration VALKEY_HOST = "localhost" @@ -55,7 +50,7 @@ def setup_valkey(self) -> Generator[None, None, None]: docker_stop("redis-test", raise_on_error=False) with docker_container("valkey-test", "valkey/valkey:latest", {"6379": 6379}): - if not wait_for_store(wait_fn=self.ping_valkey): + if not wait_for_true(bool_fn=self.ping_valkey, tries=30, wait_time=1): msg = "Valkey failed to start" raise ValkeyFailedToStartError(msg) diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_clamp_ttl.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_clamp_ttl.py index 56bda02f..3e1b0996 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_clamp_ttl.py +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_clamp_ttl.py @@ -7,7 +7,7 @@ from key_value.sync.code_gen.stores.memory.store import MemoryStore from key_value.sync.code_gen.wrappers.ttl_clamp import TTLClampWrapper -from tests.code_gen.stores.conftest import BaseStoreTests +from tests.code_gen.stores.base import BaseStoreTests class TestTTLClampWrapper(BaseStoreTests): diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_passthrough_cache.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_passthrough_cache.py index 4724f694..e34bb353 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_passthrough_cache.py +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_passthrough_cache.py @@ -10,7 +10,7 @@ from key_value.sync.code_gen.stores.disk.store import DiskStore from key_value.sync.code_gen.stores.memory.store import MemoryStore from key_value.sync.code_gen.wrappers.passthrough_cache import PassthroughCacheWrapper -from tests.code_gen.stores.conftest import BaseStoreTests +from tests.code_gen.stores.base import BaseStoreTests DISK_STORE_SIZE_LIMIT = 100 * 1024 # 100KB diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_collection.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_collection.py index 85b2e89b..d2609235 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_collection.py +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_collection.py @@ -6,7 +6,7 @@ from key_value.sync.code_gen.stores.memory.store import MemoryStore from key_value.sync.code_gen.wrappers.prefix_collections import PrefixCollectionsWrapper -from tests.code_gen.stores.conftest import BaseStoreTests +from tests.code_gen.stores.base import BaseStoreTests class TestPrefixCollectionWrapper(BaseStoreTests): diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_key.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_key.py index 15b94f41..65ff79cf 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_key.py +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_prefix_key.py @@ -6,7 +6,7 @@ from key_value.sync.code_gen.stores.memory.store import MemoryStore from key_value.sync.code_gen.wrappers.prefix_keys import PrefixKeysWrapper -from tests.code_gen.stores.conftest import BaseStoreTests +from tests.code_gen.stores.base import BaseStoreTests class TestPrefixKeyWrapper(BaseStoreTests): diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_single_collection.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_single_collection.py index 16f4d63b..2f5658c2 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_single_collection.py +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_single_collection.py @@ -6,7 +6,7 @@ from key_value.sync.code_gen.stores.memory.store import MemoryStore from key_value.sync.code_gen.wrappers.single_collection import SingleCollectionWrapper -from tests.code_gen.stores.conftest import BaseStoreTests +from tests.code_gen.stores.base import BaseStoreTests class TestSingleCollectionWrapper(BaseStoreTests): diff --git a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_statistics.py b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_statistics.py index 90e8bc75..3678cb34 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_statistics.py +++ b/key-value/key-value-sync/tests/code_gen/stores/wrappers/test_statistics.py @@ -6,7 +6,7 @@ from key_value.sync.code_gen.stores.memory.store import MemoryStore from key_value.sync.code_gen.wrappers.statistics import StatisticsWrapper -from tests.code_gen.stores.conftest import BaseStoreTests +from tests.code_gen.stores.base import BaseStoreTests class TestStatisticsWrapper(BaseStoreTests): diff --git a/key-value/key-value-sync/tests/code_gen/utils/__init__.py b/key-value/key-value-sync/tests/code_gen/utils/__init__.py deleted file mode 100644 index b1835176..00000000 --- a/key-value/key-value-sync/tests/code_gen/utils/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file '__init__.py' -# DO NOT CHANGE! Change the original file instead. - diff --git a/key-value/key-value-sync/tests/code_gen/utils/test_managed_entry.py b/key-value/key-value-sync/tests/code_gen/utils/test_managed_entry.py deleted file mode 100644 index 60e55744..00000000 --- a/key-value/key-value-sync/tests/code_gen/utils/test_managed_entry.py +++ /dev/null @@ -1,30 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file 'test_managed_entry.py' -# DO NOT CHANGE! Change the original file instead. -from datetime import datetime, timezone -from typing import Any - -import pytest - -from key_value.sync.code_gen.utils.managed_entry import dump_to_json, load_from_json -from tests.code_gen.cases import DICTIONARY_TO_JSON_TEST_CASES, DICTIONARY_TO_JSON_TEST_CASES_NAMES - -FIXED_DATETIME = datetime(2025, 1, 1, 0, 0, 0, tzinfo=timezone.utc) -FIXED_DATETIME_STRING = FIXED_DATETIME.isoformat() - - -@pytest.mark.parametrize(argnames=("obj", "expected"), argvalues=DICTIONARY_TO_JSON_TEST_CASES, ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES) -def test_dump_to_json(obj: dict[str, Any], expected: str): - assert dump_to_json(obj) == expected - - -@pytest.mark.parametrize(argnames=("obj", "expected"), argvalues=DICTIONARY_TO_JSON_TEST_CASES, ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES) -def test_load_from_json(obj: dict[str, Any], expected: str): - assert load_from_json(expected) == obj - - -@pytest.mark.parametrize(argnames=("obj", "expected"), argvalues=DICTIONARY_TO_JSON_TEST_CASES, ids=DICTIONARY_TO_JSON_TEST_CASES_NAMES) -def test_roundtrip_json(obj: dict[str, Any], expected: str): - dumped_json: str = dump_to_json(obj) - assert dumped_json == expected - assert load_from_json(dumped_json) == obj diff --git a/key-value/key-value-sync/tests/code_gen/utils/test_sanitize.py b/key-value/key-value-sync/tests/code_gen/utils/test_sanitize.py deleted file mode 100644 index be11c580..00000000 --- a/key-value/key-value-sync/tests/code_gen/utils/test_sanitize.py +++ /dev/null @@ -1,82 +0,0 @@ -# WARNING: this file is auto-generated by 'build_sync_library.py' -# from the original file 'test_sanitize.py' -# DO NOT CHANGE! Change the original file instead. -import pytest -from inline_snapshot import snapshot - -from key_value.sync.code_gen.utils.sanitize import ( - ALPHANUMERIC_CHARACTERS, - LOWERCASE_ALPHABET, - NUMBERS, - UPPERCASE_ALPHABET, - HashFragmentMode, - sanitize_string, -) - -ALWAYS_HASH = HashFragmentMode.ALWAYS -ONLY_IF_CHANGED_HASH = HashFragmentMode.ONLY_IF_CHANGED -NEVER_HASH = HashFragmentMode.NEVER - - -def test_sanitize_string(): - sanitized_string = sanitize_string(value="test string", max_length=16) - assert sanitized_string == snapshot("test string") - - sanitized_string = sanitize_string(value="test string", max_length=16, hash_fragment_mode=ALWAYS_HASH) - assert sanitized_string == snapshot("test st-d5579c46") - - sanitized_string = sanitize_string(value="test string", max_length=16, hash_fragment_mode=ONLY_IF_CHANGED_HASH) - assert sanitized_string == snapshot("test string") - - sanitized_string = sanitize_string(value="test string", max_length=16, hash_fragment_mode=NEVER_HASH) - assert sanitized_string == snapshot("test string") - - -@pytest.mark.parametrize(argnames="hash_fragment_mode", argvalues=[ONLY_IF_CHANGED_HASH, NEVER_HASH]) -@pytest.mark.parametrize(argnames="max_length", argvalues=[16, 32]) -@pytest.mark.parametrize( - argnames=("value", "allowed_chars"), - argvalues=[ - ("test", None), - ("test", "test"), - ("test_test", "test_"), - ("!@#$%^&*()", "!@#$%^&*()"), - ("test", LOWERCASE_ALPHABET), - ("test", ALPHANUMERIC_CHARACTERS), - ], -) -def test_unchanged_strings(value: str, allowed_chars: str | None, max_length: int, hash_fragment_mode: HashFragmentMode): - sanitized_string = sanitize_string( - value=value, allowed_characters=allowed_chars, max_length=max_length, hash_fragment_mode=hash_fragment_mode - ) - assert sanitized_string == value - - -@pytest.mark.parametrize(argnames="hash_fragment_mode", argvalues=[ONLY_IF_CHANGED_HASH, ALWAYS_HASH]) -def test_changed_strings(hash_fragment_mode: HashFragmentMode): - def process_string(value: str, allowed_characters: str | None) -> str: - return sanitize_string(value=value, allowed_characters=allowed_characters, max_length=16, hash_fragment_mode=hash_fragment_mode) - - sanitized_string = process_string(value="test", allowed_characters=NUMBERS) - assert sanitized_string == snapshot("9f86d081") - - sanitized_string = process_string(value="test", allowed_characters=UPPERCASE_ALPHABET) - assert sanitized_string == snapshot("9f86d081") - - sanitized_string = process_string(value="test with spaces", allowed_characters=LOWERCASE_ALPHABET) - assert sanitized_string == snapshot("test_wi-ed2daf39") - - sanitized_string = process_string(value="test too long with spaces", allowed_characters=ALPHANUMERIC_CHARACTERS) - assert sanitized_string == snapshot("test_to-479b94c3") - - sanitized_string = process_string(value="test too long with spaces", allowed_characters=None) - assert sanitized_string == snapshot("test to-479b94c3") - - sanitized_string = process_string(value="test too long with spaces", allowed_characters=ALPHANUMERIC_CHARACTERS) - assert sanitized_string == snapshot("test_to-479b94c3") - - sanitized_string = process_string(value="test way too long with spaces", allowed_characters=None) - assert sanitized_string == snapshot("test wa-3d014b9b") - - sanitized_string = process_string(value="test way too long with spaces", allowed_characters=ALPHANUMERIC_CHARACTERS) - assert sanitized_string == snapshot("test_wa-3d014b9b") diff --git a/pyproject.toml b/pyproject.toml index faed800d..c3d23806 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,20 +10,8 @@ requires-python = ">=3.10" members = [ "key-value/key-value-aio", "key-value/key-value-sync", -] - -[tool.pyright] -pythonVersion = "3.10" -typeCheckingMode = "strict" -reportExplicitAny = false -reportMissingTypeStubs = false -include = ["**/tests/**", "**/src/**"] -exclude = [ - "**/playground/**", - "**/examples/**", - "**/references/**", - "**/docs/**", - "**/.venv/**", + "key-value/key-value-shared", + "key-value/key-value-shared-test", ] [tool.ruff] @@ -79,7 +67,18 @@ line-length = 140 [dependency-groups] dev = [ "docker>=7.1.0", + "basedpyright>=1.31.5", ] -[tool.pytest.ini_options] -consider_namespace_packages = true +[tool.pyright] +pythonVersion = "3.10" +typeCheckingMode = "strict" +reportExplicitAny = false +reportMissingTypeStubs = false +exclude = [ + "**/playground/**", + "**/examples/**", + "**/references/**", + "**/docs/**", + "**/.venv/**", +] diff --git a/scripts/build_sync_library.py b/scripts/build_sync_library.py index db22e116..64e0ff13 100644 --- a/scripts/build_sync_library.py +++ b/scripts/build_sync_library.py @@ -197,6 +197,7 @@ class RenameAsyncToSync(ast.NodeTransformer): # type: ignore "tests.stores.conftest": "tests.code_gen.stores.conftest", "tests.conftest": "tests.code_gen.conftest", "tests.cases": "tests.code_gen.cases", + "tests.stores.base": "tests.code_gen.stores.base", } names_map: ClassVar[dict[str, str]] = { "__aenter__": "__enter__", @@ -221,6 +222,8 @@ class RenameAsyncToSync(ast.NodeTransformer): # type: ignore "asyncio.sleep": "time.sleep", "async_running_in_event_loop": "running_in_event_loop", "asleep": "sleep", + "async_wait_for_true": "wait_for_true", + "async_gather": "gather", } _skip_imports: ClassVar[dict[str, set[str]]] = { "acompat": {"alist", "anext"}, diff --git a/uv.lock b/uv.lock index c526e0ef..90fd9e35 100644 --- a/uv.lock +++ b/uv.lock @@ -10,6 +10,8 @@ resolution-markers = [ members = [ "py-key-value", "py-key-value-aio", + "py-key-value-shared", + "py-key-value-shared-test", "py-key-value-sync", ] @@ -929,18 +931,25 @@ source = { virtual = "." } [package.dev-dependencies] dev = [ + { name = "basedpyright" }, { name = "docker" }, ] [package.metadata] [package.metadata.requires-dev] -dev = [{ name = "docker", specifier = ">=7.1.0" }] +dev = [ + { name = "basedpyright", specifier = ">=1.31.5" }, + { name = "docker", specifier = ">=7.1.0" }, +] [[package]] name = "py-key-value-aio" version = "0.2.0" source = { editable = "key-value/key-value-aio" } +dependencies = [ + { name = "py-key-value-shared" }, +] [package.optional-dependencies] disk = [ @@ -980,6 +989,7 @@ dev = [ { name = "inline-snapshot" }, { name = "py-key-value-aio", extra = ["disk", "elasticsearch", "memcached", "memory", "mongodb", "pydantic", "redis"] }, { name = "py-key-value-aio", extra = ["valkey"], marker = "sys_platform != 'win32'" }, + { name = "py-key-value-shared-test" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-dotenv" }, @@ -1000,6 +1010,7 @@ requires-dist = [ { name = "diskcache", marker = "extra == 'disk'", specifier = ">=5.6.0" }, { name = "elasticsearch", marker = "extra == 'elasticsearch'", specifier = ">=9.0.0" }, { name = "pathvalidate", marker = "extra == 'disk'", specifier = ">=3.3.1" }, + { name = "py-key-value-shared", editable = "key-value/key-value-shared" }, { name = "pydantic", marker = "extra == 'pydantic'", specifier = ">=2.11.9" }, { name = "pymongo", marker = "extra == 'mongodb'", specifier = ">=4.15.0" }, { name = "redis", marker = "extra == 'redis'", specifier = ">=6.0.0" }, @@ -1018,6 +1029,7 @@ dev = [ { name = "py-key-value-aio", extras = ["memory", "disk", "redis", "elasticsearch", "memcached", "mongodb"] }, { name = "py-key-value-aio", extras = ["pydantic"] }, { name = "py-key-value-aio", extras = ["valkey"], marker = "sys_platform != 'win32'" }, + { name = "py-key-value-shared-test", editable = "key-value/key-value-shared-test" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-dotenv", specifier = ">=0.5.2" }, @@ -1028,12 +1040,64 @@ dev = [ ] lint = [{ name = "ruff" }] +[[package]] +name = "py-key-value-shared" +version = "0.2.0" +source = { editable = "key-value/key-value-shared" } + +[package.dev-dependencies] +dev = [ + { name = "basedpyright" }, + { name = "dirty-equals" }, + { name = "inline-snapshot" }, + { name = "py-key-value-shared-test" }, + { name = "pytest" }, + { name = "pytest-dotenv" }, + { name = "pytest-mock" }, + { name = "pytest-timeout" }, + { name = "ruff" }, +] + +[package.metadata] + +[package.metadata.requires-dev] +dev = [ + { name = "basedpyright", specifier = ">=1.31.5" }, + { name = "dirty-equals", specifier = ">=0.10.0" }, + { name = "inline-snapshot", specifier = ">=0.29.0" }, + { name = "py-key-value-shared-test", editable = "key-value/key-value-shared-test" }, + { name = "pytest" }, + { name = "pytest-dotenv", specifier = ">=0.5.2" }, + { name = "pytest-mock" }, + { name = "pytest-timeout", specifier = ">=2.4.0" }, + { name = "ruff" }, +] + +[[package]] +name = "py-key-value-shared-test" +version = "0.2.0" +source = { editable = "key-value/key-value-shared-test" } + +[package.dev-dependencies] +dev = [ + { name = "basedpyright" }, + { name = "ruff" }, +] + +[package.metadata] + +[package.metadata.requires-dev] +dev = [ + { name = "basedpyright", specifier = ">=1.31.5" }, + { name = "ruff" }, +] + [[package]] name = "py-key-value-sync" version = "0.2.0" source = { editable = "key-value/key-value-sync" } dependencies = [ - { name = "py-key-value-aio" }, + { name = "py-key-value-shared" }, ] [package.optional-dependencies] @@ -1071,6 +1135,7 @@ dev = [ { name = "dirty-equals" }, { name = "diskcache-stubs" }, { name = "inline-snapshot" }, + { name = "py-key-value-shared-test" }, { name = "py-key-value-sync", extra = ["disk", "elasticsearch", "memcached", "memory", "mongodb", "pydantic", "redis"] }, { name = "py-key-value-sync", extra = ["valkey"], marker = "sys_platform != 'win32'" }, { name = "pytest" }, @@ -1093,7 +1158,7 @@ requires-dist = [ { name = "diskcache", marker = "extra == 'disk'", specifier = ">=5.6.0" }, { name = "elasticsearch", marker = "extra == 'elasticsearch'", specifier = ">=9.0.0" }, { name = "pathvalidate", marker = "extra == 'disk'", specifier = ">=3.3.1" }, - { name = "py-key-value-aio", editable = "key-value/key-value-aio" }, + { name = "py-key-value-shared", editable = "key-value/key-value-shared" }, { name = "pydantic", marker = "extra == 'pydantic'", specifier = ">=2.11.9" }, { name = "pymongo", marker = "extra == 'mongodb'", specifier = ">=4.15.0" }, { name = "redis", marker = "extra == 'redis'", specifier = ">=6.0.0" }, @@ -1108,6 +1173,7 @@ dev = [ { name = "dirty-equals", specifier = ">=0.10.0" }, { name = "diskcache-stubs", specifier = ">=5.6.3.6.20240818" }, { name = "inline-snapshot", specifier = ">=0.29.0" }, + { name = "py-key-value-shared-test", editable = "key-value/key-value-shared-test" }, { name = "py-key-value-sync", extras = ["memory", "disk", "redis", "elasticsearch", "memcached", "mongodb"] }, { name = "py-key-value-sync", extras = ["pydantic"] }, { name = "py-key-value-sync", extras = ["valkey"], marker = "sys_platform != 'win32'" }, From c36fe22e13b4cea4596807ce09af9cf6ae2f6178 Mon Sep 17 00:00:00 2001 From: William Easton Date: Sun, 28 Sep 2025 17:30:52 -0500 Subject: [PATCH 28/31] Fix redis test --- key-value/key-value-aio/tests/stores/redis/test_redis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/key-value/key-value-aio/tests/stores/redis/test_redis.py b/key-value/key-value-aio/tests/stores/redis/test_redis.py index e879ffc3..938643a3 100644 --- a/key-value/key-value-aio/tests/stores/redis/test_redis.py +++ b/key-value/key-value-aio/tests/stores/redis/test_redis.py @@ -21,7 +21,7 @@ async def ping_redis() -> bool: client: Redis = Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True) try: - return client.ping() # pyright: ignore[reportUnknownMemberType, reportAny, reportReturnType] + return await client.ping() # pyright: ignore[reportUnknownMemberType, reportAny, reportReturnType] except Exception: return False From 5a188246463879872de3a05b5afc0d91d95e0aa7 Mon Sep 17 00:00:00 2001 From: William Easton Date: Sun, 28 Sep 2025 17:47:58 -0500 Subject: [PATCH 29/31] Add collection support to elasticsearch --- .../aio/stores/elasticsearch/store.py | 76 +++++++++++-------- .../elasticsearch/test_elasticsearch.py | 4 +- .../code_gen/stores/elasticsearch/store.py | 66 ++++++++++------ .../elasticsearch/test_elasticsearch.py | 4 +- scripts/build_sync_library.py | 36 +++++---- 5 files changed, 113 insertions(+), 73 deletions(-) diff --git a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py index 741bf539..9208ffa2 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py @@ -1,8 +1,13 @@ -import hashlib from typing import TYPE_CHECKING, Any, overload from key_value.shared.utils.compound import compound_key from key_value.shared.utils.managed_entry import ManagedEntry, load_from_json +from key_value.shared.utils.sanitize import ( + ALPHANUMERIC_CHARACTERS, + LOWERCASE_ALPHABET, + NUMBERS, + sanitize_string, +) from key_value.shared.utils.time_to_live import now_as_epoch, try_parse_datetime_str from typing_extensions import override @@ -33,7 +38,7 @@ from elastic_transport import ObjectApiResponse -DEFAULT_INDEX = "kv-store" +DEFAULT_INDEX_PREFIX = "kv_store" DEFAULT_MAPPING = { "properties": { @@ -62,6 +67,10 @@ PAGE_LIMIT = 10000 MAX_KEY_LENGTH = 256 +ALLOWED_KEY_CHARACTERS: str = ALPHANUMERIC_CHARACTERS + +MAX_INDEX_LENGTH = 240 +ALLOWED_INDEX_CHARACTERS: str = LOWERCASE_ALPHABET + NUMBERS + "_" + "-" + "." class ElasticsearchStore( @@ -76,10 +85,10 @@ class ElasticsearchStore( _index: str @overload - def __init__(self, *, elasticsearch_client: AsyncElasticsearch, index: str, default_collection: str | None = None) -> None: ... + def __init__(self, *, elasticsearch_client: AsyncElasticsearch, index_prefix: str, default_collection: str | None = None) -> None: ... @overload - def __init__(self, *, url: str, api_key: str | None = None, index: str, default_collection: str | None = None) -> None: ... + def __init__(self, *, url: str, api_key: str | None = None, index_prefix: str, default_collection: str | None = None) -> None: ... def __init__( self, @@ -87,7 +96,7 @@ def __init__( elasticsearch_client: AsyncElasticsearch | None = None, url: str | None = None, api_key: str | None = None, - index: str, + index_prefix: str, default_collection: str | None = None, ) -> None: """Initialize the elasticsearch store. @@ -96,7 +105,7 @@ def __init__( elasticsearch_client: The elasticsearch client to use. url: The url of the elasticsearch cluster. api_key: The api key to use. - index: The index to use. + index_prefix: The index prefix to use. Collections will be prefixed with this prefix. default_collection: The default collection to use if no collection is provided. """ if elasticsearch_client is None and url is None: @@ -113,42 +122,49 @@ def __init__( msg = "Either elasticsearch_client or url must be provided" raise ValueError(msg) - self._index = index or DEFAULT_INDEX + self._index_prefix = index_prefix self._is_serverless = False super().__init__(default_collection=default_collection) @override async def _setup(self) -> None: - if await self._client.options(ignore_status=404).indices.exists(index=self._index): - return - cluster_info = await self._client.options(ignore_status=404).info() self._is_serverless = cluster_info.get("version", {}).get("build_flavor") == "serverless" - _ = await self._client.options(ignore_status=404).indices.create( - index=self._index, - mappings=DEFAULT_MAPPING, - settings={}, - ) - @override async def _setup_collection(self, *, collection: str) -> None: - pass + index_name = self._sanitize_index_name(collection=collection) - def sanitize_document_id(self, key: str) -> str: - if len(key) > MAX_KEY_LENGTH: - sha256_hash: str = hashlib.sha256(key.encode()).hexdigest() - return sha256_hash[:64] - return key + if await self._client.options(ignore_status=404).indices.exists(index=index_name): + return + + _ = await self._client.options(ignore_status=404).indices.create(index=index_name, mappings=DEFAULT_MAPPING, settings={}) + + def _sanitize_index_name(self, collection: str) -> str: + sanitized_collection = sanitize_string( + value=collection, + replacement_character="_", + max_length=MAX_INDEX_LENGTH, + allowed_characters=ALLOWED_INDEX_CHARACTERS, + ) + return f"{self._index_prefix}-{sanitized_collection}" + + def _sanitize_document_id(self, key: str) -> str: + return sanitize_string( + value=key, + replacement_character="_", + max_length=MAX_KEY_LENGTH, + allowed_characters=ALLOWED_KEY_CHARACTERS, + ) @override async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: combo_key: str = compound_key(collection=collection, key=key) elasticsearch_response = await self._client.options(ignore_status=404).get( - index=self._index, id=self.sanitize_document_id(key=combo_key) + index=self._index, id=self._sanitize_document_id(key=combo_key) ) body: dict[str, Any] = get_body_from_response(response=elasticsearch_response) @@ -194,8 +210,8 @@ async def _put_managed_entry( document["expires_at"] = managed_entry.expires_at.isoformat() _ = await self._client.index( - index=self._index, - id=self.sanitize_document_id(key=combo_key), + index=self._sanitize_index_name(collection=collection), + id=self._sanitize_document_id(key=combo_key), body=document, refresh=self._should_refresh_on_put, ) @@ -205,7 +221,7 @@ async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: combo_key: str = compound_key(collection=collection, key=key) elasticsearch_response: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).delete( - index=self._index, id=self.sanitize_document_id(key=combo_key) + index=self._sanitize_index_name(collection=collection), id=self._sanitize_document_id(key=combo_key) ) body: dict[str, Any] = get_body_from_response(response=elasticsearch_response) @@ -222,7 +238,7 @@ async def _get_collection_keys(self, *, collection: str, limit: int | None = Non limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) result: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).search( - index=self._index, + index=self._sanitize_index_name(collection=collection), fields=[{"key": None}], body={ "query": { @@ -255,7 +271,7 @@ async def _get_collection_names(self, *, limit: int | None = None) -> list[str]: limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) search_response: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).search( - index=self._index, + index=f"{self._index_prefix}-*", aggregations={ "collections": { "terms": { @@ -276,7 +292,7 @@ async def _get_collection_names(self, *, limit: int | None = None) -> list[str]: @override async def _delete_collection(self, *, collection: str) -> bool: result: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).delete_by_query( - index=self._index, + index=self._sanitize_index_name(collection=collection), body={ "query": { "term": { @@ -296,7 +312,7 @@ async def _delete_collection(self, *, collection: str) -> bool: @override async def _cull(self) -> None: _ = await self._client.options(ignore_status=404).delete_by_query( - index=self._index, + index=f"{self._index_prefix}-*", body={ "query": { "range": { diff --git a/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py b/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py index b8d52363..e4a1be4f 100644 --- a/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py +++ b/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py @@ -51,8 +51,8 @@ async def setup_elasticsearch(self) -> AsyncGenerator[None, None]: @pytest.fixture async def store(self) -> AsyncGenerator[ElasticsearchStore, None]: es_client = get_elasticsearch_client() - _ = await es_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test") - async with ElasticsearchStore(url=ES_URL, index="kv-store-e2e-test") as store: + _ = await es_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test-*") + async with ElasticsearchStore(url=ES_URL, index_prefix="kv-store-e2e-test") as store: yield store @pytest.mark.skip(reason="Distributed Caches are unbounded") diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py index 17e3482d..6f444d65 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py @@ -1,11 +1,11 @@ # WARNING: this file is auto-generated by 'build_sync_library.py' # from the original file 'store.py' # DO NOT CHANGE! Change the original file instead. -import hashlib from typing import TYPE_CHECKING, Any, overload from key_value.shared.utils.compound import compound_key from key_value.shared.utils.managed_entry import ManagedEntry, load_from_json +from key_value.shared.utils.sanitize import ALPHANUMERIC_CHARACTERS, LOWERCASE_ALPHABET, NUMBERS, sanitize_string from key_value.shared.utils.time_to_live import now_as_epoch, try_parse_datetime_str from typing_extensions import override @@ -37,7 +37,7 @@ from elastic_transport import ObjectApiResponse -DEFAULT_INDEX = "kv-store" +DEFAULT_INDEX_PREFIX = "kv_store" DEFAULT_MAPPING = { "properties": { @@ -53,6 +53,10 @@ PAGE_LIMIT = 10000 MAX_KEY_LENGTH = 256 +ALLOWED_KEY_CHARACTERS: str = ALPHANUMERIC_CHARACTERS + +MAX_INDEX_LENGTH = 240 +ALLOWED_INDEX_CHARACTERS: str = LOWERCASE_ALPHABET + NUMBERS + "_" + "-" + "." class ElasticsearchStore( @@ -67,10 +71,10 @@ class ElasticsearchStore( _index: str @overload - def __init__(self, *, elasticsearch_client: Elasticsearch, index: str, default_collection: str | None = None) -> None: ... + def __init__(self, *, elasticsearch_client: Elasticsearch, index_prefix: str, default_collection: str | None = None) -> None: ... @overload - def __init__(self, *, url: str, api_key: str | None = None, index: str, default_collection: str | None = None) -> None: ... + def __init__(self, *, url: str, api_key: str | None = None, index_prefix: str, default_collection: str | None = None) -> None: ... def __init__( self, @@ -78,7 +82,7 @@ def __init__( elasticsearch_client: Elasticsearch | None = None, url: str | None = None, api_key: str | None = None, - index: str, + index_prefix: str, default_collection: str | None = None, ) -> None: """Initialize the elasticsearch store. @@ -87,7 +91,7 @@ def __init__( elasticsearch_client: The elasticsearch client to use. url: The url of the elasticsearch cluster. api_key: The api key to use. - index: The index to use. + index_prefix: The index prefix to use. Collections will be prefixed with this prefix. default_collection: The default collection to use if no collection is provided. """ if elasticsearch_client is None and url is None: @@ -104,37 +108,42 @@ def __init__( msg = "Either elasticsearch_client or url must be provided" raise ValueError(msg) - self._index = index or DEFAULT_INDEX + self._index_prefix = index_prefix self._is_serverless = False super().__init__(default_collection=default_collection) @override def _setup(self) -> None: - if self._client.options(ignore_status=404).indices.exists(index=self._index): - return - cluster_info = self._client.options(ignore_status=404).info() self._is_serverless = cluster_info.get("version", {}).get("build_flavor") == "serverless" - _ = self._client.options(ignore_status=404).indices.create(index=self._index, mappings=DEFAULT_MAPPING, settings={}) - @override def _setup_collection(self, *, collection: str) -> None: - pass + index_name = self._sanitize_index_name(collection=collection) + + if self._client.options(ignore_status=404).indices.exists(index=index_name): + return + + _ = self._client.options(ignore_status=404).indices.create(index=index_name, mappings=DEFAULT_MAPPING, settings={}) - def sanitize_document_id(self, key: str) -> str: - if len(key) > MAX_KEY_LENGTH: - sha256_hash: str = hashlib.sha256(key.encode()).hexdigest() - return sha256_hash[:64] - return key + def _sanitize_index_name(self, collection: str) -> str: + sanitized_collection = sanitize_string( + value=collection, replacement_character="_", max_length=MAX_INDEX_LENGTH, allowed_characters=ALLOWED_INDEX_CHARACTERS + ) + return f"{self._index_prefix}-{sanitized_collection}" + + def _sanitize_document_id(self, key: str) -> str: + return sanitize_string(value=key, replacement_character="_", max_length=MAX_KEY_LENGTH, allowed_characters=ALLOWED_KEY_CHARACTERS) @override def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | None: combo_key: str = compound_key(collection=collection, key=key) - elasticsearch_response = self._client.options(ignore_status=404).get(index=self._index, id=self.sanitize_document_id(key=combo_key)) + elasticsearch_response = self._client.options(ignore_status=404).get( + index=self._index, id=self._sanitize_document_id(key=combo_key) + ) body: dict[str, Any] = get_body_from_response(response=elasticsearch_response) @@ -165,7 +174,10 @@ def _put_managed_entry(self, *, key: str, collection: str, managed_entry: Manage document["expires_at"] = managed_entry.expires_at.isoformat() _ = self._client.index( - index=self._index, id=self.sanitize_document_id(key=combo_key), body=document, refresh=self._should_refresh_on_put + index=self._sanitize_index_name(collection=collection), + id=self._sanitize_document_id(key=combo_key), + body=document, + refresh=self._should_refresh_on_put, ) @override @@ -173,7 +185,7 @@ def _delete_managed_entry(self, *, key: str, collection: str) -> bool: combo_key: str = compound_key(collection=collection, key=key) elasticsearch_response: ObjectApiResponse[Any] = self._client.options(ignore_status=404).delete( - index=self._index, id=self.sanitize_document_id(key=combo_key) + index=self._sanitize_index_name(collection=collection), id=self._sanitize_document_id(key=combo_key) ) body: dict[str, Any] = get_body_from_response(response=elasticsearch_response) @@ -190,7 +202,11 @@ def _get_collection_keys(self, *, collection: str, limit: int | None = None) -> limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) result: ObjectApiResponse[Any] = self._client.options(ignore_status=404).search( - index=self._index, fields=[{"key": None}], body={"query": {"term": {"collection": collection}}}, source_includes=[], size=limit + index=self._sanitize_index_name(collection=collection), + fields=[{"key": None}], + body={"query": {"term": {"collection": collection}}}, + source_includes=[], + size=limit, ) if not (hits := get_hits_from_response(response=result)): @@ -213,7 +229,7 @@ def _get_collection_names(self, *, limit: int | None = None) -> list[str]: limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) search_response: ObjectApiResponse[Any] = self._client.options(ignore_status=404).search( - index=self._index, aggregations={"collections": {"terms": {"field": "collection"}}}, size=limit + index=f"{self._index_prefix}-*", aggregations={"collections": {"terms": {"field": "collection"}}}, size=limit ) body: dict[str, Any] = get_body_from_response(response=search_response) @@ -226,7 +242,7 @@ def _get_collection_names(self, *, limit: int | None = None) -> list[str]: @override def _delete_collection(self, *, collection: str) -> bool: result: ObjectApiResponse[Any] = self._client.options(ignore_status=404).delete_by_query( - index=self._index, body={"query": {"term": {"collection": collection}}} + index=self._sanitize_index_name(collection=collection), body={"query": {"term": {"collection": collection}}} ) body: dict[str, Any] = get_body_from_response(response=result) @@ -239,7 +255,7 @@ def _delete_collection(self, *, collection: str) -> bool: @override def _cull(self) -> None: _ = self._client.options(ignore_status=404).delete_by_query( - index=self._index, body={"query": {"range": {"expires_at": {"lt": now_as_epoch()}}}} + index=f"{self._index_prefix}-*", body={"query": {"range": {"expires_at": {"lt": now_as_epoch()}}}} ) @override diff --git a/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py b/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py index 6383157f..088b90d5 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py +++ b/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py @@ -54,8 +54,8 @@ def setup_elasticsearch(self) -> Generator[None, None, None]: @pytest.fixture def store(self) -> Generator[ElasticsearchStore, None, None]: es_client = get_elasticsearch_client() - _ = es_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test") - with ElasticsearchStore(url=ES_URL, index="kv-store-e2e-test") as store: + _ = es_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test-*") + with ElasticsearchStore(url=ES_URL, index_prefix="kv-store-e2e-test") as store: yield store @pytest.mark.skip(reason="Distributed Caches are unbounded") diff --git a/scripts/build_sync_library.py b/scripts/build_sync_library.py index 64e0ff13..f1a00064 100644 --- a/scripts/build_sync_library.py +++ b/scripts/build_sync_library.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# ruff: noqa: N802 """Convert async code in the project to sync code. Note: the version of Python used to run this script affects the output. @@ -133,30 +134,30 @@ def tree_to_str(tree: ast.AST, filepath: Path) -> str: class AsyncToSync(ast.NodeTransformer): # type: ignore - def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AST: # noqa: N802 + def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AST: new_node: ast.FunctionDef = ast.FunctionDef(**node.__dict__) ast.copy_location(new_node, old_node=node) self.visit(node=new_node) return new_node - def visit_AsyncFor(self, node: ast.AsyncFor) -> ast.AST: # noqa: N802 + def visit_AsyncFor(self, node: ast.AsyncFor) -> ast.AST: new_node: ast.For = ast.For(**node.__dict__) ast.copy_location(new_node, old_node=node) self.visit(node=new_node) return new_node - def visit_AsyncWith(self, node: ast.AsyncWith) -> ast.AST: # noqa: N802 + def visit_AsyncWith(self, node: ast.AsyncWith) -> ast.AST: new_node: ast.With = ast.With(**node.__dict__) ast.copy_location(new_node, old_node=node) self.visit(node=new_node) return new_node - def visit_Await(self, node: ast.Await) -> ast.AST: # noqa: N802 + def visit_Await(self, node: ast.Await) -> ast.AST: new_node: ast.expr = node.value self.visit(new_node) return new_node - def visit_If(self, node: ast.If) -> ast.AST: # noqa: N802 + def visit_If(self, node: ast.If) -> ast.AST: # Drop `if is_async()` branch. # # Assume that the test guards an async object becoming sync and remove @@ -229,12 +230,12 @@ class RenameAsyncToSync(ast.NodeTransformer): # type: ignore "acompat": {"alist", "anext"}, } - def visit_Module(self, node: ast.Module) -> ast.AST: # noqa: N802 + def visit_Module(self, node: ast.Module) -> ast.AST: self._fix_docstring(node.body) self.generic_visit(node) return node - def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AST: # noqa: N802 + def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AST: self._fix_docstring(node.body) node.name = self.names_map.get(node.name, node.name) for arg in node.args.args: @@ -274,6 +275,8 @@ def _fix_docstring(self, body: Sequence[ast.AST]) -> None: doc = doc.replace("Async", "") doc = doc.replace("(async", "(sync") body[0].value.value = doc + case _: + pass def _fix_decorator(self, decorator_list: Sequence[ast.AST]) -> None: for dec in decorator_list: @@ -285,21 +288,24 @@ def _fix_decorator(self, decorator_list: Sequence[ast.AST]) -> None: elts = dec.keywords[0].value.elts for i, elt in enumerate(elts): elts[i] = self._convert_if_literal_string(elt) + case _: + pass def _convert_if_literal_string(self, node: ast.AST) -> ast.AST: value: str match node: case ast.Constant(value=str(value)): node.value = self._visit_type_string(value) + case _: + pass return node def _visit_type_string(self, source: str) -> str: # Convert the string to tree, visit, and convert it back to string - tree = ast.parse(source, type_comments=False) + tree = ast.parse(source, type_comments=False) # pyright: ignore[reportUnknownMemberType] tree = async_to_sync(tree) - rv = unparse(tree) - return rv + return unparse(tree) def visit_ClassDef(self, node: ast.ClassDef) -> ast.AST: self._fix_docstring(node.body) @@ -325,7 +331,7 @@ def _fix_base_params(self, node: ast.ClassDef) -> ast.AST: return node - def visit_ImportFrom(self, node: ast.ImportFrom) -> ast.AST | None: # noqa: N802 + def visit_ImportFrom(self, node: ast.ImportFrom) -> ast.AST | None: if node.module: # Remove import of async utils eclypsing builtins if skips := self._skip_imports.get(node.module): @@ -352,18 +358,18 @@ def visit_ImportFrom(self, node: ast.ImportFrom) -> ast.AST | None: # noqa: N80 return node - def visit_Name(self, node: ast.Name) -> ast.AST: # noqa: N802 + def visit_Name(self, node: ast.Name) -> ast.AST: if node.id in self.names_map: node.id = self.names_map[node.id] return node - def visit_Attribute(self, node: ast.Attribute) -> ast.AST: # noqa: N802 + def visit_Attribute(self, node: ast.Attribute) -> ast.AST: if node.attr in self.names_map: node.attr = self.names_map[node.attr] self.generic_visit(node) return node - def visit_Subscript(self, node: ast.Subscript) -> ast.AST: # noqa: N802 + def visit_Subscript(self, node: ast.Subscript) -> ast.AST: # Manage AsyncGenerator[X, Y] -> Generator[X, None, Y] self._manage_async_generator(node) # # Won't result in a recursion because we change the args number @@ -379,6 +385,8 @@ def _manage_async_generator(self, node: ast.Subscript) -> ast.AST | None: node.slice.elts.insert(1, deepcopy(node.slice.elts[1])) self.generic_visit(node) return node + case _: + pass return None From 87c67d90f8a350e2a0145f8040c3271ff553214c Mon Sep 17 00:00:00 2001 From: William Easton Date: Sun, 28 Sep 2025 18:05:12 -0500 Subject: [PATCH 30/31] Fix Elasticsearch tests --- Makefile | 31 ++++++ .../aio/stores/elasticsearch/store.py | 9 +- .../elasticsearch/test_elasticsearch.py | 20 +++- .../code_gen/stores/elasticsearch/store.py | 12 ++- .../elasticsearch/test_elasticsearch.py | 20 +++- py-key-value.code-workspace | 17 ---- scripts/build_sync_library.py | 5 +- scripts/bump_versions.py | 99 +++++++++++++++++++ 8 files changed, 181 insertions(+), 32 deletions(-) create mode 100644 Makefile delete mode 100644 py-key-value.code-workspace create mode 100644 scripts/bump_versions.py diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..2afa0712 --- /dev/null +++ b/Makefile @@ -0,0 +1,31 @@ +.PHONY: bump-version bump-version-dry codegen lint typecheck sync precommit + + +bump-version: + @if [ -z "$(VERSION)" ]; then echo "VERSION is required, usage: make bump-version VERSION=1.2.3"; exit 1; fi + @echo "Bumping version..." + @uv run python scripts/bump_versions.py $(VERSION) + +bump-version-dry: + @if [ -z "$(VERSION)" ]; then echo "VERSION is required, usage: make bump-version-dry VERSION=1.2.3"; exit 1; fi + @echo "Bumping version (dry run)..." + @uv run python scripts/bump_versions.py $(VERSION) --dry-run + +codegen: + @echo "Codegen..." + @uv run python scripts/build_sync_library.py + +lint: + @echo "Linting..." + @uv run ruff format + @uv run ruff check --fix + +typecheck: + @echo "Type checking..." + @uv run basedpyright + +sync: + @echo "Syncing..." + @uv sync --all-packages + +precommit: lint typecheck codegen \ No newline at end of file diff --git a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py index 9208ffa2..026c2e3b 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py @@ -82,7 +82,7 @@ class ElasticsearchStore( _is_serverless: bool - _index: str + _index_prefix: str @overload def __init__(self, *, elasticsearch_client: AsyncElasticsearch, index_prefix: str, default_collection: str | None = None) -> None: ... @@ -143,13 +143,12 @@ async def _setup_collection(self, *, collection: str) -> None: _ = await self._client.options(ignore_status=404).indices.create(index=index_name, mappings=DEFAULT_MAPPING, settings={}) def _sanitize_index_name(self, collection: str) -> str: - sanitized_collection = sanitize_string( - value=collection, + return sanitize_string( + value=self._index_prefix + "-" + collection, replacement_character="_", max_length=MAX_INDEX_LENGTH, allowed_characters=ALLOWED_INDEX_CHARACTERS, ) - return f"{self._index_prefix}-{sanitized_collection}" def _sanitize_document_id(self, key: str) -> str: return sanitize_string( @@ -164,7 +163,7 @@ async def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry combo_key: str = compound_key(collection=collection, key=key) elasticsearch_response = await self._client.options(ignore_status=404).get( - index=self._index, id=self._sanitize_document_id(key=combo_key) + index=self._sanitize_index_name(collection=collection), id=self._sanitize_document_id(key=combo_key) ) body: dict[str, Any] = get_body_from_response(response=elasticsearch_response) diff --git a/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py b/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py index e4a1be4f..00cca366 100644 --- a/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py +++ b/key-value/key-value-aio/tests/stores/elasticsearch/test_elasticsearch.py @@ -47,11 +47,18 @@ async def setup_elasticsearch(self) -> AsyncGenerator[None, None]: yield + @pytest.fixture + async def es_client(self) -> AsyncGenerator[AsyncElasticsearch, None]: + async with AsyncElasticsearch(hosts=[ES_URL]) as es_client: + yield es_client + @override @pytest.fixture async def store(self) -> AsyncGenerator[ElasticsearchStore, None]: es_client = get_elasticsearch_client() - _ = await es_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test-*") + indices = await es_client.options(ignore_status=404).indices.get(index="kv-store-e2e-test-*") + for index in indices: + _ = await es_client.options(ignore_status=404).indices.delete(index=index) async with ElasticsearchStore(url=ES_URL, index_prefix="kv-store-e2e-test") as store: yield store @@ -62,3 +69,14 @@ async def test_not_unbounded(self, store: BaseStore): ... @pytest.mark.skip(reason="Skip concurrent tests on distributed caches") @override async def test_concurrent_operations(self, store: BaseStore): ... + + async def test_put_put_two_indices(self, store: ElasticsearchStore, es_client: AsyncElasticsearch): + await store.put(collection="test_collection", key="test_key", value={"test": "test"}) + await store.put(collection="test_collection_2", key="test_key", value={"test": "test"}) + assert await store.get(collection="test_collection", key="test_key") == {"test": "test"} + assert await store.get(collection="test_collection_2", key="test_key") == {"test": "test"} + + indices = await es_client.options(ignore_status=404).indices.get(index="kv-store-e2e-test-*") + assert len(indices.body) == 2 + assert "kv-store-e2e-test-test_collection" in indices + assert "kv-store-e2e-test-test_collection_2" in indices diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py index 6f444d65..1d5f71b4 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py @@ -68,7 +68,7 @@ class ElasticsearchStore( _is_serverless: bool - _index: str + _index_prefix: str @overload def __init__(self, *, elasticsearch_client: Elasticsearch, index_prefix: str, default_collection: str | None = None) -> None: ... @@ -129,10 +129,12 @@ def _setup_collection(self, *, collection: str) -> None: _ = self._client.options(ignore_status=404).indices.create(index=index_name, mappings=DEFAULT_MAPPING, settings={}) def _sanitize_index_name(self, collection: str) -> str: - sanitized_collection = sanitize_string( - value=collection, replacement_character="_", max_length=MAX_INDEX_LENGTH, allowed_characters=ALLOWED_INDEX_CHARACTERS + return sanitize_string( + value=self._index_prefix + "-" + collection, + replacement_character="_", + max_length=MAX_INDEX_LENGTH, + allowed_characters=ALLOWED_INDEX_CHARACTERS, ) - return f"{self._index_prefix}-{sanitized_collection}" def _sanitize_document_id(self, key: str) -> str: return sanitize_string(value=key, replacement_character="_", max_length=MAX_KEY_LENGTH, allowed_characters=ALLOWED_KEY_CHARACTERS) @@ -142,7 +144,7 @@ def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | Non combo_key: str = compound_key(collection=collection, key=key) elasticsearch_response = self._client.options(ignore_status=404).get( - index=self._index, id=self._sanitize_document_id(key=combo_key) + index=self._sanitize_index_name(collection=collection), id=self._sanitize_document_id(key=combo_key) ) body: dict[str, Any] = get_body_from_response(response=elasticsearch_response) diff --git a/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py b/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py index 088b90d5..1dea0584 100644 --- a/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py +++ b/key-value/key-value-sync/tests/code_gen/stores/elasticsearch/test_elasticsearch.py @@ -50,11 +50,18 @@ def setup_elasticsearch(self) -> Generator[None, None, None]: yield + @pytest.fixture + def es_client(self) -> Generator[Elasticsearch, None, None]: + with Elasticsearch(hosts=[ES_URL]) as es_client: + yield es_client + @override @pytest.fixture def store(self) -> Generator[ElasticsearchStore, None, None]: es_client = get_elasticsearch_client() - _ = es_client.options(ignore_status=404).indices.delete(index="kv-store-e2e-test-*") + indices = es_client.options(ignore_status=404).indices.get(index="kv-store-e2e-test-*") + for index in indices: + _ = es_client.options(ignore_status=404).indices.delete(index=index) with ElasticsearchStore(url=ES_URL, index_prefix="kv-store-e2e-test") as store: yield store @@ -65,3 +72,14 @@ def test_not_unbounded(self, store: BaseStore): ... @pytest.mark.skip(reason="Skip concurrent tests on distributed caches") @override def test_concurrent_operations(self, store: BaseStore): ... + + def test_put_put_two_indices(self, store: ElasticsearchStore, es_client: Elasticsearch): + store.put(collection="test_collection", key="test_key", value={"test": "test"}) + store.put(collection="test_collection_2", key="test_key", value={"test": "test"}) + assert store.get(collection="test_collection", key="test_key") == {"test": "test"} + assert store.get(collection="test_collection_2", key="test_key") == {"test": "test"} + + indices = es_client.options(ignore_status=404).indices.get(index="kv-store-e2e-test-*") + assert len(indices.body) == 2 + assert "kv-store-e2e-test-test_collection" in indices + assert "kv-store-e2e-test-test_collection_2" in indices diff --git a/py-key-value.code-workspace b/py-key-value.code-workspace deleted file mode 100644 index 8281d9c4..00000000 --- a/py-key-value.code-workspace +++ /dev/null @@ -1,17 +0,0 @@ -{ - "folders": [ - { - "name": "key-value-aio", - "path": "key-value/key-value-aio", - }, - { - "name": "key-value-sync", - "path": "key-value/key-value-sync" - }, - { - "name": "root", - "path": "." - } - ], - "settings": {} -} \ No newline at end of file diff --git a/scripts/build_sync_library.py b/scripts/build_sync_library.py index f1a00064..a3ce1055 100644 --- a/scripts/build_sync_library.py +++ b/scripts/build_sync_library.py @@ -430,8 +430,7 @@ def _inject_blanks(self, body: list[ast.Node]) -> list[ast.AST]: def unparse(tree: ast.AST) -> str: rv: str = Unparser().visit(tree) - rv = _fix_comment_on_decorators(rv) - return rv + return _fix_comment_on_decorators(source=rv) def _fix_comment_on_decorators(source: str) -> str: @@ -479,7 +478,7 @@ class Unparser(ast._Unparser): # type: ignore # Beware: private method. Tested with in Python 3.10, 3.11. def _write_constant(self, value: Any) -> None: - if isinstance(value, str) and len(value) > 50: + if isinstance(value, str) and len(value) > 50: # noqa: PLR2004 self._write_str_avoiding_backslashes(value) else: super()._write_constant(value) diff --git a/scripts/bump_versions.py b/scripts/bump_versions.py new file mode 100644 index 00000000..f44fdd79 --- /dev/null +++ b/scripts/bump_versions.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python3 +""" +Simple version bump script. + +Assumptions (per project layout): + - One `pyproject.toml` at repo root + - One `pyproject.toml` inside each direct subdirectory of `key-value` + - The version lives under the `[project]` section as `version = "..."` + +Usage: + python scripts/bump_versions.py 1.2.3 [--dry-run] +""" + +from __future__ import annotations + +import argparse +import re +import sys +from pathlib import Path + + +def get_pyproject_paths(root: Path) -> list[Path]: + paths: list[Path] = [] + root_py = root / "pyproject.toml" + if root_py.exists(): + paths.append(root_py) + + kv_dir = root / "key-value" + if kv_dir.is_dir(): + for entry in sorted(kv_dir.iterdir()): + sub_py = entry / "pyproject.toml" + if sub_py.exists(): + paths.append(sub_py) + return paths + + +def bump_in_text(text: str, new_version: str) -> tuple[str, bool]: + """Update `version` inside the `[project]` section. Returns (new_text, changed).""" + # Locate the [project] section block + section_re = re.compile(r"^\[project\]\s*$", re.MULTILINE) + match = section_re.search(text) + if not match: + return text, False + + start = match.end() + next_section = re.search(r"^\[.*?\]\s*$", text[start:], flags=re.MULTILINE) + end = start + next_section.start() if next_section else len(text) + block = text[start:end] + + # Replace version line within the block, preserving quote style + version_line_re = re.compile(r"^(\s*version\s*=\s*)([\"\'])(.+?)(\2)\s*$", re.MULTILINE) + if not version_line_re.search(block): + return text, False + + new_block = version_line_re.sub(lambda m: f"{m.group(1)}{m.group(2)}{new_version}{m.group(2)}", block) + if new_block == block: + return text, False + + new_text = text[:start] + new_block + text[end:] + return new_text, True + + +def main(argv: list[str]) -> int: + parser = argparse.ArgumentParser(description="Bump version in all pyproject.toml files") + parser.add_argument("version", help="New version string, e.g. 1.2.3") + parser.add_argument("--dry-run", action="store_true", help="Only print changes, do not write") + args = parser.parse_args(argv) + + repo_root = Path(__file__).resolve().parent.parent + targets = get_pyproject_paths(repo_root) + if not targets: + print("No pyproject.toml files found.") + return 1 + + exit_code = 0 + for path in targets: + try: + original = path.read_text(encoding="utf-8") + except Exception as e: + print(f"Skipping {path}: {e}") + exit_code = 1 + continue + + updated, changed = bump_in_text(original, args.version) + if not changed: + continue + + rel = path.relative_to(repo_root) + if args.dry_run: + print(f"Would update {rel} -> {args.version}") + else: + path.write_text(updated, encoding="utf-8") + print(f"Updated {rel} -> {args.version}") + + return exit_code + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) From 086898eca239f35f4878d340a6f169d60823c730 Mon Sep 17 00:00:00 2001 From: William Easton Date: Sun, 28 Sep 2025 20:39:30 -0500 Subject: [PATCH 31/31] Cleanup from code review --- .vscode/tasks.json | 45 ++++ README.md | 8 +- key-value/key-value-aio/AIO_CODE_REVIEW.md | 204 ++++++++++++++++++ .../aio/adapters/pydantic/adapter.py | 8 +- .../aio/adapters/raise_on_missing/adapter.py | 16 +- .../src/key_value/aio/protocols/key_value.py | 8 +- .../src/key_value/aio/stores/base.py | 52 ++--- .../src/key_value/aio/stores/disk/store.py | 4 +- .../aio/stores/elasticsearch/store.py | 4 +- .../aio/stores/elasticsearch/utils.py | 16 +- .../src/key_value/aio/stores/mongodb/store.py | 32 ++- .../src/key_value/aio/wrappers/base.py | 8 +- .../aio/wrappers/passthrough_cache/wrapper.py | 16 +- .../wrappers/prefix_collections/wrapper.py | 8 +- .../aio/wrappers/prefix_keys/wrapper.py | 8 +- .../aio/wrappers/single_collection/wrapper.py | 8 +- .../aio/wrappers/statistics/wrapper.py | 12 +- .../aio/wrappers/ttl_clamp/wrapper.py | 14 +- .../src/key_value/shared/constants.py | 1 + .../src/key_value/shared/errors/key_value.py | 14 +- .../key_value/shared/utils/time_to_live.py | 55 ++++- .../code_gen/adapters/pydantic/adapter.py | 8 +- .../adapters/raise_on_missing/adapter.py | 16 +- .../sync/code_gen/protocols/key_value.py | 8 +- .../key_value/sync/code_gen/stores/base.py | 52 ++--- .../sync/code_gen/stores/disk/store.py | 4 +- .../code_gen/stores/elasticsearch/store.py | 5 +- .../code_gen/stores/elasticsearch/utils.py | 16 +- .../sync/code_gen/stores/mongodb/store.py | 32 ++- .../key_value/sync/code_gen/wrappers/base.py | 8 +- .../wrappers/passthrough_cache/wrapper.py | 16 +- .../wrappers/prefix_collections/wrapper.py | 8 +- .../code_gen/wrappers/prefix_keys/wrapper.py | 8 +- .../wrappers/single_collection/wrapper.py | 8 +- .../code_gen/wrappers/statistics/wrapper.py | 12 +- .../code_gen/wrappers/ttl_clamp/wrapper.py | 14 +- 36 files changed, 523 insertions(+), 233 deletions(-) create mode 100644 .vscode/tasks.json create mode 100644 key-value/key-value-aio/AIO_CODE_REVIEW.md create mode 100644 key-value/key-value-shared/src/key_value/shared/constants.py diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 00000000..f3fe3c77 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,45 @@ +{ + // Simple VS Code tasks that call Makefile targets + "version": "2.0.0", + "tasks": [ + { + "label": "Bump version (make)", + "type": "shell", + "command": "make", + "args": [ + "bump-version", + "VERSION=${input:newVersion}" + ], + "problemMatcher": [] + }, + { + "label": "Bump version (dry-run) (make)", + "type": "shell", + "command": "make", + "args": [ + "bump-version-dry", + "VERSION=${input:newVersion}" + ], + "problemMatcher": [] + }, + { + "label": "Build sync library (make)", + "type": "shell", + "command": "make", + "args": [ + "build-sync" + ], + "problemMatcher": [] + } + ], + "inputs": [ + { + "id": "newVersion", + "type": "promptString", + "description": "Enter new version (e.g. 1.2.3)", + "default": "0.0.0" + } + ] +} + + diff --git a/README.md b/README.md index b343b52a..f6cb3eb7 100644 --- a/README.md +++ b/README.md @@ -69,16 +69,16 @@ The protocols offer a simple interface for your application to interact with the ```python get(key: str, collection: str | None = None) -> dict[str, Any] | None: -get_many(keys: Sequence[str], collection: str | None = None) -> list[dict[str, Any] | None]: +get_many(keys: list[str], collection: str | None = None) -> list[dict[str, Any] | None]: put(key: str, value: dict[str, Any], collection: str | None = None, ttl: float | None = None) -> None: -put_many(keys: Sequence[str], values: Sequence[dict[str, Any]], collection: str | None = None, ttl: Sequence[float | None] | float | None = None) -> None: +put_many(keys: list[str], values: Sequence[dict[str, Any]], collection: str | None = None, ttl: Sequence[float | None] | float | None = None) -> None: delete(key: str, collection: str | None = None) -> bool: -delete_many(keys: Sequence[str], collection: str | None = None) -> int: +delete_many(keys: list[str], collection: str | None = None) -> int: ttl(key: str, collection: str | None = None) -> tuple[dict[str, Any] | None, float | None]: -ttl_many(keys: Sequence[str], collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: +ttl_many(keys: list[str], collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: ``` ### Stores diff --git a/key-value/key-value-aio/AIO_CODE_REVIEW.md b/key-value/key-value-aio/AIO_CODE_REVIEW.md new file mode 100644 index 00000000..ff2d8354 --- /dev/null +++ b/key-value/key-value-aio/AIO_CODE_REVIEW.md @@ -0,0 +1,204 @@ +## AIO Code Review – Larger Findings and Recommendations + +### Executive summary +- Elasticsearch: date handling and aggregation usage issues; potential under-counting and stale culling. +- MongoDB: implementation does not match documented design; async import path risk. +- Disk stores: inconsistent persistence of expiration metadata vs backend TTL. +- API consistency: `list[str]` vs `Sequence[str]` divergence across wrappers/stores. +- Performance: default bulk ops are sequential; consider native batch ops and concurrency. +- Constants duplication: `DEFAULT_COLLECTION_NAME` duplicated in wrappers. + +--- + +### 1) Elasticsearch date handling (culling) uses seconds where ES expects ISO or epoch_millis +Problem: The cull query compares a `date` field against `now_as_epoch()` (seconds). ES date range comparisons should use an ISO string or epoch milliseconds. + +Snippet (reference): + +```23:27:/Users/bill.easton/repos/py-kv-store-adapter/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py + _ = await self._client.options(ignore_status=404).delete_by_query( + index=f"{self._index_prefix}-*", + body={ + "query": { + "range": { + "expires_at": {"lt": now_as_epoch()}, + }, + }, + }, + ) +``` + +Impact: Expired documents may not be culled reliably in clusters expecting epoch_millis. + +Recommendation: Compare against ISO-8601 or epoch_millis, e.g. `now().isoformat()` or `int(now_as_epoch() * 1000)` with a matching mapping/format hint. + +--- + +### 2) Elasticsearch aggregations/fields usage likely incorrect or incomplete +- Collection listing uses a `terms` aggregation without setting `size`, which defaults to 10 buckets. + +Snippet (reference): + +```72:89:/Users/bill.easton/repos/py-kv-store-adapter/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py + search_response: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).search( + index=f"{self._index_prefix}-*", + aggregations={ + "collections": { + "terms": { + "field": "collection", + }, + }, + }, + size=limit, + ) +``` + +Impact: Only 10 unique collections will be returned regardless of `limit`. + +Recommendation: Set `aggregations.terms.size = min(limit, PAGE_LIMIT)`. + +- Key enumeration passes `fields=[{"key": None}]` and `source_includes=[]`. ES 8 expects `fields=["key"]` and either `"_source": false` or `"_source": {"includes": [...]}`. + +Snippet (reference): + +```39:51:/Users/bill.easton/repos/py-kv-store-adapter/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py + result: ObjectApiResponse[Any] = await self._client.options(ignore_status=404).search( + index=self._sanitize_index_name(collection=collection), + fields=[{"key": None}], + body={ + "query": { + "term": { + "collection": collection, + }, + }, + }, + source_includes=[], + size=limit, + ) +``` + +Impact: May return no `fields` and/or trigger parameter validation issues depending on client/server versions. + +Recommendation: Use `fields=["key"]` and `"_source": false` (or the modern `_source` structure) to reduce payload. + +--- + +### 3) MongoDB store design mismatch vs documentation; async import stability +- The docstring states a single backing collection using compound keys, but the code provisions per-collection collections and stores raw `key` (not compound). + +Snippet (reference – documentation vs code paths): + +```76:82:/Users/bill.easton/repos/py-kv-store-adapter/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py + The store uses a single MongoDB collection to persist entries for all adapter collections. + We store compound keys "{collection}::{key}" and a JSON string payload. Optional TTL is persisted + as ISO timestamps in the JSON payload itself to maintain consistent semantics across backends. +``` + +```112:127:/Users/bill.easton/repos/py-kv-store-adapter/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py + async def _setup_collection(self, *, collection: str) -> None: + collection = self._sanitize_collection_name(collection=collection) + matching_collections: list[str] = await self._db.list_collection_names(filter={"name": collection}) + if matching_collections: + self._collections_by_name[collection] = self._db[collection] + return + new_collection: AsyncCollection[dict[str, Any]] = await self._db.create_collection(name=collection) + _ = await new_collection.create_index(keys="key") + self._collections_by_name[collection] = new_collection +``` + +Impact: Behavior diverges from stated contract and from backends that rely on compound keys in a single collection. + +Recommendation: Decide on one of: +- Align implementation to the docstring: use a single physical collection, key as `"{collection}::{key}"`, and index `key`. +- Or update the documentation to specify per-collection collections and ensure key naming, indexing, and cleanup semantics are consistent. + +- Async imports: the package path `pymongo.asynchronous` may vary across PyMongo versions; ensure compatibility with the installed major version (PyMongo 5 uses the asyncio client under `pymongo` with different import paths). Consider isolating imports behind a small compatibility shim. + +--- + +### 4) Disk-backed stores: inconsistent use of expiration metadata +- `DiskStore` writes with metadata (`to_json()`), while `MultiDiskStore` writes without expiration metadata (`include_expiration=False`). Both rely on the backend’s TTL for actual expiry. + +Snippets (reference): + +```100:108:/Users/bill.easton/repos/py-kv-store-adapter/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py + _ = self._cache.set(key=combo_key, value=managed_entry.to_json(), expire=managed_entry.ttl) +``` + +```132:135:/Users/bill.easton/repos/py-kv-store-adapter/key-value/key-value-aio/src/key_value/aio/stores/disk/multi_store.py + _ = self._cache[collection].set(key=combo_key, value=managed_entry.to_json(include_expiration=False), expire=managed_entry.ttl) +``` + +Impact: Mixed on-disk payload formats; may confuse downstream tools or future migrations. `DiskStore` also stores an `expires_at` alongside relying on the cache’s own TTL, which can drift over time. + +Recommendation: Standardize on payload format (with or without expiration metadata). If the backend TTL is authoritative, prefer omitting `expires_at` in stored JSON for consistency. + +--- + +### 5) API consistency: `keys` parameters as `list[str]` vs `Sequence[str]` +- The protocol now specifies `list[str]` for bulk `keys`. Several wrappers still type them as `Sequence[str]`. + +Snippets (reference – protocol): + +```60:73:/Users/bill.easton/repos/py-kv-store-adapter/key-value/key-value-aio/src/key_value/aio/protocols/key_value.py + async def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + ... + async def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + ... + async def put_many(self, keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, ttl: Sequence[float | None] | float | None = None) -> None: + ... + async def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: +``` + +Impact: Mixed method signatures can cause type-checking friction and confusion. + +Recommendation: Normalize wrapper/store method signatures to accept `list[str]` for `keys` to match the protocol. + +--- + +### 6) Performance: default bulk operations are sequential +- Base fallbacks fetch and delete entries one-by-one with awaited calls in a loop. + +Snippets (reference): + +```98:101:/Users/bill.easton/repos/py-kv-store-adapter/key-value/key-value-aio/src/key_value/aio/stores/base.py + async def _get_managed_entries(...): + return [await self._get_managed_entry(..., key=key) for key in keys] +``` + +```234:243:/Users/bill.easton/repos/py-kv-store-adapter/key-value/key-value-aio/src/key_value/aio/stores/base.py + async def _delete_managed_entries(...): + for key in keys: + if await self._delete_managed_entry(..., key=key): + deleted_count += 1 +``` + +Impact: Increased latency and load for backends that support native batch operations. + +Recommendation: Override bulk methods in backends where feasible (e.g., Redis `MGET`/`PIPELINE`, MongoDB bulk operations, Elasticsearch multi-get). Where not feasible, consider `asyncio.gather` with an upper bound on concurrency. + +--- + +### 7) Constants duplication: `DEFAULT_COLLECTION_NAME` +- `StatisticsWrapper` defines its own `DEFAULT_COLLECTION_NAME` rather than importing it, while other wrappers import it from the base store. + +Snippets (reference): + +```97:104:/Users/bill.easton/repos/py-kv-store-adapter/key-value/key-value-aio/src/key_value/aio/wrappers/statistics/wrapper.py +DEFAULT_COLLECTION_NAME = "default_collection" +class StatisticsWrapper(BaseWrapper): +``` + +Impact: Drift risk if the default name changes in the base store. + +Recommendation: Import `DEFAULT_COLLECTION_NAME` from `key_value.aio.stores.base`. + +--- + +### Appendix: items already addressed with simple edits +- Accept int-like TTLs consistently and clamp correctly. +- Fixed misuse of `enumerate(iterable=...)` in the passthrough cache wrapper. +- Hardened Elasticsearch utils casting. +- MongoDB collection setup now maps existing collections and closes the client. +- Clarified DiskStore parameter error message. + diff --git a/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/adapter.py b/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/adapter.py index 2446f704..c3cf8bdb 100644 --- a/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/adapter.py +++ b/key-value/key-value-aio/src/key_value/aio/adapters/pydantic/adapter.py @@ -45,7 +45,7 @@ async def get(self, key: str, *, collection: str | None = None) -> T | None: return None - async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[T | None]: + async def get_many(self, keys: list[str], *, collection: str | None = None) -> list[T | None]: """Batch get and validate models by keys, preserving order. Each element is either a parsed model instance or None if missing. @@ -67,7 +67,7 @@ async def put(self, key: str, value: T, *, collection: str | None = None, ttl: f await self.key_value.put(key=key, value=value_dict, collection=collection, ttl=ttl) - async def put_many(self, keys: Sequence[str], values: Sequence[T], *, collection: str | None = None, ttl: float | None = None) -> None: + async def put_many(self, keys: list[str], values: Sequence[T], *, collection: str | None = None, ttl: float | None = None) -> None: """Serialize and store multiple models, preserving order alignment with keys.""" collection = collection or self.default_collection @@ -81,7 +81,7 @@ async def delete(self, key: str, *, collection: str | None = None) -> bool: return await self.key_value.delete(key=key, collection=collection) - async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + async def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: """Delete multiple models by key. Returns the count of deleted entries.""" collection = collection or self.default_collection @@ -105,7 +105,7 @@ async def ttl(self, key: str, *, collection: str | None = None) -> tuple[T | Non return (None, None) - async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[T | None, float | None]]: + async def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[T | None, float | None]]: """Batch get models with TTLs. Each element is (model|None, ttl_seconds|None).""" collection = collection or self.default_collection diff --git a/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/adapter.py b/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/adapter.py index cc39d86b..899f8efe 100644 --- a/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/adapter.py +++ b/key-value/key-value-aio/src/key_value/aio/adapters/raise_on_missing/adapter.py @@ -50,16 +50,16 @@ async def get( @overload async def get_many( - self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[False] = False + self, keys: list[str], *, collection: str | None = None, raise_on_missing: Literal[False] = False ) -> list[dict[str, Any] | None]: ... @overload async def get_many( - self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[True] + self, keys: list[str], *, collection: str | None = None, raise_on_missing: Literal[True] ) -> list[dict[str, Any]]: ... async def get_many( - self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: bool = False + self, keys: list[str], *, collection: str | None = None, raise_on_missing: bool = False ) -> list[dict[str, Any]] | list[dict[str, Any] | None]: """Retrieve multiple values by key from the specified collection. @@ -113,16 +113,16 @@ async def ttl( @overload async def ttl_many( - self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[False] = False + self, keys: list[str], *, collection: str | None = None, raise_on_missing: Literal[False] = False ) -> list[tuple[dict[str, Any] | None, float | None]]: ... @overload async def ttl_many( - self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[True] + self, keys: list[str], *, collection: str | None = None, raise_on_missing: Literal[True] ) -> list[tuple[dict[str, Any], float | None]]: ... async def ttl_many( - self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: bool = False + self, keys: list[str], *, collection: str | None = None, raise_on_missing: bool = False ) -> list[tuple[dict[str, Any], float | None]] | list[tuple[dict[str, Any] | None, float | None]]: """Retrieve multiple values and TTL information by key from the specified collection. @@ -152,7 +152,7 @@ async def put(self, key: str, value: dict[str, Any], *, collection: str | None = async def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -178,7 +178,7 @@ async def delete(self, key: str, *, collection: str | None = None) -> bool: """ return await self.key_value.delete(key=key, collection=collection) - async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + async def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: """Delete multiple key-value pairs from the specified collection. Args: diff --git a/key-value/key-value-aio/src/key_value/aio/protocols/key_value.py b/key-value/key-value-aio/src/key_value/aio/protocols/key_value.py index 983b2ab1..fd48f140 100644 --- a/key-value/key-value-aio/src/key_value/aio/protocols/key_value.py +++ b/key-value/key-value-aio/src/key_value/aio/protocols/key_value.py @@ -57,7 +57,7 @@ async def delete(self, key: str, *, collection: str | None = None) -> bool: """ ... - async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + async def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: """Retrieve multiple values by key from the specified collection. Args: @@ -69,7 +69,7 @@ async def get_many(self, keys: Sequence[str], *, collection: str | None = None) """ ... - async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + async def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: """Retrieve multiple values and TTL information by key from the specified collection. Args: @@ -84,7 +84,7 @@ async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) async def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -101,7 +101,7 @@ async def put_many( """ ... - async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + async def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: """Delete multiple key-value pairs from the specified collection. Args: diff --git a/key-value/key-value-aio/src/key_value/aio/stores/base.py b/key-value/key-value-aio/src/key_value/aio/stores/base.py index d2fb4fd7..e450d376 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/base.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/base.py @@ -9,9 +9,10 @@ from types import TracebackType from typing import Any -from key_value.shared.errors import InvalidTTLError, StoreSetupError +from key_value.shared.constants import DEFAULT_COLLECTION_NAME +from key_value.shared.errors import StoreSetupError from key_value.shared.utils.managed_entry import ManagedEntry -from key_value.shared.utils.time_to_live import now +from key_value.shared.utils.time_to_live import now, prepare_ttls, validate_ttl from typing_extensions import Self, override from key_value.aio.protocols.key_value import ( @@ -23,24 +24,6 @@ AsyncKeyValueProtocol, ) -DEFAULT_COLLECTION_NAME = "default_collection" - - -def validate_one_ttl(t: float | None, raise_error: bool = False) -> bool: - if t is None: - return True - if t <= 0: - if raise_error: - raise InvalidTTLError(ttl=t) - return False - return True - - -def validate_ttls(t: list[float | None] | float | None, raise_error: bool = False) -> bool: - if not isinstance(t, Sequence): - t = [t] - return all(validate_one_ttl(t=ttl, raise_error=raise_error) for ttl in t) - class BaseStore(AsyncKeyValueProtocol, ABC): """An opinionated Abstract base class for managed key-value stores using ManagedEntry objects. @@ -111,7 +94,7 @@ async def setup_collection(self, *, collection: str) -> None: async def _get_managed_entry(self, *, collection: str, key: str) -> ManagedEntry | None: """Retrieve a cache entry by key from the specified collection.""" - async def _get_managed_entries(self, *, collection: str, keys: Sequence[str]) -> list[ManagedEntry | None]: + async def _get_managed_entries(self, *, collection: str, keys: list[str]) -> list[ManagedEntry | None]: """Retrieve multiple managed entries by key from the specified collection.""" return [await self._get_managed_entry(collection=collection, key=key) for key in keys] @@ -146,7 +129,7 @@ async def get( return managed_entry.value @override - async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + async def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: collection = collection or self.default_collection await self.setup_collection(collection=collection) @@ -168,7 +151,7 @@ async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[st @override async def ttl_many( self, - keys: Sequence[str], + keys: list[str], *, collection: str | None = None, ) -> list[tuple[dict[str, Any] | None, float | None]]: @@ -188,7 +171,7 @@ async def _put_managed_entry(self, *, collection: str, key: str, managed_entry: """Store a managed entry by key in the specified collection.""" ... - async def _put_managed_entries(self, *, collection: str, keys: Sequence[str], managed_entries: Sequence[ManagedEntry]) -> None: + async def _put_managed_entries(self, *, collection: str, keys: list[str], managed_entries: Sequence[ManagedEntry]) -> None: """Store multiple managed entries by key in the specified collection.""" for key, managed_entry in zip(keys, managed_entries, strict=True): @@ -204,9 +187,7 @@ async def put(self, key: str, value: dict[str, Any], *, collection: str | None = collection = collection or self.default_collection await self.setup_collection(collection=collection) - _ = validate_ttls(t=ttl, raise_error=True) - - managed_entry: ManagedEntry = ManagedEntry(value=value, ttl=ttl, created_at=now()) + managed_entry: ManagedEntry = ManagedEntry(value=value, ttl=validate_ttl(t=ttl), created_at=now()) await self._put_managed_entry( collection=collection, @@ -217,7 +198,7 @@ async def put(self, key: str, value: dict[str, Any], *, collection: str | None = @override async def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -235,16 +216,7 @@ async def put_many( collection = collection or self.default_collection await self.setup_collection(collection=collection) - ttl_for_entries: list[float | None] = [] - - if ttl is None: - ttl_for_entries = [None] * len(keys) - elif isinstance(ttl, Sequence): - ttl_for_entries = list(ttl) - elif isinstance(ttl, float): - ttl_for_entries = [ttl] * len(keys) - - _ = validate_ttls(t=ttl_for_entries, raise_error=True) + ttl_for_entries: list[float | None] = prepare_ttls(t=ttl, count=len(keys)) managed_entries: list[ManagedEntry] = [] @@ -258,7 +230,7 @@ async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: """Delete a managed entry by key from the specified collection.""" ... - async def _delete_managed_entries(self, *, keys: Sequence[str], collection: str) -> int: + async def _delete_managed_entries(self, *, keys: list[str], collection: str) -> int: """Delete multiple managed entries by key from the specified collection.""" deleted_count: int = 0 @@ -277,7 +249,7 @@ async def delete(self, key: str, *, collection: str | None = None) -> bool: return await self._delete_managed_entry(key=key, collection=collection) @override - async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + async def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: """Delete multiple managed entries by key from the specified collection.""" collection = collection or self.default_collection await self.setup_collection(collection=collection) diff --git a/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py b/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py index 9ea8c038..e0f5276a 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/disk/store.py @@ -58,7 +58,7 @@ def __init__( default_collection: The default collection to use if no collection is provided. """ if disk_cache is not None and directory is not None: - msg = "Either disk_cache or directory must be provided" + msg = "Provide only one of disk_cache or directory" raise ValueError(msg) if disk_cache is None and directory is None: @@ -103,7 +103,7 @@ async def _put_managed_entry( ) -> None: combo_key: str = compound_key(collection=collection, key=key) - _ = self._cache.set(key=combo_key, value=managed_entry.to_json(), expire=managed_entry.ttl) + _ = self._cache.set(key=combo_key, value=managed_entry.to_json(include_expiration=False), expire=managed_entry.ttl) @override async def _delete_managed_entry(self, *, key: str, collection: str) -> bool: diff --git a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py index 026c2e3b..695ebb15 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/store.py @@ -275,6 +275,7 @@ async def _get_collection_names(self, *, limit: int | None = None) -> list[str]: "collections": { "terms": { "field": "collection", + "size": limit, }, }, }, @@ -310,12 +311,13 @@ async def _delete_collection(self, *, collection: str) -> bool: @override async def _cull(self) -> None: + ms_epoch = int(now_as_epoch() * 1000) _ = await self._client.options(ignore_status=404).delete_by_query( index=f"{self._index_prefix}-*", body={ "query": { "range": { - "expires_at": {"lt": now_as_epoch()}, + "expires_at": {"lt": ms_epoch}, }, }, }, diff --git a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/utils.py b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/utils.py index f931d8a4..e13dfc43 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/utils.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/elasticsearch/utils.py @@ -10,7 +10,7 @@ def get_body_from_response(response: ObjectApiResponse[Any]) -> dict[str, Any]: if not isinstance(body, dict) or not all(isinstance(key, str) for key in body): # pyright: ignore[reportUnknownVariableType] return {} - return cast(typ="dict[str, Any]", val=body) + return cast("dict[str, Any]", body) def get_source_from_body(body: dict[str, Any]) -> dict[str, Any]: @@ -20,7 +20,7 @@ def get_source_from_body(body: dict[str, Any]) -> dict[str, Any]: if not isinstance(source, dict) or not all(isinstance(key, str) for key in source): # pyright: ignore[reportUnknownVariableType] return {} - return cast(typ="dict[str, Any]", val=source) + return cast("dict[str, Any]", source) def get_aggregations_from_body(body: dict[str, Any]) -> dict[str, Any]: @@ -30,7 +30,7 @@ def get_aggregations_from_body(body: dict[str, Any]) -> dict[str, Any]: if not isinstance(aggregations, dict) or not all(isinstance(key, str) for key in aggregations): # pyright: ignore[reportUnknownVariableType] return {} - return cast(typ="dict[str, Any]", val=aggregations) + return cast("dict[str, Any]", aggregations) def get_hits_from_response(response: ObjectApiResponse[Any]) -> list[dict[str, Any]]: @@ -40,12 +40,12 @@ def get_hits_from_response(response: ObjectApiResponse[Any]) -> list[dict[str, A if not isinstance(body, dict) or not all(isinstance(key, str) for key in body): # pyright: ignore[reportUnknownVariableType] return [] - body_dict: dict[str, Any] = cast(typ="dict[str, Any]", val=body) + body_dict: dict[str, Any] = cast("dict[str, Any]", body) if not (hits := body_dict.get("hits")): return [] - hits_dict: dict[str, Any] = cast(typ="dict[str, Any]", val=hits) + hits_dict: dict[str, Any] = cast("dict[str, Any]", hits) if not (hits_list := hits_dict.get("hits")): return [] @@ -53,7 +53,7 @@ def get_hits_from_response(response: ObjectApiResponse[Any]) -> list[dict[str, A if not all(isinstance(hit, dict) for hit in hits_list): # pyright: ignore[reportAny] return [] - hits_list_dict: list[dict[str, Any]] = cast(typ="list[dict[str, Any]]", val=hits_list) + hits_list_dict: list[dict[str, Any]] = cast("list[dict[str, Any]]", hits_list) return hits_list_dict @@ -73,7 +73,7 @@ def get_fields_from_hit(hit: dict[str, Any]) -> dict[str, list[Any]]: msg = f"Fields in hit {hit} is not a dict of lists" raise TypeError(msg) - return cast(typ="dict[str, list[Any]]", val=fields) + return cast("dict[str, list[Any]]", fields) def get_field_from_hit(hit: dict[str, Any], field: str) -> list[Any]: @@ -96,7 +96,7 @@ def get_values_from_field_in_hit(hit: dict[str, Any], field: str, value_type: ty msg = f"Field {field} in hit {hit} is not a list of {value_type}" raise TypeError(msg) - return cast(typ="list[T]", val=value) + return cast("list[T]", value) def get_first_value_from_field_in_hit(hit: dict[str, Any], field: str, value_type: type[T]) -> T: diff --git a/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py b/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py index e1a340f5..cf7b8a91 100644 --- a/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py +++ b/key-value/key-value-aio/src/key_value/aio/stores/mongodb/store.py @@ -57,12 +57,28 @@ def __init__( db_name: str | None = None, coll_name: str | None = None, default_collection: str | None = None, - ) -> None: ... + ) -> None: + """Initialize the MongoDB store. + + Args: + client: The MongoDB client to use. + db_name: The name of the MongoDB database. + coll_name: The name of the MongoDB collection. + default_collection: The default collection to use if no collection is provided. + """ @overload def __init__( self, *, url: str, db_name: str | None = None, coll_name: str | None = None, default_collection: str | None = None - ) -> None: ... + ) -> None: + """Initialize the MongoDB store. + + Args: + url: The url of the MongoDB cluster. + db_name: The name of the MongoDB database. + coll_name: The name of the MongoDB collection. + default_collection: The default collection to use if no collection is provided. + """ def __init__( self, @@ -73,12 +89,7 @@ def __init__( coll_name: str | None = None, default_collection: str | None = None, ) -> None: - """Initialize the MongoDB store. - - The store uses a single MongoDB collection to persist entries for all adapter collections. - We store compound keys "{collection}::{key}" and a JSON string payload. Optional TTL is persisted - as ISO timestamps in the JSON payload itself to maintain consistent semantics across backends. - """ + """Initialize the MongoDB store.""" if client: self._client = client @@ -117,6 +128,7 @@ async def _setup_collection(self, *, collection: str) -> None: matching_collections: list[str] = await self._db.list_collection_names(filter=collection_filter) if matching_collections: + self._collections_by_name[collection] = self._db[collection] return new_collection: AsyncCollection[dict[str, Any]] = await self._db.create_collection(name=collection) @@ -186,8 +198,10 @@ async def _delete_collection(self, *, collection: str) -> bool: collection = self._sanitize_collection_name(collection=collection) _ = await self._db.drop_collection(name_or_collection=collection) + if collection in self._collections_by_name: + del self._collections_by_name[collection] return True @override async def _close(self) -> None: - pass + await self._client.close() diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/base.py b/key-value/key-value-aio/src/key_value/aio/wrappers/base.py index b8385f49..685f7d52 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/base.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/base.py @@ -16,7 +16,7 @@ async def get(self, key: str, *, collection: str | None = None) -> dict[str, Any return await self.store.get(collection=collection, key=key) @override - async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + async def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: return await self.store.get_many(collection=collection, keys=keys) @override @@ -24,7 +24,7 @@ async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[st return await self.store.ttl(collection=collection, key=key) @override - async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + async def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: return await self.store.ttl_many(collection=collection, keys=keys) @override @@ -34,7 +34,7 @@ async def put(self, key: str, value: dict[str, Any], *, collection: str | None = @override async def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -47,5 +47,5 @@ async def delete(self, key: str, *, collection: str | None = None) -> bool: return await self.store.delete(collection=collection, key=key) @override - async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + async def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: return await self.store.delete_many(keys=keys, collection=collection) diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache/wrapper.py b/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache/wrapper.py index d6bdd4b4..3101016a 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache/wrapper.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/passthrough_cache/wrapper.py @@ -59,13 +59,13 @@ async def get(self, key: str, *, collection: str | None = None) -> dict[str, Any return uncached_entry @override - async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + async def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: key_to_value: dict[str, dict[str, Any] | None] = dict.fromkeys(keys, None) # First check the cache store for the entries cached_entries: list[dict[str, Any] | None] = await self.cache_store.get_many(collection=collection, keys=keys) - for i, key in enumerate(iterable=keys): + for i, key in enumerate(keys): key_to_value[key] = cached_entries[i] uncached_keys = [key for key, value in key_to_value.items() if value is None] @@ -78,7 +78,7 @@ async def get_many(self, keys: Sequence[str], *, collection: str | None = None) entries_to_cache_keys: list[str] = [] entries_to_cache_ttls: list[float | None] = [] - for i, key in enumerate(iterable=uncached_keys): + for i, key in enumerate(uncached_keys): entry, ttl = uncached_entries[i] if entry is not None: entries_to_cache_keys.append(key) @@ -114,13 +114,13 @@ async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[st return uncached_entry, ttl @override - async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + async def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: key_to_value: dict[str, tuple[dict[str, Any] | None, float | None]] = dict.fromkeys(keys, (None, None)) # type: ignore # First check the cache store for the entries cached_entries: list[tuple[dict[str, Any] | None, float | None]] = await self.cache_store.ttl_many(collection=collection, keys=keys) - for i, key in enumerate(iterable=keys): + for i, key in enumerate(keys): key_to_value[key] = (cached_entries[i][0], cached_entries[i][1]) uncached_keys = [key for key, value in key_to_value.items() if value == (None, None)] @@ -133,7 +133,7 @@ async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) entries_to_cache_keys: list[str] = [] entries_to_cache_ttls: list[float | None] = [] - for i, key in enumerate(iterable=uncached_keys): + for i, key in enumerate(uncached_keys): entry, ttl = uncached_entries[i] if entry is not None: entries_to_cache_keys.append(key) @@ -161,7 +161,7 @@ async def put(self, key: str, value: dict[str, Any], *, collection: str | None = @override async def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -178,7 +178,7 @@ async def delete(self, key: str, *, collection: str | None = None) -> bool: return await self.store.delete(collection=collection, key=key) @override - async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + async def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: _ = await self.cache_store.delete_many(collection=collection, keys=keys) return await self.store.delete_many(collection=collection, keys=keys) diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/wrapper.py b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/wrapper.py index 046437a9..f67fc38b 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/wrapper.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_collections/wrapper.py @@ -37,7 +37,7 @@ async def get(self, key: str, *, collection: str | None = None) -> dict[str, Any return await self.store.get(key=key, collection=new_collection) @override - async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + async def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: new_collection: str = self._prefix_collection(collection=collection) return await self.store.get_many(keys=keys, collection=new_collection) @@ -47,7 +47,7 @@ async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[st return await self.store.ttl(key=key, collection=new_collection) @override - async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + async def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: new_collection: str = self._prefix_collection(collection=collection) return await self.store.ttl_many(keys=keys, collection=new_collection) @@ -59,7 +59,7 @@ async def put(self, key: str, value: dict[str, Any], *, collection: str | None = @override async def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -74,6 +74,6 @@ async def delete(self, key: str, *, collection: str | None = None) -> bool: return await self.store.delete(key=key, collection=new_collection) @override - async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + async def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: new_collection: str = self._prefix_collection(collection=collection) return await self.store.delete_many(keys=keys, collection=new_collection) diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/wrapper.py b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/wrapper.py index 98bf4343..451dee26 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/wrapper.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/prefix_keys/wrapper.py @@ -34,7 +34,7 @@ async def get(self, key: str, *, collection: str | None = None) -> dict[str, Any return await self.store.get(key=new_key, collection=collection) @override - async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + async def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: new_keys: list[str] = [self._prefix_key(key=key) for key in keys] return await self.store.get_many(keys=new_keys, collection=collection) @@ -44,7 +44,7 @@ async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[st return await self.store.ttl(key=new_key, collection=collection) @override - async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + async def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: new_keys: list[str] = [self._prefix_key(key=key) for key in keys] return await self.store.ttl_many(keys=new_keys, collection=collection) @@ -56,7 +56,7 @@ async def put(self, key: str, value: dict[str, Any], *, collection: str | None = @override async def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -71,6 +71,6 @@ async def delete(self, key: str, *, collection: str | None = None) -> bool: return await self.store.delete(key=new_key, collection=collection) @override - async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + async def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: new_keys: list[str] = [self._prefix_key(key=key) for key in keys] return await self.store.delete_many(keys=new_keys, collection=collection) diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/wrapper.py b/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/wrapper.py index 397f0c7d..ee2dc7cd 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/wrapper.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/single_collection/wrapper.py @@ -41,7 +41,7 @@ async def get(self, key: str, *, collection: str | None = None) -> dict[str, Any return await self.store.get(key=new_key, collection=self.single_collection) @override - async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + async def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] return await self.store.get_many(keys=new_keys, collection=self.single_collection) @@ -51,7 +51,7 @@ async def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[st return await self.store.ttl(key=new_key, collection=self.single_collection) @override - async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + async def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] return await self.store.ttl_many(keys=new_keys, collection=self.single_collection) @@ -63,7 +63,7 @@ async def put(self, key: str, value: dict[str, Any], *, collection: str | None = @override async def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -78,6 +78,6 @@ async def delete(self, key: str, *, collection: str | None = None) -> bool: return await self.store.delete(key=new_key, collection=self.single_collection) @override - async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + async def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] return await self.store.delete_many(keys=new_keys, collection=self.single_collection) diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/statistics/wrapper.py b/key-value/key-value-aio/src/key_value/aio/wrappers/statistics/wrapper.py index 80eb4adc..d7a48cbe 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/statistics/wrapper.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/statistics/wrapper.py @@ -5,6 +5,7 @@ from typing_extensions import override from key_value.aio.protocols.key_value import AsyncKeyValue +from key_value.aio.stores.base import DEFAULT_COLLECTION_NAME from key_value.aio.wrappers.base import BaseWrapper @@ -94,9 +95,6 @@ def get_collection(self, collection: str) -> KVStoreCollectionStatistics: return self.collections[collection] -DEFAULT_COLLECTION_NAME = "__no_collection__" - - class StatisticsWrapper(BaseWrapper): """Statistics wrapper around a KV Store that tracks operation statistics. @@ -157,7 +155,7 @@ async def delete(self, key: str, *, collection: str | None = None) -> bool: return False @override - async def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + async def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: collection = collection or DEFAULT_COLLECTION_NAME results: list[dict[str, Any] | None] = await self.store.get_many(keys=keys, collection=collection) @@ -173,7 +171,7 @@ async def get_many(self, keys: Sequence[str], *, collection: str | None = None) @override async def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -186,7 +184,7 @@ async def put_many( self.statistics.get_collection(collection=collection).put.increment(increment=len(keys)) @override - async def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + async def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: collection = collection or DEFAULT_COLLECTION_NAME deleted_count: int = await self.store.delete_many(keys=keys, collection=collection) @@ -200,7 +198,7 @@ async def delete_many(self, keys: Sequence[str], *, collection: str | None = Non return deleted_count @override - async def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + async def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: collection = collection or DEFAULT_COLLECTION_NAME results: list[tuple[dict[str, Any] | None, float | None]] = await self.store.ttl_many(keys=keys, collection=collection) diff --git a/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp/wrapper.py b/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp/wrapper.py index ff4b2af9..c9d27dba 100644 --- a/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp/wrapper.py +++ b/key-value/key-value-aio/src/key_value/aio/wrappers/ttl_clamp/wrapper.py @@ -1,6 +1,7 @@ from collections.abc import Sequence from typing import Any, overload +from key_value.shared.utils.time_to_live import validate_ttl from typing_extensions import override from key_value.aio.protocols.key_value import AsyncKeyValue @@ -36,6 +37,8 @@ def _ttl_clamp(self, ttl: float | None) -> float | None: if ttl is None: return self.missing_ttl + ttl = validate_ttl(t=ttl) + return max(self.min_ttl, min(ttl, self.max_ttl)) @override @@ -45,17 +48,16 @@ async def put(self, key: str, value: dict[str, Any], *, collection: str | None = @override async def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, ttl: Sequence[float | None] | float | None = None, ) -> None: - clamped_ttl: Sequence[float | None] | float | None = None + if isinstance(ttl, (float, int)): + ttl = self._ttl_clamp(ttl=ttl) if isinstance(ttl, Sequence): - clamped_ttl = [self._ttl_clamp(ttl=t) for t in ttl] - elif isinstance(ttl, float): - clamped_ttl = self._ttl_clamp(ttl=ttl) + ttl = [self._ttl_clamp(ttl=t) for t in ttl] - await self.store.put_many(keys=keys, values=values, collection=collection, ttl=clamped_ttl) + await self.store.put_many(keys=keys, values=values, collection=collection, ttl=ttl) diff --git a/key-value/key-value-shared/src/key_value/shared/constants.py b/key-value/key-value-shared/src/key_value/shared/constants.py new file mode 100644 index 00000000..77d5c4e5 --- /dev/null +++ b/key-value/key-value-shared/src/key_value/shared/constants.py @@ -0,0 +1 @@ +DEFAULT_COLLECTION_NAME = "default_collection" diff --git a/key-value/key-value-shared/src/key_value/shared/errors/key_value.py b/key-value/key-value-shared/src/key_value/shared/errors/key_value.py index d0eec474..b233b48c 100644 --- a/key-value/key-value-shared/src/key_value/shared/errors/key_value.py +++ b/key-value/key-value-shared/src/key_value/shared/errors/key_value.py @@ -1,3 +1,5 @@ +from typing import Any + from key_value.shared.errors.base import BaseKeyValueError @@ -26,8 +28,18 @@ def __init__(self, operation: str, collection: str | None = None, key: str | Non class InvalidTTLError(KeyValueOperationError): """Raised when a TTL is invalid.""" - def __init__(self, ttl: float): + def __init__(self, ttl: Any): super().__init__( message="A TTL is invalid.", extra_info={"ttl": ttl}, ) + + +class IncorrectTTLCountError(KeyValueOperationError): + """Raised when the number of TTLs is incorrect.""" + + def __init__(self, ttl: Any, count: int): + super().__init__( + message="The number of TTLs is incorrect.", + extra_info={"ttl": ttl, "count": count}, + ) diff --git a/key-value/key-value-shared/src/key_value/shared/utils/time_to_live.py b/key-value/key-value-shared/src/key_value/shared/utils/time_to_live.py index b99d6e84..9b3eabb7 100644 --- a/key-value/key-value-shared/src/key_value/shared/utils/time_to_live.py +++ b/key-value/key-value-shared/src/key_value/shared/utils/time_to_live.py @@ -1,6 +1,10 @@ import time +from collections.abc import Sequence from datetime import datetime, timedelta, timezone -from typing import Any +from typing import Any, overload + +from key_value.shared.errors import InvalidTTLError +from key_value.shared.errors.key_value import IncorrectTTLCountError def epoch_to_datetime(epoch: float) -> datetime: @@ -36,3 +40,52 @@ def try_parse_datetime_str(value: Any) -> datetime | None: # pyright: ignore[re return None return None + + +@overload +def validate_ttl(t: float | int) -> float: ... + + +@overload +def validate_ttl(t: float | int | None) -> float | None: ... + + +def validate_ttl(t: float | int | None) -> float | None: + if t is None: + return None + + if not isinstance(t, float | int): # pyright: ignore[reportUnnecessaryIsInstance] + raise InvalidTTLError(ttl=t) + + if isinstance(t, int): + t = float(t) + + if t <= 0: + raise InvalidTTLError(ttl=t) + + return t + + +def validate_ttls(t: Sequence[float | None] | float | None) -> list[float | None]: + if not isinstance(t, Sequence): + t = [t] + return [validate_ttl(t=ttl) if ttl is not None else None for ttl in t] + + +def prepare_ttls(t: Sequence[float | None] | float | None, count: int) -> list[float | None]: + if t is None: + return [None] * count + + if isinstance(t, str): + raise InvalidTTLError(ttl=t) + + if isinstance(t, (int, float)): + t = [float(t)] * count + + if isinstance(t, Sequence): # pyright: ignore[reportUnnecessaryIsInstance] + if len(t) != count: + raise IncorrectTTLCountError(ttl=t, count=count) + + t = [validate_ttl(t=ttl) for ttl in t] + + return t diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/adapter.py b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/adapter.py index 186113dd..63edfdcb 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/adapter.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/pydantic/adapter.py @@ -48,7 +48,7 @@ def get(self, key: str, *, collection: str | None = None) -> T | None: return None - def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[T | None]: + def get_many(self, keys: list[str], *, collection: str | None = None) -> list[T | None]: """Batch get and validate models by keys, preserving order. Each element is either a parsed model instance or None if missing. @@ -70,7 +70,7 @@ def put(self, key: str, value: T, *, collection: str | None = None, ttl: float | self.key_value.put(key=key, value=value_dict, collection=collection, ttl=ttl) - def put_many(self, keys: Sequence[str], values: Sequence[T], *, collection: str | None = None, ttl: float | None = None) -> None: + def put_many(self, keys: list[str], values: Sequence[T], *, collection: str | None = None, ttl: float | None = None) -> None: """Serialize and store multiple models, preserving order alignment with keys.""" collection = collection or self.default_collection @@ -84,7 +84,7 @@ def delete(self, key: str, *, collection: str | None = None) -> bool: return self.key_value.delete(key=key, collection=collection) - def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: """Delete multiple models by key. Returns the count of deleted entries.""" collection = collection or self.default_collection @@ -108,7 +108,7 @@ def ttl(self, key: str, *, collection: str | None = None) -> tuple[T | None, flo return (None, None) - def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[T | None, float | None]]: + def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[T | None, float | None]]: """Batch get models with TTLs. Each element is (model|None, ttl_seconds|None).""" collection = collection or self.default_collection diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/adapter.py b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/adapter.py index c04782cb..493720a8 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/adapter.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/adapters/raise_on_missing/adapter.py @@ -47,14 +47,14 @@ def get(self, key: str, *, collection: str | None = None, raise_on_missing: bool @overload def get_many( - self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[False] = False + self, keys: list[str], *, collection: str | None = None, raise_on_missing: Literal[False] = False ) -> list[dict[str, Any] | None]: ... @overload - def get_many(self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[True]) -> list[dict[str, Any]]: ... + def get_many(self, keys: list[str], *, collection: str | None = None, raise_on_missing: Literal[True]) -> list[dict[str, Any]]: ... def get_many( - self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: bool = False + self, keys: list[str], *, collection: str | None = None, raise_on_missing: bool = False ) -> list[dict[str, Any]] | list[dict[str, Any] | None]: """Retrieve multiple values by key from the specified collection. @@ -104,16 +104,16 @@ def ttl(self, key: str, *, collection: str | None = None, raise_on_missing: bool @overload def ttl_many( - self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[False] = False + self, keys: list[str], *, collection: str | None = None, raise_on_missing: Literal[False] = False ) -> list[tuple[dict[str, Any] | None, float | None]]: ... @overload def ttl_many( - self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: Literal[True] + self, keys: list[str], *, collection: str | None = None, raise_on_missing: Literal[True] ) -> list[tuple[dict[str, Any], float | None]]: ... def ttl_many( - self, keys: Sequence[str], *, collection: str | None = None, raise_on_missing: bool = False + self, keys: list[str], *, collection: str | None = None, raise_on_missing: bool = False ) -> list[tuple[dict[str, Any], float | None]] | list[tuple[dict[str, Any] | None, float | None]]: """Retrieve multiple values and TTL information by key from the specified collection. @@ -143,7 +143,7 @@ def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -169,7 +169,7 @@ def delete(self, key: str, *, collection: str | None = None) -> bool: """ return self.key_value.delete(key=key, collection=collection) - def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: """Delete multiple key-value pairs from the specified collection. Args: diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/protocols/key_value.py b/key-value/key-value-sync/src/key_value/sync/code_gen/protocols/key_value.py index 33c7d548..5b7c7949 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/protocols/key_value.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/protocols/key_value.py @@ -55,7 +55,7 @@ def delete(self, key: str, *, collection: str | None = None) -> bool: """ ... - def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: """Retrieve multiple values by key from the specified collection. Args: @@ -67,7 +67,7 @@ def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> lis """ ... - def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: """Retrieve multiple values and TTL information by key from the specified collection. Args: @@ -82,7 +82,7 @@ def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> lis def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -99,7 +99,7 @@ def put_many( """ ... - def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: """Delete multiple key-value pairs from the specified collection. Args: diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/base.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/base.py index 9a2b6715..df68e7c0 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/base.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/base.py @@ -12,9 +12,10 @@ from types import TracebackType from typing import Any -from key_value.shared.errors import InvalidTTLError, StoreSetupError +from key_value.shared.constants import DEFAULT_COLLECTION_NAME +from key_value.shared.errors import StoreSetupError from key_value.shared.utils.managed_entry import ManagedEntry -from key_value.shared.utils.time_to_live import now +from key_value.shared.utils.time_to_live import now, prepare_ttls, validate_ttl from typing_extensions import Self, override from key_value.sync.code_gen.protocols.key_value import ( @@ -26,24 +27,6 @@ KeyValueProtocol, ) -DEFAULT_COLLECTION_NAME = "default_collection" - - -def validate_one_ttl(t: float | None, raise_error: bool = False) -> bool: - if t is None: - return True - if t <= 0: - if raise_error: - raise InvalidTTLError(ttl=t) - return False - return True - - -def validate_ttls(t: list[float | None] | float | None, raise_error: bool = False) -> bool: - if not isinstance(t, Sequence): - t = [t] - return all(validate_one_ttl(t=ttl, raise_error=raise_error) for ttl in t) - class BaseStore(KeyValueProtocol, ABC): """An opinionated Abstract base class for managed key-value stores using ManagedEntry objects. @@ -114,7 +97,7 @@ def setup_collection(self, *, collection: str) -> None: def _get_managed_entry(self, *, collection: str, key: str) -> ManagedEntry | None: """Retrieve a cache entry by key from the specified collection.""" - def _get_managed_entries(self, *, collection: str, keys: Sequence[str]) -> list[ManagedEntry | None]: + def _get_managed_entries(self, *, collection: str, keys: list[str]) -> list[ManagedEntry | None]: """Retrieve multiple managed entries by key from the specified collection.""" return [self._get_managed_entry(collection=collection, key=key) for key in keys] @@ -144,7 +127,7 @@ def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | No return managed_entry.value @override - def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: collection = collection or self.default_collection self.setup_collection(collection=collection) @@ -164,7 +147,7 @@ def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any return (managed_entry.value, managed_entry.ttl) @override - def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: """Retrieve multiple values and TTLs by key from the specified collection. Returns a list of tuples of the form (value, ttl_seconds). Missing or expired @@ -181,7 +164,7 @@ def _put_managed_entry(self, *, collection: str, key: str, managed_entry: Manage """Store a managed entry by key in the specified collection.""" ... - def _put_managed_entries(self, *, collection: str, keys: Sequence[str], managed_entries: Sequence[ManagedEntry]) -> None: + def _put_managed_entries(self, *, collection: str, keys: list[str], managed_entries: Sequence[ManagedEntry]) -> None: """Store multiple managed entries by key in the specified collection.""" for key, managed_entry in zip(keys, managed_entries, strict=True): @@ -193,16 +176,14 @@ def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, collection = collection or self.default_collection self.setup_collection(collection=collection) - _ = validate_ttls(t=ttl, raise_error=True) - - managed_entry: ManagedEntry = ManagedEntry(value=value, ttl=ttl, created_at=now()) + managed_entry: ManagedEntry = ManagedEntry(value=value, ttl=validate_ttl(t=ttl), created_at=now()) self._put_managed_entry(collection=collection, key=key, managed_entry=managed_entry) @override def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -220,16 +201,7 @@ def put_many( collection = collection or self.default_collection self.setup_collection(collection=collection) - ttl_for_entries: list[float | None] = [] - - if ttl is None: - ttl_for_entries = [None] * len(keys) - elif isinstance(ttl, Sequence): - ttl_for_entries = list(ttl) - elif isinstance(ttl, float): - ttl_for_entries = [ttl] * len(keys) - - _ = validate_ttls(t=ttl_for_entries, raise_error=True) + ttl_for_entries: list[float | None] = prepare_ttls(t=ttl, count=len(keys)) managed_entries: list[ManagedEntry] = [] @@ -243,7 +215,7 @@ def _delete_managed_entry(self, *, key: str, collection: str) -> bool: """Delete a managed entry by key from the specified collection.""" ... - def _delete_managed_entries(self, *, keys: Sequence[str], collection: str) -> int: + def _delete_managed_entries(self, *, keys: list[str], collection: str) -> int: """Delete multiple managed entries by key from the specified collection.""" deleted_count: int = 0 @@ -262,7 +234,7 @@ def delete(self, key: str, *, collection: str | None = None) -> bool: return self._delete_managed_entry(key=key, collection=collection) @override - def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: """Delete multiple managed entries by key from the specified collection.""" collection = collection or self.default_collection self.setup_collection(collection=collection) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/store.py index a93b161b..106a9c17 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/disk/store.py @@ -61,7 +61,7 @@ def __init__( default_collection: The default collection to use if no collection is provided. """ if disk_cache is not None and directory is not None: - msg = "Either disk_cache or directory must be provided" + msg = "Provide only one of disk_cache or directory" raise ValueError(msg) if disk_cache is None and directory is None: @@ -100,7 +100,7 @@ def _get_managed_entry(self, *, key: str, collection: str) -> ManagedEntry | Non def _put_managed_entry(self, *, key: str, collection: str, managed_entry: ManagedEntry) -> None: combo_key: str = compound_key(collection=collection, key=key) - _ = self._cache.set(key=combo_key, value=managed_entry.to_json(), expire=managed_entry.ttl) + _ = self._cache.set(key=combo_key, value=managed_entry.to_json(include_expiration=False), expire=managed_entry.ttl) @override def _delete_managed_entry(self, *, key: str, collection: str) -> bool: diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py index 1d5f71b4..6189c55b 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/store.py @@ -231,7 +231,7 @@ def _get_collection_names(self, *, limit: int | None = None) -> list[str]: limit = min(limit or DEFAULT_PAGE_SIZE, PAGE_LIMIT) search_response: ObjectApiResponse[Any] = self._client.options(ignore_status=404).search( - index=f"{self._index_prefix}-*", aggregations={"collections": {"terms": {"field": "collection"}}}, size=limit + index=f"{self._index_prefix}-*", aggregations={"collections": {"terms": {"field": "collection", "size": limit}}}, size=limit ) body: dict[str, Any] = get_body_from_response(response=search_response) @@ -256,8 +256,9 @@ def _delete_collection(self, *, collection: str) -> bool: @override def _cull(self) -> None: + ms_epoch = int(now_as_epoch() * 1000) _ = self._client.options(ignore_status=404).delete_by_query( - index=f"{self._index_prefix}-*", body={"query": {"range": {"expires_at": {"lt": now_as_epoch()}}}} + index=f"{self._index_prefix}-*", body={"query": {"range": {"expires_at": {"lt": ms_epoch}}}} ) @override diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/utils.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/utils.py index d4c95a3f..3da13eb4 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/utils.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/elasticsearch/utils.py @@ -13,7 +13,7 @@ def get_body_from_response(response: ObjectApiResponse[Any]) -> dict[str, Any]: if not isinstance(body, dict) or not all(isinstance(key, str) for key in body): # pyright: ignore[reportUnknownVariableType] return {} - return cast(typ="dict[str, Any]", val=body) + return cast("dict[str, Any]", body) def get_source_from_body(body: dict[str, Any]) -> dict[str, Any]: @@ -23,7 +23,7 @@ def get_source_from_body(body: dict[str, Any]) -> dict[str, Any]: if not isinstance(source, dict) or not all(isinstance(key, str) for key in source): # pyright: ignore[reportUnknownVariableType] return {} - return cast(typ="dict[str, Any]", val=source) + return cast("dict[str, Any]", source) def get_aggregations_from_body(body: dict[str, Any]) -> dict[str, Any]: @@ -33,7 +33,7 @@ def get_aggregations_from_body(body: dict[str, Any]) -> dict[str, Any]: if not isinstance(aggregations, dict) or not all(isinstance(key, str) for key in aggregations): # pyright: ignore[reportUnknownVariableType] return {} - return cast(typ="dict[str, Any]", val=aggregations) + return cast("dict[str, Any]", aggregations) def get_hits_from_response(response: ObjectApiResponse[Any]) -> list[dict[str, Any]]: @@ -43,12 +43,12 @@ def get_hits_from_response(response: ObjectApiResponse[Any]) -> list[dict[str, A if not isinstance(body, dict) or not all(isinstance(key, str) for key in body): # pyright: ignore[reportUnknownVariableType] return [] - body_dict: dict[str, Any] = cast(typ="dict[str, Any]", val=body) + body_dict: dict[str, Any] = cast("dict[str, Any]", body) if not (hits := body_dict.get("hits")): return [] - hits_dict: dict[str, Any] = cast(typ="dict[str, Any]", val=hits) + hits_dict: dict[str, Any] = cast("dict[str, Any]", hits) if not (hits_list := hits_dict.get("hits")): return [] @@ -56,7 +56,7 @@ def get_hits_from_response(response: ObjectApiResponse[Any]) -> list[dict[str, A if not all(isinstance(hit, dict) for hit in hits_list): # pyright: ignore[reportAny] return [] - hits_list_dict: list[dict[str, Any]] = cast(typ="list[dict[str, Any]]", val=hits_list) + hits_list_dict: list[dict[str, Any]] = cast("list[dict[str, Any]]", hits_list) return hits_list_dict @@ -76,7 +76,7 @@ def get_fields_from_hit(hit: dict[str, Any]) -> dict[str, list[Any]]: msg = f"Fields in hit {hit} is not a dict of lists" raise TypeError(msg) - return cast(typ="dict[str, list[Any]]", val=fields) + return cast("dict[str, list[Any]]", fields) def get_field_from_hit(hit: dict[str, Any], field: str) -> list[Any]: @@ -99,7 +99,7 @@ def get_values_from_field_in_hit(hit: dict[str, Any], field: str, value_type: ty msg = f"Field {field} in hit {hit} is not a list of {value_type}" raise TypeError(msg) - return cast(typ="list[T]", val=value) + return cast("list[T]", value) def get_first_value_from_field_in_hit(hit: dict[str, Any], field: str, value_type: type[T]) -> T: diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/store.py b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/store.py index 24921dc9..96995e73 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/store.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/stores/mongodb/store.py @@ -64,12 +64,28 @@ def __init__( db_name: str | None = None, coll_name: str | None = None, default_collection: str | None = None, - ) -> None: ... + ) -> None: + """Initialize the MongoDB store. + + Args: + client: The MongoDB client to use. + db_name: The name of the MongoDB database. + coll_name: The name of the MongoDB collection. + default_collection: The default collection to use if no collection is provided. + """ @overload def __init__( self, *, url: str, db_name: str | None = None, coll_name: str | None = None, default_collection: str | None = None - ) -> None: ... + ) -> None: + """Initialize the MongoDB store. + + Args: + url: The url of the MongoDB cluster. + db_name: The name of the MongoDB database. + coll_name: The name of the MongoDB collection. + default_collection: The default collection to use if no collection is provided. + """ def __init__( self, @@ -80,12 +96,7 @@ def __init__( coll_name: str | None = None, default_collection: str | None = None, ) -> None: - """Initialize the MongoDB store. - - The store uses a single MongoDB collection to persist entries for all adapter collections. - We store compound keys "{collection}::{key}" and a JSON string payload. Optional TTL is persisted - as ISO timestamps in the JSON payload itself to maintain consistent semantics across backends. - """ + """Initialize the MongoDB store.""" if client: self._client = client @@ -124,6 +135,7 @@ def _setup_collection(self, *, collection: str) -> None: matching_collections: list[str] = self._db.list_collection_names(filter=collection_filter) if matching_collections: + self._collections_by_name[collection] = self._db[collection] return new_collection: Collection[dict[str, Any]] = self._db.create_collection(name=collection) @@ -187,8 +199,10 @@ def _delete_collection(self, *, collection: str) -> bool: collection = self._sanitize_collection_name(collection=collection) _ = self._db.drop_collection(name_or_collection=collection) + if collection in self._collections_by_name: + del self._collections_by_name[collection] return True @override def _close(self) -> None: - pass + self._client.close() diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/base.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/base.py index f301d3d8..d11688d6 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/base.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/base.py @@ -19,7 +19,7 @@ def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | No return self.store.get(collection=collection, key=key) @override - def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: return self.store.get_many(collection=collection, keys=keys) @override @@ -27,7 +27,7 @@ def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any return self.store.ttl(collection=collection, key=key) @override - def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: return self.store.ttl_many(collection=collection, keys=keys) @override @@ -37,7 +37,7 @@ def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, @override def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -50,5 +50,5 @@ def delete(self, key: str, *, collection: str | None = None) -> bool: return self.store.delete(collection=collection, key=key) @override - def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: return self.store.delete_many(keys=keys, collection=collection) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/passthrough_cache/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/passthrough_cache/wrapper.py index d7c9eddc..8018c759 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/passthrough_cache/wrapper.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/passthrough_cache/wrapper.py @@ -55,13 +55,13 @@ def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | No return uncached_entry @override - def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: key_to_value: dict[str, dict[str, Any] | None] = dict.fromkeys(keys, None) # First check the cache store for the entries cached_entries: list[dict[str, Any] | None] = self.cache_store.get_many(collection=collection, keys=keys) - for i, key in enumerate(iterable=keys): + for i, key in enumerate(keys): key_to_value[key] = cached_entries[i] uncached_keys = [key for (key, value) in key_to_value.items() if value is None] @@ -72,7 +72,7 @@ def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> lis entries_to_cache_keys: list[str] = [] entries_to_cache_ttls: list[float | None] = [] - for i, key in enumerate(iterable=uncached_keys): + for i, key in enumerate(uncached_keys): (entry, ttl) = uncached_entries[i] if entry is not None: entries_to_cache_keys.append(key) @@ -103,13 +103,13 @@ def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any return (uncached_entry, ttl) @override - def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: key_to_value: dict[str, tuple[dict[str, Any] | None, float | None]] = dict.fromkeys(keys, (None, None)) # type: ignore # First check the cache store for the entries cached_entries: list[tuple[dict[str, Any] | None, float | None]] = self.cache_store.ttl_many(collection=collection, keys=keys) - for i, key in enumerate(iterable=keys): + for i, key in enumerate(keys): key_to_value[key] = (cached_entries[i][0], cached_entries[i][1]) uncached_keys = [key for (key, value) in key_to_value.items() if value == (None, None)] @@ -120,7 +120,7 @@ def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> lis entries_to_cache_keys: list[str] = [] entries_to_cache_ttls: list[float | None] = [] - for i, key in enumerate(iterable=uncached_keys): + for i, key in enumerate(uncached_keys): (entry, ttl) = uncached_entries[i] if entry is not None: entries_to_cache_keys.append(key) @@ -143,7 +143,7 @@ def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, @override def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -160,7 +160,7 @@ def delete(self, key: str, *, collection: str | None = None) -> bool: return self.store.delete(collection=collection, key=key) @override - def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: _ = self.cache_store.delete_many(collection=collection, keys=keys) return self.store.delete_many(collection=collection, keys=keys) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/wrapper.py index 501d8105..649c6b1d 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/wrapper.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_collections/wrapper.py @@ -40,7 +40,7 @@ def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | No return self.store.get(key=key, collection=new_collection) @override - def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: new_collection: str = self._prefix_collection(collection=collection) return self.store.get_many(keys=keys, collection=new_collection) @@ -50,7 +50,7 @@ def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any return self.store.ttl(key=key, collection=new_collection) @override - def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: new_collection: str = self._prefix_collection(collection=collection) return self.store.ttl_many(keys=keys, collection=new_collection) @@ -62,7 +62,7 @@ def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, @override def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -77,6 +77,6 @@ def delete(self, key: str, *, collection: str | None = None) -> bool: return self.store.delete(key=key, collection=new_collection) @override - def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: new_collection: str = self._prefix_collection(collection=collection) return self.store.delete_many(keys=keys, collection=new_collection) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/wrapper.py index f905228b..f0dc00c0 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/wrapper.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/prefix_keys/wrapper.py @@ -37,7 +37,7 @@ def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | No return self.store.get(key=new_key, collection=collection) @override - def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: new_keys: list[str] = [self._prefix_key(key=key) for key in keys] return self.store.get_many(keys=new_keys, collection=collection) @@ -47,7 +47,7 @@ def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any return self.store.ttl(key=new_key, collection=collection) @override - def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: new_keys: list[str] = [self._prefix_key(key=key) for key in keys] return self.store.ttl_many(keys=new_keys, collection=collection) @@ -59,7 +59,7 @@ def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, @override def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -74,6 +74,6 @@ def delete(self, key: str, *, collection: str | None = None) -> bool: return self.store.delete(key=new_key, collection=collection) @override - def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: new_keys: list[str] = [self._prefix_key(key=key) for key in keys] return self.store.delete_many(keys=new_keys, collection=collection) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/wrapper.py index 08c6627a..80fd4dbd 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/wrapper.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/single_collection/wrapper.py @@ -44,7 +44,7 @@ def get(self, key: str, *, collection: str | None = None) -> dict[str, Any] | No return self.store.get(key=new_key, collection=self.single_collection) @override - def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] return self.store.get_many(keys=new_keys, collection=self.single_collection) @@ -54,7 +54,7 @@ def ttl(self, key: str, *, collection: str | None = None) -> tuple[dict[str, Any return self.store.ttl(key=new_key, collection=self.single_collection) @override - def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] return self.store.ttl_many(keys=new_keys, collection=self.single_collection) @@ -66,7 +66,7 @@ def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, @override def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -81,6 +81,6 @@ def delete(self, key: str, *, collection: str | None = None) -> bool: return self.store.delete(key=new_key, collection=self.single_collection) @override - def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: new_keys: list[str] = [self._prefix_key(key=key, collection=collection) for key in keys] return self.store.delete_many(keys=new_keys, collection=self.single_collection) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/statistics/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/statistics/wrapper.py index bdab60a8..35bb402b 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/statistics/wrapper.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/statistics/wrapper.py @@ -8,6 +8,7 @@ from typing_extensions import override from key_value.sync.code_gen.protocols.key_value import KeyValue +from key_value.sync.code_gen.stores.base import DEFAULT_COLLECTION_NAME from key_value.sync.code_gen.wrappers.base import BaseWrapper @@ -97,9 +98,6 @@ def get_collection(self, collection: str) -> KVStoreCollectionStatistics: return self.collections[collection] -DEFAULT_COLLECTION_NAME = "__no_collection__" - - class StatisticsWrapper(BaseWrapper): """Statistics wrapper around a KV Store that tracks operation statistics. @@ -160,7 +158,7 @@ def delete(self, key: str, *, collection: str | None = None) -> bool: return False @override - def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: + def get_many(self, keys: list[str], *, collection: str | None = None) -> list[dict[str, Any] | None]: collection = collection or DEFAULT_COLLECTION_NAME results: list[dict[str, Any] | None] = self.store.get_many(keys=keys, collection=collection) @@ -176,7 +174,7 @@ def get_many(self, keys: Sequence[str], *, collection: str | None = None) -> lis @override def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, @@ -189,7 +187,7 @@ def put_many( self.statistics.get_collection(collection=collection).put.increment(increment=len(keys)) @override - def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> int: + def delete_many(self, keys: list[str], *, collection: str | None = None) -> int: collection = collection or DEFAULT_COLLECTION_NAME deleted_count: int = self.store.delete_many(keys=keys, collection=collection) @@ -203,7 +201,7 @@ def delete_many(self, keys: Sequence[str], *, collection: str | None = None) -> return deleted_count @override - def ttl_many(self, keys: Sequence[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: + def ttl_many(self, keys: list[str], *, collection: str | None = None) -> list[tuple[dict[str, Any] | None, float | None]]: collection = collection or DEFAULT_COLLECTION_NAME results: list[tuple[dict[str, Any] | None, float | None]] = self.store.ttl_many(keys=keys, collection=collection) diff --git a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/ttl_clamp/wrapper.py b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/ttl_clamp/wrapper.py index 6f251566..a681c919 100644 --- a/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/ttl_clamp/wrapper.py +++ b/key-value/key-value-sync/src/key_value/sync/code_gen/wrappers/ttl_clamp/wrapper.py @@ -4,6 +4,7 @@ from collections.abc import Sequence from typing import Any, overload +from key_value.shared.utils.time_to_live import validate_ttl from typing_extensions import override from key_value.sync.code_gen.protocols.key_value import KeyValue @@ -39,6 +40,8 @@ def _ttl_clamp(self, ttl: float | None) -> float | None: if ttl is None: return self.missing_ttl + ttl = validate_ttl(t=ttl) + return max(self.min_ttl, min(ttl, self.max_ttl)) @override @@ -48,17 +51,16 @@ def put(self, key: str, value: dict[str, Any], *, collection: str | None = None, @override def put_many( self, - keys: Sequence[str], + keys: list[str], values: Sequence[dict[str, Any]], *, collection: str | None = None, ttl: Sequence[float | None] | float | None = None, ) -> None: - clamped_ttl: Sequence[float | None] | float | None = None + if isinstance(ttl, (float, int)): + ttl = self._ttl_clamp(ttl=ttl) if isinstance(ttl, Sequence): - clamped_ttl = [self._ttl_clamp(ttl=t) for t in ttl] - elif isinstance(ttl, float): - clamped_ttl = self._ttl_clamp(ttl=ttl) + ttl = [self._ttl_clamp(ttl=t) for t in ttl] - self.store.put_many(keys=keys, values=values, collection=collection, ttl=clamped_ttl) + self.store.put_many(keys=keys, values=values, collection=collection, ttl=ttl)