diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..04aad9c --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 Bijay Nayak + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/docs/api.md b/docs/api.md index fcaed08..0e8b1b7 100644 --- a/docs/api.md +++ b/docs/api.md @@ -32,6 +32,13 @@ show_signature: true show_root_heading: true + +::: fast_cache.MongoDBBackend + options: + show_source: true + show_signature: true + show_root_heading: true + ## Backend Base Class ::: fast_cache.backends.backend.CacheBackend \ No newline at end of file diff --git a/docs/backends.md b/docs/backends.md index 40c6dbc..3771ce9 100644 --- a/docs/backends.md +++ b/docs/backends.md @@ -6,12 +6,13 @@ FastAPI Cachekit supports multiple cache backends, so you can choose the best fi ## Supported Backends -| Backend | Description | Best For | Docs | -|--------------------|--------------------------------------------------|-------------------------|---------------------| -| InMemoryBackend | Stores cache in the app’s memory (LRU support) | Development, testing | [In-Memory](backends/in_memory.md) | -| RedisBackend | Uses Redis for distributed, production caching | Production, scaling | [Redis](backends/redis.md) | -| PostgresBackend | Uses PostgreSQL for persistent SQL-based caching | Data persistence, SQL | [Postgres](backends/postgres.md) | -| MemcachedBackend | Uses Memcached for high-speed distributed caching | High-speed, stateless | [Memcached](backends/memcached.md) | +| Backend | Description | Best For | Docs | +|------------------|--------------------------------------------------|-------------------------|------------------------------------| +| InMemoryBackend | Stores cache in the app’s memory (LRU support) | Development, testing | [In-Memory](backends/in_memory.md) | +| RedisBackend | Uses Redis for distributed, production caching | Production, scaling | [Redis](backends/redis.md) | +| PostgresBackend | Uses PostgreSQL for persistent SQL-based caching | Data persistence, SQL | [Postgres](backends/postgres.md) | +| MemcachedBackend | Uses Memcached for high-speed distributed caching | High-speed, stateless | [Memcached](backends/memcached.md) | +| MongoDBBackend | Uses Memcached for high-speed distributed caching | High-speed, stateless | [MongoDB](backends/mongodb.md) | --- @@ -37,6 +38,11 @@ FastAPI Cachekit supports multiple cache backends, so you can choose the best fi - 🔴 No persistence (data lost on restart). - 🔴 No built-in authentication by default. +- **MongoDBBackend** + - 🟢 Persistent storage (data survives restarts). + - 🟢 Built-in TTL index for automatic cache expiration. + - 🟢 Supports authentication and access control. + - 🟡 Slower than in-memory caches (e.g., Memcached). --- ## Installation for Each Backend @@ -51,6 +57,7 @@ See the [Installation Guide](installation.md) for details on installing optional - [Redis Backend](backends/redis.md) - [Postgres Backend](backends/postgres.md) - [Memcached Backend](backends/memcached.md) +- [MongoDB Backend](backends/mongodb.md) --- diff --git a/docs/backends/mongodb.md b/docs/backends/mongodb.md new file mode 100644 index 0000000..765b187 --- /dev/null +++ b/docs/backends/mongodb.md @@ -0,0 +1,103 @@ +# MongoDB Backend + +The **MongoDBBackend** allows you to use a MongoDB database as a cache store for FastAPI Cachekit. +This backend is ideal when you want persistent, document-based caching, or when you already have a MongoDB database in your stack. +It supports automatic cache expiration using MongoDB’s TTL (Time-To-Live) indexes, making it easy to manage cache lifetimes efficiently. +--- + +## 🚀 Installation + +Install FastAPI Cachekit with MongoDB support: + +``` +pip install fastapi-cachekit[mongodb] +``` + +Or with other tools: + +- **uv** +``` +uv add fastapi-cachekit[mongodb] +``` +- **poetry** +``` +poetry add fastapi-cachekit -E mongodb +``` + +--- + +## ⚙️ Setup with FastAPI + +```python +from fast_cache import cache, MongoDBBackend + +backend = MongoDBBackend( + uri="mongodb://user:password@localhost:27017/mydb", + namespace="myapp_cache" +) +cache.init_app(app, backend) +``` + +- `uri`: MongoDB connection string With All necessary Auth and Db in URL (required) +- `namespace`: Prefix for all cache keys (default: `"fastapi_cache"`) + +--- + +## 🧑‍💻 Example Usage + +```python +@app.get("/expensive") +@cache.cached(expire=120) +async def expensive_operation(x: int): + # This result will be cached in Postgres for 2 minutes + return {"result": x * 2} +``` + +--- + + +# ⚡️ About MongoDB TTL Cache + +> **This backend uses a MongoDB collection with a TTL (Time-To-Live) index for storing cache entries.** + +- **TTL Indexes** in MongoDB automatically remove expired cache entries, so you don’t need to manage expiration manually. +- **Benefit:** Cache data is persistent across app and database restarts, and expired data is cleaned up automatically by MongoDB’s background process. +- **Drawback:** Expired documents may not be deleted immediately (MongoDB’s TTL monitor runs every 60 seconds by default), so there may be a short delay before expired cache entries are removed. + +**In summary:** +- Cache data is persistent and automatically expired, but there may be a short delay before expired entries are deleted. +- If you need instant removal of expired data, you should check expiration in your code (this backend does so). + +--- + +## ⚠️ Tips & Limitations + +- **Requires a running MongoDB server**. +- **Data is persistent across app and database restarts**. +- **Slightly slower than in-memory or Redis** for high-throughput caching, but great for persistence and document-based setups. +- **Best for apps already using MongoDB** or needing persistent, auto-expiring cache. +- **TTL index expiration is not instantaneous**; expired documents are removed in the background. + +--- +## 📝 How It Works + +- Each cache entry is stored as a document with: + - `_id`: the cache key (optionally namespaced) + - `value`: the pickled cached value + - `expires_at`: epoch time when the entry should expire +- A TTL index is created on the `expires_at` field. +- Expired documents are deleted automatically by MongoDB’s TTL monitor. +- Expiration is also checked in code to avoid returning stale data. + +--- +## 🚦 When to Use +- You want persistent, document-based caching. +- You already have a MongoDB database in your stack. +- You want automatic cache expiration without manual cleanup. +--- + +## 🔗 See Also + +- [Backends Overview](../backends.md) +- [API Reference](../api.md) +- [Usage Guide](../usage.md) \ No newline at end of file diff --git a/docs/index.md b/docs/index.md index ee65d51..e915a5b 100644 --- a/docs/index.md +++ b/docs/index.md @@ -22,7 +22,7 @@ You can choose the backend that fits your needs, and switch between them with mi ## 🌟 Benefits - **Plug-and-play:** Add caching to any FastAPI endpoint with a simple decorator. -- **Multiple backends:** Use in-memory, Redis, Postgres, or Memcached—swap backends with a single line of code. +- **Multiple backends:** Use in-memory, Redis, Postgres, or Memcache. Swap backends with a single line of code. - **Sync & Async support:** Works seamlessly with both synchronous and asynchronous FastAPI endpoints. - **Performance:** Reduce database load, speed up API responses, and improve scalability. - **Optional dependencies:** Only install the backend you need, keeping your project lightweight. @@ -33,12 +33,13 @@ You can choose the backend that fits your needs, and switch between them with mi ## 📦 Backends & Sync/Async Support -| Backend | Sync API | Async API | Install Extra | -|--------------------|:--------:|:---------:|----------------------| -| `InMemoryBackend` | ✅ | ✅ | _built-in_ | -| `RedisBackend` | ✅ | ✅ | `redis` | -| `PostgresBackend` | ✅ | ✅ | `postgres` | -| `MemcachedBackend` | ✅ | ✅ | `memcached` | +| Backend | Sync API | Async API | Install Extra | +|--------------------|:--------:|:---------:|---------------| +| `InMemoryBackend` | ✅ | ✅ | _built-in_ | +| `RedisBackend` | ✅ | ✅ | `redis` | +| `PostgresBackend` | ✅ | ✅ | `postgres` | +| `MemcachedBackend` | ✅ | ✅ | `memcached` | +| `MongoDB` | ✅ | ✅ | `mongodb` | --- diff --git a/docs/installation.md b/docs/installation.md index abf9917..b686796 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -9,90 +9,53 @@ You can use **pip**, **uv**, or **poetry** and only install the backends you nee Install the core package (in-memory backend only): -### **pip** -``` -pip install fastapi-cachekit -``` - -### **uv** -``` -uv add fastapi-cachekit -``` - -### **poetry** -``` -poetry add fastapi-cachekit -``` +- **pip** `pip install fastapi-cachekit` +- **uv** `uv add fastapi-cachekit` +- **poetry** `poetry add fastapi-cachekit` --- ## 🔌 Optional Backends -You can install support for Redis, Postgres, or Memcached by specifying the appropriate "extra". +You can install support for Redis, Postgres, MongoDB or Memcached by specifying the appropriate "extra". ### **Redis Backend** -- **pip** - ``` - pip install fastapi-cachekit[redis] - ``` -- **uv** - ``` - uv add fastapi-cachekit[redis] - ``` -- **poetry** - ``` - poetry add fastapi-cachekit -E redis - ``` +- **pip** `pip install fastapi-cachekit[redis]` +- **uv** `uv add fastapi-cachekit[redis]` +- **poetry** `poetry add fastapi-cachekit -E redis` + ### **Postgres Backend** -- **pip** - ``` - pip install fastapi-cachekit[postgres] - ``` -- **uv** - ``` - uv add fastapi-cachekit[postgres] - ``` -- **poetry** - ``` - poetry add fastapi-cachekit -E postgres - ``` +- **pip** `pip install fastapi-cachekit[postgres]` +- **uv** `uv add fastapi-cachekit[postgres]` +- **poetry** `poetry add fastapi-cachekit -E postgres` + ### **Memcached Backend** -- **pip** - ``` - pip install fastapi-cachekit[memcached] - ``` -- **uv** - ``` - uv add fastapi-cachekit[memcached] - ``` -- **poetry** - ``` - poetry add fastapi-cachekit -E memcached - ``` +- **pip** `pip install fastapi-cachekit[memcached]` +- **uv** `uv add fastapi-cachekit[memcached]` +- **poetry** `poetry add fastapi-cachekit -E memcached` + +### **MongoDB Backend** + +- **pip** `pip install fastapi-cachekit[mongodb]` +- **uv** `uv add fastapi-cachekit[mongodb]` +- **poetry** `poetry add fastapi-cachekit -E mongodb` --- ## 🧩 Install All Backends If you want to install all supported backends at once: -- **pip** - ``` - pip install fastapi-cachekit[all] - ``` -- **uv** - ``` - uv add fastapi-cachekit[all] - ``` -- **poetry** - ``` - poetry add fastapi-cachekit -E all - ``` +- **pip** `pip install fastapi-cachekit[all]` + +- **uv** `uv add fastapi-cachekit[all]` + +- **poetry** `poetry add fastapi-cachekit -E all ` --- diff --git a/fast_cache/__init__.py b/fast_cache/__init__.py index f3b033c..6f07430 100644 --- a/fast_cache/__init__.py +++ b/fast_cache/__init__.py @@ -5,8 +5,9 @@ from .backends.memory import InMemoryBackend from .backends.postgres import PostgresBackend from .backends.memcached import MemcachedBackend +from .backends.mongodb import MongoDBBackend -__all__ = ["FastAPICache", "RedisBackend", "CacheBackend", "InMemoryBackend","PostgresBackend", "cache","MemcachedBackend" ] +__all__ = ["FastAPICache", "RedisBackend", "CacheBackend", "InMemoryBackend","PostgresBackend", "cache","MemcachedBackend", "MongoDBBackend" ] # Create global cache instance diff --git a/fast_cache/backends/mongodb.py b/fast_cache/backends/mongodb.py new file mode 100644 index 0000000..580b673 --- /dev/null +++ b/fast_cache/backends/mongodb.py @@ -0,0 +1,220 @@ +import pickle +import time +from typing import Any, Optional, Union +from datetime import timedelta +from .backend import CacheBackend + +class MongoDBBackend(CacheBackend): + """ + MongoDB cache backend with both sync and async support. + Uses a TTL index for automatic expiration of cache entries. + + Each cache entry is stored as a document with: + - _id: the cache key (optionally namespaced) + - value: the pickled cached value + - expires_at: epoch time when the entry should expire + + Expired documents are deleted automatically by MongoDB's TTL monitor, + but expiration is also checked in code to avoid returning stale data. + """ + + def __init__( + self, + uri: str, + namespace: Optional[str] = "fastapi_cache" + ) -> None: + """ + Initialize the MongoDB backend. + + Args: + uri (str): MongoDB connection URI (should include the database name). + namespace (Optional[str]): Optional prefix for all cache keys and the collection name. + Defaults to "fastapi_cache". + Raises: + ImportError: If pymongo is not installed. + """ + try: + import pymongo + except ImportError: + raise ImportError( + "MongoDBBackend requires 'pymongo>=4.6.0'. " + "Install with: pip install fastapi-cachekit[mongodb]" + ) + self._namespace = namespace or "cache" + + self._sync_client = pymongo.MongoClient(uri) + self._sync_db = self._sync_client.get_default_database() + self._sync_collection = self._sync_db[self._namespace] + self._sync_collection.create_index("expires_at", expireAfterSeconds=0) + + # Async client + self._async_client = pymongo.AsyncMongoClient(uri) + self._async_db = self._async_client.get_default_database() + self._async_collection = self._async_db[self._namespace] + + def _make_key(self, key: str) -> str: + """ + Create a namespaced cache key. + + Args: + key (str): The original cache key. + + Returns: + str: The namespaced cache key. + """ + return f"{self._namespace}:{key}" + + def get(self, key: str) -> Optional[Any]: + """ + Synchronously retrieve a value from the cache. + + Args: + key (str): The cache key. + + Returns: + Optional[Any]: The cached value, or None if not found or expired. + """ + doc = self._sync_collection.find_one({"_id": self._make_key(key)}) + if doc and (doc.get("expires_at", float("inf")) > time.time()): + return pickle.loads(doc["value"]) + return None + + def set( + self, + key: str, + value: Any, + expire: Optional[Union[int, timedelta]] = None + ) -> None: + """ + Synchronously set a value in the cache. + + Args: + key (str): The cache key. + value (Any): The value to cache. + expire (Optional[Union[int, timedelta]]): Expiration time in seconds or as timedelta. + If None, the entry never expires. + """ + update = {"value": pickle.dumps(value)} + if expire is not None: + if isinstance(expire, timedelta): + exptime = int(time.time() + expire.total_seconds()) + else: + exptime = int(time.time() + expire) + update["expires_at"] = exptime + + self._sync_collection.update_one( + {"_id": self._make_key(key)}, {"$set": update}, upsert=True + ) + + def delete(self, key: str) -> None: + """ + Synchronously delete a value from the cache. + + Args: + key (str): The cache key. + """ + self._sync_collection.delete_one({"_id": self._make_key(key)}) + + def clear(self) -> None: + """ + Synchronously clear all values from the namespace. + """ + self._sync_collection.delete_many({"_id": {"$regex": f"^{self._namespace}:"}}) + + def has(self, key: str) -> bool: + """ + Synchronously check if a key exists in the cache. + + Args: + key (str): The cache key. + + Returns: + bool: True if the key exists and is not expired, False otherwise. + """ + doc = self._sync_collection.find_one({"_id": self._make_key(key)}) + return bool(doc and (doc.get("expires_at", float("inf")) > time.time())) + + async def aget(self, key: str) -> Optional[Any]: + """ + Asynchronously retrieve a value from the cache. + + Args: + key (str): The cache key. + + Returns: + Optional[Any]: The cached value, or None if not found or expired. + """ + doc = await self._async_collection.find_one({"_id": self._make_key(key)}) + if doc and (doc.get("expires_at", float("inf")) > time.time()): + return pickle.loads(doc["value"]) + return None + + async def aset( + self, + key: str, + value: Any, + expire: Optional[Union[int, timedelta]] = None + ) -> None: + """ + Asynchronously set a value in the cache. + + Args: + key (str): The cache key. + value (Any): The value to cache. + expire (Optional[Union[int, timedelta]]): Expiration time in seconds or as timedelta. + If None, the entry never expires. + """ + update = {"value": pickle.dumps(value)} + if expire is not None: + if isinstance(expire, timedelta): + exptime = int(time.time() + expire.total_seconds()) + else: + exptime = int(time.time() + expire) + update["expires_at"] = exptime + + await self._async_collection.update_one( + {"_id": self._make_key(key)}, + {"$set": update}, + upsert=True + ) + + async def adelete(self, key: str) -> None: + """ + Asynchronously delete a value from the cache. + + Args: + key (str): The cache key. + """ + await self._async_collection.delete_one({"_id": self._make_key(key)}) + + async def aclear(self) -> None: + """ + Asynchronously clear all values from the namespace. + """ + await self._async_collection.delete_many({"_id": {"$regex": f"^{self._namespace}:"}}) + + async def ahas(self, key: str) -> bool: + """ + Asynchronously check if a key exists in the cache. + + Args: + key (str): The cache key. + + Returns: + bool: True if the key exists and is not expired, False otherwise. + """ + doc = await self._async_collection.find_one({"_id": self._make_key(key)}) + return bool(doc and (doc.get("expires_at", float("inf")) > time.time())) + + def close(self) -> None: + """ + Close the synchronous MongoDB client. + """ + self._sync_client.close() + + async def aclose(self) -> None: + """ + Close the asynchronous MongoDB client. + """ + self._sync_client.close() + await self._async_client.close() \ No newline at end of file diff --git a/fast_cache/backends/postgres.py b/fast_cache/backends/postgres.py index d293fdf..b6930e5 100644 --- a/fast_cache/backends/postgres.py +++ b/fast_cache/backends/postgres.py @@ -1,9 +1,15 @@ import pickle +import re from datetime import datetime, timezone, timedelta from typing import Any, Optional, Union from .backend import CacheBackend +def _validate_namespace(namespace: str) -> str: + if not re.match(r"^[A-Za-z0-9_]+$", namespace): + raise ValueError("Invalid namespace: only alphanumeric and underscore allowed") + return namespace + class PostgresBackend(CacheBackend): """ PostgreSQL cache backend implementation. @@ -26,7 +32,7 @@ def __init__( "Install it with: pip install fast-cache[postgres]" ) - self._namespace = namespace + self._namespace = _validate_namespace(namespace) self._table_name = f"{namespace}_cache_store" # The pools are opened on creation and will auto-reopen if needed @@ -39,6 +45,13 @@ def __init__( ) self._create_unlogged_table_if_not_exists() + def _validate_namespace(namespace: str) -> str: + if not re.match(r"^[A-Za-z0-9_]+$", namespace): + raise ValueError( + "Invalid namespace: only alphanumeric and underscore allowed" + ) + return namespace + def _create_unlogged_table_if_not_exists(self): """Create the cache table if it doesn't exist.""" # The index on expire_at is for efficient periodic cleanup jobs, diff --git a/mkdocs.yml b/mkdocs.yml index 8b69c9a..6a19b76 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,5 +1,6 @@ site_name: FastAPI CacheKit repo_url: https://github.com/devbijay/Fast-Cache +edit_uri: edit/main/docs/ repo_name: FastAPI CacheKit site_url: https://devbijay.github.io/Fast-Cache theme: @@ -9,6 +10,20 @@ theme: features: - navigation.instant + - navigation.instant + - navigation.tabs + - navigation.top + - navigation.sections + - navigation.expand + - toc.integrate + - search.suggest + - search.highlight + - content.code.copy + - content.code.annotate + - content.tabs.link + - content.action.edit + - content.action.view + - content.footnote.tooltips font: text: Roboto @@ -37,9 +52,17 @@ nav: - Redis: backends/redis.md - Postgres: backends/postgres.md - Memcached: backends/memcached.md + - MongoDB: backends/mongodb.md - API Reference: api.md extra: + social: + - icon: fontawesome/brands/github + link: https://github.com/devbijay/Fast-Cache + name: GitHub + announcement: + type: info + text: "⭐️ Star FastAPI CacheKit on [GitHub](https://github.com/devbijay/Fast-Cache)!" consent: title: Cookie consent description: >- @@ -49,6 +72,12 @@ extra: make our documentation better. plugins: + - search: + lang: en + separator: '[\s\-\.]+' + pipeline: + - stemmer + - stopWordFilter - mkdocstrings: handlers: python: diff --git a/pyproject.toml b/pyproject.toml index ee37b61..7a087f1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,7 @@ dependencies = [ [project.urls] Homepage = "https://github.com/devbijay/fast-cache" -Documentation = "https://github.com/devbijay/fast-cache#readme" +Documentation = "https://devbijay.github.io/Fast-Cache/" Repository = "https://github.com/devbijay/fast-cache.git" Issues = "https://github.com/devbijay/fast-cache/issues" @@ -63,11 +63,15 @@ memcached = [ "aiomcache>=0.8.1", "pymemcache>=4.0.0" ] +mongodb = [ + "pymongo[snappy,gssapi,srv]>=4.6.0" +] all = [ "redis>=4.2.0", "psycopg[pool]>=3.2.9", "aiomcache>=0.8.1", - "pymemcache>=4.0.0" + "pymemcache>=4.0.0", + "pymongo[snappy,gssapi,srv]>=4.6.0" ] [dependency-groups] diff --git a/tests/conftest.py b/tests/conftest.py index 906c018..e5764b2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,6 +6,7 @@ import pytest_asyncio from testcontainers.memcached import MemcachedContainer +from testcontainers.mongodb import MongoDbContainer from testcontainers.redis import RedisContainer if sys.platform == "win32": @@ -122,3 +123,15 @@ def memcached_cache(memcached_url): backend = MemcachedBackend(host=host, port=port, namespace="test-ns") yield backend backend.clear() + +@pytest.fixture(scope="session") +def mongo_url(): + with MongoDbContainer(username='test', password='test', dbname='testdb') as container: + db_url = container.get_connection_url() + # Always add authSource to be explicit + if not db_url.endswith('/testdb'): + db_url = f"{db_url}/testdb" + if "authSource" not in db_url: + db_url = f"{db_url}?authSource=admin" + print(f"\n[TEST] MongoDB URL: {db_url}") + yield db_url \ No newline at end of file diff --git a/tests/integration/test_integration_mongodb.py b/tests/integration/test_integration_mongodb.py new file mode 100644 index 0000000..c2865d4 --- /dev/null +++ b/tests/integration/test_integration_mongodb.py @@ -0,0 +1,154 @@ +import os +import time +import pytest +from fastapi.testclient import TestClient +from testcontainers.mongodb import MongoDbContainer + +from examples.main import app +from fast_cache import MongoDBBackend, cache + + +@pytest.fixture +def client(): + with MongoDbContainer(username='test', password='test', dbname='testdb') as container: + db_url = container.get_connection_url() + if not db_url.endswith('/testdb'): + db_url = f"{db_url}/testdb" + if "authSource" not in db_url: + db_url = f"{db_url}?authSource=admin" + print(f"\n[TEST] MongoDB URL: {db_url}") + backend = MongoDBBackend(db_url, namespace="integration-demo") + cache.init_app(app=app, backend=backend, default_expire=120) + with TestClient(app) as c: + yield c + + + +def test_decorator_async_cache(client): + # First call: not cached + resp1 = client.get("/decorator/async", params={"x": 10}) + assert resp1.status_code == 200 + val1 = resp1.json()["result"] + + # Second call: should be cached + resp2 = client.get("/decorator/async", params={"x": 10}) + assert resp2.status_code == 200 + assert resp2.json()["result"] == val1 + + # Wait for cache to expire + time.sleep(10.1) + resp3 = client.get("/decorator/async", params={"x": 10}) + assert resp3.status_code == 200 + assert resp3.json()["result"] == val1 # Function is deterministic + + +def test_decorator_sync_cache(client): + resp1 = client.get("/decorator/sync", params={"x": 7}) + assert resp1.status_code == 200 + val1 = resp1.json()["result"] + + resp2 = client.get("/decorator/sync", params={"x": 7}) + assert resp2.status_code == 200 + assert resp2.json()["result"] == val1 + + +def test_decorator_custom_key(client): + resp1 = client.get("/decorator/custom", params={"x": 42}) + assert resp1.status_code == 200 + val1 = resp1.json()["custom_key"] + + # Should be cached even if called again + resp2 = client.get("/decorator/custom", params={"x": 42}) + assert resp2.status_code == 200 + assert resp2.json()["custom_key"] == val1 + + +def test_decorator_skip_cache(client): + resp1 = client.get("/decorator/skip", params={"x": 3}) + assert resp1.status_code == 200 + val1 = resp1.json()["result"] + + # Should be cached + resp2 = client.get("/decorator/skip", params={"x": 3}) + assert resp2.status_code == 200 + assert resp2.json()["result"] == val1 + + # Skip cache + resp3 = client.get("/decorator/skip", params={"x": 3, "skip_cache": True}) + assert resp3.status_code == 200 + assert resp3.json()["result"] == val1 + + +def test_decorator_pydantic(client): + resp = client.get("/decorator/pydantic", params={"name": "foo", "value": 123}) + assert resp.status_code == 200 + data = resp.json() + assert data["name"] == "foo" + assert data["value"] == 123 + + +def test_di_set_get_has_delete_clear(client): + # Set a value + resp = client.get("/di/set", params={"key": "foo", "value": "bar"}) + assert resp.status_code == 200 + assert resp.json()["set"] is True + + # Get the value + resp = client.get("/di/get", params={"key": "foo"}) + assert resp.status_code == 200 + assert resp.json()["value"] == "bar" + + # Check existence + resp = client.get("/di/has", params={"key": "foo"}) + assert resp.status_code == 200 + assert resp.json()["exists"] is True + + # Delete the key + resp = client.delete("/di/delete", params={"key": "foo"}) + assert resp.status_code == 200 + assert resp.json()["deleted"] is True + + # Should not exist now + resp = client.get("/di/has", params={"key": "foo"}) + assert resp.status_code == 200 + assert resp.json()["exists"] is False + + # Set again and clear all + client.get("/di/set", params={"key": "foo", "value": "bar"}) + resp = client.post("/di/clear") + assert resp.status_code == 200 + assert resp.json()["cleared"] is True + + # Should not exist after clear + resp = client.get("/di/has", params={"key": "foo"}) + assert resp.status_code == 200 + assert resp.json()["exists"] is False + + +def test_profile_cache(client): + # First call: not cached + resp1 = client.get("/profile/123") + assert resp1.status_code == 200 + assert resp1.json()["cached"] is False + + # Second call: should be cached + resp2 = client.get("/profile/123") + assert resp2.status_code == 200 + assert resp2.json()["cached"] is True + + +def test_weather_skip_cache(client): + # First call: cached + resp1 = client.get("/weather", params={"city": "London"}) + assert resp1.status_code == 200 + val1 = resp1.json()["weather"] + + # Second call: cached + resp2 = client.get("/weather", params={"city": "London"}) + assert resp2.status_code == 200 + assert resp2.json()["weather"] == val1 + + # Third call: skip cache + resp3 = client.get("/weather", params={"city": "London", "skip_cache": True}) + assert resp3.status_code == 200 + assert resp3.json()["weather"] == val1 diff --git a/tests/unit/test_mongodb_backend.py b/tests/unit/test_mongodb_backend.py new file mode 100644 index 0000000..cb11b7e --- /dev/null +++ b/tests/unit/test_mongodb_backend.py @@ -0,0 +1,58 @@ +import pytest +import asyncio + +import pytest_asyncio + +from fast_cache import MongoDBBackend + + +@pytest_asyncio.fixture +async def cache(mongo_url): + backend = MongoDBBackend(mongo_url, namespace="my_cache") + await backend.aclear() + yield backend + await backend.aclear() + await backend.aclose() + + +def test_set_and_get(cache): + cache.set("foo", "bar") + assert cache.get("foo") == "bar" + + +def test_delete(cache): + cache.set("foo", "bar") + cache.delete("foo") + assert cache.get("foo") is None + + +def test_clear(cache): + cache.set("foo", "bar") + cache.set("baz", "qux") + cache.clear() + assert cache.get("foo") is None + assert cache.get("baz") is None + + +def test_has(cache): + cache.set("foo", "bar") + assert cache.has("foo") + cache.delete("foo") + assert not cache.has("foo") + + +def test_expire(cache): + cache.set("foo", "bar", expire=1) + assert cache.get("foo") == "bar" + import time + + time.sleep(1.1) + assert cache.get("foo") is None + + +@pytest.mark.asyncio +async def test_async_set_and_get(cache): + await cache.aset("foo", "bar") + assert await cache.aget("foo") == "bar" + await cache.aclear() + assert await cache.aget("foo") is None