-
Notifications
You must be signed in to change notification settings - Fork 20
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
13 changed files
with
163 additions
and
62 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,76 +1,95 @@ | ||
import pickle | ||
from typing import Iterable | ||
|
||
from redis import Redis, StrictRedis | ||
from aioredis import Redis, create_redis_pool | ||
|
||
from aiohttp_client_cache.backends import BaseCache, CacheBackend, ResponseOrKey | ||
from aiohttp_client_cache.forge_utils import extend_signature | ||
|
||
DEFAULT_ADDRESS = 'redis://localhost' | ||
|
||
class RedisBackend(CacheBackend): | ||
"""Redis cache backend. | ||
|
||
See :py:class:`.CacheBackend` for args. | ||
""" | ||
class RedisBackend(CacheBackend): | ||
"""Redis cache backend""" | ||
|
||
@extend_signature(CacheBackend.__init__) | ||
def __init__(self, cache_name: str = 'http-cache', **kwargs): | ||
def __init__(self, cache_name: str = 'aiohttp-cache', address: str = DEFAULT_ADDRESS, **kwargs): | ||
super().__init__(cache_name=cache_name, **kwargs) | ||
self.responses = RedisCache(cache_name, 'responses', **kwargs) | ||
self.redirects = RedisCache(cache_name, 'urls', connection=self.responses.connection) | ||
self.responses = RedisCache(cache_name, 'responses', address=address, **kwargs) | ||
self.redirects = RedisCache(cache_name, 'redirects', address=address, **kwargs) | ||
|
||
|
||
# TODO: Incomplete/untested | ||
# TODO: Original implementation pickled keys as well as values. Is there a reason keys need to be pickled? | ||
# TODO: Fully async implementation. Current implementation with redis-py uses blocking operations. | ||
# Methods are currently defined as async only for compatibility with BaseCache API. | ||
class RedisCache(BaseCache): | ||
"""An async-compatible interface for caching objects in Redis. | ||
The actual key name on the redis server will be ``namespace:collection_name``. | ||
In order to deal with how redis stores data/keys, everything must be pickled. | ||
Args: | ||
namespace: namespace to use | ||
collection_name: name of the hash map stored in redis | ||
connection: An existing connection object to reuse instead of creating a new one | ||
address: Address of Redis server | ||
kwargs: Additional keyword arguments for :py:class:`redis.Redis` | ||
Note: The hash key name on the redis server will be ``namespace:collection_name``. | ||
""" | ||
|
||
def __init__(self, namespace: str, collection_name: str, connection: Redis = None, **kwargs): | ||
self.connection = connection or StrictRedis(**kwargs) | ||
self._self_key = ':'.join([namespace, collection_name]) | ||
def __init__( | ||
self, | ||
namespace: str, | ||
collection_name: str, | ||
address: str = None, | ||
connection: Redis = None, | ||
**kwargs, | ||
): | ||
self.address = address | ||
self._connection = connection | ||
self.connection_kwargs = kwargs | ||
self.hash_key = f'{namespace}:{collection_name}' | ||
|
||
@staticmethod | ||
def _unpickle_result(result): | ||
return pickle.loads(bytes(result)) if result else None | ||
|
||
async def get_connection(self): | ||
"""Lazy-initialize redis connection""" | ||
if not self._connection: | ||
self._connection = await create_redis_pool(self.address, **self.connection_kwargs) | ||
return self._connection | ||
|
||
async def clear(self): | ||
self.connection.delete(self._self_key) | ||
connection = await self.get_connection() | ||
keys = await self.keys() | ||
if keys: | ||
await connection.hdel(self.hash_key, *keys) | ||
|
||
async def contains(self, key: str) -> bool: | ||
return bool(self.connection.exists(key)) | ||
connection = await self.get_connection() | ||
return await connection.hexists(self.hash_key, key) | ||
|
||
async def delete(self, key: str): | ||
self.connection.hdel(self._self_key, pickle.dumps(key, protocol=-1)) | ||
connection = await self.get_connection() | ||
await connection.hdel(self.hash_key, key) | ||
|
||
async def keys(self) -> Iterable[str]: | ||
return [self._unpickle_result(r) for r in self.connection.hkeys(self._self_key)] | ||
connection = await self.get_connection() | ||
return [k.decode() for k in await connection.hkeys(self.hash_key)] | ||
|
||
async def read(self, key: str) -> ResponseOrKey: | ||
result = self.connection.hget(self._self_key, pickle.dumps(key, protocol=-1)) | ||
connection = await self.get_connection() | ||
result = await connection.hget(self.hash_key, key) | ||
return self._unpickle_result(result) | ||
|
||
async def size(self) -> int: | ||
return self.connection.hlen(self._self_key) | ||
connection = await self.get_connection() | ||
return await connection.hlen(self.hash_key) | ||
|
||
# TODO | ||
async def values(self) -> Iterable[ResponseOrKey]: | ||
raise NotImplementedError | ||
connection = await self.get_connection() | ||
return [self._unpickle_result(v) for v in await connection.hvals(self.hash_key)] | ||
|
||
async def write(self, key: str, item: ResponseOrKey): | ||
self.connection.hset( | ||
self._self_key, | ||
pickle.dumps(key, protocol=-1), | ||
connection = await self.get_connection() | ||
await connection.hset( | ||
self.hash_key, | ||
key, | ||
pickle.dumps(item, protocol=-1), | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,84 @@ | ||
import asyncio | ||
import pytest | ||
from datetime import datetime | ||
|
||
from aioredis import create_redis_pool | ||
|
||
from aiohttp_client_cache.backends.redis import DEFAULT_ADDRESS, RedisBackend, RedisCache | ||
|
||
|
||
def is_db_running(): | ||
"""Test if a Redis server is running locally on the default port""" | ||
|
||
async def get_db_info(): | ||
client = await create_redis_pool('redis://localhost') | ||
await client.info() | ||
|
||
try: | ||
asyncio.run(get_db_info()) | ||
return True | ||
except OSError: | ||
return False | ||
|
||
|
||
pytestmark = [ | ||
pytest.mark.asyncio, | ||
pytest.mark.skipif(not is_db_running(), reason='Redis server required for integration tests'), | ||
] | ||
|
||
test_data = {'key_1': 'item_1', 'key_2': datetime.now(), 'key_3': 3.141592654} | ||
|
||
|
||
@pytest.fixture(autouse=True, scope='function') | ||
async def cache_client(): | ||
"""Fixture that creates a new db client for each test function""" | ||
cache_client = RedisCache('aiohttp-cache', 'responses', 'redis://localhost') | ||
await cache_client.clear() | ||
yield cache_client | ||
await cache_client.clear() | ||
|
||
|
||
def test_redis_backend(): | ||
backend = RedisBackend() | ||
assert backend.responses.address == DEFAULT_ADDRESS | ||
assert backend.responses.hash_key == 'aiohttp-cache:responses' | ||
assert backend.redirects.hash_key == 'aiohttp-cache:redirects' | ||
|
||
|
||
async def test_write_read(cache_client): | ||
# Test write() and contains() | ||
for k, v in test_data.items(): | ||
await cache_client.write(k, v) | ||
assert await cache_client.contains(k) is True | ||
|
||
# Test read() | ||
for k, v in test_data.items(): | ||
assert await cache_client.read(k) == v | ||
|
||
|
||
async def test_delete(cache_client): | ||
for k, v in test_data.items(): | ||
await cache_client.write(k, v) | ||
|
||
for k in test_data.keys(): | ||
await cache_client.delete(k) | ||
assert await cache_client.contains(k) is False | ||
|
||
|
||
async def test_keys_values_size(cache_client): | ||
for k, v in test_data.items(): | ||
await cache_client.write(k, v) | ||
|
||
assert await cache_client.size() == len(test_data) | ||
assert await cache_client.keys() == list(test_data.keys()) | ||
assert await cache_client.values() == list(test_data.values()) | ||
|
||
|
||
async def test_clear(cache_client): | ||
for k, v in test_data.items(): | ||
await cache_client.write(k, v) | ||
|
||
await cache_client.clear() | ||
assert await cache_client.size() == 0 | ||
assert await cache_client.keys() == [] | ||
assert await cache_client.values() == [] |
File renamed without changes.