Skip to content

Commit

Permalink
Merge branch 'sqlite-tests' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
JWCook committed Mar 1, 2021
2 parents 61b73a0 + d31d9d0 commit 0cc2844
Show file tree
Hide file tree
Showing 10 changed files with 119 additions and 67 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,10 +65,10 @@ jobs:
run: flake8 aiohttp_client_cache
- name: Generate code coverage report
run: pytest --cov --cov-report=term --cov-report=html
# - name: Send code coverage report to Coveralls
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# run: coveralls
- name: Send code coverage report to Coveralls
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: coveralls
- name: Test Sphinx documentation build
run: make -C docs all
- name: Test package build
Expand Down
1 change: 1 addition & 0 deletions HISTORY.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
* Refactor SQLite backend to use `aiosqlite` for async cache operations
* Refactor MongoDB backend to use `motor` for async cache operations
* Refactor Redis backend to use `aiosqlite` for async cache operations
* Add integration tests and `docker-compose` for local test servers

## 0.1.0 (2020-11-14)
* Initial PyPI release
Expand Down
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
[![PyPI](https://img.shields.io/pypi/v/aiohttp-client-cache?color=blue)](https://pypi.org/project/aiohttp-client-cache)
[![PyPI - Python Versions](https://img.shields.io/pypi/pyversions/aiohttp-client-cache)](https://pypi.org/project/aiohttp-client-cache)
[![PyPI - Format](https://img.shields.io/pypi/format/aiohttp-client-cache?color=blue)](https://pypi.org/project/aiohttp-client-cache)
<!--- [![Coverage Status](https://coveralls.io/repos/github/JWCook/aiohttp-client-cache/badge.svg?branch=master)](https://coveralls.io/github/JWCook/aiohttp-client-cache?branch=master) --->
[![Coverage Status](https://coveralls.io/repos/github/JWCook/aiohttp-client-cache/badge.svg?branch=main)](https://coveralls.io/github/JWCook/aiohttp-client-cache?branch=main)

See full documentation at https://aiohttp-client-cache.readthedocs.io

Expand Down Expand Up @@ -42,7 +42,7 @@ See [Contributing](https://github.com/JWCook/aiohttp-client-cache/blob/main/READ
for setup info for local development.

## Usage example
See the [examples](https://github.com/JWCook/aiohttp-client-cache/blob/master/examples)
See the [examples](https://github.com/JWCook/aiohttp-client-cache/blob/main/examples)
folder for more detailed usage examples.

Here is a simple example using an endpoint that takes 1 second to fetch.
Expand Down Expand Up @@ -114,4 +114,4 @@ for the original `requests-cache`!
This project is licensed under the MIT license, with the exception of
[storage backend code](https://github.com/reclosedev/requests-cache/tree/master/requests_cache/backends/storage)
adapted from `requests-cache`, which is licensed under the BSD license
([copy included](https://github.com/JWCook/aiohttp-client-cache/blob/master/requests_cache.md)).
([copy included](https://github.com/JWCook/aiohttp-client-cache/blob/main/requests_cache.md)).
5 changes: 5 additions & 0 deletions aiohttp_client_cache/backends/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import hashlib
import pickle
from abc import ABCMeta, abstractmethod
from collections import UserDict
from datetime import timedelta
Expand Down Expand Up @@ -270,6 +271,10 @@ async def values(self) -> Iterable[ResponseOrKey]:
async def write(self, key: str, item: ResponseOrKey):
"""Write an item to the cache"""

@staticmethod
def unpickle(result):
return pickle.loads(bytes(result)) if result else None

async def pop(self, key: str, default=None) -> ResponseOrKey:
"""Delete an item from the cache, and return the deleted item"""
try:
Expand Down
9 changes: 4 additions & 5 deletions aiohttp_client_cache/backends/dynamodb.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,8 @@ def _scan_table(self) -> Dict:
)

@staticmethod
def _unpickle_item(response_item: Dict) -> ResponseOrKey:
value_obj = (response_item or {}).get('value')
return pickle.loads(value_obj.value) if value_obj else None
def unpickle(response_item: Dict) -> ResponseOrKey:
return BaseCache.unpickle((response_item or {}).get('value'))

async def clear(self):
response = self._scan_table()
Expand All @@ -123,7 +122,7 @@ async def keys(self) -> Iterable[str]:

async def read(self, key: str) -> ResponseOrKey:
response = self._table.get_item(Key={'namespace': self.namespace, 'key': str(key)})
return self._unpickle_item(response.get('Item'))
return self.unpickle(response.get('Item'))

async def size(self) -> int:
expression_attribute_values = {':Namespace': self.namespace}
Expand All @@ -138,7 +137,7 @@ async def size(self) -> int:

async def values(self) -> Iterable[ResponseOrKey]:
response = self._scan_table()
return [self._unpickle_item(item) for item in response.get('Items', [])]
return [self.unpickle(item) for item in response.get('Items', [])]

async def write(self, key: str, item: ResponseOrKey):
item_meta = {
Expand Down
2 changes: 1 addition & 1 deletion aiohttp_client_cache/backends/gridfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ async def read(self, key: str) -> ResponseOrKey:
result = self.fs.find_one({'_id': key})
if result is None:
raise KeyError
return pickle.loads(bytes(result.read()))
return self.unpickle(bytes(result.read()))

async def size(self) -> int:
return self.db['fs.files'].count()
Expand Down
2 changes: 1 addition & 1 deletion aiohttp_client_cache/backends/mongo.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ class MongoDBPickleCache(MongoDBCache):
"""Same as :py:class:`MongoDBCache`, but pickles values before saving"""

async def read(self, key):
return pickle.loads(bytes(await super().read(key)))
return self.unpickle(bytes(await super().read(key)))

async def write(self, key, item):
await super().write(key, pickle.dumps(item, protocol=-1))
8 changes: 2 additions & 6 deletions aiohttp_client_cache/backends/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,6 @@ def __init__(
self.connection_kwargs = kwargs
self.hash_key = f'{namespace}:{collection_name}'

@staticmethod
def _unpickle_result(result):
return pickle.loads(bytes(result)) if result else None

async def get_connection(self):
"""Lazy-initialize redis connection"""
if not self._connection:
Expand Down Expand Up @@ -76,15 +72,15 @@ async def keys(self) -> Iterable[str]:
async def read(self, key: str) -> ResponseOrKey:
connection = await self.get_connection()
result = await connection.hget(self.hash_key, key)
return self._unpickle_result(result)
return self.unpickle(result)

async def size(self) -> int:
connection = await self.get_connection()
return await connection.hlen(self.hash_key)

async def values(self) -> Iterable[ResponseOrKey]:
connection = await self.get_connection()
return [self._unpickle_result(v) for v in await connection.hvals(self.hash_key)]
return [self.unpickle(v) for v in await connection.hvals(self.hash_key)]

async def write(self, key: str, item: ResponseOrKey):
connection = await self.get_connection()
Expand Down
73 changes: 26 additions & 47 deletions aiohttp_client_cache/backends/sqlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class SQLiteBackend(CacheBackend):
def __init__(self, cache_name: str = 'aiohttp-cache', **kwargs):
super().__init__(cache_name=cache_name, **kwargs)
path, ext = splitext(cache_name)
cache_path = f'{path}.{ext or "sqlite"}'
cache_path = f'{path}{ext or ".sqlite"}'

self.responses = SQLitePickleCache(cache_path, 'responses')
self.redirects = SQLiteCache(cache_path, 'redirects')
Expand All @@ -51,58 +51,35 @@ class SQLiteCache(BaseCache):
def __init__(self, filename: str, table_name: str):
self.filename = filename
self.table_name = table_name
self.can_commit = True # Transactions can be committed if this is set to `True`
self._can_commit = True # Transactions can be committed if this is set to `True`

self._bulk_commit = False
self._initialized = False
self._pending_connection = None
self._connection = None
self._lock = asyncio.Lock()

async def _get_pending_connection(self):
"""Use/create pending connection if doing a bulk commit"""
if not self._pending_connection:
self._pending_connection = await aiosqlite.connect(self.filename)
return self._pending_connection

async def _close_pending_connection(self):
if self._pending_connection:
await self._pending_connection.close()
self._pending_connection = None

async def _init_connection(self, db: aiosqlite.Connection):
"""Create table if this is the first connection opened, and set fast save if specified"""
await db.execute('PRAGMA synchronous = 0;')
if not self._initialized:
await db.execute(
f'CREATE TABLE IF NOT EXISTS `{self.table_name}` (key PRIMARY KEY, value)'
)
self._initialized = True
return db

@asynccontextmanager
async def get_connection(self, autocommit: bool = False) -> AsyncIterator[aiosqlite.Connection]:
async with self._lock:
if self._bulk_commit:
db = await self._get_pending_connection()
else:
db = await aiosqlite.connect(self.filename)
db = self._connection if self._connection else await aiosqlite.connect(self.filename)
try:
yield await self._init_connection(db)
if autocommit and self.can_commit:
yield await self._init_db(db)
if autocommit and self._can_commit:
await db.commit()
finally:
if not self._bulk_commit:
await db.close()

async def commit(self, force: bool = False):
"""
Commits pending transaction if :attr:`can_commit` or `force` is `True`
Args:
force: force commit, ignore :attr:`can_commit`
"""
if (force or self.can_commit) and self._pending_connection:
await self._pending_connection.commit()
async def _init_db(self, db: aiosqlite.Connection):
"""Create table if this is the first connection opened, and set fast save if possible"""
if not self._bulk_commit:
await db.execute('PRAGMA synchronous = 0;')
if not self._initialized:
await db.execute(
f'CREATE TABLE IF NOT EXISTS `{self.table_name}` (key PRIMARY KEY, value)'
)
self._initialized = True
return db

@asynccontextmanager
async def bulk_commit(self):
Expand All @@ -111,21 +88,23 @@ async def bulk_commit(self):
Example:
>>> d1 = SQLiteCache('test')
>>> async with d1.bulk_commit():
>>> cache = SQLiteCache('test')
>>> async with cache.bulk_commit():
... for i in range(1000):
... d1[i] = i * 2
... await cache.write(f'key_{i}', str(i * 2))
"""
self._bulk_commit = True
self.can_commit = False
self._can_commit = False
self._connection = await aiosqlite.connect(self.filename)
try:
yield
await self.commit(force=True)
await self._connection.commit()
finally:
self._bulk_commit = False
self.can_commit = True
await self._close_pending_connection()
self._can_commit = True
await self._connection.close()
self._connection = None

async def clear(self):
async with self.get_connection(autocommit=True) as db:
Expand Down Expand Up @@ -183,7 +162,7 @@ async def read(self, key: str) -> ResponseOrKey:
async def values(self) -> Iterable[ResponseOrKey]:
async with self.get_connection() as db:
cur = await db.execute(f'select value from `{self.table_name}`')
return [row[0] for row in await cur.fetchall()]
return [self.unpickle(row[0]) for row in await cur.fetchall()]

async def write(self, key, item):
binary_item = sqlite3.Binary(pickle.dumps(item, protocol=-1))
Expand Down
72 changes: 72 additions & 0 deletions test/integration/test_sqlite_backend.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import pytest
from datetime import datetime
from tempfile import NamedTemporaryFile

from aiohttp_client_cache.backends.sqlite import SQLiteBackend, SQLitePickleCache

pytestmark = pytest.mark.asyncio
test_data = {'key_1': 'item_1', 'key_2': datetime.now(), 'key_3': 3.141592654}


@pytest.fixture(autouse=True, scope='function')
async def cache_client():
"""Fixture that creates a new db client for each test function"""
with NamedTemporaryFile(suffix='.db') as temp:
cache_client = SQLitePickleCache(temp.name, 'responses')
await cache_client.clear()
yield cache_client
await cache_client.clear()


def test_sqlite_backend():
with NamedTemporaryFile(suffix='.db') as temp:
backend = SQLiteBackend(cache_name=temp.name)
assert backend.responses.filename == temp.name
assert backend.redirects.filename == temp.name


async def test_write_read(cache_client):
# Test write() and contains()
for k, v in test_data.items():
await cache_client.write(k, v)
assert await cache_client.contains(k) is True

# Test read()
for k, v in test_data.items():
assert await cache_client.read(k) == v


async def test_bulk_commit(cache_client):
async with cache_client.bulk_commit():
for i in range(1000):
await cache_client.write(f'key_{i}', str(i * 2))

assert await cache_client.size() == 1000


async def test_delete(cache_client):
for k, v in test_data.items():
await cache_client.write(k, v)

for k in test_data.keys():
await cache_client.delete(k)
assert await cache_client.contains(k) is False


async def test_keys_values_size(cache_client):
for k, v in test_data.items():
await cache_client.write(k, v)

assert await cache_client.size() == len(test_data)
assert await cache_client.keys() == list(test_data.keys())
assert await cache_client.values() == list(test_data.values())


async def test_clear(cache_client):
for k, v in test_data.items():
await cache_client.write(k, v)

await cache_client.clear()
assert await cache_client.size() == 0
assert await cache_client.keys() == []
assert await cache_client.values() == []

0 comments on commit 0cc2844

Please sign in to comment.