Skip to content

Commit

Permalink
Working MemcachedCache implementation (#20)
Browse files Browse the repository at this point in the history
  • Loading branch information
argaen committed Oct 14, 2016
1 parent 98ef5a1 commit cfb9006
Show file tree
Hide file tree
Showing 12 changed files with 188 additions and 6 deletions.
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ script:

services:
- redis-server
- memcached

after_success:
- codecov
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ Current supported backends are:

- SimpleMemoryCache
- RedisCache using aioredis_
- MemCache using aiomcache_ IN PROGRESS
- MemCache using aiomcache_


This libraries aims for simplicity over specialization. It provides a common interface for all caches which allows to store any python object. The operations supported by all backends are:
Expand Down
3 changes: 2 additions & 1 deletion aiocache/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from .backends import RedisCache, SimpleMemoryCache
from .backends import RedisCache, SimpleMemoryCache, MemcachedCache
from .utils import cached, multi_cached


__all__ = (
'RedisCache',
'SimpleMemoryCache',
'MemcachedCache',
'cached',
'multi_cached',
)
Expand Down
2 changes: 2 additions & 0 deletions aiocache/backends/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
from .redis import RedisCache
from .memory import SimpleMemoryCache
from .memcached import MemcachedCache


__all__ = (
'RedisCache',
'SimpleMemoryCache',
'MemcachedCache',
)
156 changes: 156 additions & 0 deletions aiocache/backends/memcached.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
import asyncio
import aiomcache

from .base import BaseCache


class MemcachedCache(BaseCache):

def __init__(self, *args, endpoint=None, port=None, loop=None, **kwargs):
super().__init__(*args, **kwargs)
self.endpoint = endpoint or "127.0.0.1"
self.port = port or 11211
self._loop = loop or asyncio.get_event_loop()
self.client = aiomcache.Client(self.endpoint, self.port, loop=self._loop)

async def get(self, key, default=None, loads_fn=None):
"""
Get a value from the cache. Returns default if not found.
:param key: str
:param default: obj to return when key is not found
:param loads_fn: callable alternative to use as loads function
:returns: obj deserialized
"""

loads = loads_fn or self.serializer.loads
ns_key = self._build_key(key)

await self.policy.pre_get(key)

value = await self.client.get(ns_key)

if value:
if isinstance(value, bytes):
value = bytes.decode(value)
await self.policy.post_get(key)

return loads(value) or default

async def multi_get(self, keys, loads_fn=None):
"""
Get a value from the cache. Returns default if not found.
:param key: str
:param loads_fn: callable alternative to use as loads function
:returns: obj deserialized
"""
loads = loads_fn or self.serializer.loads

for key in keys:
await self.policy.pre_get(key)

ns_keys = [self._build_key(key) for key in keys]
values = [obj for obj in (await self.client.multi_get(*ns_keys))]

decoded_values = []
for value in values:
if value is not None and isinstance(value, bytes):
decoded_values.append(loads(bytes.decode(value)))
else:
decoded_values.append(loads(value))

for key in keys:
await self.policy.post_get(key)

return decoded_values

async def set(self, key, value, ttl=None, dumps_fn=None):
"""
Stores the value in the given key with ttl if specified
:param key: str
:param value: obj
:param ttl: int the expiration time in seconds
:param dumps_fn: callable alternative to use as dumps function
:returns: True
"""
dumps = dumps_fn or self.serializer.dumps
ttl = ttl or 0
ns_key = self._build_key(key)

await self.policy.pre_set(key, value)

s_value = dumps(value)
s_value = str.encode(s_value) if not isinstance(s_value, bytes) else s_value
ret = await self.client.set(ns_key, s_value, exptime=ttl)

await self.policy.post_set(key, value)
return ret

async def multi_set(self, pairs, dumps_fn=None):
"""
Stores multiple values in the given keys.
:param pairs: list of two element iterables. First is key and second is value
:param dumps: callable alternative to use as dumps function
:returns: True
"""
dumps = dumps_fn or self.serializer.dumps

for key, value in pairs:
await self.policy.pre_set(key, value)
await self.client.set(self._build_key(key), str.encode(dumps(value)))
await self.policy.post_set(key, value)

return True

async def add(self, key, value, ttl=None, dumps_fn=None):
"""
Stores the value in the given key with ttl if specified. Raises an error if the
key already exists.
:param key: str
:param value: obj
:param ttl: int the expiration time in seconds
:param dumps_fn: callable alternative to use as dumps function
:returns: True if key is inserted
:raises: Value error if key already exists
"""
dumps = dumps_fn or self.serializer.dumps
ttl = ttl or 0
ns_key = self._build_key(key)

await self.policy.pre_set(key, value)

s_value = dumps(value)
s_value = str.encode(s_value) if not isinstance(s_value, bytes) else s_value
ret = await self.client.add(ns_key, s_value, exptime=ttl)
if not ret:
raise ValueError(
"Key {} already exists, use .set to update the value".format(ns_key))

await self.policy.post_set(key, value)
return ret

async def delete(self, key):
"""
Deletes the given key.
:param key: Key to be deleted
:returns: int number of deleted keys
"""
return (1 if await self.client.delete(self._build_key(key)) else 0)

async def exists(self, key):
"""
Check key exists in the cache.
:param key: str key to check
:returns: True if key exists otherwise False
"""
return await self.client.append(self._build_key(key), b'')

def _build_key(self, key):
ns_key = super()._build_key(key)
return str.encode(ns_key)
4 changes: 4 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,7 @@ services:
image: redis
ports:
- "6379:6379"
memcached:
image: memcached
ports:
- "11211:11211"
7 changes: 7 additions & 0 deletions docs/backends.rst
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,10 @@ SimpleMemoryCache
.. autoclass:: aiocache.SimpleMemoryCache
:members:
:undoc-members:

MemcachedCache
--------------

.. autoclass:: aiocache.MemcachedCache
:members:
:undoc-members:
4 changes: 2 additions & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,9 @@
# built documents.
#
# The short X.Y version.
version = '0.0.4'
version = '0.0.5'
# The full version, including alpha/beta/rc tags.
release = '0.0.4'
release = '0.0.5'

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
aioredis==0.2.8
aiomcache==0.4.0
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

setup(
name="aiocache",
version="0.0.4",
version="0.0.5",
author="Manuel Miranda",
url="https://github.com/argaen/aiocache",
author_email="manu.mirandad@gmail.com",
Expand Down
1 change: 1 addition & 0 deletions tests/integration/backends/test_base_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ def loads(x):
@pytest.fixture(params=[
'redis_cache',
'memory_cache',
'memcached_cache',
])
def cache(request):
return request.getfuncargvalue(request.param)
Expand Down
11 changes: 10 additions & 1 deletion tests/integration/conftest.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import pytest

from aiocache import RedisCache, SimpleMemoryCache
from aiocache import RedisCache, SimpleMemoryCache, MemcachedCache


def pytest_namespace():
Expand Down Expand Up @@ -29,3 +29,12 @@ def memory_cache(event_loop):

event_loop.run_until_complete(cache.delete(pytest.KEY))
event_loop.run_until_complete(cache.delete(pytest.KEY_1))


@pytest.fixture
def memcached_cache(event_loop):
cache = MemcachedCache(namespace="test", loop=event_loop)
yield cache

event_loop.run_until_complete(cache.delete(pytest.KEY))
event_loop.run_until_complete(cache.delete(pytest.KEY_1))

0 comments on commit cfb9006

Please sign in to comment.