Skip to content

Commit

Permalink
Examples as acceptance tests
Browse files Browse the repository at this point in the history
  • Loading branch information
argaen committed Oct 30, 2016
1 parent d6e5691 commit 24e5011
Show file tree
Hide file tree
Showing 14 changed files with 167 additions and 92 deletions.
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ script:
- python setup.py develop
- bash examples/run_all.sh
- pytest --cov=aiocache tests/
- pytest examples/

services:
- redis-server
Expand Down
1 change: 1 addition & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ integration:
test:
docker-compose -f docker-compose.yml up -d
pytest -sv tests
pytest -sv examples
docker-compose -f docker-compose.yml stop

cov:
Expand Down
4 changes: 2 additions & 2 deletions aiocache/backends/memcached.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ async def set(self, key, value, ttl=0):
:param ttl: int
:returns: True
"""
value = value if isinstance(value, bytes) else str.encode(value)
value = str.encode(value) if isinstance(value, str) else value
return await self.client.set(key, value, exptime=ttl or 0)

async def multi_set(self, pairs, dumps_fn=None):
Expand All @@ -58,7 +58,7 @@ async def multi_set(self, pairs, dumps_fn=None):
:returns: True
"""
for key, value in pairs:
value = value if isinstance(value, bytes) else str.encode(value)
value = str.encode(value) if isinstance(value, str) else value
await self.client.set(key, value)

return True
Expand Down
24 changes: 12 additions & 12 deletions aiocache/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,25 @@
logger = logging.getLogger(__name__)


def cached(*args, ttl=0, key=None, key_attribute=None, backend=None, serializer=None, **kwargs):
def cached(*args, ttl=0, key=None, key_attribute=None, cache=None, serializer=None, **kwargs):
"""
Caches the functions return value into a key generated with module_name, function_name and args.
In some cases you will need to send more args than just the ttl, backend and serializer.
An example would be endpoint and port for the RedisBackend. This extra args will be propagated
to the backend class when instantiating.
In some cases you will need to send more args than just the ttl, cache and serializer.
An example would be endpoint and port for the RedisCache. This extra args will be propagated
to the cache class when instantiating.
:param ttl: int seconds to store the function call. Default is 0
:param key: str value to set as key for the function return. Takes precedence over
key_attribute param.
:param key_attribute: keyword attribute from the function to use as a key. If not passed,
it will use module_name + function_name + args + kwargs
:param backend: backend class to use when calling the ``set``/``get`` operations. Default is
:class:`aiocache.backends.SimpleMemoryCache`
:param cache: cache class to use when calling the ``set``/``get`` operations. Default is
:class:``aiocache.SimpleMemoryCache``
:param serializer: serializer instance to use when calling the ``serialize``/``deserialize``.
Default is :class:`aiocache.serializers.DefaultSerializer`
Default is :class:``aiocache.serializers.DefaultSerializer``
"""
cache = get_default_cache(cache=backend, serializer=serializer, *args, **kwargs)
cache = get_default_cache(cache=cache, serializer=serializer, *args, **kwargs)

def cached_decorator(fn):
async def wrapper(*args, **kwargs):
Expand All @@ -42,7 +42,7 @@ async def wrapper(*args, **kwargs):
return cached_decorator


def multi_cached(keys_attribute, backend=None, serializer=None, **kwargs):
def multi_cached(keys_attribute, cache=None, serializer=None, **kwargs):
"""
Only supports functions that return dict-like structures. This decorator caches each key/value
of the dict-like object returned by the function.
Expand All @@ -54,12 +54,12 @@ def multi_cached(keys_attribute, backend=None, serializer=None, **kwargs):
:param keys_attribute: str attribute from the function containing an iterable to use
as keys.
:param backend: backend class to use when calling the ``set``/``get`` operations. Default is
:class:`aiocache.backends.SimpleMemoryCache`
:param cache: cache class to use when calling the ``set``/``get`` operations. Default is
:class:`aiocache.SimpleMemoryCache`
:param serializer: serializer instance to use when calling the ``serialize``/``deserialize``.
Default is :class:`aiocache.serializers.DefaultSerializer`
"""
cache = get_default_cache(cache=backend, serializer=serializer, **kwargs)
cache = get_default_cache(cache=cache, serializer=serializer, **kwargs)

def multi_cached_decorator(fn):
async def wrapper(*args, **kwargs):
Expand Down
20 changes: 11 additions & 9 deletions examples/cached_decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,20 @@
from aiocache.serializers import PickleSerializer

Result = namedtuple('Result', "content, status")
cache = RedisCache(endpoint="127.0.0.1", port=6379, namespace="main")


@cached(ttl=10, backend=RedisCache, serializer=PickleSerializer(), port=6379)
async def async_main():
print("ASYNC non cached call...")
await asyncio.sleep(1)
@cached(
ttl=10, cache=RedisCache, key="key", serializer=PickleSerializer(), port=6379, namespace="main")
async def cached_call():
return Result("content", 200)


if __name__ == "__main__":
def test_cached():
loop = asyncio.get_event_loop()
print(loop.run_until_complete(async_main()))
print(loop.run_until_complete(async_main()))
print(loop.run_until_complete(async_main()))
print(loop.run_until_complete(async_main()))
loop.run_until_complete(cached_call())
assert loop.run_until_complete(cache.exists("key")) is True
loop.run_until_complete(cache.delete("key"))

if __name__ == "__main__":
test_cached()
36 changes: 21 additions & 15 deletions examples/config_default_cache.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
"""
The module demonstrates the usage of the ``config_default_cache`` call. This call sets a default
cache for the whole aiocache package. Once called, all decorators will use the default cache (if
alternative cache is not set) and also it can be explicitly used with ``aiocache.default_cache``.
"""
import asyncio
import aiocache

Expand All @@ -9,24 +14,25 @@

aiocache.config_default_cache()


@cached(ttl=10, key="key")
async def decorator():
return Result("content", 200)


async def global_cache():
print("ASYNC non cached call...")
await aiocache.default_cache.set("key", "value")
await asyncio.sleep(1)
return await aiocache.default_cache.get("key")
obj = await aiocache.default_cache.get("key")

assert obj.content == "content"
assert obj.status == 200

@cached(ttl=10)
async def decorator_example():
print("ASYNC non cached call...")
await asyncio.sleep(1)
return Result("content", 200)

def test_default_cache():
loop = asyncio.get_event_loop()
loop.run_until_complete(decorator())
loop.run_until_complete(global_cache())

loop.run_until_complete(aiocache.default_cache.delete("key"))

if __name__ == "__main__":
loop = asyncio.get_event_loop()
print(loop.run_until_complete(global_cache()))
print(loop.run_until_complete(global_cache()))
print(loop.run_until_complete(decorator_example()))
print(loop.run_until_complete(decorator_example()))
print(loop.run_until_complete(decorator_example()))
test_default_cache()
32 changes: 21 additions & 11 deletions examples/multicached_decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,21 +10,31 @@
}


@multi_cached("ids", backend=RedisCache, namespace="main:")
async def async_main(ids=None):
print("ASYNC non cached call...")
@multi_cached("ids", cache=RedisCache, namespace="main")
async def multi_cached_ids(ids=None):
return {id_: DICT[id_] for id_ in ids}


@multi_cached("keys", backend=RedisCache, namespace="main:")
async def async_second_main(keys=None):
print("ASYNC non cached call...")
@multi_cached("keys", cache=RedisCache, namespace="main")
async def multi_cached_keys(keys=None):
return {id_: DICT[id_] for id_ in keys}


if __name__ == "__main__":
cache = RedisCache(endpoint="127.0.0.1", port=6379, namespace="main")


def test_multi_cached():
loop = asyncio.get_event_loop()
print(loop.run_until_complete(async_main(ids=['a', 'b'])))
print(loop.run_until_complete(async_main(ids=['a', 'b'])))
print(loop.run_until_complete(async_second_main(keys=['a', 'b'])))
print(loop.run_until_complete(async_second_main(keys=['a', 'd'])))
loop.run_until_complete(multi_cached_ids(ids=['a', 'b']))
loop.run_until_complete(multi_cached_ids(ids=['a', 'c']))
loop.run_until_complete(multi_cached_keys(keys=['d']))

assert loop.run_until_complete(cache.exists('a'))
assert loop.run_until_complete(cache.exists('b'))
assert loop.run_until_complete(cache.exists('c'))
assert loop.run_until_complete(cache.exists('d'))

loop.run_until_complete(cache.delete("key"))

if __name__ == "__main__":
test_multi_cached()
23 changes: 17 additions & 6 deletions examples/policy.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,26 @@
from aiocache.policies import LRUPolicy


async def main():
cache = RedisCache(namespace="main:")
cache.set_policy(LRUPolicy, max_keys=2)
cache = RedisCache(namespace="main")
cache.set_policy(LRUPolicy, max_keys=2)


async def policy():
await cache.set("key", "value")
await cache.set("key_1", "value")
await cache.set("key_2", "value")
print(await cache.exists("key"))

assert await cache.exists("key") is False
assert await cache.exists("key_1") is True
assert await cache.exists("key_2") is True

if __name__ == "__main__":

def test_policy():
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.run_until_complete(policy())
loop.run_until_complete(cache.delete("key_1"))
loop.run_until_complete(cache.delete("key_2"))


if __name__ == "__main__":
test_policy()
22 changes: 14 additions & 8 deletions examples/python_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,23 @@


MyObject = namedtuple("MyObject", ["x", "y"])
cache = RedisCache(serializer=PickleSerializer(), namespace="main")


async def main():
cache = RedisCache(serializer=PickleSerializer(), namespace="main:")
# This will serialize to pickle and store in redis with bytes format
await cache.set("key", MyObject(x=1, y=2))
# This will retrieve the object and deserialize back to MyObject
async def complex_object():
obj = MyObject(x=1, y=2)
await cache.set("key", obj)
my_object = await cache.get("key")
print("MyObject x={}, y={}".format(my_object.x, my_object.y))

assert my_object.x == 1
assert my_object.y == 2

if __name__ == "__main__":

def test_python_object():
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.run_until_complete(complex_object())
loop.run_until_complete(cache.delete("key"))


if __name__ == "__main__":
test_python_object()
21 changes: 15 additions & 6 deletions examples/serializer_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,21 @@ def loads(self, value):
return 2


async def main():
cache = RedisCache(serializer=MySerializer(), namespace="main:")
await cache.set("key", "value") # Will use MySerializer.dumps method
print(await cache.get("key")) # Will use MySerializer.loads method
cache = RedisCache(serializer=MySerializer(), namespace="main")


if __name__ == "__main__":
async def serializer():
await cache.set("key", "value")

assert await cache.raw("get", "main:key") == b'1'
assert await cache.get("key") == 2


def test_serializer():
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.run_until_complete(serializer())
loop.run_until_complete(cache.delete("key"))


if __name__ == "__main__":
test_serializer()
31 changes: 24 additions & 7 deletions examples/serializer_function.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import asyncio
import json

from marshmallow import Schema, fields
from marshmallow import Schema, fields, post_load
from aiocache import RedisCache


Expand All @@ -14,22 +15,38 @@ class MyTypeSchema(Schema):
x = fields.Number()
y = fields.Number()

@post_load
def build_object(self, data):
return MyType(data['x'], data['y'])


def dumps(value):
# Current implementation can't deal directly with dicts so we must cast to string
return MyTypeSchema().dumps(value).data


def loads(value):
return MyTypeSchema().loads(value).data


async def main():
cache = RedisCache(namespace="main:")
cache = RedisCache(namespace="main")


async def serializer_function():
await cache.set("key", MyType(1, 2), dumps_fn=dumps)
print(await cache.get("key", loads_fn=loads))

obj = await cache.get("key", loads_fn=loads)

if __name__ == "__main__":
assert obj.x == 1
assert obj.y == 2
assert json.loads((await cache.get("key"))) == json.loads(('{"y": 2.0, "x": 1.0}'))
assert json.loads(bytes.decode(await cache.raw("get", "main:key"))) == {"y": 2.0, "x": 1.0}


def test_serializer_function():
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.run_until_complete(serializer_function())
loop.run_until_complete(cache.delete("key"))


if __name__ == "__main__":
test_serializer_function()
23 changes: 16 additions & 7 deletions examples/simple_redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,23 @@
from aiocache import RedisCache


async def main():
cache = RedisCache(endpoint="127.0.0.1", port=6379, namespace="main:")
cache = RedisCache(endpoint="127.0.0.1", port=6379, namespace="main")


async def redis():
await cache.set("key", "value")
await cache.set("expire_me", "value", ttl=10) # Key will expire after 10 secs
print(await cache.get("key"))
print(await cache.get("expire_me"))
await cache.set("expire_me", "value", ttl=10)

assert await cache.get("key") == "value"
assert await cache.get("expire_me") == "value"
assert await cache.raw("ttl", "main:expire_me") > 0

if __name__ == "__main__":

def test_redis():
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.run_until_complete(redis())
loop.run_until_complete(cache.delete("key"))
loop.run_until_complete(cache.delete("expire_me"))

if __name__ == "__main__":
test_redis()
3 changes: 3 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[pytest]
testpaths = tests examples
python_files = test_* examples/*
Loading

0 comments on commit 24e5011

Please sign in to comment.