Skip to content

Commit

Permalink
Merge pull request #3 from festinuz/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
festinuz committed Jul 19, 2017
2 parents 0dff3c3 + 259350e commit c698318
Show file tree
Hide file tree
Showing 8 changed files with 192 additions and 6 deletions.
1 change: 1 addition & 0 deletions .travis.yml
Expand Up @@ -14,3 +14,4 @@ deploy:
secure: arx2X7H7MG+G2FQnOAqG5MuI9WhGOlLZcubjnkLgtbxn8fZXZyw8UQV/w8zkfHSmKCF1DPBfhiCyQcJKT6pGJ+a5S6TnG0xIyF8RHTK5ykI9zddVFTgfagu1u2FLy0BPddeVgfNKD26YEyk4vK5Ab3KmUtgcwTPu+/pX4pJ+gzHBv68f7wN4h/zR3XpJC4tleS+PqOnEO1Io4J8L6d1DhatRLa7NE9Em604Mso7EPKwC7IrroMHO2QG/BZZTRG9fApVHwezEf4rZZygvrHfSA4o03wavfqcxh8SNi9M/9hjVXxtqawUhurvHF/HTfrfNtal0dFpEQbnOte0fuEM4Za38ZSb7gerCNXtqjaVq5X/WaOfgXDiZV2lFniR0ZH9wizKji7HL4lCqmunKCEHlcBk5R3PThnnU3wM8QYs1oOk21p+T1cXe1IvyPcuLfe1VHdxT870beTU6sgppEOqyZI2gh/jWJsbQVNP+Yy7JGOAP3usde61xg94MECv2flNFPXWHZ3nVyYAzNb5KMMEaWuGTz5r6NADS+qu6G4iKwwjP4VljgcdMLu8R1qFmKxaCpR0nv7JTRM4V/GnYzLB7hJygRQTmNY4qHTwfBveo4RLbQINibb+FY4qVodcv+mxQvbby2fU486p7nm6MoRBPaSoctBLLkmvc/aKqQb1Jk3k=
on:
tags: true
branch: master
30 changes: 27 additions & 3 deletions README.md
@@ -1,8 +1,23 @@
# Gecaso

![master branch status](https://api.travis-ci.org/festinuz/gecaso.svg?branch=master)

Gecaso provides you with the tools that help with creating cache for your specific task.

### Example
### Examples

#### Using default gecaso storages
```python
import gecaso


@gecaso.cached(LRUStorage(maxsize=16), ttl=100)
def long_and_boring_function(time_to_sleep):
time.sleep(time_to_sleep)
return f'I have slept for {time_to_sleep} second(s)!'
```

#### Creating new storage to fit your task
Lets say you want to cache the result of some of your functions using Redis. All you need to do is write a simple class in which you specify the steps for setting and getting data from redis.

1) Create a Redis storage:
Expand All @@ -22,6 +37,9 @@ class RedisStorage(gecaso.BaseStorage):
def set(self, key, value, **params):
self._storage[key] = self.pack(value, **params)

def remove(self, *keys):
self._storage.remove(*keys)

redis_storage = RedisStorage('valid_redis_url')
```
2) Set your cache for any function you want:
Expand All @@ -47,12 +65,14 @@ Note that at the time, gecaso only supports versions of python that are >=3.5
This function is a wrapper that helps to set up cache for any synchronus or asynchronous function. It takes single positional argument, which must be an instance of class that is inherited from **BaseStorage**. It can also optionally be provided with a keyword argument **loop** which must be an instance of an event loop. Any keyword arguments provided besides **loop** will be passed to the **set** method of storage instance whenever it is being called.

#### 2) "BaseStorage" class
Any storage provided to "cached" function shoud be inherited from this class. Base storage has 5 methods.
Any storage provided to "cached" function shoud be inherited from this class. Base storage has 6 methods.

* **get(self, key)**: Abstract method that should be overriden. Can be synchronus or asynchronous. MUST raise KeyError if key is not present in storage. If data was packed using **pack** method before being stored, it must be unpacked using **unpack** method.

* **set(self, key, value, \*\*params)**: Abstract method that should be overriden. Can be synchronus or asynchronous. It is recomended to pack provided value using **pack** method before storing it in storage.

* **remove(self, \*keys)**: Abstract method that should be overriden. Should delete every key that is passed in *keys* parameter and exisits in storage.

* **pack(self, value, \*\*params)**: Returns representation of object with fields named *data* and *params* as bytes object.

* **unpack(self, value)**: Unpacks bytes object that was packed with **pack** method and returns *tuple(data, params)*.
Expand All @@ -75,7 +95,7 @@ class LocalMemoryStorage(gecaso.BaseStorage):
self._storage = dict() # Python's dict is a nice basic storage of data
```

2) Override **set** and **get** methods of gecaso.BaseStorage:
2) Override **set**, **get** and **remove** methods of gecaso.BaseStorage:
```python
def set(self, key, value, ttl=None): # We dont want any additional parameters besides time to live
params = dict()
Expand All @@ -87,6 +107,10 @@ class LocalMemoryStorage(gecaso.BaseStorage):
self.data = self._storage[key] # If key is not present this will raise KeyError
value, params = self.unpack(self._storage[key]) # params can optionally contain ttl
return self.verified_get(value, **params) # Using BaseStorage.verified_get method to verify ttl

def remove(self, *keys):
for key in keys:
self._storage.pop(key, None) # Not going to throw error if some of the keys do not exists
```
At this point the get method wont work properly because we called **verified_get** at the end of it. This method tries to call class method for every parameter it got and will break since we are trying to pass it our **ttl** parameter but it cant find the verifying function that this parameter should represent.

Expand Down
6 changes: 4 additions & 2 deletions gecaso/__init__.py
@@ -1,5 +1,7 @@
from gecaso.cache import cached
from gecaso.storage import BaseStorage, LocalMemoryStorage
from gecaso.storage import BaseStorage, LocalMemoryStorage, LRUStorage
from gecaso.utils import asyncify


__all__ = ['cached', 'BaseStorage', 'LocalMemoryStorage']
__all__ = [
'cached', 'BaseStorage', 'LocalMemoryStorage', 'LRUStorage', 'asyncify']
75 changes: 75 additions & 0 deletions gecaso/storage.py
Expand Up @@ -6,6 +6,9 @@


class BaseStorage(metaclass=abc.ABCMeta):
"""Any storage that is to be passed to 'cache' wrapper should be inherited
from this class.
"""
@abc.abstractmethod
def get(self, key):
"""Must throw KeyError if key is not found"""
Expand All @@ -15,15 +18,29 @@ def get(self, key):
def set(self, key, value, **params):
pass

@abc.abstractmethod
def remove(self, *keys):
pass

def pack(self, value, **params):
"""Packs value and methods into a object which is then converted to
bytes using pickle library. Used to simplify storaging because bytes
can bestored almost anywhere.
"""
result = utils.Namespace(value=value, params=params)
return pickle.dumps(result)

def unpack(self, value):
"""Unpacks bytes object packed with 'pack' method. Returns packed value
and parameters.
"""
result = pickle.loads(value)
return result.value, result.params

def verified_get(self, value, **params):
"""Given value and params, returns value if all methods called from
params (method name is assumed as 'vfunc_PARAMNAME' and argument is
value of param) return 'True'; Else raises KeyError."""
if all([getattr(self, 'vfunc_'+f)(v) for f, v in params.items()]):
return value
else:
Expand All @@ -44,5 +61,63 @@ def set(self, key, value, ttl=None):
params['ttl'] = time.time() + ttl
self._storage[key] = self.pack(value, **params)

def remove(self, *keys):
for key in keys:
self._storage.pop(key, None)

def vfunc_ttl(self, time_of_death):
return time_of_death > time.time()


class LRUStorage(BaseStorage):
"""Storage that provides LRUCache functionality when used with 'cached'
wrapper. If 'storage' argument is not provided, LocalMemoryStorage is used
as default substorage. Any provided storage is expected to be inherited
from BaseStorage.
"""
class Node:
def __init__(self, next_node=None, key=None):
if next_node:
self.prev = next_node.prev
self.next = next_node.prev.next

self.next.prev = self
self.prev.next = self
else:
self.next = self
self.prev = self
self.key = key

def delete(self):
self.prev.next = self.next
self.next.prev = self.prev

def __init__(self, storage=None, maxsize=128):
self._storage = storage or LocalMemoryStorage()
self._nodes = dict()
self._maxsize = maxsize
self._head = LRUStorage.Node() # This empty node will always be last
self.storage_set = utils.asyncify(self._storage.set)
self.storage_get = utils.asyncify(self._storage.get)
self.storage_remove = utils.asyncify(self._storage.remove)

async def get(self, key):
node = self._nodes.pop(key) # Throws KeyError on failure
node.delete()
value = await self.storage_get(key) # Throws KeyError on failure
self._nodes[key] = LRUStorage.Node(self._head, key)
self._head = self._nodes[key]
return value

async def set(self, key, value, **params):
if len(self._nodes) > self._maxsize:
last_node = self._head.prev.prev # skipping over empty node
await self.remove(last_node.key)
await self.storage_set(key, value, **params)
self._nodes[key] = LRUStorage.Node(self._head, key)
self._head = self._nodes[key]

async def remove(self, key):
node = self._nodes.pop(key)
node.delete()
await self.storage_remove(key)
4 changes: 4 additions & 0 deletions gecaso/utils.py
Expand Up @@ -21,6 +21,10 @@ def is_async_function(function):


def asyncify(function):
"""Wraps function. If function is asynchrounous, it is not changed.
If function is synchronous, returns asynchrounous function which calls
synchronous function inside. This function allows to write code that uses
sync/async functions in the same manner: as async functions."""
async def new_function(*args, **kwargs):
if is_async_function(function):
return await function(*args, **kwargs)
Expand Down
2 changes: 1 addition & 1 deletion setup.py
@@ -1,6 +1,6 @@
from distutils.core import setup

version = '0.1.2'
version = '0.2.0'
url = 'https://github.com/festinuz/gecaso'
download_url = url + '/archive/' + version + '.tar.gz'

Expand Down
4 changes: 4 additions & 0 deletions tests/test_local.py
Expand Up @@ -21,6 +21,10 @@ async def set(self, key, value, ttl=None):
params['ttl'] = time.time() + ttl
self._storage[key] = self.pack(value, **params)

async def remove(self, *keys):
for key in keys:
self._storage.pop(key, None)

def vfunc_ttl(self, time_of_death):
return time_of_death > time.time()

Expand Down
76 changes: 76 additions & 0 deletions tests/test_lru.py
@@ -0,0 +1,76 @@
import time
import asyncio
import inspect

import pytest

import gecaso


class LocalAsyncMemoryStorage(gecaso.BaseStorage):
def __init__(self):
self._storage = dict()

async def get(self, key):
value, params = self.unpack(self._storage[key])
return self.verified_get(value, **params)

async def set(self, key, value, ttl=None):
params = dict()
if ttl:
params['ttl'] = time.time() + ttl
self._storage[key] = self.pack(value, **params)

async def remove(self, *keys):
for key in keys:
self._storage.pop(key, None)

def vfunc_ttl(self, time_of_death):
return time_of_death > time.time()


def slow_echo(time_to_sleep):
time.sleep(time_to_sleep)
return time_to_sleep


async def slow_async_echo(time_to_sleep):
await asyncio.sleep(time_to_sleep)
return time_to_sleep


local_storage = gecaso.LocalMemoryStorage()
local_async_storage = LocalAsyncMemoryStorage()


@pytest.mark.parametrize("storage", [local_storage, local_async_storage])
@pytest.mark.parametrize("echo_func", [slow_echo, slow_async_echo])
def test_lru_cache(storage, echo_func):
lru_echo = gecaso.cached(gecaso.LRUStorage(storage, maxsize=4))(echo_func)
start_time = time.time()
if inspect.iscoroutinefunction(lru_echo):
loop = asyncio.get_event_loop()
assert loop.run_until_complete(lru_echo(0.5)) == 0.5
assert loop.run_until_complete(lru_echo(0.4)) == 0.4
assert loop.run_until_complete(lru_echo(0.3)) == 0.3
assert loop.run_until_complete(lru_echo(0.2)) == 0.2
assert loop.run_until_complete(lru_echo(0.1)) == 0.1
assert loop.run_until_complete(lru_echo(0.0)) == 0.0
for i in range(10):
assert loop.run_until_complete(lru_echo(0.3)) == 0.3
assert loop.run_until_complete(lru_echo(0.5)) == 0.5
else:
assert lru_echo(0.5) == 0.5
assert lru_echo(0.4) == 0.4
assert lru_echo(0.3) == 0.3
assert lru_echo(0.2) == 0.2
assert lru_echo(0.1) == 0.1
assert lru_echo(0.0) == 0.0
for i in range(10):
assert lru_echo(0.3) == 0.3
assert lru_echo(0.5) == 0.5
assert lru_echo(0.01) == 0.01
assert lru_echo(0.02) == 0.02
assert lru_echo(0.03) == 0.03
assert lru_echo(0.04) == 0.04
assert 2 < time.time() - start_time < 2.5

0 comments on commit c698318

Please sign in to comment.