/
decorators.py
341 lines (278 loc) · 13 KB
/
decorators.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
import inspect
import functools
import logging
from aiocache import SimpleMemoryCache, caches
from aiocache.base import SENTINEL
from aiocache.lock import RedLock
logger = logging.getLogger(__name__)
class cached:
"""
Caches the functions return value into a key generated with module_name, function_name and args.
The cache is available in the function object as ``<function_name>.cache``.
In some cases you will need to send more args to configure the cache object.
An example would be endpoint and port for the RedisCache. You can send those args as
kwargs and they will be propagated accordingly.
Only one cache instance is created per decorated call. If you expect high concurrency of calls
to the same function, you should adapt the pool size as needed.
When calling the decorated function, the reads and writes from/to the cache can be controlled
with the parameters ``cache_read`` and ``cache_write`` (both are enabled by default).
:param ttl: int seconds to store the function call. Default is None which means no expiration.
:param key: str value to set as key for the function return. Takes precedence over
key_builder param. If key and key_builder are not passed, it will use module_name
+ function_name + args + kwargs
:param key_builder: Callable that allows to build the function dynamically. It receives
the function plus same args and kwargs passed to the function.
:param cache: cache class to use when calling the ``set``/``get`` operations.
Default is ``aiocache.SimpleMemoryCache``.
:param serializer: serializer instance to use when calling the ``dumps``/``loads``.
If its None, default one from the cache backend is used.
:param plugins: list plugins to use when calling the cmd hooks
Default is pulled from the cache class being used.
:param alias: str specifying the alias to load the config from. If alias is passed, other config
parameters are ignored. Same cache identified by alias is used on every call. If you need
a per function cache, specify the parameters explicitly without using alias.
:param noself: bool if you are decorating a class function, by default self is also used to
generate the key. This will result in same function calls done by different class instances
to use different cache keys. Use noself=True if you want to ignore it.
"""
def __init__(
self,
ttl=SENTINEL,
key=None,
key_builder=None,
cache=SimpleMemoryCache,
serializer=None,
plugins=None,
alias=None,
noself=False,
**kwargs
):
self.ttl = ttl
self.key = key
self.key_builder = key_builder
self.noself = noself
self.alias = alias
self.cache = None
self._cache = cache
self._serializer = serializer
self._plugins = plugins
self._kwargs = kwargs
def __call__(self, f):
if self.alias:
self.cache = caches.get(self.alias)
else:
self.cache = _get_cache(
cache=self._cache,
serializer=self._serializer,
plugins=self._plugins,
**self._kwargs
)
@functools.wraps(f)
async def wrapper(*args, **kwargs):
return await self.decorator(f, *args, **kwargs)
wrapper.cache = self.cache
return wrapper
async def decorator(self, f, *args, cache_read=True, cache_write=True, **kwargs):
key = self.get_cache_key(f, args, kwargs)
if cache_read:
value = await self.get_from_cache(key)
if value is not None:
return value
result = await f(*args, **kwargs)
if cache_write:
await self.set_in_cache(key, result)
return result
def get_cache_key(self, f, args, kwargs):
if self.key:
return self.key
if self.key_builder:
return self.key_builder(f, *args, **kwargs)
return self._key_from_args(f, args, kwargs)
def _key_from_args(self, func, args, kwargs):
ordered_kwargs = sorted(kwargs.items())
return (
(func.__module__ or "")
+ func.__name__
+ str(args[1:] if self.noself else args)
+ str(ordered_kwargs)
)
async def get_from_cache(self, key):
try:
value = await self.cache.get(key)
return value
except Exception:
logger.exception("Couldn't retrieve %s, unexpected error", key)
async def set_in_cache(self, key, value):
try:
await self.cache.set(key, value, ttl=self.ttl)
except Exception:
logger.exception("Couldn't set %s in key %s, unexpected error", value, key)
class cached_stampede(cached):
"""
Caches the functions return value into a key generated with module_name, function_name and args
while avoids for cache stampede effects.
In some cases you will need to send more args to configure the cache object.
An example would be endpoint and port for the RedisCache. You can send those args as
kwargs and they will be propagated accordingly.
Only one cache instance is created per decorated function. If you expect high concurrency
of calls to the same function, you should adapt the pool size as needed.
:param lease: int seconds to lock function call to avoid cache stampede effects.
If 0 or None, no locking happens (default is 2). redis and memory backends support
float ttls
:param ttl: int seconds to store the function call. Default is None which means no expiration.
:param key: str value to set as key for the function return. Takes precedence over
key_from_attr param. If key and key_from_attr are not passed, it will use module_name
+ function_name + args + kwargs
:param key_from_attr: str arg or kwarg name from the function to use as a key.
:param cache: cache class to use when calling the ``set``/``get`` operations.
Default is ``aiocache.SimpleMemoryCache``.
:param serializer: serializer instance to use when calling the ``dumps``/``loads``.
Default is JsonSerializer.
:param plugins: list plugins to use when calling the cmd hooks
Default is pulled from the cache class being used.
:param alias: str specifying the alias to load the config from. If alias is passed, other config
parameters are ignored. New cache is created every time.
:param noself: bool if you are decorating a class function, by default self is also used to
generate the key. This will result in same function calls done by different class instances
to use different cache keys. Use noself=True if you want to ignore it.
"""
def __init__(self, lease=2, **kwargs):
super().__init__(**kwargs)
self.lease = lease
async def decorator(self, f, *args, **kwargs):
key = self.get_cache_key(f, args, kwargs)
value = await self.get_from_cache(key)
if value is not None:
return value
async with RedLock(self.cache, key, self.lease):
value = await self.get_from_cache(key)
if value is not None:
return value
result = await f(*args, **kwargs)
await self.set_in_cache(key, result)
return result
def _get_cache(cache=SimpleMemoryCache, serializer=None, plugins=None, **cache_kwargs):
return cache(serializer=serializer, plugins=plugins, **cache_kwargs)
def _get_args_dict(func, args, kwargs):
defaults = {
arg_name: arg.default
for arg_name, arg in inspect.signature(func).parameters.items()
if arg.default is not inspect._empty # TODO: bug prone..
}
args_names = func.__code__.co_varnames[: func.__code__.co_argcount]
return {**defaults, **dict(zip(args_names, args)), **kwargs}
class multi_cached:
"""
Only supports functions that return dict-like structures. This decorator caches each key/value
of the dict-like object returned by the function.
The cache is available in the function object as ``<function_name>.cache``.
If key_builder is passed, before storing the key, it will be transformed according to the output
of the function.
If the attribute specified to be the key is an empty list, the cache will be ignored and
the function will be called as expected.
Only one cache instance is created per decorated function. If you expect high concurrency
of calls to the same function, you should adapt the pool size as needed.
When calling the decorated function, the reads and writes from/to the cache can be controlled
with the parameters ``cache_read`` and ``cache_write`` (both are enabled by default).
:param keys_from_attr: arg or kwarg name from the function containing an iterable to use
as keys to index in the cache.
:param key_builder: Callable that allows to change the format of the keys before storing.
Receives the key the function and same args and kwargs as the called function.
:param ttl: int seconds to store the keys. Default is 0 which means no expiration.
:param cache: cache class to use when calling the ``multi_set``/``multi_get`` operations.
Default is ``aiocache.SimpleMemoryCache``.
:param serializer: serializer instance to use when calling the ``dumps``/``loads``.
If its None, default one from the cache backend is used.
:param plugins: plugins to use when calling the cmd hooks
Default is pulled from the cache class being used.
:param alias: str specifying the alias to load the config from. If alias is passed, other config
parameters are ignored. Same cache identified by alias is used on every call. If you need
a per function cache, specify the parameters explicitly without using alias.
"""
def __init__(
self,
keys_from_attr,
key_builder=None,
ttl=SENTINEL,
cache=SimpleMemoryCache,
serializer=None,
plugins=None,
alias=None,
**kwargs
):
self.keys_from_attr = keys_from_attr
self.key_builder = key_builder or (lambda key, f, *args, **kwargs: key)
self.ttl = ttl
self.alias = alias
self.cache = None
self._cache = cache
self._serializer = serializer
self._plugins = plugins
self._kwargs = kwargs
def __call__(self, f):
if self.alias:
self.cache = caches.get(self.alias)
else:
self.cache = _get_cache(
cache=self._cache,
serializer=self._serializer,
plugins=self._plugins,
**self._kwargs
)
@functools.wraps(f)
async def wrapper(*args, **kwargs):
return await self.decorator(f, *args, **kwargs)
wrapper.cache = self.cache
return wrapper
async def decorator(self, f, *args, cache_read=True, cache_write=True, **kwargs):
missing_keys = []
partial = {}
keys, new_args, args_index = self.get_cache_keys(f, args, kwargs)
if cache_read:
values = await self.get_from_cache(*keys)
for key, value in zip(keys, values):
if value is None:
missing_keys.append(key)
else:
partial[key] = value
if values and None not in values:
return partial
else:
missing_keys = list(keys)
if args_index > -1:
new_args[args_index] = missing_keys
else:
kwargs[self.keys_from_attr] = missing_keys
result = await f(*new_args, **kwargs)
result.update(partial)
if cache_write:
await self.set_in_cache(result, f, args, kwargs)
return result
def get_cache_keys(self, f, args, kwargs):
args_dict = _get_args_dict(f, args, kwargs)
keys = args_dict[self.keys_from_attr] or []
keys = [self.key_builder(key, f, *args, **kwargs) for key in keys]
args_names = f.__code__.co_varnames[: f.__code__.co_argcount]
new_args = list(args)
keys_index = -1
if self.keys_from_attr in args_names and self.keys_from_attr not in kwargs:
keys_index = args_names.index(self.keys_from_attr)
new_args[keys_index] = keys
return keys, new_args, keys_index
async def get_from_cache(self, *keys):
if not keys:
return []
try:
values = await self.cache.multi_get(keys)
return values
except Exception:
logger.exception("Couldn't retrieve %s, unexpected error", keys)
return [None] * len(keys)
async def set_in_cache(self, result, fn, fn_args, fn_kwargs):
try:
await self.cache.multi_set(
[(self.key_builder(k, fn, *fn_args, **fn_kwargs), v) for k, v in result.items()],
ttl=self.ttl,
)
except Exception:
logger.exception("Couldn't set %s, unexpected error", result)