Skip to content

Commit

Permalink
Fixed remaining issues
Browse files Browse the repository at this point in the history
  • Loading branch information
syrusakbary committed Jan 8, 2020
1 parent 989e013 commit 5c0fed0
Showing 1 changed file with 15 additions and 19 deletions.
34 changes: 15 additions & 19 deletions aiodataloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,19 +46,15 @@ def __init__(self, batch_load_fn=None, batch=None, max_batch_size=None,
if cache is not None:
self.cache = cache

if get_cache_key is not None:
self.get_cache_key = get_cache_key
self.get_cache_key = get_cache_key or (lambda x: x)

self._cache = cache_map if cache_map is not None else {}
self._queue = [] # type: List[Loader]

def get_cache_key(self, key): # type: ignore
return key

def load(self, key=None):
'''
"""
Loads a key, returning a `Future` for the value represented by that key.
'''
"""
if key is None:
raise TypeError((
'The loader.load() function must be called with a value, '
Expand Down Expand Up @@ -100,7 +96,7 @@ def do_resolve_reject(self, key, future):
dispatch_queue(self)

def load_many(self, keys):
'''
"""
Loads multiple keys, returning a list of values
>>> a, b = await my_loader.load_many([ 'a', 'b' ])
Expand All @@ -111,7 +107,7 @@ def load_many(self, keys):
>>> my_loader.load('a'),
>>> my_loader.load('b')
>>> )
'''
"""
if not isinstance(keys, Iterable):
raise TypeError((
'The loader.load_many() function must be called with Iterable<key> '
Expand All @@ -121,28 +117,28 @@ def load_many(self, keys):
return gather(*[self.load(key) for key in keys])

def clear(self, key):
'''
"""
Clears the value at `key` from the cache, if it exists. Returns itself for
method chaining.
'''
"""
cache_key = self.get_cache_key(key)
self._cache.pop(cache_key, None)
return self

def clear_all(self):
'''
"""
Clears the entire cache. To be used when some event results in unknown
invalidations across this particular `DataLoader`. Returns itself for
method chaining.
'''
"""
self._cache.clear()
return self

def prime(self, key, value):
'''
"""
Adds the provied key and value to the cache. If the key already exists, no
change is made. Returns itself for method chaining.
'''
"""
cache_key = self.get_cache_key(key)

# Only add the key if it does not already exist.
Expand Down Expand Up @@ -172,10 +168,10 @@ def get_chunks(iterable_obj, chunk_size=1):


def dispatch_queue(loader):
'''
"""
Given the current state of a Loader instance, perform a batch load
from its current queue.
'''
"""
# Take the current loader queue, replacing it with an empty queue.
queue = loader._queue
loader._queue = []
Expand Down Expand Up @@ -247,10 +243,10 @@ async def dispatch_queue_batch(loader, queue):


def failed_dispatch(loader, queue, error):
'''
"""
Do not cache individual loads if the entire batch dispatch fails,
but still reject each request so they do not hang.
'''
"""
for l in queue:
loader.clear(l.key)
l.future.set_exception(error)

0 comments on commit 5c0fed0

Please sign in to comment.