Skip to content

Commit

Permalink
add an lru cache from Raymond Hettinger
Browse files Browse the repository at this point in the history
  • Loading branch information
Jeff Balogh committed Aug 10, 2011
1 parent da1d4d9 commit df3da25
Showing 1 changed file with 86 additions and 0 deletions.
86 changes: 86 additions & 0 deletions lib/lru_cache.py
@@ -0,0 +1,86 @@
# http://code.activestate.com/recipes/498245-lru-and-lfu-cache-decorators/
# by Raymond Hettinger.
import collections
import functools
from itertools import ifilterfalse


class Counter(dict):
'Mapping where default values are zero'
def __missing__(self, key):
return 0


def lru_cache(maxsize=100):
'''Least-recently-used cache decorator.
Arguments to the cached function must be hashable.
Cache performance statistics stored in f.hits and f.misses.
Clear the cache with f.clear().
http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
'''
maxqueue = maxsize * 10
def decorating_function(user_function,
len=len, iter=iter, tuple=tuple, sorted=sorted, KeyError=KeyError):
cache = {} # mapping of args to results
queue = collections.deque() # order that keys have been used
refcount = Counter() # times each key is in the queue
sentinel = object() # marker for looping around the queue
kwd_mark = object() # separate positional and keyword args

# lookup optimizations (ugly but fast)
queue_append, queue_popleft = queue.append, queue.popleft
queue_appendleft, queue_pop = queue.appendleft, queue.pop

@functools.wraps(user_function)
def wrapper(*args, **kwds):
# cache key records both positional and keyword args
key = args
if kwds:
key += (kwd_mark,) + tuple(sorted(kwds.items()))

# record recent use of this key
queue_append(key)
refcount[key] += 1

# get cache entry or compute if not found
try:
result = cache[key]
wrapper.hits += 1
except KeyError:
result = user_function(*args, **kwds)
cache[key] = result
wrapper.misses += 1

# purge least recently used cache entry
if len(cache) > maxsize:
key = queue_popleft()
refcount[key] -= 1
while refcount[key]:
key = queue_popleft()
refcount[key] -= 1
del cache[key], refcount[key]

# periodically compact the queue by eliminating duplicate keys
# while preserving order of most recent access
if len(queue) > maxqueue:
refcount.clear()
queue_appendleft(sentinel)
for key in ifilterfalse(refcount.__contains__,
iter(queue_pop, sentinel)):
queue_appendleft(key)
refcount[key] = 1

return result

def clear():
cache.clear()
queue.clear()
refcount.clear()
wrapper.hits = wrapper.misses = 0

wrapper.hits = wrapper.misses = 0
wrapper.clear = clear
return wrapper
return decorating_function

0 comments on commit df3da25

Please sign in to comment.