Skip to content

Commit 2d34bd7

Browse files
author
Jeff Balogh
committed
merging byid into redis
2 parents de9ed74 + 4209189 commit 2d34bd7

File tree

2 files changed

+46
-1
lines changed

2 files changed

+46
-1
lines changed

caching/base.py

Lines changed: 38 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from django.db.models.sql import query
99
from django.utils import encoding
1010

11-
from .invalidation import invalidator, flush_key, make_key
11+
from .invalidation import invalidator, flush_key, make_key, byid
1212

1313

1414
class NullHandler(logging.Handler):
@@ -24,6 +24,7 @@ def emit(self, record):
2424
FOREVER = 0
2525
NO_CACHE = -1
2626
CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '')
27+
FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False)
2728

2829
scheme, _, _ = parse_backend_uri(settings.CACHE_BACKEND)
2930
cache.scheme = scheme
@@ -142,8 +143,44 @@ def iterator(self):
142143
query_string = self.query_key()
143144
except query.EmptyResultSet:
144145
return iterator()
146+
if FETCH_BY_ID:
147+
iterator = self.fetch_by_id
145148
return iter(CacheMachine(query_string, iterator, self.timeout))
146149

150+
def fetch_by_id(self):
151+
"""
152+
Run two queries to get objects: one for the ids, one for id__in=ids.
153+
154+
After getting ids from the first query we can try cache.get_many to
155+
reuse objects we've already seen. Then we fetch the remaining items
156+
from the db, and put those in the cache. This prevents cache
157+
duplication.
158+
"""
159+
# Include columns from extra since they could be used in the query's
160+
# order_by.
161+
vals = self.values_list('pk', *self.query.extra.keys())
162+
pks = [val[0] for val in vals]
163+
keys = dict((byid(self.model._cache_key(pk)), pk) for pk in pks)
164+
cached = dict((k, v) for k, v in cache.get_many(keys).items()
165+
if v is not None)
166+
167+
missed = [pk for key, pk in keys.items() if key not in cached]
168+
# Clear out the default ordering since we order based on the query.
169+
others = self.model.objects.filter(pk__in=missed).order_by()
170+
if hasattr(others, 'no_cache'):
171+
others = others.no_cache()
172+
if self.query.select_related:
173+
others.dup_select_related(self)
174+
175+
# Put the fetched objects back in cache.
176+
new = dict((byid(o), o) for o in others)
177+
cache.set_many(new)
178+
179+
# Use pks to return the objects in the correct order.
180+
objects = dict((o.pk, o) for o in cached.values() + new.values())
181+
for pk in pks:
182+
yield objects[pk]
183+
147184
def count(self):
148185
timeout = getattr(settings, 'CACHE_COUNT_TIMEOUT', None)
149186
super_count = super(CachingQuerySet, self).count

caching/invalidation.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515

1616

1717
CACHE_PREFIX = getattr(settings, 'CACHE_PREFIX', '')
18+
FETCH_BY_ID = getattr(settings, 'FETCH_BY_ID', False)
1819
FLUSH = CACHE_PREFIX + ':flush:'
1920

2021
log = logging.getLogger('caching.invalidation')
@@ -39,6 +40,11 @@ def flush_key(obj):
3940
return FLUSH + make_key(key, with_locale=False)
4041

4142

43+
def byid(obj):
44+
key = obj if isinstance(obj, basestring) else obj._cache_key(obj.pk)
45+
return make_key('byid:' + key)
46+
47+
4248
def safe_redis(return_type):
4349
"""
4450
Decorator to catch and log any redis errors.
@@ -91,6 +97,8 @@ def cache_objects(self, objects, query_key, query_flush):
9197
for key in map(flush_key, obj._cache_keys()):
9298
if key != obj_flush:
9399
flush_lists[key].add(obj_flush)
100+
if FETCH_BY_ID:
101+
flush_lists[key].append(byid(obj))
94102
self.add_to_flush_list(flush_lists)
95103

96104
def find_flush_lists(self, keys):

0 commit comments

Comments
 (0)