Skip to content

Commit

Permalink
fixed bug that causes creation of thousands duplicated CacheKey records
Browse files Browse the repository at this point in the history
  • Loading branch information
dotsbb committed Jan 18, 2012
1 parent ca9c886 commit e204eac
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions menus/menu_pool.py
Expand Up @@ -84,7 +84,7 @@ def clear(self, site_id=None, language=None, all=False):
cache_keys = CacheKey.objects.get_keys()
else:
cache_keys = CacheKey.objects.get_keys(site_id, language)
to_be_deleted = [obj.key for obj in cache_keys]
to_be_deleted = cache_keys.distinct().values_list('key', flat=True)
cache.delete_many(to_be_deleted)
cache_keys.delete()

Expand Down Expand Up @@ -140,7 +140,7 @@ def _build_nodes(self, request, site_id):
# the database. It's still cheaper than recomputing every time!
# This way we can selectively invalidate per-site and per-language,
# since the cache shared but the keys aren't
CacheKey.objects.create(key=key, language=lang, site=site_id)
CacheKey.objects.get_or_create(key=key, language=lang, site=site_id)
return final_nodes

def apply_modifiers(self, nodes, request, namespace=None, root_id=None, post_cut=False, breadcrumb=False):
Expand Down

0 comments on commit e204eac

Please sign in to comment.