Permalink
Browse files

refactor cache objects, add methods to RedisCache for removing things…

… from the cache
  • Loading branch information...
1 parent a60f022 commit a6d564c72fda7cabd53b8c21f4770b025ad1627b @ryanmark ryanmark committed May 16, 2013
Showing with 187 additions and 67 deletions.
  1. +1 −0 .gitignore
  2. +134 −44 p2p/cache.py
  3. +51 −22 p2p/tests.py
  4. +1 −1 setup.py
View
@@ -4,3 +4,4 @@ dist
*.pyc
.DS*
*.egg
+dump.rdb
View
@@ -32,104 +32,91 @@ def __init__(self, prefix='p2p'):
def get_content_item(self, slug=None, id=None, query=None):
self.content_items_gets += 1
- if slug:
- key = "_".join([self.prefix, 'content_item',
- slug,
- self.query_to_key(query)])
- elif id:
- key = "_".join([self.prefix, 'content_item',
- str(id),
- self.query_to_key(query)])
- else:
+ if slug is None and id is None:
raise TypeError("get_content_item() takes either a slug or "
"id keyword argument")
+
+ if id:
+ lookup_key = self.make_key('content_item', str(id), 'lookup')
+ slug = self.get(lookup_key)
+ if slug is None:
+ return None
+
+ key = self.make_key('content_item', slug, self.query_to_key(query))
ret = self.get(key)
if ret:
self.content_items_hits += 1
return ret
def save_content_item(self, content_item, query=None):
- key = "_".join([self.prefix, 'content_item',
- content_item['slug'],
- self.query_to_key(query)])
+ # save the actual data
+ key = self.make_key(
+ 'content_item', content_item['slug'], self.query_to_key(query))
self.set(key, content_item)
- key = "_".join([self.prefix, 'content_item',
- str(content_item['id']),
- self.query_to_key(query)])
- self.set(key, content_item)
+ # save a reference. Since we might need to lookup by id,
+ # we'll save a simple item that tells us the slug for that id
+ lookup_key = self.make_key(
+ 'content_item', str(content_item['id']), 'lookup')
+ self.set(lookup_key, content_item['slug'])
- def get_collection(self, slug=None, id=None, query=None):
+ def get_collection(self, slug, query=None):
self.collections_gets += 1
- if slug:
- key = "_".join([self.prefix, 'collection',
- slug,
- self.query_to_key(query)])
- elif id:
- key = "_".join([self.prefix, 'collection',
- str(id), self.query_to_key(query)])
- else:
- raise TypeError("get_collection() takes either a slug or id keyword argument")
+ key = self.make_key('collection', slug, self.query_to_key(query))
ret = self.get(key)
if ret:
self.collections_hits += 1
return ret
def save_collection(self, collection, query=None):
- key = "_".join([self.prefix, 'collection',
- collection['code'],
- self.query_to_key(query)])
- self.set(key, collection)
-
- key = "_".join([self.prefix, 'collection',
- str(collection['id']),
- self.query_to_key(query)])
+ key = self.make_key(
+ 'collection', collection['code'], self.query_to_key(query))
self.set(key, collection)
def get_collection_layout(self, slug, query=None):
self.collection_layouts_gets += 1
- key = "_".join([self.prefix, 'collection_layout',
- slug, self.query_to_key(query)])
+ key = self.make_key(
+ 'collection_layout', slug, self.query_to_key(query))
ret = self.get(key)
if ret:
ret['code'] = slug
self.collection_layouts_hits += 1
return ret
def save_collection_layout(self, collection_layout, query=None):
- key = "_".join([self.prefix, 'collection_layout',
- collection_layout['code'],
- self.query_to_key(query)])
+ key = self.make_key('collection_layout',
+ collection_layout['code'],
+ self.query_to_key(query))
self.set(key, collection_layout)
def get_section(self, path):
self.sections_gets += 1
- key = "_".join([self.prefix, 'section', path])
+ key = self.make_key('section', path)
ret = self.get(key)
if ret:
self.sections_hits += 1
return ret
def save_section(self, path, section):
- key = "_".join([self.prefix, 'section', path])
+ key = self.make_key('section', path)
self.set(key, section)
def get_section_configs(self, path):
self.section_configs_gets += 1
- key = "_".join([self.prefix, 'section_configs', path])
+ key = self.make_key('section_configs', path)
ret = self.get(key)
if ret:
self.section_configs_hits += 1
return ret
def save_section_configs(self, path, section):
- key = "_".join([self.prefix, 'section_configs', path])
+ key = self.make_key('section_configs', path)
self.set(key, section)
def get_stats(self):
@@ -147,20 +134,39 @@ def get_stats(self):
}
def get(self, key):
+ """
+ Get data from a cache key
+ """
raise NotImplementedError()
def set(self, key, data):
+ """
+ Save data to a cache key
+ """
raise NotImplementedError()
def clear(self):
+ """
+ Clear the entire cache
+ """
raise NotImplementedError()
def query_to_key(self, query):
+ """
+ Take a query in the form of a dictionary and turn it into something
+ that can be used in a cache key
+ """
if query is None:
return ''
return utils.dict_to_qs(query)
+ def make_key(self, *args):
+ """
+ Take any number of arguments and return a key string
+ """
+ return '_'.join([self.prefix] + list(args))
+
class DictionaryCache(BaseCache):
"""
@@ -246,14 +252,98 @@ def __init__(self, prefix='p2p', host='localhost', port=6379, db=0):
self.prefix = prefix
self.r = redis.StrictRedis(host=host, port=port, db=db)
+ def remove_content_item(self, slug=None, id=None):
+ """
+ Remove all instances of this content item from the cache
+ """
+ # make sure we have arguments
+ if slug is None and id is None:
+ raise TypeError("remove_content_item() takes either a slug or "
+ "id keyword argument")
+
+ # If we got an id, we need to lookup the slug
+ if id:
+ lookup_key = self.make_key('content_item', str(id), 'lookup')
+ slug = self.get(lookup_key)
+ # Couldn't find the slug so bail
+ if slug is None:
+ return False
+
+ # construct a redis key query to get the keys for all copies of
+ # this content item in the cache
+ key_query = self.make_key('content_item', slug, '*')
+ matching_keys = self.r.keys(key_query)
+
+ # if we don't have any keys, bail
+ if not matching_keys:
+ return False
+
+ if id is None:
+ # we need to grab a copy of the content item in order to
+ # retrieve the id. We need the id to remove the lookup key.
+ content_item = self.get(matching_keys[0])
+ id = content_item['id']
+ lookup_key = self.make_key('content_item', str(id), 'lookup')
+
+ # add the lookup key to our list of keys, then delete them all
+ matching_keys.append(lookup_key)
+ self.r.delete(*matching_keys)
+ return True
+
+ def remove_collection(self, slug):
+ """
+ Remove all instances of this collection from the cache
+ """
+ # construct a redis key query to get the keys for all copies of
+ # this content item in the cache
+ key_query = self.make_key('collection', slug, '*')
+ matching_keys = self.r.keys(key_query)
+
+ # if we don't have any keys, bail
+ if not matching_keys:
+ return False
+
+ # add the lookup key to our list of keys, then delete them all
+ self.r.delete(*matching_keys)
+ return True
+
+ def remove_collection_layout(self, slug):
+ """
+ Remove all instances of this collection layout from the cache
+ """
+ # construct a redis key query to get the keys for all copies of
+ # this content item in the cache
+ key_query = self.make_key('collection', slug, '*')
+ matching_keys = self.r.keys(key_query)
+
+ # if we don't have any keys, bail
+ if not matching_keys:
+ return False
+
+ # add the lookup key to our list of keys, then delete them all
+ self.r.delete(*matching_keys)
+ return True
+
+ def remove_section(self, path):
+ """
+ Remove all instances of this section from the cache
+ """
+ pass
+
+ def remove_section_configs(self, path):
+ """
+ Remove all instances of the configs for this section from the cache
+ """
+ pass
+
def get(self, key):
ret = self.r.get(key)
return pickle.loads(ret) if ret else None
def set(self, key, data):
self.r.set(key, pickle.dumps(data))
- def clean(self):
+ def clear(self):
self.r.flushdb()
except ImportError, e:
View
@@ -103,26 +103,6 @@ def test_many_multi_items(self):
for k in self.content_item_keys:
self.assertIn(k, data[0].keys())
- # TODO: test redis cache
- def test_cache(self):
- # Get a list of availabe classes to test
- test_backends = ('DictionaryCache', 'DjangoCache')
- cache_backends = list()
- for backend in test_backends:
- if hasattr(cache, backend):
- cache_backends.append(getattr(cache, backend))
-
- content_item_ids = [
- 58253183, 56809651, 56810874, 56811192, 58253247]
-
- for cls in cache_backends:
- self.p2p.cache = cls()
- data = self.p2p.get_multi_content_items(ids=content_item_ids)
- data = self.p2p.get_content_item(self.content_item_slug)
- stats = self.p2p.cache.get_stats()
- self.assertEqual(stats['content_item_gets'], 6)
- self.assertEqual(stats['content_item_hits'], 1)
-
def test_fancy_collection(self):
data = self.p2p.get_fancy_collection(
self.collection_slug, with_collection=True)
@@ -190,6 +170,7 @@ def test_auth(self):
def test_get_section(self):
data = self.p2p.get_section('/news/local/breaking')
+ self.assertEqual(type(data), dict)
#pp.pprint(data)
@@ -245,9 +226,9 @@ def test_publish_story(self):
# Create photo
photo = self.p2p.create_content_item(photo_data)
- self.assertIn('multimedia', photo)
+ self.assertIn('photo', photo)
self.assertEqual(
- photo['multimedia']['slug'], photo_data['slug'])
+ photo['photo']['slug'], photo_data['slug'])
# Add photo as related item to the article
self.assertEqual(
@@ -277,6 +258,54 @@ def test_publish_story(self):
article_data['slug']))
+class TestP2PCache(unittest.TestCase):
+ def setUp(self):
+ self.content_item_slug = 'chi-na-lorem-a'
+ self.collection_slug = 'chi_na_lorem'
+ self.p2p = get_connection()
+ self.p2p.debug = True
+ self.maxDiff = None
+
+ def test_cache(self):
+ # Get a list of availabe classes to test
+ test_backends = ('DictionaryCache', 'DjangoCache')
+ cache_backends = list()
+ for backend in test_backends:
+ if hasattr(cache, backend):
+ cache_backends.append(getattr(cache, backend))
+
+ content_item_ids = [
+ 58253183, 56809651, 56810874, 56811192, 58253247]
+
+ for cls in cache_backends:
+ self.p2p.cache = cls()
+ data = self.p2p.get_multi_content_items(ids=content_item_ids)
+ data = self.p2p.get_content_item(self.content_item_slug)
+ stats = self.p2p.cache.get_stats()
+ self.assertEqual(stats['content_item_gets'], 6)
+ self.assertEqual(stats['content_item_hits'], 1)
+
+ #@unittest.skip("Beware, will delete everything from redis")
+ def test_redis_cache(self):
+ content_item_ids = [
+ 58253183, 56809651, 56810874, 56811192, 58253247]
+
+ self.p2p.cache = cache.RedisCache()
+ self.p2p.cache.clear()
+ data = self.p2p.get_multi_content_items(ids=content_item_ids)
+ data = self.p2p.get_content_item(self.content_item_slug)
+ stats = self.p2p.cache.get_stats()
+ self.assertEqual(stats['content_item_gets'], 6)
+ self.assertEqual(stats['content_item_hits'], 1)
+
+ removed = self.p2p.cache.remove_content_item(self.content_item_slug)
+ data = self.p2p.get_content_item(self.content_item_slug)
+ stats = self.p2p.cache.get_stats()
+ self.assertTrue(removed)
+ self.assertEqual(stats['content_item_gets'], 7)
+ self.assertEqual(stats['content_item_hits'], 1)
+
+
if __name__ == '__main__':
import logging
logging.basicConfig()
View
@@ -4,7 +4,7 @@
setup(
name="p2p",
- version="1.3.0",
+ version="1.3.1",
packages=find_packages(),
install_requires=["python-dateutil",

0 comments on commit a6d564c

Please sign in to comment.