Permalink
Browse files

Refactored cache from django/core/cache.py into django/core/cache pac…

…kage, with each backend getting a separate module. This keeps things cleaner and uses less memory, because the backend module is only loaded if it's needed.

git-svn-id: http://code.djangoproject.com/svn/django/trunk@2378 bcc190cf-cafb-0310-a4f2-bffc1f526a37
  • Loading branch information...
1 parent 9a74e89 commit c5073320a77058206f9b4b79420586531764aa26 @adrianholovaty adrianholovaty committed Feb 24, 2006
View
@@ -0,0 +1,54 @@
+"""
+Caching framework.
+
+This package defines set of cache backends that all conform to a simple API.
+In a nutshell, a cache is a set of values -- which can be any object that
+may be pickled -- identified by string keys. For the complete API, see
+the abstract BaseCache class in django.core.cache.backends.base.
+
+Client code should not access a cache backend directly; instead it should
+either use the "cache" variable made available here, or it should use the
+get_cache() function made available here. get_cache() takes a backend URI
+(e.g. "memcached://127.0.0.1:11211/") and returns an instance of a backend
+cache class.
+
+See docs/cache.txt for information on the public API.
+"""
+
+from cgi import parse_qsl
+from django.conf import settings
+from django.core.cache.backends.base import InvalidCacheBackendError
+
+BACKENDS = {
+ # name for use in settings file --> name of module in "backends" directory
+ 'memcached': 'memcached',
+ 'simple': 'simple',
+ 'locmem': 'locmem',
+ 'file': 'filebased',
+ 'db': 'db',
+ 'dummy': 'dummy',
+}
+
+def get_cache(backend_uri):
+ if backend_uri.find(':') == -1:
+ raise InvalidCacheBackendError, "Backend URI must start with scheme://"
+ scheme, rest = backend_uri.split(':', 1)
+ if not rest.startswith('//'):
+ raise InvalidCacheBackendError, "Backend URI must start with scheme://"
+ if scheme not in BACKENDS:
+ raise InvalidCacheBackendError, "%r is not a valid cache backend" % scheme
+
+ host = rest[2:]
+ qpos = rest.find('?')
+ if qpos != -1:
+ params = dict(parse_qsl(rest[qpos+1:]))
+ host = rest[2:qpos]
+ else:
+ params = {}
+ if host.endswith('/'):
+ host = host[:-1]
+
+ cache_class = getattr(__import__('django.core.cache.backends.%s' % BACKENDS[scheme], '', '', ['']), 'CacheClass')
+ return cache_class(host, params)
+
+cache = get_cache(settings.CACHE_BACKEND)
@@ -0,0 +1,56 @@
+"Base Cache class."
+
+from django.core.exceptions import ImproperlyConfigured
+
+class InvalidCacheBackendError(ImproperlyConfigured):
+ pass
+
+class BaseCache:
+ def __init__(self, params):
+ timeout = params.get('timeout', 300)
+ try:
+ timeout = int(timeout)
+ except (ValueError, TypeError):
+ timeout = 300
+ self.default_timeout = timeout
+
+ def get(self, key, default=None):
+ """
+ Fetch a given key from the cache. If the key does not exist, return
+ default, which itself defaults to None.
+ """
+ raise NotImplementedError
+
+ def set(self, key, value, timeout=None):
+ """
+ Set a value in the cache. If timeout is given, that timeout will be
+ used for the key; otherwise the default cache timeout will be used.
+ """
+ raise NotImplementedError
+
+ def delete(self, key):
+ """
+ Delete a key from the cache, failing silently.
+ """
+ raise NotImplementedError
+
+ def get_many(self, keys):
+ """
+ Fetch a bunch of keys from the cache. For certain backends (memcached,
+ pgsql) this can be *much* faster when fetching multiple values.
+
+ Returns a dict mapping each key in keys to its value. If the given
+ key is missing, it will be missing from the response dict.
+ """
+ d = {}
+ for k in keys:
+ val = self.get(k)
+ if val is not None:
+ d[k] = val
+ return d
+
+ def has_key(self, key):
+ """
+ Returns True if the key is in the cache and has not expired.
+ """
+ return self.get(key) is not None
@@ -0,0 +1,82 @@
+"Database cache backend."
+
+from django.core.cache.backends.base import BaseCache
+from django.core.db import db, DatabaseError
+import base64, time
+from datetime import datetime
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+class CacheClass(BaseCache):
+ def __init__(self, table, params):
+ BaseCache.__init__(self, params)
+ self._table = table
+ max_entries = params.get('max_entries', 300)
+ try:
+ self._max_entries = int(max_entries)
+ except (ValueError, TypeError):
+ self._max_entries = 300
+ cull_frequency = params.get('cull_frequency', 3)
+ try:
+ self._cull_frequency = int(cull_frequency)
+ except (ValueError, TypeError):
+ self._cull_frequency = 3
+
+ def get(self, key, default=None):
+ cursor = db.cursor()
+ cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % self._table, [key])
+ row = cursor.fetchone()
+ if row is None:
+ return default
+ now = datetime.now()
+ if row[2] < now:
+ cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
+ db.commit()
+ return default
+ return pickle.loads(base64.decodestring(row[1]))
+
+ def set(self, key, value, timeout=None):
+ if timeout is None:
+ timeout = self.default_timeout
+ cursor = db.cursor()
+ cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
+ num = cursor.fetchone()[0]
+ now = datetime.now().replace(microsecond=0)
+ exp = datetime.fromtimestamp(time.time() + timeout).replace(microsecond=0)
+ if num > self._max_entries:
+ self._cull(cursor, now)
+ encoded = base64.encodestring(pickle.dumps(value, 2)).strip()
+ cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s" % self._table, [key])
+ try:
+ if cursor.fetchone():
+ cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % self._table, [encoded, str(exp), key])
+ else:
+ cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % self._table, [key, encoded, str(exp)])
+ except DatabaseError:
+ # To be threadsafe, updates/inserts are allowed to fail silently
+ pass
+ else:
+ db.commit()
+
+ def delete(self, key):
+ cursor = db.cursor()
+ cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
+ db.commit()
+
+ def has_key(self, key):
+ cursor = db.cursor()
+ cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s" % self._table, [key])
+ return cursor.fetchone() is not None
+
+ def _cull(self, cursor, now):
+ if self._cull_frequency == 0:
+ cursor.execute("DELETE FROM %s" % self._table)
+ else:
+ cursor.execute("DELETE FROM %s WHERE expires < %%s" % self._table, [str(now)])
+ cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
+ num = cursor.fetchone()[0]
+ if num > self._max_entries:
+ cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % self._table, [num / self._cull_frequency])
+ cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % self._table, [cursor.fetchone()[0]])
@@ -0,0 +1,22 @@
+"Dummy cache backend"
+
+from django.core.cache.backends.base import BaseCache
+
+class CacheClass(BaseCache):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def get(self, *args, **kwargs):
+ pass
+
+ def set(self, *args, **kwargs):
+ pass
+
+ def delete(self, *args, **kwargs):
+ pass
+
+ def get_many(self, *args, **kwargs):
+ pass
+
+ def has_key(self, *args, **kwargs):
+ return False
@@ -0,0 +1,80 @@
+"File-based cache backend"
+
+from django.core.cache.backends.simple import CacheClass as SimpleCacheClass
+import os, time, urllib
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+class CacheClass(SimpleCacheClass):
+ def __init__(self, dir, params):
+ self._dir = dir
+ if not os.path.exists(self._dir):
+ self._createdir()
+ SimpleCacheClass.__init__(self, dir, params)
+ del self._cache
+ del self._expire_info
+
+ def get(self, key, default=None):
+ fname = self._key_to_file(key)
+ try:
+ f = open(fname, 'rb')
+ exp = pickle.load(f)
+ now = time.time()
+ if exp < now:
+ f.close()
+ os.remove(fname)
+ else:
+ return pickle.load(f)
+ except (IOError, OSError, EOFError, pickle.PickleError):
+ pass
+ return default
+
+ def set(self, key, value, timeout=None):
+ fname = self._key_to_file(key)
+ if timeout is None:
+ timeout = self.default_timeout
+ try:
+ filelist = os.listdir(self._dir)
+ except (IOError, OSError):
+ self._createdir()
+ filelist = []
+ if len(filelist) > self._max_entries:
+ self._cull(filelist)
+ try:
+ f = open(fname, 'wb')
+ now = time.time()
+ pickle.dump(now + timeout, f, 2)
+ pickle.dump(value, f, 2)
+ except (IOError, OSError):
+ pass
+
+ def delete(self, key):
+ try:
+ os.remove(self._key_to_file(key))
+ except (IOError, OSError):
+ pass
+
+ def has_key(self, key):
+ return os.path.exists(self._key_to_file(key))
+
+ def _cull(self, filelist):
+ if self._cull_frequency == 0:
+ doomed = filelist
+ else:
+ doomed = [k for (i, k) in enumerate(filelist) if i % self._cull_frequency == 0]
+ for fname in doomed:
+ try:
+ os.remove(os.path.join(self._dir, fname))
+ except (IOError, OSError):
+ pass
+
+ def _createdir(self):
+ try:
+ os.makedirs(self._dir)
+ except OSError:
+ raise EnvironmentError, "Cache directory '%s' does not exist and could not be created'" % self._dir
+
+ def _key_to_file(self, key):
+ return os.path.join(self._dir, urllib.quote_plus(key))
@@ -0,0 +1,51 @@
+"Thread-safe in-memory cache backend."
+
+from django.core.cache.backends.simple import CacheClass as SimpleCacheClass
+from django.utils.synch import RWLock
+import copy, time
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+class CacheClass(SimpleCacheClass):
+ def __init__(self, host, params):
+ SimpleCacheClass.__init__(self, host, params)
+ self._lock = RWLock()
+
+ def get(self, key, default=None):
+ should_delete = False
+ self._lock.reader_enters()
+ try:
+ now = time.time()
+ exp = self._expire_info.get(key)
+ if exp is None:
+ return default
+ elif exp < now:
+ should_delete = True
+ else:
+ return copy.deepcopy(self._cache[key])
+ finally:
+ self._lock.reader_leaves()
+ if should_delete:
+ self._lock.writer_enters()
+ try:
+ del self._cache[key]
+ del self._expire_info[key]
+ return default
+ finally:
+ self._lock.writer_leaves()
+
+ def set(self, key, value, timeout=None):
+ self._lock.writer_enters()
+ try:
+ SimpleCacheClass.set(self, key, value, timeout)
+ finally:
+ self._lock.writer_leaves()
+
+ def delete(self, key):
+ self._lock.writer_enters()
+ try:
+ SimpleCacheClass.delete(self, key)
+ finally:
+ self._lock.writer_leaves()
@@ -0,0 +1,29 @@
+"Memcached cache backend"
+
+from django.core.cache.backends.base import BaseCache, InvalidCacheBackendError
+
+try:
+ import memcache
+except ImportError:
+ raise InvalidCacheBackendError, "Memcached cache backend requires the 'memcache' library"
+
+class CacheClass(BaseCache):
+ def __init__(self, server, params):
+ BaseCache.__init__(self, params)
+ self._cache = memcache.Client(server.split(';'))
+
+ def get(self, key, default=None):
+ val = self._cache.get(key)
+ if val is None:
+ return default
+ else:
+ return val
+
+ def set(self, key, value, timeout=0):
+ self._cache.set(key, value, timeout)
+
+ def delete(self, key):
+ self._cache.delete(key)
+
+ def get_many(self, keys):
+ return self._cache.get_multi(keys)
Oops, something went wrong. Retry.

0 comments on commit c507332

Please sign in to comment.