Skip to content

Commit

Permalink
Made the database cache backend, which bypasses the ORM, compatible w…
Browse files Browse the repository at this point in the history
…ith time zone support.

git-svn-id: http://code.djangoproject.com/svn/django/trunk@17119 bcc190cf-cafb-0310-a4f2-bffc1f526a37
  • Loading branch information
aaugustin committed Nov 20, 2011
1 parent f6ee168 commit 7075e93
Show file tree
Hide file tree
Showing 3 changed files with 59 additions and 18 deletions.
67 changes: 52 additions & 15 deletions django/core/cache/backends/db.py
Expand Up @@ -8,8 +8,10 @@
except ImportError: except ImportError:
import pickle import pickle


from django.conf import settings
from django.core.cache.backends.base import BaseCache from django.core.cache.backends.base import BaseCache
from django.db import connections, router, transaction, DatabaseError from django.db import connections, router, transaction, DatabaseError
from django.utils import timezone




class Options(object): class Options(object):
Expand Down Expand Up @@ -38,22 +40,35 @@ class CacheEntry(object):
self.cache_model_class = CacheEntry self.cache_model_class = CacheEntry


class DatabaseCache(BaseDatabaseCache): class DatabaseCache(BaseDatabaseCache):

# This class uses cursors provided by the database connection. This means
# it reads expiration values as aware or naive datetimes depending on the
# value of USE_TZ. They must be compared to aware or naive representations
# of "now" respectively.

# But it bypasses the ORM for write operations. As a consequence, aware
# datetimes aren't made naive for databases that don't support time zones.
# We work around this problem by always using naive datetimes when writing
# expiration values, in UTC when USE_TZ = True and in local time otherwise.

def get(self, key, default=None, version=None): def get(self, key, default=None, version=None):
key = self.make_key(key, version=version) key = self.make_key(key, version=version)
self.validate_key(key) self.validate_key(key)
db = router.db_for_read(self.cache_model_class) db = router.db_for_read(self.cache_model_class)
table = connections[db].ops.quote_name(self._table) table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor() cursor = connections[db].cursor()


cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % table, [key]) cursor.execute("SELECT cache_key, value, expires FROM %s "
"WHERE cache_key = %%s" % table, [key])
row = cursor.fetchone() row = cursor.fetchone()
if row is None: if row is None:
return default return default
now = datetime.now() now = timezone.now()
if row[2] < now: if row[2] < now:
db = router.db_for_write(self.cache_model_class) db = router.db_for_write(self.cache_model_class)
cursor = connections[db].cursor() cursor = connections[db].cursor()
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key]) cursor.execute("DELETE FROM %s "
"WHERE cache_key = %%s" % table, [key])
transaction.commit_unless_managed(using=db) transaction.commit_unless_managed(using=db)
return default return default
value = connections[db].ops.process_clob(row[1]) value = connections[db].ops.process_clob(row[1])
Expand All @@ -78,20 +93,28 @@ def _base_set(self, mode, key, value, timeout=None):


cursor.execute("SELECT COUNT(*) FROM %s" % table) cursor.execute("SELECT COUNT(*) FROM %s" % table)
num = cursor.fetchone()[0] num = cursor.fetchone()[0]
now = datetime.now().replace(microsecond=0) now = timezone.now()
exp = datetime.fromtimestamp(time.time() + timeout).replace(microsecond=0) now = now.replace(microsecond=0)
if settings.USE_TZ:
exp = datetime.utcfromtimestamp(time.time() + timeout)
else:
exp = datetime.fromtimestamp(time.time() + timeout)
exp = exp.replace(microsecond=0)
if num > self._max_entries: if num > self._max_entries:
self._cull(db, cursor, now) self._cull(db, cursor, now)
encoded = base64.encodestring(pickle.dumps(value, 2)).strip() encoded = base64.encodestring(pickle.dumps(value, 2)).strip()
cursor.execute("SELECT cache_key, expires FROM %s WHERE cache_key = %%s" % table, [key]) cursor.execute("SELECT cache_key, expires FROM %s "
"WHERE cache_key = %%s" % table, [key])
try: try:
result = cursor.fetchone() result = cursor.fetchone()
if result and (mode == 'set' or if result and (mode == 'set' or
(mode == 'add' and result[1] < now)): (mode == 'add' and result[1] < now)):
cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % table, cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
"WHERE cache_key = %%s" % table,
[encoded, connections[db].ops.value_to_db_datetime(exp), key]) [encoded, connections[db].ops.value_to_db_datetime(exp), key])
else: else:
cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % table, cursor.execute("INSERT INTO %s (cache_key, value, expires) "
"VALUES (%%s, %%s, %%s)" % table,
[key, encoded, connections[db].ops.value_to_db_datetime(exp)]) [key, encoded, connections[db].ops.value_to_db_datetime(exp)])
except DatabaseError: except DatabaseError:
# To be threadsafe, updates/inserts are allowed to fail silently # To be threadsafe, updates/inserts are allowed to fail silently
Expand Down Expand Up @@ -120,15 +143,22 @@ def has_key(self, key, version=None):
table = connections[db].ops.quote_name(self._table) table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor() cursor = connections[db].cursor()


now = datetime.now().replace(microsecond=0) if settings.USE_TZ:
cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s and expires > %%s" % table, now = datetime.utcnow()
else:
now = datetime.now()
now = now.replace(microsecond=0)
cursor.execute("SELECT cache_key FROM %s "
"WHERE cache_key = %%s and expires > %%s" % table,
[key, connections[db].ops.value_to_db_datetime(now)]) [key, connections[db].ops.value_to_db_datetime(now)])
return cursor.fetchone() is not None return cursor.fetchone() is not None


def _cull(self, db, cursor, now): def _cull(self, db, cursor, now):
if self._cull_frequency == 0: if self._cull_frequency == 0:
self.clear() self.clear()
else: else:
# When USE_TZ is True, 'now' will be an aware datetime in UTC.
now = now.replace(tzinfo=None)
table = connections[db].ops.quote_name(self._table) table = connections[db].ops.quote_name(self._table)
cursor.execute("DELETE FROM %s WHERE expires < %%s" % table, cursor.execute("DELETE FROM %s WHERE expires < %%s" % table,
[connections[db].ops.value_to_db_datetime(now)]) [connections[db].ops.value_to_db_datetime(now)])
Expand All @@ -137,12 +167,19 @@ def _cull(self, db, cursor, now):
if num > self._max_entries: if num > self._max_entries:
cull_num = num / self._cull_frequency cull_num = num / self._cull_frequency
if connections[db].vendor == 'oracle': if connections[db].vendor == 'oracle':
# Special case for Oracle because it doesn't support LIMIT + OFFSET # Oracle doesn't support LIMIT + OFFSET
cursor.execute("SELECT cache_key FROM (SELECT ROW_NUMBER() OVER (ORDER BY cache_key) AS counter, cache_key FROM %s) WHERE counter > %%s AND COUNTER <= %%s" % table, [cull_num, cull_num + 1]) cursor.execute("""SELECT cache_key FROM
(SELECT ROW_NUMBER() OVER (ORDER BY cache_key) AS counter, cache_key FROM %s)
WHERE counter > %%s AND COUNTER <= %%s""" % table, [cull_num, cull_num + 1])
else: else:
# This isn't standard SQL, it's likely to break with some non officially supported databases # This isn't standard SQL, it's likely to break
cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % table, [cull_num]) # with some non officially supported databases
cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % table, [cursor.fetchone()[0]]) cursor.execute("SELECT cache_key FROM %s "
"ORDER BY cache_key "
"LIMIT 1 OFFSET %%s" % table, [cull_num])
cursor.execute("DELETE FROM %s "
"WHERE cache_key < %%s" % table,
[cursor.fetchone()[0]])


def clear(self): def clear(self):
db = router.db_for_write(self.cache_model_class) db = router.db_for_write(self.cache_model_class)
Expand Down
4 changes: 2 additions & 2 deletions tests/regressiontests/cache/models.py
@@ -1,11 +1,11 @@
from datetime import datetime from django.utils import timezone


from django.db import models from django.db import models




def expensive_calculation(): def expensive_calculation():
expensive_calculation.num_runs += 1 expensive_calculation.num_runs += 1
return datetime.now() return timezone.now()


class Poll(models.Model): class Poll(models.Model):
question = models.CharField(max_length=200) question = models.CharField(max_length=200)
Expand Down
6 changes: 5 additions & 1 deletion tests/regressiontests/cache/tests.py
Expand Up @@ -746,7 +746,7 @@ def custom_key_func(key, key_prefix, version):
return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) return 'CUSTOM-' + '-'.join([key_prefix, str(version), key])




class DBCacheTests(unittest.TestCase, BaseCacheTests): class DBCacheTests(BaseCacheTests, TestCase):
backend_name = 'django.core.cache.backends.db.DatabaseCache' backend_name = 'django.core.cache.backends.db.DatabaseCache'


def setUp(self): def setUp(self):
Expand All @@ -763,6 +763,7 @@ def tearDown(self):
from django.db import connection from django.db import connection
cursor = connection.cursor() cursor = connection.cursor()
cursor.execute('DROP TABLE %s' % connection.ops.quote_name(self._table_name)) cursor.execute('DROP TABLE %s' % connection.ops.quote_name(self._table_name))
connection.commit()


def test_cull(self): def test_cull(self):
self.perform_cull_test(50, 29) self.perform_cull_test(50, 29)
Expand All @@ -776,6 +777,9 @@ def test_old_initialization(self):
self.perform_cull_test(50, 18) self.perform_cull_test(50, 18)




DBCacheWithTimeZoneTests = override_settings(USE_TZ=True)(DBCacheTests)


class DBCacheRouter(object): class DBCacheRouter(object):
"""A router that puts the cache table on the 'other' database.""" """A router that puts the cache table on the 'other' database."""


Expand Down

0 comments on commit 7075e93

Please sign in to comment.