Skip to content
Browse files

Made the database cache backend, which bypasses the ORM, compatible w…

…ith time zone support.

git-svn-id: http://code.djangoproject.com/svn/django/trunk@17119 bcc190cf-cafb-0310-a4f2-bffc1f526a37
  • Loading branch information...
1 parent f6ee168 commit 7075e932565d3a28d53ff014f0e33ce46df53496 @aaugustin aaugustin committed
Showing with 59 additions and 18 deletions.
  1. +52 −15 django/core/cache/backends/db.py
  2. +2 −2 tests/regressiontests/cache/models.py
  3. +5 −1 tests/regressiontests/cache/tests.py
View
67 django/core/cache/backends/db.py
@@ -8,8 +8,10 @@
except ImportError:
import pickle
+from django.conf import settings
from django.core.cache.backends.base import BaseCache
from django.db import connections, router, transaction, DatabaseError
+from django.utils import timezone
class Options(object):
@@ -38,6 +40,17 @@ class CacheEntry(object):
self.cache_model_class = CacheEntry
class DatabaseCache(BaseDatabaseCache):
+
+ # This class uses cursors provided by the database connection. This means
+ # it reads expiration values as aware or naive datetimes depending on the
+ # value of USE_TZ. They must be compared to aware or naive representations
+ # of "now" respectively.
+
+ # But it bypasses the ORM for write operations. As a consequence, aware
+ # datetimes aren't made naive for databases that don't support time zones.
+ # We work around this problem by always using naive datetimes when writing
+ # expiration values, in UTC when USE_TZ = True and in local time otherwise.
+
def get(self, key, default=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
@@ -45,15 +58,17 @@ def get(self, key, default=None, version=None):
table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor()
- cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % table, [key])
+ cursor.execute("SELECT cache_key, value, expires FROM %s "
+ "WHERE cache_key = %%s" % table, [key])
row = cursor.fetchone()
if row is None:
return default
- now = datetime.now()
+ now = timezone.now()
if row[2] < now:
db = router.db_for_write(self.cache_model_class)
cursor = connections[db].cursor()
- cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
+ cursor.execute("DELETE FROM %s "
+ "WHERE cache_key = %%s" % table, [key])
transaction.commit_unless_managed(using=db)
return default
value = connections[db].ops.process_clob(row[1])
@@ -78,20 +93,28 @@ def _base_set(self, mode, key, value, timeout=None):
cursor.execute("SELECT COUNT(*) FROM %s" % table)
num = cursor.fetchone()[0]
- now = datetime.now().replace(microsecond=0)
- exp = datetime.fromtimestamp(time.time() + timeout).replace(microsecond=0)
+ now = timezone.now()
+ now = now.replace(microsecond=0)
+ if settings.USE_TZ:
+ exp = datetime.utcfromtimestamp(time.time() + timeout)
+ else:
+ exp = datetime.fromtimestamp(time.time() + timeout)
+ exp = exp.replace(microsecond=0)
if num > self._max_entries:
self._cull(db, cursor, now)
encoded = base64.encodestring(pickle.dumps(value, 2)).strip()
- cursor.execute("SELECT cache_key, expires FROM %s WHERE cache_key = %%s" % table, [key])
+ cursor.execute("SELECT cache_key, expires FROM %s "
+ "WHERE cache_key = %%s" % table, [key])
try:
result = cursor.fetchone()
if result and (mode == 'set' or
(mode == 'add' and result[1] < now)):
- cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % table,
+ cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
+ "WHERE cache_key = %%s" % table,
[encoded, connections[db].ops.value_to_db_datetime(exp), key])
else:
- cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % table,
+ cursor.execute("INSERT INTO %s (cache_key, value, expires) "
+ "VALUES (%%s, %%s, %%s)" % table,
[key, encoded, connections[db].ops.value_to_db_datetime(exp)])
except DatabaseError:
# To be threadsafe, updates/inserts are allowed to fail silently
@@ -120,8 +143,13 @@ def has_key(self, key, version=None):
table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor()
- now = datetime.now().replace(microsecond=0)
- cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s and expires > %%s" % table,
+ if settings.USE_TZ:
+ now = datetime.utcnow()
+ else:
+ now = datetime.now()
+ now = now.replace(microsecond=0)
+ cursor.execute("SELECT cache_key FROM %s "
+ "WHERE cache_key = %%s and expires > %%s" % table,
[key, connections[db].ops.value_to_db_datetime(now)])
return cursor.fetchone() is not None
@@ -129,6 +157,8 @@ def _cull(self, db, cursor, now):
if self._cull_frequency == 0:
self.clear()
else:
+ # When USE_TZ is True, 'now' will be an aware datetime in UTC.
+ now = now.replace(tzinfo=None)
table = connections[db].ops.quote_name(self._table)
cursor.execute("DELETE FROM %s WHERE expires < %%s" % table,
[connections[db].ops.value_to_db_datetime(now)])
@@ -137,12 +167,19 @@ def _cull(self, db, cursor, now):
if num > self._max_entries:
cull_num = num / self._cull_frequency
if connections[db].vendor == 'oracle':
- # Special case for Oracle because it doesn't support LIMIT + OFFSET
- cursor.execute("SELECT cache_key FROM (SELECT ROW_NUMBER() OVER (ORDER BY cache_key) AS counter, cache_key FROM %s) WHERE counter > %%s AND COUNTER <= %%s" % table, [cull_num, cull_num + 1])
+ # Oracle doesn't support LIMIT + OFFSET
+ cursor.execute("""SELECT cache_key FROM
+(SELECT ROW_NUMBER() OVER (ORDER BY cache_key) AS counter, cache_key FROM %s)
+WHERE counter > %%s AND COUNTER <= %%s""" % table, [cull_num, cull_num + 1])
else:
- # This isn't standard SQL, it's likely to break with some non officially supported databases
- cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % table, [cull_num])
- cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % table, [cursor.fetchone()[0]])
+ # This isn't standard SQL, it's likely to break
+ # with some non officially supported databases
+ cursor.execute("SELECT cache_key FROM %s "
+ "ORDER BY cache_key "
+ "LIMIT 1 OFFSET %%s" % table, [cull_num])
+ cursor.execute("DELETE FROM %s "
+ "WHERE cache_key < %%s" % table,
+ [cursor.fetchone()[0]])
def clear(self):
db = router.db_for_write(self.cache_model_class)
View
4 tests/regressiontests/cache/models.py
@@ -1,11 +1,11 @@
-from datetime import datetime
+from django.utils import timezone
from django.db import models
def expensive_calculation():
expensive_calculation.num_runs += 1
- return datetime.now()
+ return timezone.now()
class Poll(models.Model):
question = models.CharField(max_length=200)
View
6 tests/regressiontests/cache/tests.py
@@ -746,7 +746,7 @@ def custom_key_func(key, key_prefix, version):
return 'CUSTOM-' + '-'.join([key_prefix, str(version), key])
-class DBCacheTests(unittest.TestCase, BaseCacheTests):
+class DBCacheTests(BaseCacheTests, TestCase):
backend_name = 'django.core.cache.backends.db.DatabaseCache'
def setUp(self):
@@ -763,6 +763,7 @@ def tearDown(self):
from django.db import connection
cursor = connection.cursor()
cursor.execute('DROP TABLE %s' % connection.ops.quote_name(self._table_name))
+ connection.commit()
def test_cull(self):
self.perform_cull_test(50, 29)
@@ -776,6 +777,9 @@ def test_old_initialization(self):
self.perform_cull_test(50, 18)
+DBCacheWithTimeZoneTests = override_settings(USE_TZ=True)(DBCacheTests)
+
+
class DBCacheRouter(object):
"""A router that puts the cache table on the 'other' database."""

0 comments on commit 7075e93

Please sign in to comment.
Something went wrong with that request. Please try again.