-
Notifications
You must be signed in to change notification settings - Fork 31
/
local_sync_cache.py
259 lines (201 loc) · 9.44 KB
/
local_sync_cache.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
# coding: utf-8
"""
Local sync cache
~~~~~~~~~~~~~~~~
The Problem:
If you use a normal dict for cache some values, you can't clear it in
a multi-threaded environment, because you have no access to the dict in
other threads.
The Solution:
Use LocalSyncCache() as a cache dict. If dict.clear() called, the dict
in all threads would be cleared.
How it works:
* Every cache dict memorize his own creation/reset time.
* in dict.clear() the reset time would be saved
into django cache (to share it with all threads)
* On every request the LocalSyncCacheMiddleware called all existing cache dict
in the current threads to look into the shared django cache, if they
are outdatet or not. If they are outdated, the dict would be cleaned.
usage
~~~~~
Add LocalSyncCacheMiddleware to settings, e.g:
---------------------------------------------------------------------------
MIDDLEWARE_CLASSES = (
...
'django_tools.local_sync_cache.LocalSyncCacheMiddleware.LocalSyncCacheMiddleware',
...
)
---------------------------------------------------------------------------
Create a cache dict with a id.
Use it in a model, e.g.:
---------------------------------------------------------------------------
from django.db import models
from django_tools.local_sync_cache.local_sync_cache import LocalSyncCache
class PageTree(models.Model):
parent = models.ForeignKey("self", null=True, blank=True)
slug = models.SlugField()
_url_cache = LocalSyncCache(id="PageTree_absolute_url") # <<<---
def get_absolute_url(self):
if self.pk in self._url_cache:
return self._url_cache[self.pk]
if self.parent:
parent_url = self.parent.get_absolute_url()
url = parent_url + self.slug + "/"
else:
url = "/" + self.slug + "/"
self._url_cache[self.pk] = url
return url
def save(self, *args, **kwargs):
self._url_cache.clear() # Clean the local url cache dict
return super(PageTree, self).save(*args, **kwargs)
---------------------------------------------------------------------------
logging
~~~~~~~
To enable logging, add this to your settings, e.g.:
import logging
logger = logging.getLogger("django_tools.local_sync_cache")
logger.setLevel(log.logging.DEBUG)
logger.addHandler(log.logging.FileHandler("local_sync_cache.log"))
:copyleft: 2011-2015 by the django-tools team, see AUTHORS for more details.
:license: GNU GPL v3 or above, see LICENSE for more details.
"""
from __future__ import absolute_import, division, print_function
import logging
import sys
import time
import datetime
from django.conf import settings
from django.core.cache import caches
logger = logging.getLogger("django_tools.local_sync_cache")
LOCAL_SYNC_CACHE_BACKEND = getattr(settings, "LOCAL_SYNC_CACHE_BACKEND", "local_sync_cache")
def _get_cache():
"""
return django cache object.
Try to use a own backend and fallback to 'default' if not defined in settings.CACHES
"""
if LOCAL_SYNC_CACHE_BACKEND not in settings.CACHES:
# TODO: Not needed in django v1.4: https://code.djangoproject.com/ticket/16410
msg = "You should define a '%s' cache in your settings.CACHES (use default cache)" % LOCAL_SYNC_CACHE_BACKEND
logger.critical(msg)
cache_name = "default" # fallback to default cache entry
else:
cache_name = LOCAL_SYNC_CACHE_BACKEND
backend = settings.CACHES[cache_name]["BACKEND"]
if "dummy" in backend or "locmem" in backend:
msg = "You should use Memcache, FileBasedCache or DatabaseCache and not: %s" % backend
logger.critical(msg)
django_cache = caches[cache_name]
logger.debug("Use django '%s' cache: %r" % (cache_name, django_cache))
return django_cache
class LocalSyncCache(dict):
INIT_COUNTER = {} # Counts how often __init__ used, should always be 1!
# Stores all existing instance, used in middleware to call check_state()
CACHES = []
# Store the last reset times secondary in this local thread.
_OWN_RESET_TIMES = {}
def __init__(self, id=None, unique_ids=True):
if id is None:
raise AssertionError("LocalSyncCache must take a id as argument.")
if unique_ids:
for existing_cache in self.CACHES:
if id == existing_cache.id:
logger.error(
"ID %r was already used! It must be unique! (Existing ids are: %s)" % (
id, repr([i.id for i in self.CACHES])
)
)
self.id = id
self.django_cache = _get_cache()
self.last_reset = time.time() # Save last creation/reset time
self.CACHES.append(self)
if not self.id in self.INIT_COUNTER:
self.INIT_COUNTER[self.id] = 1
else:
logger.error("Error: __init__ for %s was called to often!" % self.id)
self.INIT_COUNTER[self.id] += 1
self.request_counter = 0 # Counts how often check_state called (Normally called one time per request)
self.own_clear_counter = 0 # Counts how often clear called in this thread
self.ext_clear_counter = 0 # Counts how often clears from external thread
logger.debug("%r __init__" % id)
def check_state(self):
"""
Check if we are out-dated or not.
Should be called at the start of a request. e.g.: by middleware
"""
self.request_counter += 1
global_update_time = self.django_cache.get(self.id)
if global_update_time is None:
if self.id in self._OWN_RESET_TIMES:
# clear() was called in the past in this thread and it
# is not stored in the django cache -> resave it
logger.info("Resave %r last reset time in cache" % self.id)
self.django_cache.set(self.id, self._OWN_RESET_TIMES[self.id])
elif self.last_reset < global_update_time:
# We have out-dated data -> reset dict
self.ext_clear_counter += 1
logger.info("%r out-dated data -> reset (global_update_time: %r - self.last_reset: %r)" % (self.id, global_update_time, self.last_reset))
dict.clear(self)
self.last_reset = time.time()
if self.id in self._OWN_RESET_TIMES:
# In this thread clear() was called in the past and now in
# a other thread clear() was called.
del(self._OWN_RESET_TIMES[self.id])
logger.debug("remove %r from _OWN_RESET_TIMES" % self.id)
def clear(self):
"""
Must be called from the process/thread witch change the data.
* Clear the dict
* Save clear time in django cache and in self._OWN_RESET_TIMES
"""
self.own_clear_counter += 1
dict.clear(self)
self.last_reset = time.time()
self.django_cache.set(self.id, self.last_reset)
logger.info("%r - dict.clear - Set global_update_time to %r" % (self.id, self.last_reset))
# Save reset time in this thread for re-adding it to cache in check_state()
self._OWN_RESET_TIMES[self.id] = self.last_reset
# Check if cache worked
cached_value = self.django_cache.get(self.id)
if cached_value != self.last_reset:
logger.error("Cache seems not to work: %r != %r" % (cached_value, self.last_reset))
@staticmethod
def get_cache_information():
cache_information = []
django_cache = _get_cache()
for instance in LocalSyncCache.CACHES:
try:
instance_size = sys.getsizeof(instance) # New in version 2.6
except (AttributeError, TypeError): # PyPy raised a TypeError
instance_size = None
id = instance.id
cleared = id in LocalSyncCache._OWN_RESET_TIMES
global_update_time = django_cache.get(id)
last_reset_datetime = datetime.datetime.fromtimestamp(instance.last_reset)
if global_update_time:
global_update_datetime = datetime.datetime.fromtimestamp(global_update_time)
else:
global_update_datetime = None
cache_information.append({
"instance": instance,
"length": len(instance),
"size": instance_size,
"cleared": cleared,
"global_update_time": global_update_time,
"global_update_datetime": global_update_datetime,
"last_reset_datetime": last_reset_datetime,
"init_counter": LocalSyncCache.INIT_COUNTER[id],
})
return cache_information
@staticmethod
def pformat_cache_information():
output = []
attributes = ("id", "request_counter", "own_clear_counter", "ext_clear_counter")
cache_information = LocalSyncCache.get_cache_information()
for item in cache_information:
output.append(" -" * 40)
instance = item["instance"]
for attr in attributes:
output.append("%22s: %s" % (attr, getattr(instance, attr)))
for key, value in item.items():
output.append("%22s: %r" % (key, value))
return "\n".join(output)