Skip to content

Commit

Permalink
Merge pull request #738 from untitaker/master
Browse files Browse the repository at this point in the history
Add has() to caches
  • Loading branch information
untitaker committed Jun 14, 2015
2 parents d4e8b3f + 148b646 commit 768370b
Show file tree
Hide file tree
Showing 3 changed files with 54 additions and 0 deletions.
1 change: 1 addition & 0 deletions CHANGES
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ Version 0.11
an issue with redis setex (issue ``#550``)
- Werkzeug now assumes ``UTF-8`` as filesystem encoding on Unix if Python
detected it as ASCII.
- New optional `has` method on caches.

Version 0.10.5
--------------
Expand Down
10 changes: 10 additions & 0 deletions tests/contrib/test_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,16 @@ def test_generic_timeout(self, c, fast_sleep):
fast_sleep(timeout + 1)
assert c.get('foo') is None

def test_generic_has(self, c):
assert c.has('foo') in (False, 0)
assert c.has('spam') in (False, 0)
assert c.set('foo', 'bar')
assert c.has('foo') in (True, 1)
assert c.has('spam') in (False, 0)
c.delete('foo')
assert c.has('foo') in (False, 0)
assert c.has('spam') in (False, 0)


class TestSimpleCache(CacheTests):

Expand Down
43 changes: 43 additions & 0 deletions werkzeug/contrib/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,21 @@ def delete_many(self, *keys):
"""
return all(self.delete(key) for key in keys)

def has(self, key):
"""Checks if a key exists in the cache without returning it. This is a
cheap operation that bypasses loading the actual data on the backend.
This method is optional and may not be implemented on all caches.
:param key: the key to check
"""
raise NotImplementedError(
'%s doesn\'t have an efficient implementation of `has`. That '
'means it is impossible to check whether a key exists without '
'fully loading the key\'s data. Consider using `self.get` '
'explicitly if you don\'t care about performance.'
)

def clear(self):
"""Clears the cache. Keep in mind that not all caches support
completely clearing the cache.
Expand Down Expand Up @@ -309,6 +324,12 @@ def add(self, key, value, timeout=None):
def delete(self, key):
return self._cache.pop(key, None) is not None

def has(self, key):
try:
expires, value = self._cache[key]
return expires == 0 or expires > time()
except KeyError:
return False

_test_memcached_key = re.compile(r'[^\x00-\x21\xff]{1,250}$').match

Expand Down Expand Up @@ -441,6 +462,12 @@ def delete_many(self, *keys):
new_keys.append(key)
return self._client.delete_multi(new_keys)

def has(self, key):
key = self._normalize_key(key)
if _test_memcached_key(key):
return self._client.append(key, '')
return False

def clear(self):
return self._client.flush_all()

Expand Down Expand Up @@ -619,6 +646,9 @@ def delete_many(self, *keys):
keys = [self.key_prefix + key for key in keys]
return self._client.delete(*keys)

def has(self, key):
return self._client.exists(self.key_prefix + key)

def clear(self):
status = False
if self.key_prefix:
Expand Down Expand Up @@ -750,3 +780,16 @@ def delete(self, key):
return False
else:
return True

def has(self, key):
filename = self._get_filename(key)
try:
with open(filename, 'rb') as f:
pickle_time = pickle.load(f)
if pickle_time == 0 or pickle_time >= time():
return True
else:
os.remove(filename)
return False
except (IOError, OSError, pickle.PickleError):
return False

0 comments on commit 768370b

Please sign in to comment.