Skip to content
Permalink
Browse files
Add pymemcache backend
Added support for the pymemcache backend, using the
``"dogpile.cache.pymemcache"`` backend identifier. Pull request courtesy
Moisés Guimarães de Medeiros.

Also includes some refinements to two of the most commonly failing
"timing intensive" tests so that they are more reliable.

Co-authored-by: Mike Bayer <mike_mp@zzzcomputing.com>
Fixes: #134

Closes: #197
Pull-request: #197
Pull-request-sha: 29ec25a

Change-Id: Id73faeb9a7262683921ea14555a29288c3183efd
  • Loading branch information
moisesguimaraes authored and zzzeek committed Jan 25, 2021
1 parent ec352d5 commit a2e25bc743a48d91705d944e52aa667ebff10943
Show file tree
Hide file tree
Showing 6 changed files with 177 additions and 10 deletions.
@@ -0,0 +1,11 @@
.. change::
:tags: feature, memcached
:tickets: 134

Added support for the pymemcache backend, using the
``"dogpile.cache.pymemcache"`` backend identifier. Pull request courtesy
Moisés Guimarães de Medeiros.

.. seealso::

:class:`.PyMemcacheBackend`
@@ -24,6 +24,11 @@
"dogpile.cache.backends.memcached",
"MemcachedBackend",
)
register_backend(
"dogpile.cache.pymemcache",
"dogpile.cache.backends.memcached",
"PyMemcacheBackend",
)
register_backend(
"dogpile.cache.memory", "dogpile.cache.backends.memory", "MemoryBackend"
)
@@ -21,16 +21,19 @@
import memcache
import pylibmc
import bmemcached
import pymemcache
else:
# delayed import
memcache = None
pylibmc = None
bmemcached = None
pymemcache = None

__all__ = (
"GenericMemcachedBackend",
"MemcachedBackend",
"PylibmcBackend",
"PyMemcacheBackend",
"BMemcachedBackend",
"MemcachedLock",
)
@@ -192,7 +195,11 @@ def get(self, key):

def get_multi(self, keys):
values = self.client.get_multi(keys)
return [NO_VALUE if key not in values else values[key] for key in keys]

return [
NO_VALUE if val is None else val
for val in [values.get(key, NO_VALUE) for key in keys]
]

def set(self, key, value):
self.client.set(key, value, **self.set_arguments)
@@ -421,3 +428,96 @@ def delete_multi(self, keys):
"""python-binary-memcached api does not implements delete_multi"""
for key in keys:
self.delete(key)


pymemcache = None


class PyMemcacheBackend(GenericMemcachedBackend):
"""A backend for the
`pymemcache <https://github.com/pinterest/pymemcache>`_
memcached client.
A comprehensive, fast, pure Python memcached client
.. versionadded:: 1.1.2
pymemcache supports the following features:
* Complete implementation of the memcached text protocol.
* Configurable timeouts for socket connect and send/recv calls.
* Access to the "noreply" flag, which can significantly increase
the speed of writes.
* Flexible, simple approach to serialization and deserialization.
* The (optional) ability to treat network and memcached errors as
cache misses.
dogpile.cache uses the ``HashClient`` from pymemcache in order to reduce
API differences when compared to other memcached client drivers. In short,
this allows the user to provide a single server or a list of memcached
servers.
The ``serde`` param defaults to ``pymemcache.serde.pickle_serde`` as the
legacy ``serde`` would always convert the stored data to binary.
The ``default_noreply`` param defaults to False, otherwise the add command
would always return True causing the mutex not to work.
SSL/TLS is a security layer on end-to-end communication.
It provides following benefits:
* Encryption: Data is encrypted on the wire between
Memcached client and server.
* Authentication: Optionally, both server and client
authenticate each other.
* Integrity: Data is not tampered or altered when
transmitted between client and server
A typical configuration using tls_context::
import ssl
from dogpile.cache import make_region
ctx = ssl.create_default_context(cafile="/path/to/my-ca.pem")
region = make_region().configure(
'dogpile.cache.pymemcache',
expiration_time = 3600,
arguments = {
'url':["127.0.0.1"],
'tls_context':ctx,
}
)
For advanced ways to configure TLS creating a more complex
tls_context visit https://docs.python.org/3/library/ssl.html
Arguments which can be passed to the ``arguments``
dictionary include:
:param tls_context: optional TLS context, will be used for
TLS connections.
:param serde: optional "serde". Defaults to
``pymemcache.serde.pickle_serde``
:param default_noreply: Defaults to False
"""

def __init__(self, arguments):
super().__init__(arguments)

self.serde = arguments.get("serde", pymemcache.serde.pickle_serde)
self.default_noreply = arguments.get("default_noreply", False)
self.tls_context = arguments.get("tls_context", None)

def _imports(self):
global pymemcache
import pymemcache

def _create_client(self):
return pymemcache.client.hash.HashClient(
self.url,
serde=self.serde,
default_noreply=self.default_noreply,
tls_context=self.tls_context,
)
@@ -245,7 +245,17 @@ def f():

@pytest.mark.time_intensive
def test_threaded_get_multi(self):
"""This test is testing that when we get inside the "creator" for
a certain key, there are no other "creators" running at all for
that key.
With "distributed" locks, this is not 100% the case.
"""
reg = self._region(config_args={"expiration_time": 0.25})
backend_mutex = reg.backend.get_mutex("some_key")
is_custom_mutex = backend_mutex is not None

locks = dict((str(i), Lock()) for i in range(11))

canary = collections.defaultdict(list)
@@ -290,8 +300,12 @@ def f():
t.join()

assert sum([len(v) for v in canary.values()]) > 10
for l in canary.values():
assert False not in l

# for non-custom mutex, check that we never had two creators
# running at once
if not is_custom_mutex:
for l in canary.values():
assert False not in l

def test_region_delete(self):
reg = self._region()
@@ -309,19 +323,24 @@ def test_region_expire(self):
# with very slow processing missing a timeout, as is often the
# case with this particular test

reg = self._region(config_args={"expiration_time": 0.75})
expire_time = 1.00

reg = self._region(config_args={"expiration_time": expire_time})
counter = itertools.count(1)

def creator():
return "some value %d" % next(counter)

eq_(reg.get_or_create("some key", creator), "some value 1")
time.sleep(0.85)
time.sleep(expire_time + (0.2 * expire_time))
# expiration is definitely hit
eq_(reg.get("some key", ignore_expiration=True), "some value 1")
post_expiration = reg.get("some key", ignore_expiration=True)
if post_expiration is not NO_VALUE:
eq_(post_expiration, "some value 1")

eq_(reg.get_or_create("some key", creator), "some value 2")

# this line needs to run less the .75 sec before the previous
# this line needs to run less the expire_time sec before the previous
# two or it hits the expiration
eq_(reg.get("some key"), "some value 2")

@@ -141,9 +141,6 @@ class BMemcachedDistributedWithTimeoutTest(
):
backend = "dogpile.cache.bmemcached"

def test_threaded_get_multi(self):
pytest.skip("failing on bmemcached right now")


class BMemcachedTLSTest(_NonDistributedTLSMemcachedTest):
backend = "dogpile.cache.bmemcached"
@@ -169,6 +166,40 @@ class BMemcachedSerializerTest(
backend = "dogpile.cache.bmemcached"


class PyMemcacheTest(_NonDistributedMemcachedTest):
backend = "dogpile.cache.pymemcache"


class PyMemcacheDistributedWithTimeoutTest(
_DistributedMemcachedWithTimeoutTest
):
backend = "dogpile.cache.pymemcache"


class PyMemcacheTLSTest(_NonDistributedTLSMemcachedTest):
backend = "dogpile.cache.pymemcache"


class PyMemcacheDistributedTest(_DistributedMemcachedTest):
backend = "dogpile.cache.pymemcache"


class PyMemcacheDistributedMutexTest(_DistributedMemcachedMutexTest):
backend = "dogpile.cache.pymemcache"


class PyMemcacheDistributedMutexWithTimeoutTest(
_DistributedMemcachedMutexWithTimeoutTest
):
backend = "dogpile.cache.pymemcache"


class PyMemcacheSerializerTest(
_GenericSerializerTest, _NonDistributedMemcachedTest
):
backend = "dogpile.cache.pymemcache"


class MemcachedTest(_NonDistributedMemcachedTest):
backend = "dogpile.cache.memcached"

@@ -38,6 +38,7 @@ deps=
{memcached}: python-memcached
{memcached}: python-binary-memcached>=0.29.0
{memcached}: pifpaf>=2.5.0
{memcached}: pymemcache>=3.1.0
{redis}: redis
{redis}: pifpaf
{redis_sentinel}: redis

0 comments on commit a2e25bc

Please sign in to comment.