Skip to content
This repository has been archived by the owner on Aug 26, 2022. It is now read-only.

Commit

Permalink
bug 1431497: Migrate from memcache to redis (#4870)
Browse files Browse the repository at this point in the history
Combine the two cache configurations (local memory and memcache) into a single cache configuration based on Redis. This also removes the memcache_hashring library, which doesn't support Python 3.
  • Loading branch information
MatonAnthony authored and jwhitlock committed Aug 2, 2018
1 parent ab1ed87 commit 7ed89da
Show file tree
Hide file tree
Showing 18 changed files with 61 additions and 87 deletions.
1 change: 1 addition & 0 deletions .travis.yml
Expand Up @@ -12,6 +12,7 @@ services:
- docker
- memcached
- mysql
- redis
env:
global:
- DATABASE_URL=mysql://root:@127.0.0.1:3306/kuma
Expand Down
8 changes: 4 additions & 4 deletions docker-compose.test.yml
Expand Up @@ -33,11 +33,11 @@ services:
- ./:/app
command: sh -c "urlwait && py.test --nomigrations kuma --ignore=kuma/search"
depends_on:
- memcached
- mysql
- redis
environment:
- DATABASE_URL=mysql://root:kuma@mysql:3306/developer_mozilla_org
- MEMCACHE_SERVERS=memcached:11211
- REDIS_CACHE_SERVER=redis://redis:6379/3
- PYTHONDONTWRITEBYTECODE=1
- URLWAIT_TIMEOUT=300

Expand All @@ -48,12 +48,12 @@ services:
- ./:/app
command: sh -c "urlwait && py.test --nomigrations --junit-xml=/app/test_results/django.xml kuma"
depends_on:
- memcached
- redis
- mysql
- elasticsearch
environment:
- DATABASE_URL=mysql://root:kuma@mysql:3306/developer_mozilla_org
- ES_URLS=elasticsearch:9200
- MEMCACHE_SERVERS=memcached:11211
- REDIS_CACHE_SERVER=redis://redis:6379/3
- PYTHONDONTWRITEBYTECODE=1
- URLWAIT_TIMEOUT=300
4 changes: 1 addition & 3 deletions docker-compose.yml
Expand Up @@ -7,7 +7,6 @@ services:
volumes:
- ./:/app:z
depends_on:
- memcached
- mysql
- elasticsearch
- redis
Expand All @@ -28,7 +27,7 @@ services:
- ES_URLS=elasticsearch:9200
- INTERACTIVE_EXAMPLES_BASE=${INTERACTIVE_EXAMPLES_BASE:-https://interactive-examples.mdn.mozilla.net}
- KUMASCRIPT_URL_TEMPLATE=http://kumascript:9080/docs/{path}
- MEMCACHE_SERVERS=memcached:11211
- REDIS_CACHE_SERVER=redis://redis:6379/3
- PROTOCOL=http://
- SESSION_COOKIE_SECURE=False
- SITE_URL=http://localhost:8000
Expand All @@ -51,7 +50,6 @@ services:
<<: *worker
command: gunicorn -w 4 --bind 0.0.0.0:8000 --access-logfile=- --timeout=120 kuma.wsgi:application
depends_on:
- memcached
- mysql
- elasticsearch
- redis
Expand Down
4 changes: 0 additions & 4 deletions kuma/core/cache.py

This file was deleted.

8 changes: 4 additions & 4 deletions kuma/core/tasks.py
@@ -1,10 +1,10 @@
from celery.task import task
from constance import config
from django.contrib.sessions.models import Session
from django.core.cache import cache
from django.db import connection
from django.utils import timezone

from .cache import memcache
from .decorators import skip_in_maintenance_mode
from .models import IPBan

Expand All @@ -28,7 +28,7 @@ def clean_sessions():
logger = clean_sessions.get_logger()
chunk_size = config.SESSION_CLEANUP_CHUNK_SIZE

if memcache.add(LOCK_ID, now.strftime('%c'), LOCK_EXPIRE):
if cache.add(LOCK_ID, now.strftime('%c'), LOCK_EXPIRE):
total_count = get_expired_sessions(now).count()
delete_count = 0
logger.info('Deleting the %s of %s oldest expired sessions' %
Expand All @@ -44,13 +44,13 @@ def clean_sessions():
""", [chunk_size])
finally:
logger.info('Deleted %s expired sessions' % delete_count)
memcache.delete(LOCK_ID)
cache.delete(LOCK_ID)
expired_sessions = get_expired_sessions(now)
if expired_sessions.exists():
clean_sessions.apply_async()
else:
logger.error('The clean_sessions task is already running since %s' %
memcache.get(LOCK_ID))
cache.get(LOCK_ID))


@task
Expand Down
3 changes: 0 additions & 3 deletions kuma/core/tests/__init__.py
Expand Up @@ -7,8 +7,6 @@
from django.test import TestCase
from django.utils.translation import trans_real

from ..cache import memcache


def assert_no_cache_header(response):
assert 'max-age=0' in response['Cache-Control']
Expand Down Expand Up @@ -40,7 +38,6 @@ def _pre_setup(self):

# Clean the slate.
cache.clear()
memcache.clear()

trans_real.deactivate()
trans_real._translations = {} # Django fails to clear this cache.
Expand Down
16 changes: 8 additions & 8 deletions kuma/core/utils.py
Expand Up @@ -14,6 +14,7 @@
from babel import dates, localedata
from celery import chain, chord
from django.conf import settings
from django.core.cache import cache
from django.core.paginator import EmptyPage, InvalidPage, Paginator
from django.http import QueryDict
from django.shortcuts import _get_queryset
Expand All @@ -26,7 +27,6 @@
from six.moves.urllib.parse import parse_qsl, urlsplit, urlunsplit
from taggit.utils import split_strip

from .cache import memcache
from .exceptions import DateTimeFormatError


Expand Down Expand Up @@ -132,16 +132,16 @@ def generate_filename_and_delete_previous(ffile, name, before_delete=None):
return new_filename


class MemcacheLockException(Exception):
class CacheLockException(Exception):
pass


class MemcacheLock(object):
class CacheLock(object):
def __init__(self, key, attempts=1, expires=60 * 60 * 3):
self.key = 'lock_%s' % key
self.attempts = attempts
self.expires = expires
self.cache = memcache
self.cache = cache

def locked(self):
return bool(self.cache.get(self.key))
Expand All @@ -159,13 +159,13 @@ def acquire(self):
logging.debug('Sleeping for %s while trying to acquire key %s',
sleep_time, self.key)
time.sleep(sleep_time)
raise MemcacheLockException('Could not acquire lock for %s' % self.key)
raise CacheLockException('Could not acquire lock for %s' % self.key)

def release(self):
self.cache.delete(self.key)


def memcache_lock(prefix, expires=60 * 60):
def cache_lock(prefix, expires=60 * 60):
"""
Decorator that only allows one instance of the same command to run
at a time.
Expand All @@ -174,14 +174,14 @@ def decorator(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
name = '_'.join((prefix, func.__name__) + args)
lock = MemcacheLock(name, expires=expires)
lock = CacheLock(name, expires=expires)
if lock.locked():
log.warning('Lock %s locked; ignoring call.' % name)
return
try:
# Try to acquire the lock without blocking.
lock.acquire()
except MemcacheLockException:
except CacheLockException:
log.warning('Aborting %s; lock acquisition failed.' % name)
return
else:
Expand Down
4 changes: 2 additions & 2 deletions kuma/feeder/management/commands/update_feeds.py
Expand Up @@ -3,7 +3,7 @@

from django.core.management.base import BaseCommand

from kuma.core.utils import memcache_lock
from kuma.core.utils import cache_lock
from kuma.feeder.utils import update_feeds


Expand All @@ -16,7 +16,7 @@ def add_arguments(self, parser):
help='Fetch even disabled feeds.',
action='store_true')

@memcache_lock('kuma_feeder')
@cache_lock('kuma_feeder')
def handle(self, *args, **options):
"""
Locked command handler to avoid running this command more than once
Expand Down
2 changes: 1 addition & 1 deletion kuma/health/views.py
Expand Up @@ -19,7 +19,7 @@ def liveness(request):
"""
A successful response from this endpoint simply proves
that Django is up and running. It doesn't mean that its
supporting services (like MySQL, memcached, Celery) can
supporting services (like MySQL, Redis, Celery) can
be successfully used from within this service.
"""
return HttpResponse(status=204)
Expand Down
14 changes: 7 additions & 7 deletions kuma/landing/tests/test_views.py
@@ -1,18 +1,18 @@
import mock
import pytest
from django.core.cache import cache
from django.utils.six.moves.urllib.parse import urlparse
from ratelimit.exceptions import Ratelimited

from kuma.core.cache import memcache
from kuma.core.tests import assert_no_cache_header, assert_shared_cache_header
from kuma.core.urlresolvers import reverse


@pytest.fixture()
def cleared_memcache():
memcache.clear()
yield memcache
memcache.clear()
def cleared_cache():
cache.clear()
yield cache
cache.clear()


def test_contribute_json(client, db):
Expand All @@ -28,12 +28,12 @@ def test_home(client, db):
assert_shared_cache_header(response)


def test_home_community_stats(client, db, cleared_memcache):
def test_home_community_stats(client, db, cleared_cache):
stats = {
'contributors': 'so many, like more than 10,000',
'locales': 'lots, maybe fifty'
}
memcache.set('community_stats', stats)
cache.set('community_stats', stats)
response = client.get(reverse('home'), follow=True)
assert response.status_code == 200
assert_shared_cache_header(response)
Expand Down
4 changes: 2 additions & 2 deletions kuma/landing/views.py
@@ -1,12 +1,12 @@
from django.conf import settings
from django.core.cache import cache
from django.http import HttpResponse
from django.shortcuts import redirect, render
from django.views import static
from django.views.decorators.cache import never_cache
from django.views.generic import RedirectView
from ratelimit.decorators import ratelimit

from kuma.core.cache import memcache
from kuma.core.decorators import shared_cache_control
from kuma.feeder.models import Bundle
from kuma.feeder.sections import SECTION_HACKS
Expand All @@ -27,7 +27,7 @@ def home(request):
"""Home page."""
updates = list(Bundle.objects.recent_entries(SECTION_HACKS.updates)[:5])

community_stats = memcache.get('community_stats')
community_stats = cache.get('community_stats')

if not community_stats:
community_stats = {'contributors': 5453, 'locales': 36}
Expand Down
31 changes: 10 additions & 21 deletions kuma/settings/common.py
Expand Up @@ -107,21 +107,15 @@ def parse_conn_max_age(value):

CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': CACHE_COUNT_TIMEOUT,
'KEY_PREFIX': CACHE_PREFIX,
},
'memcache': {
'BACKEND': 'memcached_hashring.backend.MemcachedHashRingCache',
'BACKEND': 'django_redis.cache.RedisCache',
'TIMEOUT': CACHE_COUNT_TIMEOUT * 60,
'KEY_PREFIX': CACHE_PREFIX,
'LOCATION': config('MEMCACHE_SERVERS',
default='127.0.0.1:11211',
cast=Csv()),
},
'LOCATION': config('REDIS_CACHE_SERVER',
default='127.0.0.1:6379'),
}
}

CACHEBACK_CACHE_ALIAS = 'memcache'
CACHEBACK_CACHE_ALIAS = 'default'

# Email
vars().update(config('EMAIL_URL',
Expand Down Expand Up @@ -1195,18 +1189,13 @@ def pipeline_one_scss(slug, **kwargs):

if MAINTENANCE_MODE:
# In maintenance mode, we're going to avoid using the database, and
# use Celery's default beat-scheduler as well as memcached for storing
# use Celery's default beat-scheduler as well as Redis for storing
# any results. In both normal and maintenance mode we use djcelery's
# loader (see djcelery.setup_loader() above) so we, among other things,
# acquire the Celery settings from among Django's settings.
CELERYBEAT_SCHEDULER = 'celery.beat.PersistentScheduler'
DEFAULT_CELERY_RESULT_BACKEND = (
'cache+memcached://' + ';'.join(
config('MEMCACHE_SERVERS',
default='127.0.0.1:11211',
cast=Csv())
)
)
DEFAULT_CELERY_RESULT_BACKEND = CACHES['default']['LOCATION']

else:
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
DEFAULT_CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
Expand Down Expand Up @@ -1330,7 +1319,7 @@ def pipeline_one_scss(slug, **kwargs):
if MAINTENANCE_MODE else
'constance.backends.database.DatabaseBackend')
# must be an entry in the CACHES setting!
CONSTANCE_DATABASE_CACHE_BACKEND = 'memcache'
CONSTANCE_DATABASE_CACHE_BACKEND = 'default'

# Settings and defaults controllable by Constance in admin
CONSTANCE_CONFIG = dict(
Expand Down Expand Up @@ -1637,7 +1626,7 @@ def get_user_url(user):

# django-ratelimit
RATELIMIT_ENABLE = config('RATELIMIT_ENABLE', default=True, cast=bool)
RATELIMIT_USE_CACHE = config('RATELIMIT_USE_CACHE', default='memcache')
RATELIMIT_USE_CACHE = config('RATELIMIT_USE_CACHE', default='default')
RATELIMIT_VIEW = 'kuma.core.views.rate_limited'

# Caching constants for the Cache-Control header.
Expand Down
2 changes: 1 addition & 1 deletion kuma/settings/prod.py
Expand Up @@ -14,7 +14,7 @@
)

# Cache
CACHES['memcache']['TIMEOUT'] = 60 * 60 * 24
CACHES['default']['TIMEOUT'] = 60 * 60 * 24

MEDIA_URL = config('MEDIA_URL', default='https://developer.cdn.mozilla.net/media/')

Expand Down

0 comments on commit 7ed89da

Please sign in to comment.