Skip to content

Commit

Permalink
Merge pull request #1872 from SEED-platform/1871-redis-pw-upgrade-celery
Browse files Browse the repository at this point in the history
Enable Redis passwords
  • Loading branch information
nllong committed May 8, 2019
2 parents bc0c8da + de7631c commit e0aace1
Show file tree
Hide file tree
Showing 9 changed files with 122 additions and 60 deletions.
14 changes: 1 addition & 13 deletions config/settings/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,23 +202,11 @@

APPEND_SLASH = True

CELERY_WORKER_MAX_TASKS_PER_CHILD = 1

# Default queue
CELERY_TASK_DEFAULT_QUEUE = 'seed-common'
CELERY_TASK_QUEUES = (
Queue(
CELERY_TASK_DEFAULT_QUEUE,
Exchange(CELERY_TASK_DEFAULT_QUEUE),
routing_key=CELERY_TASK_DEFAULT_QUEUE
),
)

# Register our custom JSON serializer so we can serialize datetime objects in celery.
register('seed_json', CeleryDatetimeSerializer.seed_dumps,
CeleryDatetimeSerializer.seed_loads,
content_type='application/json', content_encoding='utf-8')

CELERY_WORKER_MAX_TASKS_PER_CHILD = 1
CELERY_ACCEPT_CONTENT = ['seed_json']
CELERY_TASK_SERIALIZER = 'seed_json'
CELERY_RESULT_SERIALIZER = 'seed_json'
Expand Down
20 changes: 0 additions & 20 deletions config/settings/dev.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,15 +31,6 @@

MIDDLEWARE = ('seed.utils.nocache.DisableClientSideCachingMiddleware',) + MIDDLEWARE

CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': "127.0.0.1:6379",
'OPTIONS': {'DB': 1},
'TIMEOUT': 300
}
}

LOGGING = {
'version': 1,
'disable_existing_loggers': False,
Expand Down Expand Up @@ -74,17 +65,6 @@
},
}

CELERY_BROKER_URL = 'redis://127.0.0.1:6379/1'
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
CELERY_DEFAULT_QUEUE = 'seed-dev'
CELERY_QUEUES = (
Queue(
CELERY_TASK_DEFAULT_QUEUE,
Exchange(CELERY_TASK_DEFAULT_QUEUE),
routing_key=CELERY_TASK_DEFAULT_QUEUE
),
)

REQUIRE_UNIQUE_EMAIL = False

ALLOWED_HOSTS = ['*']
Expand Down
44 changes: 33 additions & 11 deletions config/settings/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@

# The optional vars will set the SERVER_EMAIL information as needed
OPTIONAL_ENV_VARS = ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SES_REGION_NAME',
'AWS_SES_REGION_ENDPOINT', 'SERVER_EMAIL', 'SENTRY_JS_DSN', 'SENTRY_RAVEN_DSN']
'AWS_SES_REGION_ENDPOINT', 'SERVER_EMAIL', 'SENTRY_JS_DSN', 'SENTRY_RAVEN_DSN',
'REDIS_PASSWORD']

for loc in ENV_VARS + OPTIONAL_ENV_VARS:
locals()[loc] = os.environ.get(loc)
Expand Down Expand Up @@ -51,18 +52,39 @@
}

# Redis / Celery config
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': "db-redis:6379",
'OPTIONS': {'DB': 1},
'TIMEOUT': 300
if 'REDIS_PASSWORD' in os.environ:
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': "db-redis:6379",
'OPTIONS': {
'DB': 1,
'PASSWORD': REDIS_PASSWORD,
},
'TIMEOUT': 300
}
}
}
CELERY_BROKER_URL = 'redis://:%s@%s/%s' % (
CACHES['default']['OPTIONS']['PASSWORD'],
CACHES['default']['LOCATION'],
CACHES['default']['OPTIONS']['DB']
)
else:
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': "db-redis:6379",
'OPTIONS': {
'DB': 1
},
'TIMEOUT': 300
}
}
CELERY_BROKER_URL = 'redis://%s/%s' % (
CACHES['default']['LOCATION'], CACHES['default']['OPTIONS']['DB']
)

CELERY_BROKER_TRANSPORT = 'redis'
CELERY_BROKER_URL = 'redis://%s/%s' % (
CACHES['default']['LOCATION'], CACHES['default']['OPTIONS']['DB']
)
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
CELERY_TASK_DEFAULT_QUEUE = 'seed-docker'
CELERY_TASK_QUEUES = (
Expand Down
46 changes: 35 additions & 11 deletions config/settings/local_untracked.py.dist
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@ import os

from kombu import Exchange, Queue


# ============================ General settings and flags ===========================
# ============================ General settings and flags ============================
COMPRESS_ENABLED = False
DOMAIN_URLCONFS = {'default': 'config.urls'}
DEBUG = True # Set to False if this is being used in production mode. If this is set as false, then
Expand All @@ -37,7 +36,7 @@ MAPQUEST_API_KEY = os.environ.get('MAPQUEST_API_KEY', 'a-mapquest-api-key')
#SECRET_KEY = 'default-your-secret-key-here'

# MapQuest API key for testing only - A valid key is only needed when refreshing VCR cassettes.
# Keys for app users are attached to each organization.
# Keys for app users are attached to each organization.
TESTING_MAPQUEST_API_KEY = os.environ.get('TESTING_MAPQUEST_API_KEY', '<your_key_here>')

# email through SES (django-ses)
Expand Down Expand Up @@ -81,9 +80,7 @@ DATABASES = {
}
}

# Redis cache config.
# If using AWS ElastiCache redis, the LOCATION setting looks something like:
# 'xx-yy-zzrr0aax9a.ntmprk.0001.usw2.cache.amazonaws.com:6379'
# =============================== Celery/Redis Cache Settings (No Password) =========
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
Expand All @@ -93,11 +90,10 @@ CACHES = {
}
}


# =============================== Celery Settings ===================================
# redis celery/message broker config. If using AWS, then the URL will look like the following:
# 'redis://xx-yy-zzrr0aax9a.ntmprk.0001.usw2.cache.amazonaws.com:6379/1'
CELERY_BROKER_URL = 'redis://%s/%s' % (CACHES['default']['LOCATION'], CACHES['default']['OPTIONS']['DB'])
CELERY_BROKER_URL = 'redis://%s/%s' % (
CACHES['default']['LOCATION'], CACHES['default']['OPTIONS']['DB']
)
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
CELERY_TASK_DEFAULT_QUEUE = 'seed-local'
CELERY_TASK_QUEUES = (
Queue(
Expand All @@ -107,6 +103,34 @@ CELERY_TASK_QUEUES = (
),
)

# =============================== Celery/Redis Cache Settings (w/Password) =========
#CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': 'your-cache-url:your-cache-port',
# 'OPTIONS': {
# 'DB': 1,
# 'PASSWORD': 'your-redis-password',
# },
# 'TIMEOUT': 300
# }
#}
#
#CELERY_BROKER_URL = 'redis://:%s@%s/%s' % (
# CACHES['default']['OPTIONS']['PASSWORD'],
# CACHES['default']['LOCATION'],
# CACHES['default']['OPTIONS']['DB']
#)
#CELERY_RESULT_BACKEND = CELERY_BROKER_URL
#CELERY_TASK_DEFAULT_QUEUE = 'seed-local'
#CELERY_TASK_QUEUES = (
# Queue(
# CELERY_TASK_DEFAULT_QUEUE,
# Exchange(CELERY_TASK_DEFAULT_QUEUE),
# routing_key=CELERY_TASK_DEFAULT_QUEUE
# ),
#)


# =================================== Logging =======================================
LOGGING = {
Expand Down
1 change: 1 addition & 0 deletions docker-compose.local.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ services:
- POSTGRES_DB
- POSTGRES_USER
- POSTGRES_PASSWORD
# - REDIS_PASSWORD
- SEED_ADMIN_USER
- SEED_ADMIN_PASSWORD
- SEED_ADMIN_ORG
Expand Down
2 changes: 2 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ services:
- POSTGRES_DB=seed
- POSTGRES_USER=seed
- POSTGRES_PASSWORD=super-secret-password
# - REDIS_PASSWORD=optional-need-to-configure-redis
- SEED_ADMIN_USER=user@seed-platform.org
- SEED_ADMIN_PASSWORD=super-secret-password
- SEED_ADMIN_ORG=default
Expand All @@ -54,6 +55,7 @@ services:
- POSTGRES_DB=seed
- POSTGRES_USER=seed
- POSTGRES_PASSWORD=super-secret-password
# - REDIS_PASSWORD=optional-need-to-configure-redis
- SECRET_KEY=ARQV8qGuJKH8sGnBf6ZeEdJQRKLTUhsvEcp8qG9X9sCPXvGLhdxqnNXpZcy6HEyf
- DJANGO_SETTINGS_MODULE=config.settings.docker
- NUMBER_OF_WORKERS
Expand Down
45 changes: 45 additions & 0 deletions docs/source/migrations.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,51 @@ Migrations

Django handles the migration of the database very well; however, there are various changes to SEED that may require some custom (manual) migrations. The migration documenation includes the required changes based on deployment and development for each release.

Version Develop
---------------

In order to support Redis passwords, the configuration of the Redis/Celery settings changed a bit.
You will need to add the following to your local_untracked.py configuration file. If you are using
Docker then you will not need to do this.

.. code-block:: console
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
If you are using a password, then in your local_untracked.py configuration, add the password to
the CACHES configuration option. Your final configuration should look like the following in your
local_untracked.py file

.. code-block:: console
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': "127.0.0.1:6379",
'OPTIONS': {
'DB': 1,
'PASSWORD': 'password',
},
'TIMEOUT': 300
}
}
CELERY_BROKER_URL = 'redis://:%s@%s/%s' % (
CACHES['default']['OPTIONS']['PASSWORD'],
CACHES['default']['LOCATION'],
CACHES['default']['OPTIONS']['DB']
)
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
CELERY_TASK_DEFAULT_QUEUE = 'seed-local'
CELERY_TASK_QUEUES = (
Queue(
CELERY_TASK_DEFAULT_QUEUE,
Exchange(CELERY_TASK_DEFAULT_QUEUE),
routing_key=CELERY_TASK_DEFAULT_QUEUE
),
)
Version 2.6.0
------------------

Expand Down
6 changes: 2 additions & 4 deletions requirements/base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,12 @@ modeltranslation==0.25
psycopg2-binary==2.7.5

# background process management
kombu==4.2.2.post1 # 4.4 breaks celery without upgrading py-redis (redis below)
celery==4.2.1
django-redis-cache==1.7.1
celery==4.3.0
django-redis-cache==2.0.0
django_compressor==2.2
django-compressor-autoprefixer==0.1.0
django-extensions==1.9.6
django-libsass==0.7
redis==2.10.6

# Time zones support
pytz==2018.7
Expand Down
4 changes: 3 additions & 1 deletion seed/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@
:copyright (c) 2014 - 2019, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Department of Energy) and contributors. All rights reserved. # NOQA
:author
"""
from __future__ import absolute_import
from __future__ import absolute_import, unicode_literals

# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # NOQA

__all__ = ('celery_app',)

0 comments on commit e0aace1

Please sign in to comment.