Skip to content

Commit

Permalink
OpenConceptLab/ocl_issues#927 Redis clustering, fixing celery config
Browse files Browse the repository at this point in the history
  • Loading branch information
rkorytkowski committed Aug 23, 2023
1 parent dcbe83a commit ef1ff77
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 25 deletions.
6 changes: 6 additions & 0 deletions core/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app

__all__ = ('celery_app',)

API_VERSION = '2.3.44'
API_BUILD = 'dev'
VERSION = API_VERSION + '-' + API_BUILD
Expand Down
22 changes: 20 additions & 2 deletions core/celery.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,29 @@
import os
from datetime import timedelta

from celery import Celery
from celery.schedules import crontab
from django.conf import settings

os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings")
app = Celery('core')
app.conf.ONCE = settings.CELERY_ONCE # force CELERY_ONCE to load settings
app.conf.CELERYBEAT_SCHEDULE = settings.CELERYBEAT_SCHEDULE
app.conf.ONCE = {
'backend': 'core.common.backends.QueueOnceRedisBackend',
}
app.conf.CELERYBEAT_SCHEDULE = {
'healthcheck-every-minute': {
'task': 'core.common.tasks.beat_healthcheck',
'schedule': timedelta(seconds=60),
},
'first-of-every-month': {
'task': 'core.common.tasks.monthly_usage_report',
'schedule': crontab(1, 0, day_of_month='1'),
},
'vacuum-and-analyze-db': {
'task': 'core.common.tasks.vacuum_and_analyze_db',
'schedule': crontab(0, 1), # Run at 1 am
},

}
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
26 changes: 5 additions & 21 deletions core/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@
'CONNECTION_POOL_KWARGS': {
'max_connections': 100,
'retry': Retry(ExponentialBackoff(cap=10, base=0.5), 10),
'health_check_interval': 5
'health_check_interval': 0 # Handled by Redis TCP keepalive
}
}
if REDIS_SENTINELS:
Expand Down Expand Up @@ -382,7 +382,7 @@
CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = {
'redis_socket_connect_timeout': 5,
'redis_socket_timeout': 5,
'redis_backend_health_check_interval': 5,
'redis_backend_health_check_interval': 0, # Handled by Redis TCP keepalive
'redis_retry_on_timeout': True,
}

Expand All @@ -398,14 +398,13 @@
CELERY_RESULT_EXTENDED = True
CELERY_RESULT_EXPIRES = 259200 # 72 hours
CELERY_BROKER_URL = CELERY_RESULT_BACKEND
CELERY_BROKER_POOL_LIMIT = 50 # should be adjusted considering the number of threads
CELERY_BROKER_POOL_LIMIT = 100 # should be adjusted considering the number of threads
CELERY_BROKER_CONNECTION_TIMEOUT = 5.0
CELERY_BROKER_CONNECTION_RETRY = True
CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP = True
CELERY_BROKER_CONNECTION_MAX_RETRIES = 200
CELERY_BROKER_CONNECTION_MAX_RETRIES = 10
CELERY_BROKER_CHANNEL_ERROR_RETRY = True
CELERY_BROKER_HEARTBEAT = 20
CELERY_BROKER_HEARTBEAT_CHECKRATE = 2
CELERY_BROKER_HEARTBEAT = None # Handled by Redis tcp keepalive

CELERY_TASK_PUBLISH_RETRY = True
CELERY_TASK_PUBLISH_RETRY_POLICY = {
Expand All @@ -421,21 +420,6 @@
'backend': 'core.common.backends.QueueOnceRedisBackend',
'settings': {}
}
CELERYBEAT_SCHEDULE = {
'healthcheck-every-minute': {
'task': 'core.common.tasks.beat_healthcheck',
'schedule': timedelta(seconds=60),
},
'first-of-every-month': {
'task': 'core.common.tasks.monthly_usage_report',
'schedule': crontab(1, 0, day_of_month='1'),
},
'vacuum-and-analyze-db': {
'task': 'core.common.tasks.vacuum_and_analyze_db',
'schedule': crontab(0, 1), # Run at 1 am
},

}
CELERYBEAT_HEALTHCHECK_KEY = 'celery_beat_healthcheck'
ELASTICSEARCH_DSL_PARALLEL = True
ELASTICSEARCH_DSL_AUTO_REFRESH = True
Expand Down
4 changes: 2 additions & 2 deletions start_celery_worker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@ set -e

./wait_for_it.sh ${REDIS_HOST}:${REDIS_PORT} -t 0

UUID=$(cat /proc/sys/kernel/random/uuid)
UUID=$(cat /proc/sys/kernel/random/uuid | cut -c1-8)
CELERY_WORKER_NAME=${CELERY_WORKER_NAME:-""}
CELERY_WORKER_NAME_WITH_UUID="${CELERY_WORKER_NAME}-${UUID}"

echo "$CELERY_WORKER_NAME_WITH_UUID" > "/temp/celery-worker-$CELERY_WORKER_NAME.tmp"

celery -A core.celery worker -n $CELERY_WORKER_NAME_WITH_UUID --loglevel=INFO "$@"
celery -A core.celery worker -n $CELERY_WORKER_NAME_WITH_UUID --loglevel=INFO "$@" -E --without-heartbeat --without-gossip --without-mingle

0 comments on commit ef1ff77

Please sign in to comment.