View
374 Changelog

Large diffs are not rendered by default.

Oops, something went wrong.
View
@@ -7,7 +7,7 @@ include TODO
include setup.cfg
include setup.py
recursive-include t *.py
recursive-include t *.py *.rst
recursive-include docs *
recursive-include extra/bash-completion *
recursive-include extra/centos *
View
@@ -1,10 +1,10 @@
.. image:: http://docs.celeryproject.org/en/latest/_images/celery-banner-small.png
|build-status| |coverage| |license| |wheel| |pyversion| |pyimp|
|build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge|
:Version: 4.1.0 (latentcall)
:Version: 4.2.0rc2 (latentcall)
:Web: http://celeryproject.org/
:Download: https://pypi.python.org/pypi/celery/
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
:Keywords: task, queue, job, async, rabbitmq, amqp, redis,
python, distributed, actors
@@ -188,8 +188,8 @@ database connections at ``fork``.
.. _`web2py`: http://web2py.com/
.. _`Bottle`: https://bottlepy.org/
.. _`Pyramid`: http://docs.pylonsproject.org/en/latest/docs/pyramid.html
.. _`pyramid_celery`: https://pypi.python.org/pypi/pyramid_celery/
.. _`celery-pylons`: https://pypi.python.org/pypi/celery-pylons
.. _`pyramid_celery`: https://pypi.org/project/pyramid_celery/
.. _`celery-pylons`: https://pypi.org/project/celery-pylons/
.. _`web2py-celery`: https://code.google.com/p/web2py-celery/
.. _`Tornado`: http://www.tornadoweb.org/
.. _`tornado-celery`: https://github.com/mher/tornado-celery/
@@ -323,7 +323,7 @@ Downloading and installing from source
Download the latest version of Celery from PyPI:
https://pypi.python.org/pypi/celery/
https://pypi.org/project/celery/
You can install it by doing the following,:
@@ -406,12 +406,16 @@ Wiki
https://wiki.github.com/celery/celery/
Credits
=======
.. _contributing-short:
Contributing
============
Contributors
------------
Development of `celery` happens at GitHub: https://github.com/celery/celery
This project exists thanks to all the people who contribute. Development of
`celery` happens at GitHub: https://github.com/celery/celery
You're highly encouraged to participate in the development
of `celery`. If you don't like GitHub (for some reason) you're welcome
@@ -423,6 +427,36 @@ documentation.
.. _`Contributing to Celery`:
http://docs.celeryproject.org/en/master/contributing.html
|oc-contributors|
.. |oc-contributors| image:: https://opencollective.com/celery/contributors.svg?width=890&button=false
:target: graphs/contributors
Backers
-------
Thank you to all our backers! 🙏 [`Become a backer`_]
.. _`Become a backer`: https://opencollective.com/celery#backer
|oc-backers|
.. |oc-backers| image:: https://opencollective.com/celery/backers.svg?width=890
:target: https://opencollective.com/celery#backers
Sponsors
--------
Support this project by becoming a sponsor. Your logo will show up here with a
link to your website. [`Become a sponsor`_]
.. _`Become a sponsor`: https://opencollective.com/celery#sponsor
|oc-sponsors|
.. |oc-sponsors| image:: https://opencollective.com/celery/sponsor/0/avatar.svg
:target: https://opencollective.com/celery/sponsor/0/website
.. _license:
License
@@ -446,13 +480,20 @@ file in the top distribution directory for the full license text.
.. |wheel| image:: https://img.shields.io/pypi/wheel/celery.svg
:alt: Celery can be installed via wheel
:target: https://pypi.python.org/pypi/celery/
:target: https://pypi.org/project/celery/
.. |pyversion| image:: https://img.shields.io/pypi/pyversions/celery.svg
:alt: Supported Python versions.
:target: https://pypi.python.org/pypi/celery/
:target: https://pypi.org/project/celery/
.. |pyimp| image:: https://img.shields.io/pypi/implementation/celery.svg
:alt: Support Python implementations.
:target: https://pypi.python.org/pypi/celery/
:target: https://pypi.org/project/celery/
.. |ocbackerbadge| image:: https://opencollective.com/celery/backers/badge.svg
:alt: Backers on Open Collective
:target: #backers
.. |ocsponsorbadge| image:: https://opencollective.com/celery/sponsors/badge.svg
:alt: Sponsors on Open Collective
:target: #sponsors
View
@@ -12,9 +12,9 @@
import sys
from collections import namedtuple
SERIES = 'latentcall'
SERIES = 'windowlicker'
__version__ = '4.2.0'
__version__ = '4.2.0rc2'
__author__ = 'Ask Solem'
__contact__ = 'ask@celeryproject.org'
__homepage__ = 'http://celeryproject.org'
View
@@ -328,7 +328,8 @@ def as_task_v2(self, task_id, name, args=None, kwargs=None,
expires = maybe_make_aware(
now + timedelta(seconds=expires), tz=timezone,
)
eta = eta and eta.isoformat()
if not isinstance(eta, string_t):
eta = eta and eta.isoformat()
# If we retry a task `expires` will already be ISO8601-formatted.
if not isinstance(expires, string_t):
expires = expires and expires.isoformat()
View
@@ -38,7 +38,6 @@
# We take __repr__ very seriously around here ;)
R_BOUND_TASK = '<class {0.__name__} of {app}{flags}>'
R_UNBOUND_TASK = '<unbound {0.__name__}{flags}>'
R_SELF_TASK = '<@task {0.name} bound to other {0.__self__}>'
R_INSTANCE = '<@task: {0.name} of {app}{flags}>'
#: Here for backwards compatibility as tasks no longer use a custom meta-class.
@@ -161,9 +160,6 @@ class Task(object):
#: Request class used, or the qualified name of one.
Request = 'celery.worker.request:Request'
#: This is the instance bound to if the task is a method of a class.
__self__ = None
#: The application instance associated with this task class.
_app = None
@@ -377,9 +373,6 @@ def __call__(self, *args, **kwargs):
_task_stack.push(self)
self.push_request(args=args, kwargs=kwargs)
try:
# add self if this is a bound task
if self.__self__ is not None:
return self.run(self.__self__, *args, **kwargs)
return self.run(*args, **kwargs)
finally:
self.pop_request()
@@ -525,11 +518,11 @@ def apply_async(self, args=None, kwargs=None, task_id=None, producer=None,
with denied_join_result():
return self.apply(args, kwargs, task_id=task_id or uuid(),
link=link, link_error=link_error, **options)
# add 'self' if this is a "task_method".
if self.__self__ is not None:
args = args if isinstance(args, tuple) else tuple(args or ())
args = (self.__self__,) + args
shadow = shadow or self.shadow_name(args, kwargs, options)
if self.__v2_compat__:
shadow = shadow or self.shadow_name(self(), args, kwargs, options)
else:
shadow = shadow or self.shadow_name(args, kwargs, options)
preopts = self._get_exec_options()
options = dict(preopts, **options) if options else preopts
@@ -713,9 +706,6 @@ def apply(self, args=None, kwargs=None,
app = self._get_app()
args = args or ()
# add 'self' if this is a bound method.
if self.__self__ is not None:
args = (self.__self__,) + tuple(args)
kwargs = kwargs or {}
task_id = task_id or uuid()
retries = retries or 0
@@ -979,7 +969,7 @@ def pop_request(self):
def __repr__(self):
"""``repr(task)``."""
return _reprtask(self, R_SELF_TASK if self.__self__ else R_INSTANCE)
return _reprtask(self, R_INSTANCE)
def _get_request(self):
"""Get current request object."""
View
@@ -24,7 +24,7 @@
except ImportError: # pragma: no cover
raise ImproperlyConfigured(
'The database result backend requires SQLAlchemy to be installed.'
'See https://pypi.python.org/pypi/SQLAlchemy')
'See https://pypi.org/project/SQLAlchemy/')
logger = logging.getLogger(__name__)
View
@@ -58,6 +58,20 @@ def __init__(self, *args, **kwargs):
self._decode_result = self.backend.decode_result
self.subscribed_to = set()
def on_after_fork(self):
self.backend.client.connection_pool.reset()
if self._pubsub is not None:
self._pubsub.close()
super(ResultConsumer, self).on_after_fork()
def _maybe_cancel_ready_task(self, meta):
if meta['status'] in states.READY_STATES:
self.cancel_for(meta['task_id'])
def on_state_change(self, meta, message):
super(ResultConsumer, self).on_state_change(meta, message)
self._maybe_cancel_ready_task(meta)
def start(self, initial_task_id, **kwargs):
self._pubsub = self.backend.client.pubsub(
ignore_subscribe_messages=True,
View
@@ -24,7 +24,7 @@
from celery._state import current_app
from celery.five import python_2_unicode_compatible
from celery.local import try_import
from celery.result import GroupResult
from celery.result import GroupResult, allow_join_result
from celery.utils import abstract
from celery.utils.functional import _regen
from celery.utils.functional import chunks as _chunks
@@ -554,7 +554,8 @@ def apply_async(self, args=(), kwargs={}, **options):
# python is best at unpacking kwargs, so .run is here to do that.
app = self.app
if app.conf.task_always_eager:
return self.apply(args, kwargs, **options)
with allow_join_result():
return self.apply(args, kwargs, **options)
return self.run(args, kwargs, app=app, **(
dict(self.options, **options) if options else self.options))
View
@@ -32,6 +32,7 @@
from __future__ import absolute_import, unicode_literals
from celery.app.task import BaseTask
from celery.local import PromiseProxy
from sphinx.domains.python import PyModulelevel
from sphinx.ext.autodoc import FunctionDocumenter
@@ -68,11 +69,13 @@ def document_members(self, all_members=False):
def check_module(self):
# Normally checks if *self.object* is really defined in the module
# given by *self.modname*. But since functions decorated with the @task
# decorator are instances living in the celery.local module we're
# checking for that and simply agree to document those then.
# decorator are instances living in the celery.local, we have to check
# the wrapped function instead.
modname = self.get_attr(self.object, '__module__', None)
if modname and modname == 'celery.local':
return True
wrapped = getattr(self.object, '__wrapped__', None)
if wrapped and getattr(wrapped, '__module__') == self.modname:
return True
return super(TaskDocumenter, self).check_module()
@@ -83,11 +86,26 @@ def get_signature_prefix(self, sig):
return self.env.config.celery_task_prefix
def autodoc_skip_member_handler(app, what, name, obj, skip, options):
"""Handler for autodoc-skip-member event."""
# Celery tasks created with the @task decorator have the property
# that *obj.__doc__* and *obj.__class__.__doc__* are equal, which
# trips up the logic in sphinx.ext.autodoc that is supposed to
# suppress repetition of class documentation in an instance of the
# class. This overrides that behavior.
if isinstance(obj, BaseTask) and getattr(obj, '__wrapped__'):
if skip and isinstance(obj, PromiseProxy):
return False
return None
def setup(app):
"""Setup Sphinx extension."""
app.setup_extension('sphinx.ext.autodoc')
app.add_autodocumenter(TaskDocumenter)
app.add_directive_to_domain('py', 'task', TaskDirective)
app.add_config_value('celery_task_prefix', '(task)', True)
app.connect('autodoc-skip-member', autodoc_skip_member_handler)
return {
'parallel_read_safe': True
View
@@ -9,6 +9,7 @@
from kombu.utils.functional import retry_over_time
from celery import states
from celery.exceptions import TimeoutError
from celery.five import items
from celery.result import ResultSet
@@ -145,6 +146,31 @@ def assert_received(self, ids, interval=0.5,
self.is_accepted, ids, interval=interval, desc=desc, **policy
)
def assert_result_tasks_in_progress_or_completed(
self,
async_results,
interval=0.5,
desc='waiting for tasks to be started or completed',
**policy
):
return self.assert_task_state_from_result(
self.is_result_task_in_progress,
async_results,
interval=interval, desc=desc, **policy
)
def assert_task_state_from_result(self, fun, results,
interval=0.5, **policy):
return self.wait_for(
partial(self.true_or_raise, fun, results, timeout=interval),
(Sentinel,), **policy
)
@staticmethod
def is_result_task_in_progress(results, **kwargs):
possible_states = (states.STARTED, states.SUCCESS)
return all(result.state in possible_states for result in results)
def assert_task_worker_state(self, fun, ids, interval=0.5, **policy):
return self.wait_for(
partial(self.true_or_raise, fun, ids, timeout=interval),
View
@@ -102,7 +102,7 @@ def _start_worker_thread(app,
setup_app_for_worker(app, loglevel, logfile)
assert 'celery.ping' in app.tasks
# Make sure we can connect to the broker
with app.connection() as conn:
with app.connection(hostname=os.environ.get('TEST_BROKER')) as conn:
conn.default_channel.queue_declare
worker = WorkController(
View
@@ -791,6 +791,7 @@ def check_privileges(accept_content):
uid=uid, euid=euid, gid=gid, egid=egid,
), file=sys.stderr)
finally:
sys.stderr.flush()
os._exit(1)
warnings.warn(RuntimeWarning(ROOT_DISCOURAGED.format(
uid=uid, euid=euid, gid=gid, egid=egid,
View
@@ -487,7 +487,6 @@ def add(self, result):
self._on_full.add(result)
def _on_ready(self):
self.backend.remove_pending_result(self)
if self.backend.is_async:
self._cache = [r.get() for r in self.results]
self.on_ready()
@@ -845,6 +844,10 @@ def __init__(self, id=None, results=None, parent=None, **kwargs):
self.parent = parent
ResultSet.__init__(self, results, **kwargs)
def _on_ready(self):
self.backend.remove_pending_result(self)
ResultSet._on_ready(self)
def save(self, backend=None):
"""Save group-result for later retrieval using :meth:`restore`.
View
@@ -34,7 +34,7 @@ def _make_id(target): # pragma: no cover
# see Issue #2475
return target
if hasattr(target, '__func__'):
return (id(target.__self__), id(target.__func__))
return id(target.__func__)
return id(target)
@@ -182,15 +182,6 @@ def _connect_signal(self, receiver, sender, weak, dispatch_uid):
if weak:
ref = weakref.ref
receiver_object = receiver
# Check for bound methods
try:
receiver.__self__
receiver.__func__
except AttributeError:
pass
else:
ref = WeakMethod
receiver_object = receiver.__self__
if PY3:
receiver = ref(receiver)
weakref.finalize(receiver_object, self._remove_receiver)
View
@@ -167,7 +167,7 @@ def _format_chars(val, maxlen):
if isinstance(val, bytes): # pragma: no cover
return _format_binary_bytes(val, maxlen)
else:
return "'{0}'".format(truncate(val, maxlen))
return "'{0}'".format(truncate(val, maxlen).replace("'", "\\'"))
def _repr(obj):
View
@@ -74,6 +74,7 @@ def run(self):
self.on_crash('{0!r} crashed: {1!r}', self.name, exc)
self._set_stopped()
finally:
sys.stderr.flush()
os._exit(1) # exiting by normal means won't work
finally:
self._set_stopped()
View
@@ -91,6 +91,7 @@ def run(self):
pass
except Exception as exc:
logger.error('Thread Timer crashed: %r', exc, exc_info=True)
sys.stderr.flush()
os._exit(1)
def stop(self):
View
@@ -0,0 +1 @@
CELERY_USER=developer
View
@@ -0,0 +1,79 @@
FROM debian:jessie
ENV PYTHONIOENCODING UTF-8
# Pypy is installed from a package manager because it takes so long to build.
RUN apt-get update && apt-get install -y \
build-essential \
curl \
git \
libbz2-dev \
libcurl4-openssl-dev \
libmemcached-dev \
libncurses5-dev \
libreadline-dev \
libsqlite3-dev \
libssl-dev \
pkg-config \
pypy \
wget \
zlib1g-dev
# Setup variables. Even though changing these may cause unnecessary invalidation of
# unrelated elements, grouping them together makes the Dockerfile read better.
ENV PROVISIONING /provisioning
# This is provisioned from .env
ARG CELERY_USER=developer
# Check for mandatory build arguments
RUN : "${CELERY_USER:?CELERY_USER build argument needs to be set and non-empty.}"
ENV HOME /home/$CELERY_USER
ENV PATH="$HOME/.pyenv/bin:$PATH"
# Copy and run setup scripts
WORKDIR $PROVISIONING
COPY docker/scripts/install-couchbase.sh .
# Scripts will lose thier executable flags on copy. To avoid the extra instructions
# we call the shell directly.
RUN sh install-couchbase.sh
COPY docker/scripts/create-linux-user.sh .
RUN sh create-linux-user.sh
# Swap to the celery user so packages and celery are not installed as root.
USER $CELERY_USER
COPY docker/scripts/install-pyenv.sh .
RUN sh install-pyenv.sh
# Install celery
WORKDIR $HOME
COPY --chown=1000:1000 requirements $HOME/requirements
COPY --chown=1000:1000 docker/entrypoint /entrypoint
RUN chmod gu+x /entrypoint
# Define the local pyenvs
RUN pyenv local python2.7 python3.4 python3.5 python3.6
# Setup one celery environment for basic development use
RUN pyenv exec pip install \
-r requirements/default.txt \
-r requirements/pkgutils.txt \
-r requirements/test.txt \
-r requirements/test-ci-base.txt \
-r requirements/test-integration.txt
COPY --chown=1000:1000 MANIFEST.in Makefile setup.py setup.cfg tox.ini $HOME/
COPY --chown=1000:1000 docs $HOME/docs
COPY --chown=1000:1000 t $HOME/t
COPY --chown=1000:1000 celery $HOME/celery
RUN pyenv exec pip install -e .
# the compiled files from earlier steps will cause py.test to fail with
# an ImportMismatchError
RUN make clean-pyc
# Setup the entrypoint, this ensures pyenv is initialized when a container is started.
ENTRYPOINT ["/entrypoint"]
View
@@ -0,0 +1,36 @@
version: '2'
services:
celery:
build:
context: ..
dockerfile: docker/Dockerfile
args:
CELERY_USER:
environment:
TEST_BROKER: pyamqp://rabbit:5672
TEST_BACKEND: redis://redis
PYTHONUNBUFFERED: 1
PYTHONDONTWRITEBYTECODE: 1
REDIS_HOST: redis
WORKER_LOGLEVEL: DEBUG
tty: true
volumes:
- ../celery:/home/$CELERY_USER/celery
# Because pytest fails when it encounters files from alternative python compilations,
# __pycache__ and pyc files, PYTHONDONTWRITEBYTECODE must be
# set on the host as well or py.test will throw configuration errors.
# - ../t:/home/$CELERY_USER/t
depends_on:
- rabbit
- redis
- dynamodb
rabbit:
image: rabbitmq:3.7.3
redis:
image: redis:3.2.11
dynamodb:
image: dwmkerr/dynamodb:38
View
@@ -0,0 +1,4 @@
#!/bin/bash
eval "$(pyenv init -)"
eval "$(pyenv virtualenv-init -)"
exec "$@"
View
@@ -0,0 +1,3 @@
#!/bin/sh
addgroup --gid 1000 $CELERY_USER
adduser --system --disabled-password --uid 1000 --gid 1000 $CELERY_USER
View
@@ -0,0 +1,5 @@
#!/bin/sh
wget http://packages.couchbase.com/clients/c/libcouchbase-2.8.4_jessie_amd64.tar
tar -vxf libcouchbase-2.8.4_jessie_amd64.tar
dpkg -i libcouchbase-2.8.4_jessie_amd64/libcouchbase2-core_2.8.4-1_amd64.deb
dpkg -i libcouchbase-2.8.4_jessie_amd64/libcouchbase-dev_2.8.4-1_amd64.deb
View
@@ -0,0 +1,13 @@
#!/bin/sh
# For managing all the local python installations for testing, use pyenv
curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer | bash
# To enable testing versions like 3.4.8 as 3.4 in tox, we need to alias
# pyenv python versions
git clone https://github.com/s1341/pyenv-alias.git $(pyenv root)/plugins/pyenv-alias
# Python versions to test against
VERSION_ALIAS="python2.7" pyenv install 2.7.14
VERSION_ALIAS="python3.4" pyenv install 3.4.8
VERSION_ALIAS="python3.5" pyenv install 3.5.5
VERSION_ALIAS="python3.6" pyenv install 3.6.4
View
@@ -8,6 +8,15 @@ This document contains change notes for bugfix releases in the 3.1.x series
(Cipater), please see :ref:`whatsnew-3.1` for an overview of what's
new in Celery 3.1.
.. _version-3.1.26:
3.1.26
======
:release-date: 2018-23-03 16:00 PM IST
:release-by: Omer Katz
- Fixed a crash caused by tasks cycling between Celery 3 and Celery 4 workers.
.. _version-3.1.25:
3.1.25
View
@@ -0,0 +1,326 @@
.. _changelog-4.1:
================
Change history
================
.. _version-4.1.0:
4.1.0
=====
:release-date: 2017-07-25 00:00 PM PST
:release-by: Omer Katz
- **Configuration**: CELERY_SEND_EVENTS instead of CELERYD_SEND_EVENTS for 3.1.x compatibility (#3997)
Contributed by **abhinav nilaratna**.
- **App**: Restore behavior so Broadcast queues work. (#3934)
Contributed by **Patrick Cloke**.
- **Sphinx**: Make appstr use standard format (#4134) (#4139)
Contributed by **Preston Moore**.
- **App**: Make id, name always accessible from logging.Formatter via extra (#3994)
Contributed by **Yoichi NAKAYAMA**.
- **Worker**: Add worker_shutting_down signal (#3998)
Contributed by **Daniel Huang**.
- **PyPy**: Support PyPy version 5.8.0 (#4128)
Contributed by **Omer Katz**.
- **Results**: Elasticsearch: Fix serializing keys (#3924)
Contributed by :github_user:`staticfox`.
- **Canvas**: Deserialize all tasks in a chain (#4015)
Contributed by :github_user:`fcoelho`.
- **Systemd**: Recover loglevel for ExecStart in systemd config (#4023)
Contributed by **Yoichi NAKAYAMA**.
- **Sphinx**: Use the Sphinx add_directive_to_domain API. (#4037)
Contributed by **Patrick Cloke**.
- **App**: Pass properties to before_task_publish signal (#4035)
Contributed by **Javier Domingo Cansino**.
- **Results**: Add SSL option for Redis backends (#3831)
Contributed by **Chris Kuehl**.
- **Beat**: celery.schedule.crontab: fix reduce (#3826) (#3827)
Contributed by **Taylor C. Richberger**.
- **State**: Fix celery issues when using flower REST API
Contributed by **Thierry RAMORASOAVINA**.
- **Results**: Elasticsearch: Fix serializing document id.
Contributed by **Acey9**.
- **Beat**: Make shallow copy of schedules dictionary
Contributed by **Brian May**.
- **Beat**: Populate heap when periodic tasks are changed
Contributed by **Wojciech Żywno**.
- **Task**: Allow class methods to define tasks (#3952)
Contributed by **georgepsarakis**.
- **Platforms**: Always return boolean value when checking if signal is supported (#3962).
Contributed by **Jian Yu**.
- **Canvas**: Avoid duplicating chains in chords (#3779)
Contributed by **Ryan Hiebert**.
- **Canvas**: Lookup task only if list has items (#3847)
Contributed by **Marc Gibbons**.
- **Results**: Allow unicode message for exception raised in task (#3903)
Contributed by **George Psarakis**.
- **Python3**: Support for Python 3.6 (#3904, #3903, #3736)
Contributed by **Jon Dufresne**, **George Psarakis**, **Asif Saifuddin Auvi**, **Omer Katz**.
- **App**: Fix retried tasks with expirations (#3790)
Contributed by **Brendan MacDonell**.
- * Fixes items format route in docs (#3875)
Contributed by **Slam**.
- **Utils**: Fix maybe_make_aware (#3850)
Contributed by **Taylor C. Richberger**.
- **Task**: Fix task ETA issues when timezone is defined in configuration (#3867)
Contributed by **George Psarakis**.
- **Concurrency**: Consumer does not shutdown properly when embedded in gevent application (#3746)
Contributed by **Arcadiy Ivanov**.
- **Canvas**: Fix #3725: Task replaced with group does not complete (#3731)
Contributed by **Morgan Doocy**.
- **Task**: Correct order in chains with replaced tasks (#3730)
Contributed by **Morgan Doocy**.
- **Result**: Enable synchronous execution of sub-tasks (#3696)
Contributed by **shalev67**.
- **Task**: Fix request context for blocking task apply (added hostname) (#3716)
Contributed by **Marat Sharafutdinov**.
- **Utils**: Fix task argument handling (#3678) (#3693)
Contributed by **Roman Sichny**.
- **Beat**: Provide a transparent method to update the Scheduler heap (#3721)
Contributed by **Alejandro Pernin**.
- **Beat**: Specify default value for pidfile option of celery beat. (#3722)
Contributed by **Arnaud Rocher**.
- **Results**: Elasticsearch: Stop generating a new field every time when a new result is being put (#3708)
Contributed by **Mike Chen**.
- **Requirements**
- Now depends on :ref:`Kombu 4.1.0 <kombu:version-4.1.0>`.
- **Results**: Elasticsearch now reuses fields when new results are added.
Contributed by **Mike Chen**.
- **Results**: Fixed MongoDB integration when using binary encodings
(Issue #3575).
Contributed by **Andrew de Quincey**.
- **Worker**: Making missing ``*args`` and ``**kwargs`` in Task protocol 1
return empty value in protocol 2 (Issue #3687).
Contributed by **Roman Sichny**.
- **App**: Fixed :exc:`TypeError` in AMQP when using deprecated signal
(Issue #3707).
Contributed by :github_user:`michael-k`.
- **Beat**: Added a transparent method to update the scheduler heap.
Contributed by **Alejandro Pernin**.
- **Task**: Fixed handling of tasks with keyword arguments on Python 3
(Issue #3657).
Contributed by **Roman Sichny**.
- **Task**: Fixed request context for blocking task apply by adding missing
hostname attribute.
Contributed by **Marat Sharafutdinov**.
- **Task**: Added option to run subtasks synchronously with
``disable_sync_subtasks`` argument.
Contributed by :github_user:`shalev67`.
- **App**: Fixed chaining of replaced tasks (Issue #3726).
Contributed by **Morgan Doocy**.
- **Canvas**: Fixed bug where replaced tasks with groups were not completing
(Issue #3725).
Contributed by **Morgan Doocy**.
- **Worker**: Fixed problem where consumer does not shutdown properly when
embedded in a gevent application (Issue #3745).
Contributed by **Arcadiy Ivanov**.
- **Results**: Added support for using AWS DynamoDB as a result backend (#3736).
Contributed by **George Psarakis**.
- **Testing**: Added caching on pip installs.
Contributed by :github_user:`orf`.
- **Worker**: Prevent consuming queue before ready on startup (Issue #3620).
Contributed by **Alan Hamlett**.
- **App**: Fixed task ETA issues when timezone is defined in configuration
(Issue #3753).
Contributed by **George Psarakis**.
- **Utils**: ``maybe_make_aware`` should not modify datetime when it is
already timezone-aware (Issue #3849).
Contributed by **Taylor C. Richberger**.
- **App**: Fixed retrying tasks with expirations (Issue #3734).
Contributed by **Brendan MacDonell**.
- **Results**: Allow unicode message for exceptions raised in task
(Issue #3858).
Contributed by :github_user:`staticfox`.
- **Canvas**: Fixed :exc:`IndexError` raised when chord has an empty header.
Contributed by **Marc Gibbons**.
- **Canvas**: Avoid duplicating chains in chords (Issue #3771).
Contributed by **Ryan Hiebert** and **George Psarakis**.
- **Utils**: Allow class methods to define tasks (Issue #3863).
Contributed by **George Psarakis**.
- **Beat**: Populate heap when periodic tasks are changed.
Contributed by :github_user:`wzywno` and **Brian May**.
- **Results**: Added support for Elasticsearch backend options settings.
Contributed by :github_user:`Acey9`.
- **Events**: Ensure ``Task.as_dict()`` works when not all information about
task is available.
Contributed by :github_user:`tramora`.
- **Schedules**: Fixed pickled crontab schedules to restore properly (Issue #3826).
Contributed by **Taylor C. Richberger**.
- **Results**: Added SSL option for redis backends (Issue #3830).
Contributed by **Chris Kuehl**.
- Documentation and examples improvements by:
- **Bruno Alla**
- **Jamie Alessio**
- **Vivek Anand**
- **Peter Bittner**
- **Kalle Bronsen**
- **Jon Dufresne**
- **James Michael DuPont**
- **Sergey Fursov**
- **Samuel Dion-Girardeau**
- **Daniel Hahler**
- **Mike Helmick**
- **Marc Hörsken**
- **Christopher Hoskin**
- **Daniel Huang**
- **Primož Kerin**
- **Michal Kuffa**
- **Simon Legner**
- **Anthony Lukach**
- **Ed Morley**
- **Jay McGrath**
- **Rico Moorman**
- **Viraj Navkal**
- **Ross Patterson**
- **Dmytro Petruk**
- **Luke Plant**
- **Eric Poelke**
- **Salvatore Rinchiera**
- **Arnaud Rocher**
- **Kirill Romanov**
- **Simon Schmidt**
- **Tamer Sherif**
- **YuLun Shih**
- **Ask Solem**
- **Tom 'Biwaa' Riat**
- **Arthur Vigil**
- **Joey Wilhelm**
- **Jian Yu**
- **YuLun Shih**
- **Arthur Vigil**
- **Joey Wilhelm**
- :github_user:`baixuexue123`
- :github_user:`bronsen`
- :github_user:`michael-k`
- :github_user:`orf`
- :github_user:`3lnc`
View
@@ -13,7 +13,11 @@ version please visit :ref:`changelog`.
.. toctree::
:maxdepth: 2
whatsnew-4.1
changelog-4.1
whatsnew-4.0
changelog-4.0
whatsnew-3.1
changelog-3.1
whatsnew-3.0
changelog-3.0
View
File renamed without changes.
View
File renamed without changes.
View
@@ -0,0 +1,258 @@
.. _whatsnew-4.1:
===========================================
What's new in Celery 4.1 (latentcall)
===========================================
:Author: Omer Katz (``omer.drow at gmail.com``)
.. sidebar:: Change history
What's new documents describe the changes in major versions,
we also have a :ref:`changelog` that lists the changes in bugfix
releases (0.0.x), while older series are archived under the :ref:`history`
section.
Celery is a simple, flexible, and reliable distributed system to
process vast amounts of messages, while providing operations with
the tools required to maintain such a system.
It's a task queue with focus on real-time processing, while also
supporting task scheduling.
Celery has a large and diverse community of users and contributors,
you should come join us :ref:`on IRC <irc-channel>`
or :ref:`our mailing-list <mailing-list>`.
To read more about Celery you should go read the :ref:`introduction <intro>`.
While this version is backward compatible with previous versions
it's important that you read the following section.
This version is officially supported on CPython 2.7, 3.4, 3.5 & 3.6
and is also supported on PyPy.
.. _`website`: http://celeryproject.org/
.. topic:: Table of Contents
Make sure you read the important notes before upgrading to this version.
.. contents::
:local:
:depth: 2
Preface
=======
The 4.1.0 release continues to improve our efforts to provide you with
the best task execution platform for Python.
This release is mainly a bug fix release, ironing out some issues and regressions
found in Celery 4.0.0.
We added official support for Python 3.6 and PyPy 5.8.0.
This is the first time we release without Ask Solem as an active contributor.
We'd like to thank him for his hard work in creating and maintaining Celery over the years.
Since Ask Solem was not involved there were a few kinks in the release process
which we promise to resolve in the next release.
This document was missing when we did release Celery 4.1.0.
Also, we did not update the release codename as we should have.
We apologize for the inconvenience.
For the time being, I, Omer Katz will be the release manager.
Thank you for your support!
*— Omer Katz*
Wall of Contributors
--------------------
Acey <huiwang.e@gmail.com>
Acey9 <huiwang.e@gmail.com>
Alan Hamlett <alanhamlett@users.noreply.github.com>
Alan Justino da Silva <alan.justino@yahoo.com.br>
Alejandro Pernin <ale.pernin@gmail.com>
Alli <alzeih@users.noreply.github.com>
Andreas Pelme <andreas@pelme.se>
Andrew de Quincey <adq@lidskialf.net>
Anthony Lukach <anthonylukach@gmail.com>
Arcadiy Ivanov <arcadiy@ivanov.biz>
Arnaud Rocher <cailloumajor@users.noreply.github.com>
Arthur Vigil <ahvigil@mail.sfsu.edu>
Asif Saifuddin Auvi <auvipy@users.noreply.github.com>
Ask Solem <ask@celeryproject.org>
BLAGA Razvan-Paul <razvan.paul.blaga@gmail.com>
Brendan MacDonell <macdonellba@gmail.com>
Brian Luan <jznight@gmail.com>
Brian May <brian@linuxpenguins.xyz>
Bruno Alla <browniebroke@users.noreply.github.com>
Chris Kuehl <chris@techxonline.net>
Christian <github@penpal4u.net>
Christopher Hoskin <mans0954@users.noreply.github.com>
Daniel Hahler <github@thequod.de>
Daniel Huang <dxhuang@gmail.com>
Derek Harland <donkopotamus@users.noreply.github.com>
Dmytro Petruk <bavaria95@gmail.com>
Ed Morley <edmorley@users.noreply.github.com>
Eric Poelke <epoelke@gmail.com>
Felipe <fcoelho@users.noreply.github.com>
François Voron <fvoron@gmail.com>
GDR! <gdr@gdr.name>
George Psarakis <giwrgos.psarakis@gmail.com>
J Alan Brogan <jalanb@users.noreply.github.com>
James Michael DuPont <JamesMikeDuPont@gmail.com>
Jamie Alessio <jamie@stoic.net>
Javier Domingo Cansino <javierdo1@gmail.com>
Jay McGrath <jaymcgrath@users.noreply.github.com>
Jian Yu <askingyj@gmail.com>
Joey Wilhelm <tarkatronic@gmail.com>
Jon Dufresne <jon.dufresne@gmail.com>
Kalle Bronsen <bronsen@nrrd.de>
Kirill Romanov <djaler1@gmail.com>
Laurent Peuch <cortex@worlddomination.be>
Luke Plant <L.Plant.98@cantab.net>
Marat Sharafutdinov <decaz89@gmail.com>
Marc Gibbons <marc_gibbons@rogers.com>
Marc Hörsken <mback2k@users.noreply.github.com>
Michael <michael-k@users.noreply.github.com>
Michael Howitz <mh@gocept.com>
Michal Kuffa <beezz@users.noreply.github.com>
Mike Chen <yi.chen.it@gmail.com>
Mike Helmick <michaelhelmick@users.noreply.github.com>
Morgan Doocy <morgan@doocy.net>
Moussa Taifi <moutai10@gmail.com>
Omer Katz <omer.drow@gmail.com>
Patrick Cloke <clokep@users.noreply.github.com>
Peter Bittner <django@bittner.it>
Preston Moore <prestonkmoore@gmail.com>
Primož Kerin <kerin.primoz@gmail.com>
Pysaoke <pysaoke@gmail.com>
Rick Wargo <rickwargo@users.noreply.github.com>
Rico Moorman <rico.moorman@gmail.com>
Roman Sichny <roman@sichnyi.com>
Ross Patterson <me@rpatterson.net>
Ryan Hiebert <ryan@ryanhiebert.com>
Rémi Marenco <remi.marenco@gmail.com>
Salvatore Rinchiera <srinchiera@college.harvard.edu>
Samuel Dion-Girardeau <samuel.diongirardeau@gmail.com>
Sergey Fursov <GeyseR85@gmail.com>
Simon Legner <Simon.Legner@gmail.com>
Simon Schmidt <schmidt.simon@gmail.com>
Slam <3lnc.slam@gmail.com>
Static <staticfox@staticfox.net>
Steffen Allner <sa@gocept.com>
Steven <rh0dium@users.noreply.github.com>
Steven Johns <duoi@users.noreply.github.com>
Tamer Sherif <tamer.sherif@flyingelephantlab.com>
Tao Qingyun <845767657@qq.com>
Tayfun Sen <totayfun@gmail.com>
Taylor C. Richberger <taywee@gmx.com>
Thierry RAMORASOAVINA <thierry.ramorasoavina@orange.com>
Tom 'Biwaa' Riat <riat.tom@gmail.com>
Viktor Holmqvist <viktorholmqvist@gmail.com>
Viraj <vnavkal0@gmail.com>
Vivek Anand <vivekanand1101@users.noreply.github.com>
Will <paradox41@users.noreply.github.com>
Wojciech Żywno <w.zywno@gmail.com>
Yoichi NAKAYAMA <yoichi.nakayama@gmail.com>
YuLun Shih <shih@yulun.me>
Yuhannaa <yuhannaa@gmail.com>
abhinav nilaratna <anilaratna2@bloomberg.net>
aydin <adigeaydin@gmail.com>
csfeathers <csfeathers@users.noreply.github.com>
georgepsarakis <giwrgos.psarakis@gmail.com>
orf <tom@tomforb.es>
shalev67 <shalev67@gmail.com>
sww <sww@users.noreply.github.com>
tnir <tnir@users.noreply.github.com>
何翔宇(Sean Ho) <h1x2y3awalm@gmail.com>
.. note::
This wall was automatically generated from git history,
so sadly it doesn't not include the people who help with more important
things like answering mailing-list questions.
.. _v410-important:
Important Notes
===============
Added support for Python 3.6 & PyPy 5.8.0
-----------------------------------------
We now run our unit test suite and integration test suite on Python 3.6.x
and PyPy 5.8.0.
We expect newer versions of PyPy to work but unfortunately we do not have the
resources to test PyPy with those versions.
The supported Python Versions are:
- CPython 2.7
- CPython 3.4
- CPython 3.5
- CPython 3.6
- PyPy 5.8 (``pypy2``)
.. _v410-news:
News
====
Result Backends
---------------
New DynamoDB Results Backend
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We added a new results backend for those of you who are using DynamoDB.
If you are interested in using this results backend, refer to :ref:`conf-dynamodb-result-backend` for more information.
Elasticsearch
~~~~~~~~~~~~~
The Elasticsearch results backend is now more robust and configurable.
See :ref:`conf-elasticsearch-result-backend` for more information
about the new configuration options.
Redis
~~~~~
The Redis results backend can now use TLS to encrypt the communication with the
Redis database server.
See :ref:`conf-redis-result-backend`.
MongoDB
~~~~~~~
The MongoDB results backend can now handle binary-encoded task results.
This was a regression from 4.0.0 which resulted in a problem using serializers
such as MsgPack or Pickle in conjunction with the MongoDB results backend.
Periodic Tasks
--------------
The task schedule now updates automatically when new tasks are added.
Now if you use the Django database scheduler, you can add and remove tasks from the schedule without restarting Celery beat.
Tasks
-----
The ``disable_sync_subtasks`` argument was added to allow users to override disabling
synchronous subtasks.
See :ref:`task-synchronous-subtasks`
Canvas
------
Multiple bugs were resolved resulting in a much smoother experience when using Canvas.
View
@@ -120,7 +120,7 @@ Downloading and installing from source
Download the latest version of Celery from PyPI:
https://pypi.python.org/pypi/celery/
https://pypi.org/project/celery/
You can install it by doing the following,:
View
@@ -1,6 +1,6 @@
:Version: 4.1.0 (latentcall)
:Version: 4.2.0rc2 (latentcall)
:Web: http://celeryproject.org/
:Download: https://pypi.python.org/pypi/celery/
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
:Keywords: task, queue, job, async, rabbitmq, amqp, redis,
python, distributed, actors
@@ -184,8 +184,8 @@ database connections at ``fork``.
.. _`web2py`: http://web2py.com/
.. _`Bottle`: https://bottlepy.org/
.. _`Pyramid`: http://docs.pylonsproject.org/en/latest/docs/pyramid.html
.. _`pyramid_celery`: https://pypi.python.org/pypi/pyramid_celery/
.. _`celery-pylons`: https://pypi.python.org/pypi/celery-pylons
.. _`pyramid_celery`: https://pypi.org/project/pyramid_celery/
.. _`celery-pylons`: https://pypi.org/project/celery-pylons/
.. _`web2py-celery`: https://code.google.com/p/web2py-celery/
.. _`Tornado`: http://www.tornadoweb.org/
.. _`tornado-celery`: https://github.com/mher/tornado-celery/
View
@@ -48,8 +48,7 @@ Contents
tutorials/index
faq
changelog
whatsnew-4.0
whatsnew-3.1
whatsnew-4.2
reference/index
internals/index
history/index
@@ -62,4 +61,3 @@ Indices and tables
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
View
@@ -62,19 +62,19 @@ Users of the 2.4 series should upgrade to 2.4.4:
* ``pip install -U celery``, or
* ``easy_install -U celery``, or
* https://pypi.python.org/pypi/celery/2.4.4
* https://pypi.org/project/celery/2.4.4/
Users of the 2.3 series should upgrade to 2.3.4:
* ``pip install -U celery==2.3.4``, or
* ``easy_install -U celery==2.3.4``, or
* https://pypi.python.org/pypi/celery/2.3.4
* https://pypi.org/project/celery/2.3.4/
Users of the 2.2 series should upgrade to 2.2.8:
* ``pip install -U celery==2.2.8``, or
* ``easy_install -U celery==2.2.8``, or
* https://pypi.python.org/pypi/celery/2.2.8
* https://pypi.org/project/celery/2.2.8/
The 2.1 series is no longer being maintained, so we urge users
of that series to upgrade to a more recent version.
View
@@ -69,13 +69,13 @@ Or you can upgrade to a more recent version:
* ``pip install -U celery``, or
* ``easy_install -U celery``, or
* https://pypi.python.org/pypi/celery/3.1.13
* https://pypi.org/project/celery/3.1.13/
- Users of the 3.0 series should upgrade to 3.0.25:
* ``pip install -U celery==3.0.25``, or
* ``easy_install -U celery==3.0.25``, or
* https://pypi.python.org/pypi/celery/3.0.25
* https://pypi.org/project/celery/3.0.25/
Distribution package maintainers are urged to provide their users
with updated packages.
View
@@ -21,12 +21,12 @@
.. |wheel| image:: https://img.shields.io/pypi/wheel/celery.svg
:alt: Celery can be installed via wheel
:target: https://pypi.python.org/pypi/celery/
:target: https://pypi.org/project/celery/
.. |pyversion| image:: https://img.shields.io/pypi/pyversions/celery.svg
:alt: Supported Python versions.
:target: https://pypi.python.org/pypi/celery/
:target: https://pypi.org/project/celery/
.. |pyimp| image:: https://img.shields.io/pypi/implementation/celery.svg
:alt: Support Python implementations.
:target: https://pypi.python.org/pypi/celery/
:target: https://pypi.org/project/celery/
View
@@ -355,7 +355,7 @@ Here's some examples:
.. code-block:: pycon
>>> res = (add.si(2, 2) | add.si(4, 4) | add.s(8, 8))()
>>> res = (add.si(2, 2) | add.si(4, 4) | add.si(8, 8))()
>>> res.get()
16
View
@@ -182,8 +182,9 @@ A white-list of content-types/serializers to allow.
If a message is received that's not in this list then
the message will be discarded with an error.
By default any content type is enabled, including pickle and yaml,
so make sure untrusted parties don't have access to your broker.
By default only json is enabled but any content type can be added,
including pickle and yaml; when this is the case make sure
untrusted parties don't have access to your broker.
See :ref:`guide-security` for more.
Example::
@@ -227,7 +228,7 @@ The timezone value can be any time zone supported by the :pypi:`pytz`
library.
If not set the UTC timezone is used. For backwards compatibility
there's also a :setting:`enable_utc` setting, and this is set
there's also a :setting:`enable_utc` setting, and when this is set
to false the system local timezone is used instead.
.. _conf-tasks:
@@ -1929,6 +1930,13 @@ The default timeout in seconds before we give up establishing a connection
to the AMQP server. This setting is disabled when using
gevent.
.. note::
The broker connection timeout only applies to a worker attempting to
connect to the broker. It does not apply to producer sending a task, see
:setting:`broker_transport_options` for how to provide a timeout for that
situation.
.. setting:: broker_connection_retry
``broker_connection_retry``
View
@@ -36,7 +36,7 @@ An example time zone could be `Europe/London`:
timezone = 'Europe/London'
This setting must be added to your app, either by configuration it directly
This setting must be added to your app, either by configuring it directly
using (``app.conf.timezone = 'Europe/London'``), or by adding
it to your configuration module if you have set one up using
``app.config_from_object``. See :ref:`celerytut-configuration` for
View
@@ -574,6 +574,8 @@ You can disable the argument checking for any task by setting its
>>> add.delay(8)
<AsyncResult: f59d71ca-1549-43e0-be41-4e8821a83c0c>
.. _task-hiding-sensitive-information:
Hiding sensitive information in arguments
-----------------------------------------
@@ -746,7 +748,7 @@ If you want to automatically retry on any error, simply use:
def x():
...
.. versionadded:: 4.1
.. versionadded:: 4.2
If your tasks depend on another service, like making a request to an API,
then it's a good idea to use `exponential backoff`_ to avoid overwhelming the
@@ -1498,6 +1500,7 @@ Handlers
The return value of this handler is ignored.
.. _task-requests-and-custom-requests:
Requests and custom requests
----------------------------
View
@@ -0,0 +1,273 @@
.. _whatsnew-4.2:
===========================================
What's new in Celery 4.2 (windowlicker)
===========================================
:Author: Omer Katz (``omer.drow at gmail.com``)
.. sidebar:: Change history
What's new documents describe the changes in major versions,
we also have a :ref:`changelog` that lists the changes in bugfix
releases (0.0.x), while older series are archived under the :ref:`history`
section.
Celery is a simple, flexible, and reliable distributed system to
process vast amounts of messages, while providing operations with
the tools required to maintain such a system.
It's a task queue with focus on real-time processing, while also
supporting task scheduling.
Celery has a large and diverse community of users and contributors,
you should come join us :ref:`on IRC <irc-channel>`
or :ref:`our mailing-list <mailing-list>`.
To read more about Celery you should go read the :ref:`introduction <intro>`.
While this version is backward compatible with previous versions
it's important that you read the following section.
This version is officially supported on CPython 2.7, 3.4, 3.5 & 3.6
and is also supported on PyPy.
.. _`website`: http://celeryproject.org/
.. topic:: Table of Contents
Make sure you read the important notes before upgrading to this version.
.. contents::
:local:
:depth: 2
Preface
=======
The 4.2.0 release continues to improve our efforts to provide you with
the best task execution platform for Python.
This release is mainly a bug fix release, ironing out some issues and regressions
found in Celery 4.0.0.
Traditionally, releases were named after `Autechre <https://en.wikipedia.org/wiki/Autechre>`_'s track names.
This release continues this tradition in a slightly different way.
Each major version of Celery will use a different artist's track names as codenames.
From now on, the 4.x series will be codenamed after `Aphex Twin <https://en.wikipedia.org/wiki/Aphex_Twin>`_'s track names.
This release is codenamed after his very famous track, `Windowlicker <https://youtu.be/UBS4Gi1y_nc?t=4m>`_.
Thank you for your support!
*— Omer Katz*
Wall of Contributors
--------------------
Alejandro Varas <alej0varas@gmail.com>
Alex Garel <alex@garel.org>
Alex Hill <alex@hill.net.au>
Alex Zaitsev <azaitsev@gmail.com>
Alexander Ovechkin <frostoov@gmail.com>
Andrew Wong <argsno@gmail.com>
Anton <anton.gladkov@gmail.com>
Anton Gladkov <atn18@yandex-team.ru>
Armenak Baburyan <kanemra@gmail.com>
Asif Saifuddin Auvi <auvipy@users.noreply.github.com>
BR <b.rabiega@gmail.com>
Ben Welsh <ben.welsh@gmail.com>
Bohdan Rybak <bohdan.rybak@gmail.com>
Chris Mitchell <chris.mit7@gmail.com>
DDevine <devine@ddevnet.net>
Dan Wilson <danjwilson@gmail.com>
David Baumgold <david@davidbaumgold.com>
David Davis <daviddavis@users.noreply.github.com>
Denis Podlesniy <Haos616@Gmail.com>
Denis Shirokov <dan@rexuni.com>
Fengyuan Chen <cfy1990@gmail.com>
GDR! <gdr@gdr.name>
Geoffrey Bauduin <bauduin.geo@gmail.com>
George Psarakis <giwrgos.psarakis@gmail.com>
Harry Moreno <morenoh149@gmail.com>
Huang Huang <mozillazg101@gmail.com>
Igor Kasianov <super.hang.glider@gmail.com>
JJ <jairojair@gmail.com>
Jackie Leng <Jackie.Leng@nelen-schuurmans.nl>
James M. Allen <james.m.allen@gmail.com>
Javier Martin Montull <javier.martin.montull@cern.ch>
John Arnold <johnar@microsoft.com>
Jon Dufresne <jon.dufresne@gmail.com>
Jozef <knaperek@users.noreply.github.com>
Kevin Gu <guqi@reyagroup.com>
Kxrr <Hi@Kxrr.Us>
Leo Singer <leo.singer@ligo.org>
Mads Jensen <mje@inducks.org>
Manuel Vázquez Acosta <mvaled@users.noreply.github.com>
Marcelo Da Cruz Pinto <Marcelo_DaCruzPinto@McAfee.com>
Marco Schweighauser <marco@mailrelay.ch>
Markus Kaiserswerth <github@sensun.org>
Matt Davis <matteius@gmail.com>
Michael <michael-k@users.noreply.github.com>
Michael Peake <michaeljpeake@icloud.com>
Mikołaj <mikolevy1@gmail.com>
Misha Wolfson <myw@users.noreply.github.com>
Nick Eaket <4418194+neaket360pi@users.noreply.github.com>
Nicolas Mota <nicolas_mota@live.com>
Nicholas Pilon <npilon@gmail.com>
Omer Katz <omer.drow@gmail.com>
Patrick Cloke <clokep@users.noreply.github.com>
Patrick Zhang <patdujour@gmail.com>
Paulo <PauloPeres@users.noreply.github.com>
Rachel Johnson <racheljohnson457@gmail.com>
Raphaël Riel <raphael.riel@gmail.com>
Russell Keith-Magee <russell@keith-magee.com>
Ryan Guest <ryanguest@gmail.com>
Ryan P Kilby <rpkilby@ncsu.edu>
Régis B <github@behmo.com>
Sammie S. Taunton <diemuzi@gmail.com>
Samuel Dion-Girardeau <samueldg@users.noreply.github.com>
Scott Cooper <scttcper@gmail.com>
Sergi Almacellas Abellana <sergi@koolpi.com>
Sergio Fernandez <ElAutoestopista@users.noreply.github.com>
Shitikanth <golu3990@gmail.com>
Theodore Dubois <tbodt@users.noreply.github.com>
Thijs Triemstra <info@collab.nl>
Tobias Kunze <rixx@cutebit.de>
Vincent Barbaresi <vbarbaresi@users.noreply.github.com>
Vinod Chandru <vinod.chandru@gmail.com>
Wido den Hollander <wido@widodh.nl>
Xavier Hardy <xavierhardy@users.noreply.github.com>
anentropic <ego@anentropic.com>
arpanshah29 <ashah29@stanford.edu>
dmollerm <d.moller.m@gmail.com>
hclihn <23141651+hclihn@users.noreply.github.com>
jess <jessachandler@gmail.com>
lead2gold <caronc@users.noreply.github.com>
mariia-zelenova <32500603+mariia-zelenova@users.noreply.github.com>
martialp <martialp@users.noreply.github.com>
mperice <mperice@users.noreply.github.com>
pachewise <pachewise@users.noreply.github.com>
partizan <serg.partizan@gmail.com>
y0ngdi <36658095+y0ngdi@users.noreply.github.com>
.. note::
This wall was automatically generated from git history,
so sadly it doesn't not include the people who help with more important
things like answering mailing-list questions.
.. _v420-important:
Important Notes
===============
Supported Python Versions
-------------------------
The supported Python Versions are:
- CPython 2.7
- CPython 3.4
- CPython 3.5
- CPython 3.6
- PyPy 5.8 (``pypy2``)
.. _v420-news:
News
====
Result Backends
---------------
New Redis Sentinel Results Backend
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Redis Sentinel provides high availability for Redis.
A new result backend supporting it was added.
Cassandra Results Backend
~~~~~~~~~~~~~~~~~~~~~~~~~
A new `cassandra_options` configuration option was introduced in order to configure
the cassandra client.
See :ref:`conf-cassandra-result-backend` for more information.
DynamoDB Results Backend
~~~~~~~~~~~~~~~~~~~~~~~~
A new `dynamodb_endpoint_url` configuration option was introduced in order
to point the result backend to a local endpoint during development or testing.
See :ref:`conf-dynamodb-result-backend` for more information.
Python 2/3 Compatibility Fixes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Both the CouchDB and the Consul result backends accepted byte strings without decoding them to Unicode first.
This is now no longer the case.
Canvas
------
Multiple bugs were resolved resulting in a much smoother experience when using Canvas.
Tasks
-----
Bound Tasks as Error Callbacks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We fixed a regression that occured when bound tasks are used as error callbacks.
This used to work in Celery 3.x but raised an exception in 4.x until this release.
In both 4.0 and 4.1 the following code wouldn't work:
.. code-block:: python
@app.task(name="raise_exception", bind=True)
def raise_exception(self):
raise Exception("Bad things happened")
@app.task(name="handle_task_exception", bind=True)
def handle_task_exception(self):
print("Exception detected")
subtask = raise_exception.subtask()
subtask.apply_async(link_error=handle_task_exception.s())
Task Representation
~~~~~~~~~~~~~~~~~~~
- Shadowing task names now works as expected.
The shadowed name is properly presented in flower, the logs and the traces.
- `argsrepr` and `kwargsrepr` were previously not used even if specified.
They now work as expected. See :ref:`task-hiding-sensitive-information` for more information.
Custom Requests
~~~~~~~~~~~~~~~
We now allow tasks to use custom `request <celery.worker.request.Request>`:class: classes
for custom task classes.
See :ref:`task-requests-and-custom-requests` for more information.
Retries with Exponential Backoff
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Retries can now be performed with exponential backoffs to avoid overwhelming
external services with requests.
See :ref:`task-autoretry` for more information.
Sphinx Extension
----------------
Tasks were supposed to be automatically documented when using Sphinx's Autodoc was used.
The code that would have allowed automatic documentation had a few bugs which are now fixed.
Also, The extension is now documented properly. See :ref:`sphinx` for more information.
View

This file was deleted.

Oops, something went wrong.
View
@@ -24,6 +24,11 @@ def _inner(*args, **kwargs):
return _inner
def get_redis_connection():
from redis import StrictRedis
return StrictRedis(host=os.environ.get('REDIS_HOST'))
@pytest.fixture(scope='session')
def celery_config():
return {
View
@@ -7,6 +7,8 @@
from celery.exceptions import SoftTimeLimitExceeded
from celery.utils.log import get_task_logger
from .conftest import get_redis_connection
logger = get_task_logger(__name__)
@@ -21,6 +23,13 @@ def add(x, y):
return x + y
@shared_task
def chain_add(x, y):
(
add.s(x, x) | add.s(y)
).apply_async()
@shared_task
def delayed_sum(numbers, pause_time=1):
"""Sum the iterable of numbers."""
@@ -115,17 +124,15 @@ def retry_once(self):
@shared_task
def redis_echo(message):
"""Task that appends the message to a redis list"""
from redis import StrictRedis
redis_connection = StrictRedis()
redis_connection = get_redis_connection()
redis_connection.rpush('redis-echo', message)
@shared_task(bind=True)
def second_order_replace1(self, state=False):
from redis import StrictRedis
redis_connection = StrictRedis()
redis_connection = get_redis_connection()
if not state:
redis_connection.rpush('redis-echo', 'In A')
new_task = chain(second_order_replace2.s(),
@@ -137,9 +144,7 @@ def second_order_replace1(self, state=False):
@shared_task(bind=True)
def second_order_replace2(self, state=False):
from redis import StrictRedis
redis_connection = StrictRedis()
redis_connection = get_redis_connection()
if not state:
redis_connection.rpush('redis-echo', 'In B')
new_task = chain(redis_echo.s("In/Out C"),
View
@@ -1,16 +1,14 @@
from __future__ import absolute_import, unicode_literals
from datetime import datetime, timedelta
from time import sleep
import pytest
from redis import StrictRedis
from celery import chain, chord, group
from celery.exceptions import TimeoutError
from celery.result import AsyncResult, GroupResult
from celery.result import AsyncResult, GroupResult, ResultSet
from .conftest import flaky
from .conftest import flaky, get_redis_connection
from .tasks import (add, add_chord_to_chord, add_replaced, add_to_all,
add_to_all_to_chord, collect_ids, delayed_sum,
delayed_sum_with_soft_guard, identity, ids, print_unicode,
@@ -60,13 +58,24 @@ def test_chain_inside_group_receives_arguments(self, manager):
res = c()
assert res.get(timeout=TIMEOUT) == [14, 14]
@flaky
def test_eager_chain_inside_task(self, manager):
from .tasks import chain_add
prev = chain_add.app.conf.task_always_eager
chain_add.app.conf.task_always_eager = True
chain_add.apply_async(args=(4, 8), throw=True).get()
chain_add.app.conf.task_always_eager = prev
@flaky
def test_group_chord_group_chain(self, manager):
from celery.five import bytes_if_py2
if not manager.app.conf.result_backend.startswith('redis'):
raise pytest.skip('Requires redis result backend.')
redis_connection = StrictRedis()
redis_connection = get_redis_connection()
redis_connection.delete('redis-echo')
before = group(redis_echo.si('before {}'.format(i)) for i in range(3))
connect = redis_echo.si('connect')
@@ -94,7 +103,7 @@ def test_second_order_replace(self, manager):
if not manager.app.conf.result_backend.startswith('redis'):
raise pytest.skip('Requires redis result backend.')
redis_connection = StrictRedis()
redis_connection = get_redis_connection()
redis_connection.delete('redis-echo')
result = second_order_replace1.delay()
@@ -156,7 +165,6 @@ def test_chord_soft_timeout_recuperation(self, manager):
result = c(delayed_sum.s(pause_time=0)).get()
assert result == 3
@pytest.mark.xfail()
def test_chain_error_handler_with_eta(self, manager):
try:
manager.app.backend.ensure_chords_allowed()
@@ -176,6 +184,16 @@ def test_chain_error_handler_with_eta(self, manager):
assert result == 10
class test_result_set:
@flaky
def test_result_set(self, manager):
assert manager.inspect().ping()
rs = ResultSet([add.delay(1, 1), add.delay(2, 2)])
assert rs.get(timeout=TIMEOUT) == [2, 4]
class test_group:
@flaky
@@ -237,23 +255,54 @@ def assert_ids(r, expected_value, expected_root_id, expected_parent_id):
class test_chord:
@staticmethod
def _get_active_redis_channels(client):
return client.execute_command('PUBSUB CHANNELS')
@flaky
def test_redis_subscribed_channels_leak(self, manager):
if not manager.app.conf.result_backend.startswith('redis'):
raise pytest.skip('Requires redis result backend.')
redis_client = StrictRedis()
async_result = chord([add.s(5, 6), add.s(6, 7)])(delayed_sum.s())
for _ in range(TIMEOUT):
if async_result.state == 'STARTED':
break
sleep(0.2)
channels_before = \
len(redis_client.execute_command('PUBSUB CHANNELS'))
assert async_result.get(timeout=TIMEOUT) == 24
channels_after = \
len(redis_client.execute_command('PUBSUB CHANNELS'))
assert channels_after < channels_before
redis_client = get_redis_connection()
manager.app.backend.result_consumer.on_after_fork()
initial_channels = self._get_active_redis_channels(redis_client)
initial_channels_count = len(initial_channels)
total_chords = 10
async_results = [
chord([add.s(5, 6), add.s(6, 7)])(delayed_sum.s())
for _ in range(total_chords)
]
manager.assert_result_tasks_in_progress_or_completed(async_results)
channels_before = self._get_active_redis_channels(redis_client)
channels_before_count = len(channels_before)
assert set(channels_before) != set(initial_channels)
assert channels_before_count > initial_channels_count
# The total number of active Redis channels at this point
# is the number of chord header tasks multiplied by the
# total chord tasks, plus the initial channels
# (existing from previous tests).
chord_header_task_count = 2
assert channels_before_count == \
chord_header_task_count * total_chords + initial_channels_count
result_values = [
result.get(timeout=TIMEOUT)
for result in async_results
]
assert result_values == [24] * total_chords
channels_after = self._get_active_redis_channels(redis_client)
channels_after_count = len(channels_after)
assert channels_after_count == initial_channels_count
assert set(channels_after) == set(initial_channels)
@flaky
def test_replaced_nested_chord(self, manager):
View
@@ -359,6 +359,14 @@ def test_expires_to_datetime(self):
assert m.headers['expires'] == (
now + timedelta(seconds=30)).isoformat()
def test_eta_to_datetime(self):
eta = datetime.utcnow()
now = to_utc(datetime.utcnow()).astimezone(self.app.timezone)
m = self.app.amqp.as_task_v2(
uuid(), 'foo', eta=eta,
)
assert m.headers['eta'] == eta.isoformat()
def test_callbacks_errbacks_chord(self):
@self.app.task
View
@@ -494,24 +494,6 @@ def _inner(*args, **kwargs):
i.annotate()
i.annotate()
def test_apply_async_has__self__(self):
@self.app.task(__self__='hello', shared=False)
def aawsX(x, y):
pass
with pytest.raises(TypeError):
aawsX.apply_async(())
with pytest.raises(TypeError):
aawsX.apply_async((2,))
with patch('celery.app.amqp.AMQP.create_task_message') as create:
with patch('celery.app.amqp.AMQP.send_task_message') as send:
create.return_value = Mock(), Mock(), Mock(), Mock()
aawsX.apply_async((4, 5))
args = create.call_args[0][2]
assert args, ('hello', 4 == 5)
send.assert_called()
def test_apply_async_adds_children(self):
from celery._state import _task_stack
View
@@ -134,6 +134,54 @@ class sentinel(object):
Sentinel = Sentinel
class test_RedisResultConsumer:
def get_backend(self):
from celery.backends.redis import RedisBackend
class _RedisBackend(RedisBackend):
redis = redis
return _RedisBackend(app=self.app)
def get_consumer(self):
return self.get_backend().result_consumer
@patch('celery.backends.async.BaseResultConsumer.on_after_fork')
def test_on_after_fork(self, parent_method):
consumer = self.get_consumer()
consumer.start('none')
consumer.on_after_fork()
parent_method.assert_called_once()
consumer.backend.client.connection_pool.reset.assert_called_once()
consumer._pubsub.close.assert_called_once()
# PubSub instance not initialized - exception would be raised
# when calling .close()
consumer._pubsub = None
parent_method.reset_mock()
consumer.backend.client.connection_pool.reset.reset_mock()
consumer.on_after_fork()
parent_method.assert_called_once()
consumer.backend.client.connection_pool.reset.assert_called_once()
@patch('celery.backends.redis.ResultConsumer.cancel_for')
@patch('celery.backends.async.BaseResultConsumer.on_state_change')
def test_on_state_change(self, parent_method, cancel_for):
consumer = self.get_consumer()
meta = {'task_id': 'testing', 'status': states.SUCCESS}
message = 'hello'
consumer.on_state_change(meta, message)
parent_method.assert_called_once_with(meta, message)
cancel_for.assert_called_once_with(meta['task_id'])
# Does not call cancel_for for other states
meta = {'task_id': 'testing2', 'status': states.PENDING}
parent_method.reset_mock()
cancel_for.reset_mock()
consumer.on_state_change(meta, message)
parent_method.assert_called_once_with(meta, message)
cancel_for.assert_not_called()
class test_RedisBackend:
def get_backend(self):
from celery.backends.redis import RedisBackend
View
@@ -0,0 +1,9 @@
from __future__ import absolute_import, unicode_literals
import os
import sys
extensions = ['celery.contrib.sphinx']
autodoc_default_flags = ['members']
sys.path.insert(0, os.path.abspath('.'))
View
@@ -0,0 +1 @@
.. automodule:: foo
View
@@ -0,0 +1,11 @@
from __future__ import absolute_import, unicode_literals
from celery import Celery
from xyzzy import plugh # noqa
app = Celery()
@app.task
def bar():
"""This task has a docstring!"""
View
@@ -0,0 +1,10 @@
from __future__ import absolute_import, unicode_literals
from celery import Celery
app = Celery()
@app.task
def plugh():
"""This task is in a different module!"""
View
@@ -0,0 +1,20 @@
from __future__ import absolute_import, unicode_literals
import pkg_resources
import pytest
try:
sphinx_build = pkg_resources.load_entry_point(
'sphinx', 'console_scripts', 'sphinx-build')
except pkg_resources.DistributionNotFound:
sphinx_build = None
@pytest.mark.skipif(sphinx_build is None, reason='Sphinx is not installed')
def test_sphinx(tmpdir):
srcdir = pkg_resources.resource_filename(__name__, 'proj')
sphinx_build([srcdir, str(tmpdir)])
with open(tmpdir / 'contents.html', 'r') as f:
contents = f.read()
assert 'This task has a docstring!' in contents
assert 'This task is in a different module!' not in contents
View
@@ -411,6 +411,26 @@ def test_always_eager(self):
self.app.conf.task_always_eager = True
assert ~(self.add.s(4, 4) | self.add.s(8)) == 16
def test_chain_always_eager(self):
self.app.conf.task_always_eager = True
from celery import _state
from celery import result
fixture_task_join_will_block = _state.task_join_will_block
try:
_state.task_join_will_block = _state.orig_task_join_will_block
result.task_join_will_block = _state.orig_task_join_will_block
@self.app.task(shared=False)
def chain_add():
return (self.add.s(4, 4) | self.add.s(8)).apply_async()
r = chain_add.apply_async(throw=True).get()
assert r.get() == 16
finally:
_state.task_join_will_block = fixture_task_join_will_block
result.task_join_will_block = fixture_task_join_will_block
def test_apply(self):
x = chain(self.add.s(4, 4), self.add.s(8), self.add.s(10))
res = x.apply()
View
@@ -13,6 +13,7 @@
from celery.exceptions import Ignore, ImproperlyConfigured, Retry
from celery.five import items, range, string_t
from celery.result import EagerResult
from celery.task.base import Task as OldTask
from celery.utils.time import parse_iso8601
try:
@@ -394,6 +395,42 @@ def shadowed():
self.app.send_task = old_send_task
def test_shadow_name_old_task_class(self):
def shadow_name(task, args, kwargs, options):
return 'fooxyz'
@self.app.task(base=OldTask, shadow_name=shadow_name)
def shadowed():
pass
old_send_task = self.app.send_task
self.app.send_task = Mock()
shadowed.delay()
self.app.send_task.assert_called_once_with(ANY, ANY, ANY,
compression=ANY,
delivery_mode=ANY,
exchange=ANY,
expires=ANY,
immediate=ANY,
link=ANY,
link_error=ANY,
mandatory=ANY,
priority=ANY,
producer=ANY,
queue=ANY,
result_cls=ANY,
routing_key=ANY,
serializer=ANY,
soft_time_limit=ANY,
task_id=ANY,
task_type=ANY,
time_limit=ANY,
shadow='fooxyz')
self.app.send_task = old_send_task
def test_typing__disabled(self):
@self.app.task(typing=False)
def add(x, y, kw=1):
@@ -646,16 +683,6 @@ def test_repr_v2_compat(self):
self.mytask.__v2_compat__ = True
assert 'v2 compatible' in repr(self.mytask)
def test_apply_with_self(self):
@self.app.task(__self__=42, shared=False)
def tawself(self):
return self
assert tawself.apply().get() == 42
assert tawself() == 42
def test_context_get(self):
self.mytask.push_request()
try:
View
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import ast
import re
import struct
from decimal import Decimal
@@ -186,6 +187,10 @@ def test_same_as_repr(self, value):
native = old_repr(value)
assert saferepr(value) == native
def test_single_quote(self):
val = {"foo's": "bar's"}
assert ast.literal_eval(saferepr(val)) == val
@skip.if_python3()
def test_bytes_with_unicode(self):
class X(object):
View
@@ -34,6 +34,7 @@ commands =
integration: py.test -xsv t/integration
setenv =
WORKER_LOGLEVEL = INFO
PYTHONIOENCODING = UTF-8
rabbitmq: TEST_BROKER=pyamqp://
rabbitmq: TEST_BACKEND=rpc