From f5d19afedbf5bffa19bcea8f04da26dd37678a03 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 11:43:38 +0200 Subject: [PATCH 01/70] Bump github/codeql-action from 2 to 3 (#8725) Bumps [github/codeql-action](https://github.com/github/codeql-action) from 2 to 3. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v2...v3) --- updated-dependencies: - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql-analysis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 65e0f6c8ca5..a1dcabfe893 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -41,7 +41,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -52,7 +52,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -66,4 +66,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 From d1350f9f065ca8f0b5113ccc5cfa1d6dd1c46a88 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 17 Dec 2023 22:38:02 +0200 Subject: [PATCH 02/70] Fixed multiprocessing integration tests not running on Mac (#8727) --- t/integration/test_tasks.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 5dc5c955358..223827c2784 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -1,6 +1,8 @@ import logging +import platform import time from datetime import datetime, timedelta +from multiprocessing import set_start_method from time import perf_counter, sleep from uuid import uuid4 @@ -29,6 +31,16 @@ def flaky(fn): return _timeout(_flaky(fn)) +def set_multiprocessing_start_method(): + """Set multiprocessing start method to 'fork' if not on Linux.""" + if platform.system() != 'Linux': + try: + set_start_method('fork') + except RuntimeError: + # The method is already set + pass + + class test_class_based_tasks: @flaky @@ -89,6 +101,8 @@ def test_basic_task(self, manager): @flaky def test_multiprocess_producer(self, manager): """Testing multiple processes calling tasks.""" + set_multiprocessing_start_method() + from multiprocessing import Pool pool = Pool(20) ret = pool.map(_producer, range(120)) @@ -97,6 +111,8 @@ def test_multiprocess_producer(self, manager): @flaky def test_multithread_producer(self, manager): """Testing multiple threads calling tasks.""" + set_multiprocessing_start_method() + from multiprocessing.pool import ThreadPool pool = ThreadPool(20) ret = pool.map(_producer, range(120)) From 20cdf5e616fe971480f2853384b9e9c2ccf28831 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 18 Dec 2023 19:22:12 +0200 Subject: [PATCH 03/70] Added make docker-docs (#8729) * Changed docs service port to a less common value * Added make docker-docs * Added CI workflow for building the docs * Improved error msg if make docker-docs fails * Increased timeout from 10s -> 60s * Reduced docker-docs CI workflow timeout from 60m -> 5m * Improved UI --- .github/workflows/docker.yml | 10 +++++++++- Makefile | 11 +++++++++++ docker/docker-compose.yml | 2 +- 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 6b2c67ca5a4..bc39a2bd3b1 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -27,4 +27,12 @@ jobs: steps: - uses: actions/checkout@v4 - name: Build Docker container - run: make docker-build \ No newline at end of file + run: make docker-build + + docker-docs: + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - uses: actions/checkout@v4 + - name: Build Documentation + run: make docker-docs diff --git a/Makefile b/Makefile index e380095c094..5342986415c 100644 --- a/Makefile +++ b/Makefile @@ -59,6 +59,7 @@ help: @echo " docker-lint - Run tox -e lint on docker container." @echo " docker-unit-tests - Run unit tests on docker container, use '-- -k ' for specific test run." @echo " docker-bash - Get a bash shell inside the container." + @echo " docker-docs - Build documentation with docker." clean: clean-docs clean-pyc clean-build @@ -197,6 +198,16 @@ docker-integration-tests: docker-bash: @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery bash +.PHONY: docker-docs +docker-docs: + @docker-compose -f docker/docker-compose.yml up --build -d docs + @echo "Waiting 60 seconds for docs service to build the documentation inside the container..." + @timeout 60 sh -c 'until docker logs $$(docker-compose -f docker/docker-compose.yml ps -q docs) 2>&1 | \ + grep "build succeeded"; do sleep 1; done' || \ + (echo "Error! - run manually: docker compose -f ./docker/docker-compose.yml up --build docs"; \ + docker-compose -f docker/docker-compose.yml logs --tail=50 docs; false) + @docker-compose -f docker/docker-compose.yml down + .PHONY: catch-all %: catch-all @: diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index c37501f1dc0..221e6ddb3ef 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -46,5 +46,5 @@ services: volumes: - ../docs:/docs:z ports: - - "7000:7000" + - "7001:7000" command: /start-docs \ No newline at end of file From 04e361509e00dc07a22c09971fc835b84d47fb65 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 19:59:45 +0200 Subject: [PATCH 04/70] [pre-commit.ci] pre-commit autoupdate (#8730) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pycqa/isort: 5.13.0 → 5.13.2](https://github.com/pycqa/isort/compare/5.13.0...5.13.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a7800429fae..10b034c957a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: - id: mixed-line-ending - repo: https://github.com/pycqa/isort - rev: 5.13.0 + rev: 5.13.2 hooks: - id: isort From 7a27725cc9bd8d6e7b930a748e854f2d00379d47 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Tue, 19 Dec 2023 20:10:25 +0100 Subject: [PATCH 05/70] Fix DeprecationWarning: datetime.datetime.utcnow() (#8726) > lib/python3.12/site-packages/celery/app/base.py:940: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). now_in_utc = to_utc(datetime.utcnow()) --- celery/app/base.py | 3 ++- celery/backends/mongodb.py | 4 ++-- celery/fixups/django.py | 4 ++-- celery/loaders/base.py | 4 ++-- celery/security/certificate.py | 2 +- celery/utils/time.py | 2 +- celery/worker/worker.py | 6 ++--- t/integration/test_canvas.py | 4 ++-- t/integration/test_inspect.py | 4 ++-- t/integration/test_security.py | 2 +- t/integration/test_tasks.py | 6 ++--- t/unit/app/test_amqp.py | 8 +++---- t/unit/app/test_app.py | 5 +++-- t/unit/app/test_beat.py | 10 ++++----- t/unit/app/test_exceptions.py | 6 ++--- t/unit/app/test_schedules.py | 14 ++++++------ t/unit/backends/test_arangodb.py | 2 +- t/unit/security/test_certificate.py | 4 ++-- t/unit/utils/test_serialization.py | 8 +++---- t/unit/utils/test_time.py | 34 ++++++++++++++--------------- t/unit/worker/test_request.py | 8 +++---- 21 files changed, 71 insertions(+), 69 deletions(-) diff --git a/celery/app/base.py b/celery/app/base.py index 75eee027bb7..78012936e5e 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -6,6 +6,7 @@ import warnings from collections import UserDict, defaultdict, deque from datetime import datetime +from datetime import timezone as datetime_timezone from operator import attrgetter from click.exceptions import Exit @@ -937,7 +938,7 @@ def prepare_config(self, c): def now(self): """Return the current time and date as a datetime.""" - now_in_utc = to_utc(datetime.utcnow()) + now_in_utc = to_utc(datetime.now(datetime_timezone.utc)) return now_in_utc.astimezone(self.timezone) def select_queues(self, queues=None): diff --git a/celery/backends/mongodb.py b/celery/backends/mongodb.py index c64fe380807..1789f6cf0b0 100644 --- a/celery/backends/mongodb.py +++ b/celery/backends/mongodb.py @@ -1,5 +1,5 @@ """MongoDB result store backend.""" -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from kombu.exceptions import EncodeError from kombu.utils.objects import cached_property @@ -228,7 +228,7 @@ def _save_group(self, group_id, result): meta = { '_id': group_id, 'result': self.encode([i.id for i in result]), - 'date_done': datetime.utcnow(), + 'date_done': datetime.now(timezone.utc), } self.group_collection.replace_one({'_id': group_id}, meta, upsert=True) return result diff --git a/celery/fixups/django.py b/celery/fixups/django.py index 473c3b676b4..adc26db08f8 100644 --- a/celery/fixups/django.py +++ b/celery/fixups/django.py @@ -2,7 +2,7 @@ import os import sys import warnings -from datetime import datetime +from datetime import datetime, timezone from importlib import import_module from typing import IO, TYPE_CHECKING, Any, List, Optional, cast @@ -100,7 +100,7 @@ def on_worker_init(self, **kwargs: Any) -> None: self.worker_fixup.install() def now(self, utc: bool = False) -> datetime: - return datetime.utcnow() if utc else self._now() + return datetime.now(timezone.utc) if utc else self._now() def autodiscover_tasks(self) -> List[str]: from django.apps import apps diff --git a/celery/loaders/base.py b/celery/loaders/base.py index 8ac3e5b50e9..01e84254710 100644 --- a/celery/loaders/base.py +++ b/celery/loaders/base.py @@ -3,7 +3,7 @@ import os import re import sys -from datetime import datetime +from datetime import datetime, timezone from kombu.utils import json from kombu.utils.objects import cached_property @@ -62,7 +62,7 @@ def __init__(self, app, **kwargs): def now(self, utc=True): if utc: - return datetime.utcnow() + return datetime.now(timezone.utc) return datetime.now() def on_task_init(self, task_id, task): diff --git a/celery/security/certificate.py b/celery/security/certificate.py index 80398b39f6d..2691904d432 100644 --- a/celery/security/certificate.py +++ b/celery/security/certificate.py @@ -43,7 +43,7 @@ def __init__(self, cert: str) -> None: def has_expired(self) -> bool: """Check if the certificate has expired.""" - return datetime.datetime.utcnow() >= self._cert.not_valid_after + return datetime.datetime.now(datetime.timezone.utc) >= self._cert.not_valid_after def get_pubkey(self) -> ( DSAPublicKey | EllipticCurvePublicKey | Ed448PublicKey | Ed25519PublicKey | RSAPublicKey diff --git a/celery/utils/time.py b/celery/utils/time.py index ba94d7951b1..c8fd0959336 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -217,7 +217,7 @@ def remaining( Returns: ~datetime.timedelta: Remaining time. """ - now = now or datetime.utcnow() + now = now or datetime.now(datetime_timezone.utc) if str( start.tzinfo) == str( now.tzinfo) and now.utcoffset() != start.utcoffset(): diff --git a/celery/worker/worker.py b/celery/worker/worker.py index 04f8c30e10d..28609d9d8c5 100644 --- a/celery/worker/worker.py +++ b/celery/worker/worker.py @@ -14,7 +14,7 @@ import os import sys -from datetime import datetime +from datetime import datetime, timezone from billiard import cpu_count from kombu.utils.compat import detect_environment @@ -89,7 +89,7 @@ class Blueprint(bootsteps.Blueprint): def __init__(self, app=None, hostname=None, **kwargs): self.app = app or self.app self.hostname = default_nodename(hostname) - self.startup_time = datetime.utcnow() + self.startup_time = datetime.now(timezone.utc) self.app.loader.init_worker() self.on_before_init(**kwargs) self.setup_defaults(**kwargs) @@ -293,7 +293,7 @@ def _maybe_reload_module(self, module, force_reload=False, reloader=None): return reload_from_cwd(sys.modules[module], reloader) def info(self): - uptime = datetime.utcnow() - self.startup_time + uptime = datetime.now(timezone.utc) - self.startup_time return {'total': self.state.total_count, 'pid': os.getpid(), 'clock': str(self.app.clock), diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 5673c5e60c2..b5f88016f82 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -2,7 +2,7 @@ import re import tempfile import uuid -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from time import monotonic, sleep import pytest @@ -366,7 +366,7 @@ def test_chain_error_handler_with_eta(self, manager): except NotImplementedError as e: raise pytest.skip(e.args[0]) - eta = datetime.utcnow() + timedelta(seconds=10) + eta = datetime.now(timezone.utc) + timedelta(seconds=10) c = chain( group( add.s(1, 2), diff --git a/t/integration/test_inspect.py b/t/integration/test_inspect.py index 501cf178d36..c6c4b2af814 100644 --- a/t/integration/test_inspect.py +++ b/t/integration/test_inspect.py @@ -1,6 +1,6 @@ import os import re -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from time import sleep from unittest.mock import ANY @@ -126,7 +126,7 @@ def test_active(self, inspect): @flaky def test_scheduled(self, inspect): """Tests listing scheduled tasks""" - exec_time = datetime.utcnow() + timedelta(seconds=5) + exec_time = datetime.now(timezone.utc) + timedelta(seconds=5) res = add.apply_async([1, 2], {'z': 3}, eta=exec_time) ret = inspect.scheduled() assert len(ret) == 1 diff --git a/t/integration/test_security.py b/t/integration/test_security.py index a6ec3e4a552..36400940439 100644 --- a/t/integration/test_security.py +++ b/t/integration/test_security.py @@ -74,7 +74,7 @@ def gen_private_key(self): def gen_certificate(self, key, common_name, issuer=None, sign_key=None): """generate a certificate with cryptography""" - now = datetime.datetime.utcnow() + now = datetime.datetime.now(datetime.timezone.utc) certificate = x509.CertificateBuilder().subject_name( x509.Name([ diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 223827c2784..10a41f407e0 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -1,7 +1,7 @@ import logging import platform import time -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from multiprocessing import set_start_method from time import perf_counter, sleep from uuid import uuid4 @@ -154,7 +154,7 @@ def test_expired(self, manager): for _ in range(4): sleeping.delay(2) # Execute task with expiration at now + 1 sec - result = add.apply_async((1, 1), expires=datetime.utcnow() + timedelta(seconds=1)) + result = add.apply_async((1, 1), expires=datetime.now(timezone.utc) + timedelta(seconds=1)) with pytest.raises(celery.exceptions.TaskRevokedError): result.get() assert result.status == 'REVOKED' @@ -180,7 +180,7 @@ def test_eta(self, manager): start = perf_counter() # Schedule task to be executed at time now + 3 seconds - result = add.apply_async((2, 2), eta=datetime.utcnow() + timedelta(seconds=3)) + result = add.apply_async((2, 2), eta=datetime.now(timezone.utc) + timedelta(seconds=3)) sleep(1) assert result.status == 'PENDING' assert result.ready() is False diff --git a/t/unit/app/test_amqp.py b/t/unit/app/test_amqp.py index 070002d43f4..acbeecea08a 100644 --- a/t/unit/app/test_amqp.py +++ b/t/unit/app/test_amqp.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from unittest.mock import Mock, patch import pytest @@ -349,14 +349,14 @@ def test_raises_if_kwargs_is_not_mapping(self): self.app.amqp.as_task_v2(uuid(), 'foo', kwargs=(1, 2, 3)) def test_countdown_to_eta(self): - now = to_utc(datetime.utcnow()).astimezone(self.app.timezone) + now = to_utc(datetime.now(timezone.utc)).astimezone(self.app.timezone) m = self.app.amqp.as_task_v2( uuid(), 'foo', countdown=10, now=now, ) assert m.headers['eta'] == (now + timedelta(seconds=10)).isoformat() def test_expires_to_datetime(self): - now = to_utc(datetime.utcnow()).astimezone(self.app.timezone) + now = to_utc(datetime.now(timezone.utc)).astimezone(self.app.timezone) m = self.app.amqp.as_task_v2( uuid(), 'foo', expires=30, now=now, ) @@ -364,7 +364,7 @@ def test_expires_to_datetime(self): now + timedelta(seconds=30)).isoformat() def test_eta_to_datetime(self): - eta = datetime.utcnow() + eta = datetime.now(timezone.utc) m = self.app.amqp.as_task_v2( uuid(), 'foo', eta=eta, ) diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 8f307ebbf0c..4c92f475d42 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -6,6 +6,7 @@ import uuid from copy import deepcopy from datetime import datetime, timedelta +from datetime import timezone as datetime_timezone from pickle import dumps, loads from unittest.mock import Mock, patch @@ -85,7 +86,7 @@ def test_now(self): tz_utc = timezone.get_timezone('UTC') tz_us_eastern = timezone.get_timezone(timezone_setting_value) - now = to_utc(datetime.utcnow()) + now = to_utc(datetime.now(datetime_timezone.utc)) app_now = self.app.now() assert app_now.tzinfo is tz_utc @@ -101,7 +102,7 @@ def test_now(self): assert app_now.tzinfo == tz_us_eastern - diff = to_utc(datetime.utcnow()) - localize(app_now, tz_utc) + diff = to_utc(datetime.now(datetime_timezone.utc)) - localize(app_now, tz_utc) assert diff <= timedelta(seconds=1) # Verify that timezone setting overrides enable_utc=on setting diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index 082aeb3a5ef..6b113df426e 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -1,6 +1,6 @@ import errno import sys -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from pickle import dumps, loads from unittest.mock import Mock, call, patch @@ -863,17 +863,17 @@ class test_schedule: def test_maybe_make_aware(self): x = schedule(10, app=self.app) x.utc_enabled = True - d = x.maybe_make_aware(datetime.utcnow()) + d = x.maybe_make_aware(datetime.now(timezone.utc)) assert d.tzinfo x.utc_enabled = False - d2 = x.maybe_make_aware(datetime.utcnow()) + d2 = x.maybe_make_aware(datetime.now(timezone.utc)) assert d2.tzinfo def test_to_local(self): x = schedule(10, app=self.app) x.utc_enabled = True - d = x.to_local(datetime.utcnow()) + d = x.to_local(datetime.utcnow()) # datetime.utcnow() is deprecated in Python 3.12 assert d.tzinfo is None x.utc_enabled = False - d = x.to_local(datetime.utcnow()) + d = x.to_local(datetime.now(timezone.utc)) assert d.tzinfo diff --git a/t/unit/app/test_exceptions.py b/t/unit/app/test_exceptions.py index b881be4c028..4013c22b0da 100644 --- a/t/unit/app/test_exceptions.py +++ b/t/unit/app/test_exceptions.py @@ -1,5 +1,5 @@ import pickle -from datetime import datetime +from datetime import datetime, timezone from celery.exceptions import Reject, Retry @@ -7,11 +7,11 @@ class test_Retry: def test_when_datetime(self): - x = Retry('foo', KeyError(), when=datetime.utcnow()) + x = Retry('foo', KeyError(), when=datetime.now(timezone.utc)) assert x.humanize() def test_pickleable(self): - x = Retry('foo', KeyError(), when=datetime.utcnow()) + x = Retry('foo', KeyError(), when=datetime.now(timezone.utc)) y = pickle.loads(pickle.dumps(x)) assert x.message == y.message assert repr(x.exc) == repr(y.exc) diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py index 1f4d5fdd85a..e5a7bfb7bdd 100644 --- a/t/unit/app/test_schedules.py +++ b/t/unit/app/test_schedules.py @@ -1,7 +1,7 @@ import sys import time from contextlib import contextmanager -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from pickle import dumps, loads from unittest import TestCase from unittest.mock import Mock @@ -50,17 +50,17 @@ def test_repr(self): def test_is_due(self): self.s.remaining_estimate = Mock(name='rem') self.s.remaining_estimate.return_value = timedelta(seconds=0) - assert self.s.is_due(datetime.utcnow()).is_due + assert self.s.is_due(datetime.now(timezone.utc)).is_due def test_is_due__not_due(self): self.s.remaining_estimate = Mock(name='rem') self.s.remaining_estimate.return_value = timedelta(hours=10) - assert not self.s.is_due(datetime.utcnow()).is_due + assert not self.s.is_due(datetime.now(timezone.utc)).is_due def test_remaining_estimate(self): self.s.cal = Mock(name='cal') - self.s.cal.next_rising().datetime.return_value = datetime.utcnow() - self.s.remaining_estimate(datetime.utcnow()) + self.s.cal.next_rising().datetime.return_value = datetime.now(timezone.utc) + self.s.remaining_estimate(datetime.now(timezone.utc)) def test_coordinates(self): with pytest.raises(ValueError): @@ -82,7 +82,7 @@ def test_event_uses_center(self): s.method = s._methods[ev] s.is_center = s._use_center_l[ev] try: - s.remaining_estimate(datetime.utcnow()) + s.remaining_estimate(datetime.now(timezone.utc)) except TypeError: pytest.fail( f"{s.method} was called with 'use_center' which is not a " @@ -108,7 +108,7 @@ def test_pickle(self): # This is needed for test_crontab_parser because datetime.utcnow doesn't pickle # in python 2 def utcnow(): - return datetime.utcnow() + return datetime.now(timezone.utc) class test_crontab_parser: diff --git a/t/unit/backends/test_arangodb.py b/t/unit/backends/test_arangodb.py index 8e86f09b67c..dd1232e0d77 100644 --- a/t/unit/backends/test_arangodb.py +++ b/t/unit/backends/test_arangodb.py @@ -210,7 +210,7 @@ def test_backend_cleanup(self): self.backend.cleanup() self.backend.db.AQLQuery.assert_not_called() - now = datetime.datetime.utcnow() + now = datetime.datetime.now(datetime.timezone.utc) self.backend.app.now = Mock(return_value=now) self.backend.expires = 86400 expected_checkpoint = (now - self.backend.expires_delta).isoformat() diff --git a/t/unit/security/test_certificate.py b/t/unit/security/test_certificate.py index 241527f82df..68b05fa03ee 100644 --- a/t/unit/security/test_certificate.py +++ b/t/unit/security/test_certificate.py @@ -40,7 +40,7 @@ def test_has_expired_mock(self): x = Certificate(CERT1) x._cert = Mock(name='cert') - time_after = datetime.datetime.utcnow() + datetime.timedelta(days=-1) + time_after = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(days=-1) x._cert.not_valid_after = time_after assert x.has_expired() is True @@ -49,7 +49,7 @@ def test_has_not_expired_mock(self): x = Certificate(CERT1) x._cert = Mock(name='cert') - time_after = datetime.datetime.utcnow() + datetime.timedelta(days=1) + time_after = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(days=1) x._cert.not_valid_after = time_after assert x.has_expired() is False diff --git a/t/unit/utils/test_serialization.py b/t/unit/utils/test_serialization.py index 9e762d5e8af..5ae68e4f89b 100644 --- a/t/unit/utils/test_serialization.py +++ b/t/unit/utils/test_serialization.py @@ -1,7 +1,7 @@ import json import pickle import sys -from datetime import date, datetime, time, timedelta +from datetime import date, datetime, time, timedelta, timezone from unittest.mock import Mock import pytest @@ -67,9 +67,9 @@ class test_jsonify: Queue('foo'), ['foo', 'bar', 'baz'], {'foo': 'bar'}, - datetime.utcnow(), - datetime.utcnow().replace(tzinfo=ZoneInfo("UTC")), - datetime.utcnow().replace(microsecond=0), + datetime.now(timezone.utc), + datetime.now(timezone.utc).replace(tzinfo=ZoneInfo("UTC")), + datetime.now(timezone.utc).replace(microsecond=0), date(2012, 1, 1), time(hour=1, minute=30), time(hour=1, minute=30, microsecond=3), diff --git a/t/unit/utils/test_time.py b/t/unit/utils/test_time.py index 80d5db973a1..6b955e096e9 100644 --- a/t/unit/utils/test_time.py +++ b/t/unit/utils/test_time.py @@ -48,7 +48,7 @@ def test_daylight(self, patching): class test_iso8601: def test_parse_with_timezone(self): - d = datetime.utcnow().replace(tzinfo=ZoneInfo("UTC")) + d = datetime.now(_timezone.utc).replace(tzinfo=ZoneInfo("UTC")) assert parse_iso8601(d.isoformat()) == d # 2013-06-07T20:12:51.775877+00:00 iso = d.isoformat() @@ -124,7 +124,7 @@ def test_maybe_timedelta(arg, expected): def test_remaining(): # Relative - remaining(datetime.utcnow(), timedelta(hours=1), relative=True) + remaining(datetime.now(_timezone.utc), timedelta(hours=1), relative=True) """ The upcoming cases check whether the next run is calculated correctly @@ -188,38 +188,38 @@ def test_tz_or_local(self): assert timezone.tz_or_local(timezone.utc) def test_to_local(self): - assert timezone.to_local(make_aware(datetime.utcnow(), timezone.utc)) - assert timezone.to_local(datetime.utcnow()) + assert timezone.to_local(make_aware(datetime.now(_timezone.utc), timezone.utc)) + assert timezone.to_local(datetime.now(_timezone.utc)) def test_to_local_fallback(self): assert timezone.to_local_fallback( - make_aware(datetime.utcnow(), timezone.utc)) - assert timezone.to_local_fallback(datetime.utcnow()) + make_aware(datetime.now(_timezone.utc), timezone.utc)) + assert timezone.to_local_fallback(datetime.now(_timezone.utc)) class test_make_aware: def test_standard_tz(self): tz = tzinfo() - wtz = make_aware(datetime.utcnow(), tz) + wtz = make_aware(datetime.now(_timezone.utc), tz) assert wtz.tzinfo == tz def test_tz_when_zoneinfo(self): tz = ZoneInfo('US/Eastern') - wtz = make_aware(datetime.utcnow(), tz) + wtz = make_aware(datetime.now(_timezone.utc), tz) assert wtz.tzinfo == tz def test_maybe_make_aware(self): - aware = datetime.utcnow().replace(tzinfo=timezone.utc) + aware = datetime.now(_timezone.utc).replace(tzinfo=timezone.utc) assert maybe_make_aware(aware) - naive = datetime.utcnow() + naive = datetime.utcnow() # datetime.utcnow() is deprecated in Python 3.12 assert maybe_make_aware(naive) assert maybe_make_aware(naive).tzinfo is ZoneInfo("UTC") tz = ZoneInfo('US/Eastern') - eastern = datetime.utcnow().replace(tzinfo=tz) + eastern = datetime.now(_timezone.utc).replace(tzinfo=tz) assert maybe_make_aware(eastern).tzinfo is tz - utcnow = datetime.utcnow() + utcnow = datetime.utcnow() # datetime.utcnow() is deprecated in Python 3.12 assert maybe_make_aware(utcnow, 'UTC').tzinfo is ZoneInfo("UTC") @@ -232,17 +232,17 @@ def utcoffset(self, dt): return None # Mock no utcoffset specified tz = tzz() - assert localize(make_aware(datetime.utcnow(), tz), tz) + assert localize(make_aware(datetime.now(_timezone.utc), tz), tz) @patch('dateutil.tz.datetime_ambiguous') def test_when_zoneinfo(self, datetime_ambiguous_mock): datetime_ambiguous_mock.return_value = False tz = ZoneInfo("US/Eastern") - assert localize(make_aware(datetime.utcnow(), tz), tz) + assert localize(make_aware(datetime.now(_timezone.utc), tz), tz) datetime_ambiguous_mock.return_value = True tz2 = ZoneInfo("US/Eastern") - assert localize(make_aware(datetime.utcnow(), tz2), tz2) + assert localize(make_aware(datetime.now(_timezone.utc), tz2), tz2) @patch('dateutil.tz.datetime_ambiguous') def test_when_is_ambiguous(self, datetime_ambiguous_mock): @@ -256,11 +256,11 @@ def is_ambiguous(self, dt): datetime_ambiguous_mock.return_value = False tz = tzz() - assert localize(make_aware(datetime.utcnow(), tz), tz) + assert localize(make_aware(datetime.now(_timezone.utc), tz), tz) datetime_ambiguous_mock.return_value = True tz2 = tzz() - assert localize(make_aware(datetime.utcnow(), tz2), tz2) + assert localize(make_aware(datetime.now(_timezone.utc), tz2), tz2) def test_localize_changes_utc_dt(self): now_utc_time = datetime.now(tz=ZoneInfo("UTC")) diff --git a/t/unit/worker/test_request.py b/t/unit/worker/test_request.py index 342e7092b1a..44408599dc7 100644 --- a/t/unit/worker/test_request.py +++ b/t/unit/worker/test_request.py @@ -2,7 +2,7 @@ import os import signal import socket -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from time import monotonic, time from unittest.mock import Mock, patch @@ -537,7 +537,7 @@ def test_cancel__task_reserved(self): def test_revoked_expires_expired(self): job = self.get_request(self.mytask.s(1, f='x').set( - expires=datetime.utcnow() - timedelta(days=1) + expires=datetime.now(timezone.utc) - timedelta(days=1) )) with self.assert_signal_called( task_revoked, sender=job.task, request=job._context, @@ -549,7 +549,7 @@ def test_revoked_expires_expired(self): def test_revoked_expires_not_expired(self): job = self.xRequest( - expires=datetime.utcnow() + timedelta(days=1), + expires=datetime.now(timezone.utc) + timedelta(days=1), ) job.revoked() assert job.id not in revoked @@ -558,7 +558,7 @@ def test_revoked_expires_not_expired(self): def test_revoked_expires_ignore_result(self): self.mytask.ignore_result = True job = self.xRequest( - expires=datetime.utcnow() - timedelta(days=1), + expires=datetime.now(timezone.utc) - timedelta(days=1), ) job.revoked() assert job.id in revoked From 7861fd4ebfa840a06102f7c2e95720bb84a13c63 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 25 Dec 2023 23:01:05 +0200 Subject: [PATCH 06/70] [pre-commit.ci] pre-commit autoupdate (#8740) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.7.1 → v1.8.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.7.1...v1.8.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 10b034c957a..8e681020401 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.7.1 + rev: v1.8.0 hooks: - id: mypy pass_filenames: false From 40d38a835ade91676f1ef3d1be24f9e698a76086 Mon Sep 17 00:00:00 2001 From: Viicos <65306057+Viicos@users.noreply.github.com> Date: Thu, 28 Dec 2023 11:26:45 +0100 Subject: [PATCH 07/70] Remove `new` adjective in docs --- docs/userguide/periodic-tasks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/periodic-tasks.rst b/docs/userguide/periodic-tasks.rst index b55799d2fe6..1928b1f9ac3 100644 --- a/docs/userguide/periodic-tasks.rst +++ b/docs/userguide/periodic-tasks.rst @@ -50,7 +50,7 @@ schedule manually. .. admonition:: Django Users - Celery recommends and is compatible with the new ``USE_TZ`` setting introduced + Celery recommends and is compatible with the ``USE_TZ`` setting introduced in Django 1.4. For Django users the time zone specified in the ``TIME_ZONE`` setting From 34a951b93a43499a1d96a9ca3ab4c71ac2550150 Mon Sep 17 00:00:00 2001 From: Emile Date: Wed, 3 Jan 2024 15:30:42 +0100 Subject: [PATCH 08/70] add type annotation (#8747) --- celery/utils/sysinfo.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/celery/utils/sysinfo.py b/celery/utils/sysinfo.py index 57425dd8173..52fc45e5474 100644 --- a/celery/utils/sysinfo.py +++ b/celery/utils/sysinfo.py @@ -1,4 +1,6 @@ """System information utilities.""" +from __future__ import annotations + import os from math import ceil @@ -9,16 +11,16 @@ if hasattr(os, 'getloadavg'): - def _load_average(): + def _load_average() -> tuple[float, ...]: return tuple(ceil(l * 1e2) / 1e2 for l in os.getloadavg()) else: # pragma: no cover # Windows doesn't have getloadavg - def _load_average(): - return (0.0, 0.0, 0.0) + def _load_average() -> tuple[float, ...]: + return 0.0, 0.0, 0.0, -def load_average(): +def load_average() -> tuple[float, ...]: """Return system load average as a triple.""" return _load_average() @@ -26,23 +28,23 @@ def load_average(): class df: """Disk information.""" - def __init__(self, path): + def __init__(self, path: str | bytes | os.PathLike) -> None: self.path = path @property - def total_blocks(self): + def total_blocks(self) -> float: return self.stat.f_blocks * self.stat.f_frsize / 1024 @property - def available(self): + def available(self) -> float: return self.stat.f_bavail * self.stat.f_frsize / 1024 @property - def capacity(self): + def capacity(self) -> int: avail = self.stat.f_bavail used = self.stat.f_blocks - self.stat.f_bfree return int(ceil(used * 100.0 / (used + avail) + 0.5)) @cached_property - def stat(self): + def stat(self) -> os.statvfs_result: return os.statvfs(os.path.abspath(self.path)) From be61f8f311b3cdc08c7957cf5b9df9a808a25686 Mon Sep 17 00:00:00 2001 From: Emile Date: Wed, 3 Jan 2024 19:19:46 +0100 Subject: [PATCH 09/70] add type annotation (#8750) --- celery/utils/iso8601.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/utils/iso8601.py b/celery/utils/iso8601.py index 74aff491a69..33176576b7f 100644 --- a/celery/utils/iso8601.py +++ b/celery/utils/iso8601.py @@ -50,7 +50,7 @@ ) -def parse_iso8601(datestring): +def parse_iso8601(datestring: str) -> str: """Parse and convert ISO-8601 string to datetime.""" warn("parse_iso8601", "v5.3", "v6", "datetime.datetime.fromisoformat or dateutil.parser.isoparse") m = ISO8601_REGEX.match(datestring) From 12a59f821fb8c5c857bedfb4832e1d72f345e6a1 Mon Sep 17 00:00:00 2001 From: Emile Date: Thu, 4 Jan 2024 16:59:19 +0100 Subject: [PATCH 10/70] Change type annotation to celery/utils/iso8601.py (#8752) * add type annotation * change type annotation --- celery/utils/iso8601.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/utils/iso8601.py b/celery/utils/iso8601.py index 33176576b7f..f878bec59e1 100644 --- a/celery/utils/iso8601.py +++ b/celery/utils/iso8601.py @@ -50,7 +50,7 @@ ) -def parse_iso8601(datestring: str) -> str: +def parse_iso8601(datestring: str) -> datetime: """Parse and convert ISO-8601 string to datetime.""" warn("parse_iso8601", "v5.3", "v6", "datetime.datetime.fromisoformat or dateutil.parser.isoparse") m = ISO8601_REGEX.match(datestring) From 516e332f21a630baee001e7d9f57bca8b8fd902b Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Fri, 5 Jan 2024 13:30:29 +0100 Subject: [PATCH 11/70] Update test deps --- requirements/test.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index be7af014b73..35991da4076 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==7.4.3 +pytest==7.4.4 pytest-celery==0.0.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 @@ -7,8 +7,8 @@ pytest-order==1.2.0 boto3>=1.26.143 moto>=4.1.11 # typing extensions -mypy==1.7.1; platform_python_implementation=="CPython" -pre-commit==3.5.0 +mypy==1.8.0; platform_python_implementation=="CPython" +pre-commit==3.6.0 -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt From 950711074dda320864ebc831727df35a34933876 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Fri, 5 Jan 2024 13:34:11 +0100 Subject: [PATCH 12/70] Update requirements/test.txt --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 35991da4076..ad4f6ae5c95 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ boto3>=1.26.143 moto>=4.1.11 # typing extensions mypy==1.8.0; platform_python_implementation=="CPython" -pre-commit==3.6.0 +pre-commit==3.5.0 -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt From f9573974351b7f2d3106d1d0cf349b6b27fb1ed1 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sat, 6 Jan 2024 20:10:42 +0200 Subject: [PATCH 13/70] Mark flaky: test_asyncresult_get_cancels_subscription() (#8757) --- t/integration/test_tasks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 10a41f407e0..6ce6b509c7e 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -540,6 +540,7 @@ def test_asyncresult_forget_cancels_subscription(self, manager): new_channels = [channel for channel in get_active_redis_channels() if channel not in channels_before_test] assert new_channels == [] + @flaky def test_asyncresult_get_cancels_subscription(self, manager): channels_before_test = get_active_redis_channels() From 232acf9ffb768e0ea614dc6bb0150f9983b6ff85 Mon Sep 17 00:00:00 2001 From: Emile Date: Sun, 7 Jan 2024 16:04:13 +0100 Subject: [PATCH 14/70] change _read_as_base64 (b64encode returns bytes) (#8759) --- celery/utils/term.py | 5 ++--- t/unit/utils/test_term.py | 17 ++++++++++++++++- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/celery/utils/term.py b/celery/utils/term.py index a2eff996333..850abffe0f7 100644 --- a/celery/utils/term.py +++ b/celery/utils/term.py @@ -1,6 +1,5 @@ """Terminals and colors.""" import base64 -import codecs import os import platform import sys @@ -166,9 +165,9 @@ def supports_images(): def _read_as_base64(path): - with codecs.open(path, mode='rb') as fh: + with open(path, mode='rb') as fh: encoded = base64.b64encode(fh.read()) - return encoded if isinstance(encoded, str) else encoded.decode('ascii') + return encoded.decode('ascii') def imgcat(path, inline=1, preserve_aspect_ratio=0, **kwargs): diff --git a/t/unit/utils/test_term.py b/t/unit/utils/test_term.py index 1a599b57d8c..2261b59f8e3 100644 --- a/t/unit/utils/test_term.py +++ b/t/unit/utils/test_term.py @@ -1,8 +1,11 @@ +from base64 import b64encode +from tempfile import NamedTemporaryFile + import pytest import t.skip from celery.utils import term -from celery.utils.term import colored, fg +from celery.utils.term import _read_as_base64, colored, fg @t.skip.if_win32 @@ -55,3 +58,15 @@ def test_more_unicode(self): c2 = colored().blue('ƒƒz') c3 = c._add(c, c2) assert c3 == '\x1b[1;31m\xe5foo\x1b[0m\x1b[1;34m\u0192\u0192z\x1b[0m' + + def test_read_as_base64(self): + test_data = b"The quick brown fox jumps over the lazy dog" + with NamedTemporaryFile(mode='wb') as temp_file: + temp_file.write(test_data) + temp_file.seek(0) + temp_file_path = temp_file.name + + result = _read_as_base64(temp_file_path) + expected_result = b64encode(test_data).decode('ascii') + + assert result == expected_result From e1d3df4c49abe9c8e3e5bc15e7c6ac5b1f609301 Mon Sep 17 00:00:00 2001 From: Emile Date: Sun, 7 Jan 2024 16:09:20 +0100 Subject: [PATCH 15/70] Replace string concatenation with fstring (#8760) --- celery/utils/term.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/celery/utils/term.py b/celery/utils/term.py index 850abffe0f7..700a80e84a5 100644 --- a/celery/utils/term.py +++ b/celery/utils/term.py @@ -56,7 +56,7 @@ def __init__(self, *s, **kwargs): } def _add(self, a, b): - return str(a) + str(b) + return f"{a}{b}" def _fold_no_color(self, a, b): try: @@ -68,7 +68,7 @@ def _fold_no_color(self, a, b): except AttributeError: B = str(b) - return ''.join((str(A), str(B))) + return f"{A}{B}" def no_color(self): if self.s: @@ -79,13 +79,13 @@ def embed(self): prefix = '' if self.enabled: prefix = self.op - return ''.join((str(prefix), str(reduce(self._add, self.s)))) + return f"{prefix}{reduce(self._add, self.s)}" def __str__(self): suffix = '' if self.enabled: suffix = RESET_SEQ - return str(''.join((self.embed(), str(suffix)))) + return f"{self.embed()}{suffix}" def node(self, s, op): return self.__class__(enabled=self.enabled, op=op, *s) @@ -157,7 +157,7 @@ def reset(self, *s): return self.node(s or [''], RESET_SEQ) def __add__(self, other): - return str(self) + str(other) + return f"{self}{other}" def supports_images(): From 9ac848f2cdfcbdcf6562accf2cb6f1eff7791dd5 Mon Sep 17 00:00:00 2001 From: Emile Date: Sun, 7 Jan 2024 18:10:14 +0100 Subject: [PATCH 16/70] add type annotation (#8755) --- celery/utils/term.py | 84 +++++++++++++++++++++++--------------------- 1 file changed, 44 insertions(+), 40 deletions(-) diff --git a/celery/utils/term.py b/celery/utils/term.py index 700a80e84a5..53236ad549d 100644 --- a/celery/utils/term.py +++ b/celery/utils/term.py @@ -1,4 +1,6 @@ """Terminals and colors.""" +from __future__ import annotations + import base64 import os import platform @@ -7,6 +9,8 @@ __all__ = ('colored',) +from typing import Any + BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8) OP_SEQ = '\033[%dm' RESET_SEQ = '\033[0m' @@ -25,7 +29,7 @@ _IMG_POST = '\a\033\\' if TERM_IS_SCREEN else '\a' -def fg(s): +def fg(s: int) -> str: return COLOR_SEQ % s @@ -40,11 +44,11 @@ class colored: ... c.green('dog '))) """ - def __init__(self, *s, **kwargs): - self.s = s - self.enabled = not IS_WINDOWS and kwargs.get('enabled', True) - self.op = kwargs.get('op', '') - self.names = { + def __init__(self, *s: object, **kwargs: Any) -> None: + self.s: tuple[object, ...] = s + self.enabled: bool = not IS_WINDOWS and kwargs.get('enabled', True) + self.op: str = kwargs.get('op', '') + self.names: dict[str, Any] = { 'black': self.black, 'red': self.red, 'green': self.green, @@ -55,10 +59,10 @@ def __init__(self, *s, **kwargs): 'white': self.white, } - def _add(self, a, b): + def _add(self, a: object, b: object) -> str: return f"{a}{b}" - def _fold_no_color(self, a, b): + def _fold_no_color(self, a: Any, b: Any) -> str: try: A = a.no_color() except AttributeError: @@ -70,107 +74,107 @@ def _fold_no_color(self, a, b): return f"{A}{B}" - def no_color(self): + def no_color(self) -> str: if self.s: return str(reduce(self._fold_no_color, self.s)) return '' - def embed(self): + def embed(self) -> str: prefix = '' if self.enabled: prefix = self.op return f"{prefix}{reduce(self._add, self.s)}" - def __str__(self): + def __str__(self) -> str: suffix = '' if self.enabled: suffix = RESET_SEQ return f"{self.embed()}{suffix}" - def node(self, s, op): + def node(self, s: tuple[object, ...], op: str) -> colored: return self.__class__(enabled=self.enabled, op=op, *s) - def black(self, *s): + def black(self, *s: object) -> colored: return self.node(s, fg(30 + BLACK)) - def red(self, *s): + def red(self, *s: object) -> colored: return self.node(s, fg(30 + RED)) - def green(self, *s): + def green(self, *s: object) -> colored: return self.node(s, fg(30 + GREEN)) - def yellow(self, *s): + def yellow(self, *s: object) -> colored: return self.node(s, fg(30 + YELLOW)) - def blue(self, *s): + def blue(self, *s: object) -> colored: return self.node(s, fg(30 + BLUE)) - def magenta(self, *s): + def magenta(self, *s: object) -> colored: return self.node(s, fg(30 + MAGENTA)) - def cyan(self, *s): + def cyan(self, *s: object) -> colored: return self.node(s, fg(30 + CYAN)) - def white(self, *s): + def white(self, *s: object) -> colored: return self.node(s, fg(30 + WHITE)) - def __repr__(self): + def __repr__(self) -> str: return repr(self.no_color()) - def bold(self, *s): + def bold(self, *s: object) -> colored: return self.node(s, OP_SEQ % 1) - def underline(self, *s): + def underline(self, *s: object) -> colored: return self.node(s, OP_SEQ % 4) - def blink(self, *s): + def blink(self, *s: object) -> colored: return self.node(s, OP_SEQ % 5) - def reverse(self, *s): + def reverse(self, *s: object) -> colored: return self.node(s, OP_SEQ % 7) - def bright(self, *s): + def bright(self, *s: object) -> colored: return self.node(s, OP_SEQ % 8) - def ired(self, *s): + def ired(self, *s: object) -> colored: return self.node(s, fg(40 + RED)) - def igreen(self, *s): + def igreen(self, *s: object) -> colored: return self.node(s, fg(40 + GREEN)) - def iyellow(self, *s): + def iyellow(self, *s: object) -> colored: return self.node(s, fg(40 + YELLOW)) - def iblue(self, *s): + def iblue(self, *s: colored) -> colored: return self.node(s, fg(40 + BLUE)) - def imagenta(self, *s): + def imagenta(self, *s: object) -> colored: return self.node(s, fg(40 + MAGENTA)) - def icyan(self, *s): + def icyan(self, *s: object) -> colored: return self.node(s, fg(40 + CYAN)) - def iwhite(self, *s): + def iwhite(self, *s: object) -> colored: return self.node(s, fg(40 + WHITE)) - def reset(self, *s): - return self.node(s or [''], RESET_SEQ) + def reset(self, *s: object) -> colored: + return self.node(s or ('',), RESET_SEQ) - def __add__(self, other): + def __add__(self, other: object) -> str: return f"{self}{other}" -def supports_images(): - return sys.stdin.isatty() and ITERM_PROFILE +def supports_images() -> bool: + return sys.stdin.isatty() and ITERM_PROFILE is not None -def _read_as_base64(path): +def _read_as_base64(path: str) -> str: with open(path, mode='rb') as fh: encoded = base64.b64encode(fh.read()) return encoded.decode('ascii') -def imgcat(path, inline=1, preserve_aspect_ratio=0, **kwargs): +def imgcat(path: str, inline: int = 1, preserve_aspect_ratio: int = 0, **kwargs: Any) -> str: return '\n%s1337;File=inline=%d;preserveAspectRatio=%d:%s%s' % ( _IMG_PRE, inline, preserve_aspect_ratio, _read_as_base64(path), _IMG_POST) From 851b897d38e7715ba64827c714aa5ec468b88bb0 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 7 Jan 2024 23:49:13 +0200 Subject: [PATCH 17/70] Skipping test_tasks::test_task_accepted - Test fails randomly (non-deterministic) (#8761) --- t/integration/test_tasks.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 6ce6b509c7e..87587119b15 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -418,7 +418,8 @@ def test_fail_with_unpickleable_exception(self, manager): assert result.status == 'FAILURE' - @flaky + # Requires investigation why it randomly succeeds/fails + @pytest.mark.skip(reason="Randomly fails") def test_task_accepted(self, manager, sleep=1): r1 = sleeping.delay(sleep) sleeping.delay(sleep) From 1c8e3f998bf4927f42a48d1649fd3c64cb1f3131 Mon Sep 17 00:00:00 2001 From: robotrapta <79607467+robotrapta@users.noreply.github.com> Date: Sun, 7 Jan 2024 16:58:21 -0800 Subject: [PATCH 18/70] Updated concurrency docs page. (#8753) * First draft of updated concurrency docs page. * Wordsmithing a bit. * Removing link to better external documentation. --- docs/userguide/concurrency/index.rst | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/docs/userguide/concurrency/index.rst b/docs/userguide/concurrency/index.rst index 75faac8e98d..d0355fdfb80 100644 --- a/docs/userguide/concurrency/index.rst +++ b/docs/userguide/concurrency/index.rst @@ -7,8 +7,36 @@ :Release: |version| :Date: |today| +Concurrency in Celery enables the parallel execution of tasks. The default +model, `prefork`, is well-suited for many scenarios and generally recommended +for most users. In fact, switching to another mode will silently disable +certain features like `soft_timeout` and `max_tasks_per_child`. + +This page gives a quick overview of the available options which you can pick +between using the `--pool` option when starting the worker. + +Overview of Concurrency Options +------------------------------- + +- `prefork`: The default option, ideal for CPU-bound tasks and most use cases. + It is robust and recommended unless there's a specific need for another model. +- `eventlet` and `gevent`: Designed for IO-bound tasks, these models use + greenlets for high concurrency. Note that certain features, like `soft_timeout`, + are not available in these modes. These have detailed documentation pages + linked below. +- `solo`: Executes tasks sequentially in the main thread. +- `threads`: Utilizes threading for concurrency, available if the + `concurrent.futures` module is present. +- `custom`: Enables specifying a custom worker pool implementation through + environment variables. + .. toctree:: :maxdepth: 2 eventlet gevent + +.. note:: + While alternative models like `eventlet` and `gevent` are available, they + may lack certain features compared to `prefork`. We recommend `prefork` as + the starting point unless specific requirements dictate otherwise. From a06707f71d45e7c06e2fcf5439651ead39bc346e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 8 Jan 2024 16:55:07 +0200 Subject: [PATCH 19/70] Changed pyup -> dependabot for updating dependencies (#8764) --- .github/dependabot.yml | 4 ++++ .pyup.yml | 5 ----- 2 files changed, 4 insertions(+), 5 deletions(-) delete mode 100644 .pyup.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 123014908be..47a31bc9d65 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,3 +4,7 @@ updates: directory: "/" schedule: interval: "daily" + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" diff --git a/.pyup.yml b/.pyup.yml deleted file mode 100644 index 0218aef3410..00000000000 --- a/.pyup.yml +++ /dev/null @@ -1,5 +0,0 @@ -# autogenerated pyup.io config file -# see https://pyup.io/docs/configuration/ for all available options - -schedule: "every week" -update: all From 3b4ab9ff7c5efc70f41a6437fe570e3eb11a7088 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 18:11:20 +0200 Subject: [PATCH 20/70] Bump isort from 5.12.0 to 5.13.2 (#8772) Bumps [isort](https://github.com/pycqa/isort) from 5.12.0 to 5.13.2. - [Release notes](https://github.com/pycqa/isort/releases) - [Changelog](https://github.com/PyCQA/isort/blob/main/CHANGELOG.md) - [Commits](https://github.com/pycqa/isort/compare/5.12.0...5.13.2) --- updated-dependencies: - dependency-name: isort dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 441d81a3230..fae13c00951 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -2,4 +2,4 @@ git+https://github.com/celery/py-amqp.git git+https://github.com/celery/kombu.git git+https://github.com/celery/billiard.git vine>=5.0.0 -isort==5.12.0 +isort==5.13.2 From 7d1eb9adc3d178e016eda59ec05fa51472344d69 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 18:12:54 +0200 Subject: [PATCH 21/70] Update elasticsearch requirement from <=8.11.0 to <=8.11.1 (#8775) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.11.1) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 50764cdfb64..af927f70d11 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.11.0 +elasticsearch<=8.11.1 elastic-transport<=8.10.0 From 45dbe1cf9c98c4f0dff08a61e1067d680f6d5339 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 18:13:59 +0200 Subject: [PATCH 22/70] Bump sphinx-click from 4.4.0 to 5.1.0 (#8774) Bumps [sphinx-click](https://github.com/click-contrib/sphinx-click) from 4.4.0 to 5.1.0. - [Release notes](https://github.com/click-contrib/sphinx-click/releases) - [Commits](https://github.com/click-contrib/sphinx-click/compare/4.4.0...5.1.0) --- updated-dependencies: - dependency-name: sphinx-click dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index fac534b02cf..2596004d021 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ sphinx_celery>=2.0.0 Sphinx==5.3.0 sphinx-testing~=1.0.1 -sphinx-click==4.4.0 +sphinx-click==5.1.0 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt From cd6738bb8663ac31dc37f033538f923250fbd266 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 19:06:02 +0200 Subject: [PATCH 23/70] Bump python-memcached from 1.59 to 1.61 (#8776) Bumps [python-memcached](https://github.com/linsomniac/python-memcached) from 1.59 to 1.61. - [Release notes](https://github.com/linsomniac/python-memcached/releases) - [Changelog](https://github.com/linsomniac/python-memcached/blob/master/ChangeLog) - [Commits](https://github.com/linsomniac/python-memcached/compare/1.59...1.61) --- updated-dependencies: - dependency-name: python-memcached dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/pymemcache.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/pymemcache.txt b/requirements/extras/pymemcache.txt index 24743088b93..6429f34b9f5 100644 --- a/requirements/extras/pymemcache.txt +++ b/requirements/extras/pymemcache.txt @@ -1 +1 @@ -python-memcached==1.59 +python-memcached==1.61 From cf9785bd4fe5d1a26163b7721fd3bf4696b1e56a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 20:00:28 +0200 Subject: [PATCH 24/70] [pre-commit.ci] pre-commit autoupdate (#8778) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/flake8: 6.1.0 → 7.0.0](https://github.com/PyCQA/flake8/compare/6.1.0...7.0.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8e681020401..66653ceaa63 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ repos: args: ["--py38-plus"] - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 + rev: 7.0.0 hooks: - id: flake8 From 6a2720e4f7847fa501928754babbac62a12b3fc7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Jan 2024 01:56:30 +0200 Subject: [PATCH 25/70] Update elastic-transport requirement from <=8.10.0 to <=8.11.0 (#8780) Updates the requirements on [elastic-transport](https://github.com/elastic/elastic-transport-python) to permit the latest version. - [Release notes](https://github.com/elastic/elastic-transport-python/releases) - [Changelog](https://github.com/elastic/elastic-transport-python/blob/main/CHANGELOG.md) - [Commits](https://github.com/elastic/elastic-transport-python/compare/0.1.0b0...v8.11.0) --- updated-dependencies: - dependency-name: elastic-transport dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index af927f70d11..696c6ce76cc 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ elasticsearch<=8.11.1 -elastic-transport<=8.10.0 +elastic-transport<=8.11.0 From dc49ec2a95da14ae3449491a4aa1e799b1415375 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 11 Jan 2024 20:33:58 +0200 Subject: [PATCH 26/70] python-memcached==1.61 -> python-memcached>=1.61 (#8787) --- requirements/extras/pymemcache.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/pymemcache.txt b/requirements/extras/pymemcache.txt index 6429f34b9f5..ffa124846aa 100644 --- a/requirements/extras/pymemcache.txt +++ b/requirements/extras/pymemcache.txt @@ -1 +1 @@ -python-memcached==1.61 +python-memcached>=1.61 From fa1d98c2a86bf6a3d7987b85253a6a2fb9b90f74 Mon Sep 17 00:00:00 2001 From: Adam Weiss Date: Sun, 14 Jan 2024 14:30:58 -0500 Subject: [PATCH 27/70] Remove usage of utcnow (#8791) * Remove usage of utcnow * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/backends/base.py | 4 +- celery/backends/database/models.py | 8 +-- celery/backends/elasticsearch.py | 4 +- celery/utils/time.py | 2 +- t/unit/app/test_beat.py | 7 +- t/unit/backends/test_elasticsearch.py | 98 +++++++++++++-------------- t/unit/backends/test_mongodb.py | 5 +- t/unit/utils/test_time.py | 4 +- 8 files changed, 69 insertions(+), 63 deletions(-) diff --git a/celery/backends/base.py b/celery/backends/base.py index 4216c3b343e..f7d62c3dbe4 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -9,7 +9,7 @@ import time import warnings from collections import namedtuple -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from functools import partial from weakref import WeakValueDictionary @@ -460,7 +460,7 @@ def _get_result_meta(self, result, state, traceback, request, format_date=True, encode=False): if state in self.READY_STATES: - date_done = datetime.utcnow() + date_done = datetime.now(timezone.utc) if format_date: date_done = date_done.isoformat() else: diff --git a/celery/backends/database/models.py b/celery/backends/database/models.py index 1c766b51ca4..a5df8f4d341 100644 --- a/celery/backends/database/models.py +++ b/celery/backends/database/models.py @@ -1,5 +1,5 @@ """Database models used by the SQLAlchemy result store backend.""" -from datetime import datetime +from datetime import datetime, timezone import sqlalchemy as sa from sqlalchemy.types import PickleType @@ -22,8 +22,8 @@ class Task(ResultModelBase): task_id = sa.Column(sa.String(155), unique=True) status = sa.Column(sa.String(50), default=states.PENDING) result = sa.Column(PickleType, nullable=True) - date_done = sa.Column(sa.DateTime, default=datetime.utcnow, - onupdate=datetime.utcnow, nullable=True) + date_done = sa.Column(sa.DateTime, default=datetime.now(timezone.utc), + onupdate=datetime.now(timezone.utc), nullable=True) traceback = sa.Column(sa.Text, nullable=True) def __init__(self, task_id): @@ -84,7 +84,7 @@ class TaskSet(ResultModelBase): autoincrement=True, primary_key=True) taskset_id = sa.Column(sa.String(155), unique=True) result = sa.Column(PickleType, nullable=True) - date_done = sa.Column(sa.DateTime, default=datetime.utcnow, + date_done = sa.Column(sa.DateTime, default=datetime.now(timezone.utc), nullable=True) def __init__(self, taskset_id, result): diff --git a/celery/backends/elasticsearch.py b/celery/backends/elasticsearch.py index cb4ca4da0fd..a97869bef52 100644 --- a/celery/backends/elasticsearch.py +++ b/celery/backends/elasticsearch.py @@ -1,5 +1,5 @@ """Elasticsearch result store backend.""" -from datetime import datetime +from datetime import datetime, timezone from kombu.utils.encoding import bytes_to_str from kombu.utils.url import _parse_url @@ -129,7 +129,7 @@ def _set_with_state(self, key, value, state): body = { 'result': value, '@timestamp': '{}Z'.format( - datetime.utcnow().isoformat()[:-3] + datetime.now(timezone.utc).isoformat()[:-9] ), } try: diff --git a/celery/utils/time.py b/celery/utils/time.py index c8fd0959336..d27615cc10e 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -212,7 +212,7 @@ def remaining( using :func:`delta_resolution` (i.e., rounded to the resolution of `ends_in`). now (Callable): Function returning the current time and date. - Defaults to :func:`datetime.utcnow`. + Defaults to :func:`datetime.now(timezone.utc)`. Returns: ~datetime.timedelta: Remaining time. diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index 6b113df426e..fa163bb931e 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -156,7 +156,10 @@ def is_due(self, *args, **kwargs): class mocked_schedule(schedule): - def __init__(self, is_due, next_run_at, nowfun=datetime.utcnow): + def now_func(): + return datetime.now(timezone.utc) + + def __init__(self, is_due, next_run_at, nowfun=now_func): self._is_due = is_due self._next_run_at = next_run_at self.run_every = timedelta(seconds=1) @@ -872,7 +875,7 @@ def test_maybe_make_aware(self): def test_to_local(self): x = schedule(10, app=self.app) x.utc_enabled = True - d = x.to_local(datetime.utcnow()) # datetime.utcnow() is deprecated in Python 3.12 + d = x.to_local(datetime.now()) assert d.tzinfo is None x.utc_enabled = False d = x.to_local(datetime.now(timezone.utc)) diff --git a/t/unit/backends/test_elasticsearch.py b/t/unit/backends/test_elasticsearch.py index a53fe512984..a465cbcf501 100644 --- a/t/unit/backends/test_elasticsearch.py +++ b/t/unit/backends/test_elasticsearch.py @@ -1,4 +1,4 @@ -import datetime +from datetime import datetime, timezone from unittest.mock import Mock, call, patch, sentinel import pytest @@ -150,8 +150,8 @@ def test_backend_by_url(self, url='elasticsearch://localhost:9200/index'): @patch('celery.backends.elasticsearch.datetime') def test_index_conflict(self, datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + datetime_mock.now.return_value = expected_dt x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -178,20 +178,20 @@ def test_index_conflict(self, datetime_mock): x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}}, + body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}}, params={'if_seq_no': 2, 'if_primary_term': 1} ) @patch('celery.backends.elasticsearch.datetime') def test_index_conflict_with_doctype(self, datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + datetime_mock.now.return_value = expected_dt x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -219,21 +219,21 @@ def test_index_conflict_with_doctype(self, datetime_mock): id=sentinel.task_id, index=x.index, doc_type=x.doc_type, - body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_called_once_with( id=sentinel.task_id, index=x.index, doc_type=x.doc_type, - body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}}, + body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}}, params={'if_seq_no': 2, 'if_primary_term': 1} ) @patch('celery.backends.elasticsearch.datetime') def test_index_conflict_without_state(self, datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + datetime_mock.now.return_value = expected_dt x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -260,13 +260,13 @@ def test_index_conflict_without_state(self, datetime_mock): x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}}, + body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}}, params={'if_seq_no': 2, 'if_primary_term': 1} ) @@ -277,8 +277,8 @@ def test_index_conflict_with_ready_state_on_backend_without_state(self, datetime so it cannot protect overriding a ready state by any other state. As a result, server.update will be called no matter what. """ - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + datetime_mock.now.return_value = expected_dt x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -305,20 +305,20 @@ def test_index_conflict_with_ready_state_on_backend_without_state(self, datetime x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}}, + body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}}, params={'if_seq_no': 2, 'if_primary_term': 1} ) @patch('celery.backends.elasticsearch.datetime') def test_index_conflict_with_existing_success(self, datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + datetime_mock.now.return_value = expected_dt x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -347,15 +347,15 @@ def test_index_conflict_with_existing_success(self, datetime_mock): x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_not_called() @patch('celery.backends.elasticsearch.datetime') def test_index_conflict_with_existing_ready_state(self, datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + datetime_mock.now.return_value = expected_dt x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -382,7 +382,7 @@ def test_index_conflict_with_existing_ready_state(self, datetime_mock): x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_not_called() @@ -390,11 +390,11 @@ def test_index_conflict_with_existing_ready_state(self, datetime_mock): @patch('celery.backends.elasticsearch.datetime') @patch('celery.backends.base.datetime') def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - es_datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + es_datetime_mock.now.return_value = expected_dt - expected_done_dt = datetime.datetime(2020, 6, 1, 18, 45, 34, 654321, None) - base_datetime_mock.utcnow.return_value = expected_done_dt + expected_done_dt = datetime(2020, 6, 1, 18, 45, 34, 654321, timezone.utc) + base_datetime_mock.now.return_value = expected_done_dt self.app.conf.result_backend_always_retry, prev = True, self.app.conf.result_backend_always_retry x_server_get_side_effect = [ @@ -455,7 +455,7 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ), @@ -464,7 +464,7 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ), @@ -476,7 +476,7 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): body={ 'doc': { 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' } }, params={'if_seq_no': 2, 'if_primary_term': 1} @@ -487,7 +487,7 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): body={ 'doc': { 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' } }, params={'if_seq_no': 3, 'if_primary_term': 1} @@ -501,11 +501,11 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): @patch('celery.backends.elasticsearch.datetime') @patch('celery.backends.base.datetime') def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es_datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - es_datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + es_datetime_mock.now.return_value = expected_dt - expected_done_dt = datetime.datetime(2020, 6, 1, 18, 45, 34, 654321, None) - base_datetime_mock.utcnow.return_value = expected_done_dt + expected_done_dt = datetime(2020, 6, 1, 18, 45, 34, 654321, timezone.utc) + base_datetime_mock.now.return_value = expected_done_dt self.app.conf.result_backend_always_retry, prev = True, self.app.conf.result_backend_always_retry try: @@ -550,7 +550,7 @@ def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ), @@ -559,7 +559,7 @@ def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ), @@ -572,11 +572,11 @@ def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es @patch('celery.backends.elasticsearch.datetime') @patch('celery.backends.base.datetime') def test_backend_index_conflicting_document_removed_not_throwing(self, base_datetime_mock, es_datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - es_datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + es_datetime_mock.now.return_value = expected_dt - expected_done_dt = datetime.datetime(2020, 6, 1, 18, 45, 34, 654321, None) - base_datetime_mock.utcnow.return_value = expected_done_dt + expected_done_dt = datetime(2020, 6, 1, 18, 45, 34, 654321, timezone.utc) + base_datetime_mock.now.return_value = expected_done_dt self.app.conf.result_backend_always_retry, prev = True, self.app.conf.result_backend_always_retry try: @@ -618,7 +618,7 @@ def test_backend_index_conflicting_document_removed_not_throwing(self, base_date index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ), @@ -627,7 +627,7 @@ def test_backend_index_conflicting_document_removed_not_throwing(self, base_date index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ), @@ -640,11 +640,11 @@ def test_backend_index_conflicting_document_removed_not_throwing(self, base_date @patch('celery.backends.elasticsearch.datetime') @patch('celery.backends.base.datetime') def test_backend_index_corrupted_conflicting_document(self, base_datetime_mock, es_datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - es_datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + es_datetime_mock.now.return_value = expected_dt - expected_done_dt = datetime.datetime(2020, 6, 1, 18, 45, 34, 654321, None) - base_datetime_mock.utcnow.return_value = expected_done_dt + expected_done_dt = datetime(2020, 6, 1, 18, 45, 34, 654321, timezone.utc) + base_datetime_mock.now.return_value = expected_done_dt # self.app.conf.result_backend_always_retry, prev = True, self.app.conf.result_backend_always_retry # try: @@ -685,7 +685,7 @@ def test_backend_index_corrupted_conflicting_document(self, base_datetime_mock, index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ) @@ -695,7 +695,7 @@ def test_backend_index_corrupted_conflicting_document(self, base_datetime_mock, body={ 'doc': { 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' } }, params={'if_primary_term': 1, 'if_seq_no': 2} diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py index 6f74b42125f..9ae340ee149 100644 --- a/t/unit/backends/test_mongodb.py +++ b/t/unit/backends/test_mongodb.py @@ -563,7 +563,10 @@ def test_cleanup(self, mock_get_database): mock_database.__getitem__ = Mock(name='MD.__getitem__') mock_database.__getitem__.return_value = mock_collection - self.backend.app.now = datetime.datetime.utcnow + def now_func(): + return datetime.datetime.now(datetime.timezone.utc) + + self.backend.app.now = now_func self.backend.cleanup() mock_get_database.assert_called_once_with() diff --git a/t/unit/utils/test_time.py b/t/unit/utils/test_time.py index 6b955e096e9..621769252a9 100644 --- a/t/unit/utils/test_time.py +++ b/t/unit/utils/test_time.py @@ -212,14 +212,14 @@ def test_tz_when_zoneinfo(self): def test_maybe_make_aware(self): aware = datetime.now(_timezone.utc).replace(tzinfo=timezone.utc) assert maybe_make_aware(aware) - naive = datetime.utcnow() # datetime.utcnow() is deprecated in Python 3.12 + naive = datetime.now() assert maybe_make_aware(naive) assert maybe_make_aware(naive).tzinfo is ZoneInfo("UTC") tz = ZoneInfo('US/Eastern') eastern = datetime.now(_timezone.utc).replace(tzinfo=tz) assert maybe_make_aware(eastern).tzinfo is tz - utcnow = datetime.utcnow() # datetime.utcnow() is deprecated in Python 3.12 + utcnow = datetime.now() assert maybe_make_aware(utcnow, 'UTC').tzinfo is ZoneInfo("UTC") From 9ed121d3d514a084247f4e29fbe7a7aa8b2d441a Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Oct 2023 22:56:58 +0300 Subject: [PATCH 28/70] Pytest Celery Integration (#8241) * Added initial/sanity smoke tests * Allow using all integration tests tasks in the smoke tests environment, in addition to smoke tests specific tasks (to reuse existing tests tasks) * Added xdist support to smoke tests only * Added CI workflow for building the smoke tests dockerfiles * Added new tox env to clean resources & remains from the smoke tests: tox -e clean --- .github/workflows/docker.yml | 32 ++++++++++++ .github/workflows/python-package.yml | 39 +++++++++++++++ .gitignore | 1 + requirements/extras/pytest.txt | 3 ++ requirements/test-tmp_for_dev.txt | 3 ++ requirements/test.txt | 3 +- t/integration/conftest.py | 7 +-- t/integration/tasks.py | 50 +++++++++++-------- t/smoke/__init__.py | 0 t/smoke/conftest.py | 16 ++++++ t/smoke/signals.py | 26 ++++++++++ t/smoke/tasks.py | 15 ++++++ t/smoke/test_canvas.py | 73 ++++++++++++++++++++++++++++ t/smoke/test_consumer.py | 55 +++++++++++++++++++++ t/smoke/test_control.py | 7 +++ t/smoke/test_failover.py | 41 ++++++++++++++++ t/smoke/test_signals.py | 54 ++++++++++++++++++++ t/smoke/workers/__init__.py | 0 t/smoke/workers/dev.py | 66 +++++++++++++++++++++++++ t/smoke/workers/docker/dev | 34 +++++++++++++ t/smoke/workers/docker/pypi | 33 +++++++++++++ t/smoke/workers/latest.py | 51 +++++++++++++++++++ t/smoke/workers/legacy.py | 55 +++++++++++++++++++++ tox.ini | 18 +++++++ 24 files changed, 654 insertions(+), 28 deletions(-) create mode 100644 requirements/test-tmp_for_dev.txt create mode 100644 t/smoke/__init__.py create mode 100644 t/smoke/conftest.py create mode 100644 t/smoke/signals.py create mode 100644 t/smoke/tasks.py create mode 100644 t/smoke/test_canvas.py create mode 100644 t/smoke/test_consumer.py create mode 100644 t/smoke/test_control.py create mode 100644 t/smoke/test_failover.py create mode 100644 t/smoke/test_signals.py create mode 100644 t/smoke/workers/__init__.py create mode 100644 t/smoke/workers/dev.py create mode 100644 t/smoke/workers/docker/dev create mode 100644 t/smoke/workers/docker/pypi create mode 100644 t/smoke/workers/latest.py create mode 100644 t/smoke/workers/legacy.py diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index bc39a2bd3b1..65dd0914029 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -36,3 +36,35 @@ jobs: - uses: actions/checkout@v4 - name: Build Documentation run: make docker-docs + + smoke-tests_dev: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v4 + - name: "Build smoke tests container: dev" + run: docker build -f t/smoke/workers/docker/dev . + + smoke-tests_latest: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v4 + - name: "Build smoke tests container: latest" + run: docker build -f t/smoke/workers/docker/pypi . + + smoke-tests_pypi: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v4 + - name: "Build smoke tests container: pypi" + run: docker build -f t/smoke/workers/docker/pypi --build-arg CELERY_VERSION="5" . + + smoke-tests_legacy: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v4 + - name: "Build smoke tests container: legacy" + run: docker build -f t/smoke/workers/docker/pypi --build-arg CELERY_VERSION="4" . diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 41bdf04ea3d..04c363a818c 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -119,3 +119,42 @@ jobs: run: > tox --verbose --verbose -e "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv + + Smoke: + # needs: + # - Integration + # if: needs.Integration.result == 'success' + # timeout-minutes: 240 + + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 30 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- --reruns 5 --reruns-delay 60 --rerun-except AssertionError -n auto diff --git a/.gitignore b/.gitignore index d892eca06e5..02c9965790a 100644 --- a/.gitignore +++ b/.gitignore @@ -37,3 +37,4 @@ integration-tests-config.json [0-9]* statefilename.* dump.rdb +.env diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index 6daa4ff1249..0d178f4a462 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1,4 @@ pytest-celery==0.0.0 +# pytest-celery==1.0.0a1 +# git+https://github.com/celery/pytest-celery.git +# git+https://github.com/Katz-Consulting-Group/pytest-celery.git@celery_integration#egg=pytest-celery \ No newline at end of file diff --git a/requirements/test-tmp_for_dev.txt b/requirements/test-tmp_for_dev.txt new file mode 100644 index 00000000000..326c2e82e07 --- /dev/null +++ b/requirements/test-tmp_for_dev.txt @@ -0,0 +1,3 @@ +# -e ../pytest-celery +git+https://github.com/celery/pytest-celery.git +# git+https://github.com/Katz-Consulting-Group/pytest-celery.git@BRANCH_NAME#egg=pytest-celery \ No newline at end of file diff --git a/requirements/test.txt b/requirements/test.txt index ad4f6ae5c95..2b26eef5e9f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,6 @@ pytest==7.4.4 -pytest-celery==0.0.0 +# pytest-celery==1.0.0a1 +pytest-rerunfailures==12.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 pytest-click==1.1.0 diff --git a/t/integration/conftest.py b/t/integration/conftest.py index 550bd5d37ba..1707e3ca324 100644 --- a/t/integration/conftest.py +++ b/t/integration/conftest.py @@ -8,6 +8,7 @@ # that installs the pytest plugin into the setuptools registry. from celery.contrib.pytest import celery_app, celery_session_worker from celery.contrib.testing.manager import Manager +from t.integration.tasks import get_redis_connection TEST_BROKER = os.environ.get('TEST_BROKER', 'pyamqp://') TEST_BACKEND = os.environ.get('TEST_BACKEND', 'redis://') @@ -17,15 +18,9 @@ 'celery_app', 'celery_session_worker', 'get_active_redis_channels', - 'get_redis_connection', ) -def get_redis_connection(): - from redis import StrictRedis - return StrictRedis(host=os.environ.get('REDIS_HOST')) - - def get_active_redis_channels(): return get_redis_connection().execute_command('PUBSUB CHANNELS') diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 24dedbce29c..038b137f823 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -1,12 +1,18 @@ +import os from collections.abc import Iterable from time import sleep from celery import Signature, Task, chain, chord, group, shared_task -from celery.canvas import StampingVisitor, signature +from celery.canvas import signature from celery.exceptions import SoftTimeLimitExceeded from celery.utils.log import get_task_logger -from .conftest import get_redis_connection + +def get_redis_connection(): + from redis import StrictRedis + + return StrictRedis(host=os.environ.get("REDIS_HOST")) + logger = get_task_logger(__name__) @@ -455,28 +461,30 @@ def errback_new_style(request, exc, tb): return request.id -class StampOnReplace(StampingVisitor): - stamp = {'StampOnReplace': 'This is the replaced task'} +try: + from celery.canvas import StampingVisitor - def on_signature(self, sig, **headers) -> dict: - return self.stamp + class StampOnReplace(StampingVisitor): + stamp = {'StampOnReplace': 'This is the replaced task'} + def on_signature(self, sig, **headers) -> dict: + return self.stamp -class StampedTaskOnReplace(Task): - """Custom task for stamping on replace""" + class StampedTaskOnReplace(Task): + """Custom task for stamping on replace""" - def on_replace(self, sig): - sig.stamp(StampOnReplace()) - return super().on_replace(sig) - - -@shared_task -def replaced_with_me(): - return True + def on_replace(self, sig): + sig.stamp(StampOnReplace()) + return super().on_replace(sig) + @shared_task + def replaced_with_me(): + return True -@shared_task(bind=True, base=StampedTaskOnReplace) -def replace_with_stamped_task(self: StampedTaskOnReplace, replace_with=None): - if replace_with is None: - replace_with = replaced_with_me.s() - self.replace(signature(replace_with)) + @shared_task(bind=True, base=StampedTaskOnReplace) + def replace_with_stamped_task(self: StampedTaskOnReplace, replace_with=None): + if replace_with is None: + replace_with = replaced_with_me.s() + self.replace(signature(replace_with)) +except ImportError: + pass diff --git a/t/smoke/__init__.py b/t/smoke/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py new file mode 100644 index 00000000000..3b9b8e3c7ca --- /dev/null +++ b/t/smoke/conftest.py @@ -0,0 +1,16 @@ +import pytest + +from t.smoke.workers.dev import * # noqa +from t.smoke.workers.latest import * # noqa +from t.smoke.workers.legacy import * # noqa + + +@pytest.fixture +def default_worker_tasks() -> set: + from t.integration import tasks as integration_tests_tasks + from t.smoke import tasks as smoke_tests_tasks + + yield { + integration_tests_tasks, + smoke_tests_tasks, + } diff --git a/t/smoke/signals.py b/t/smoke/signals.py new file mode 100644 index 00000000000..298c12e17d3 --- /dev/null +++ b/t/smoke/signals.py @@ -0,0 +1,26 @@ +from celery.signals import worker_init, worker_process_init, worker_process_shutdown, worker_ready, worker_shutdown + + +@worker_init.connect +def worker_init_handler(sender, **kwargs): # type: ignore + print("worker_init_handler") + + +@worker_process_init.connect +def worker_process_init_handler(sender, **kwargs): # type: ignore + print("worker_process_init_handler") + + +@worker_process_shutdown.connect +def worker_process_shutdown_handler(sender, pid, exitcode, **kwargs): # type: ignore + print("worker_process_shutdown_handler") + + +@worker_ready.connect +def worker_ready_handler(sender, **kwargs): # type: ignore + print("worker_ready_handler") + + +@worker_shutdown.connect +def worker_shutdown_handler(sender, **kwargs): # type: ignore + print("worker_shutdown_handler") diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py new file mode 100644 index 00000000000..ad316d7347f --- /dev/null +++ b/t/smoke/tasks.py @@ -0,0 +1,15 @@ +from time import sleep + +import celery.utils +from celery import shared_task +from t.integration.tasks import * # noqa + + +@shared_task +def noop(*args, **kwargs) -> None: + return celery.utils.noop(*args, **kwargs) + + +@shared_task +def long_running_task(seconds: float = 1) -> None: + sleep(seconds) diff --git a/t/smoke/test_canvas.py b/t/smoke/test_canvas.py new file mode 100644 index 00000000000..965ac5e3179 --- /dev/null +++ b/t/smoke/test_canvas.py @@ -0,0 +1,73 @@ +import pytest +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup + +from celery.canvas import chain, chord, group, signature +from t.smoke.tasks import add, identity + + +class test_signature: + def test_sanity(self, celery_setup: CeleryTestSetup): + sig = signature(identity, args=("test_signature",), queue=celery_setup.worker.worker_queue) + assert sig.delay().get(timeout=RESULT_TIMEOUT) == "test_signature" + + +class test_group: + def test_sanity(self, celery_setup: CeleryTestSetup): + sig = group( + group(add.si(1, 1), add.si(2, 2)), + group([add.si(1, 1), add.si(2, 2)]), + group(s for s in [add.si(1, 1), add.si(2, 2)]), + ) + res = sig.apply_async(queue=celery_setup.worker.worker_queue) + assert res.get(timeout=RESULT_TIMEOUT) == [2, 4, 2, 4, 2, 4] + + +class test_chain: + def test_sanity(self, celery_setup: CeleryTestSetup): + queue = celery_setup.worker.worker_queue + sig = chain( + identity.si("chain_task1").set(queue=queue), + identity.si("chain_task2").set(queue=queue), + ) | identity.si("test_chain").set(queue=queue) + res = sig.apply_async() + assert res.get(timeout=RESULT_TIMEOUT) == "test_chain" + + +class test_chord: + def test_sanity(self, celery_setup: CeleryTestSetup): + if not celery_setup.chords_allowed(): + pytest.skip("Chords are not supported") + + upgraded_chord = signature( + group( + identity.si("header_task1"), + identity.si("header_task2"), + ) + | identity.si("body_task"), + queue=celery_setup.worker.worker_queue, + ) + + sig = group( + [ + upgraded_chord, + chord( + group( + identity.si("header_task3"), + identity.si("header_task4"), + ), + identity.si("body_task"), + ), + chord( + ( + sig + for sig in [ + identity.si("header_task5"), + identity.si("header_task6"), + ] + ), + identity.si("body_task"), + ), + ] + ) + res = sig.apply_async(queue=celery_setup.worker.worker_queue) + assert res.get(timeout=RESULT_TIMEOUT) == ["body_task"] * 3 diff --git a/t/smoke/test_consumer.py b/t/smoke/test_consumer.py new file mode 100644 index 00000000000..0e0f09dbf33 --- /dev/null +++ b/t/smoke/test_consumer.py @@ -0,0 +1,55 @@ +import pytest +from pytest_celery import CeleryTestSetup, RedisTestBroker + +from celery import Celery +from celery.canvas import group +from t.smoke.tasks import long_running_task + +WORKER_PREFETCH_MULTIPLIER = 2 +WORKER_CONCURRENCY = 5 +MAX_PREFETCH = WORKER_PREFETCH_MULTIPLIER * WORKER_CONCURRENCY + + +@pytest.fixture +def default_worker_app(default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_prefetch_multiplier = WORKER_PREFETCH_MULTIPLIER + app.conf.worker_concurrency = WORKER_CONCURRENCY + yield app + + +class test_consumer: + @pytest.mark.parametrize("expected_running_tasks_count", range(1, WORKER_CONCURRENCY + 1)) + def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_running_tasks_count: int): + sig = group(long_running_task.s(420) for _ in range(expected_running_tasks_count)) + sig.apply_async(queue=celery_setup.worker.worker_queue) + celery_setup.broker.restart() + + expected_reduced_prefetch = max( + WORKER_PREFETCH_MULTIPLIER, MAX_PREFETCH - expected_running_tasks_count * WORKER_PREFETCH_MULTIPLIER + ) + + expected_prefetch_reduce_message = ( + f"Temporarily reducing the prefetch count to {expected_reduced_prefetch} " + f"to avoid over-fetching since {expected_running_tasks_count} tasks are currently being processed." + ) + celery_setup.worker.wait_for_log(expected_prefetch_reduce_message) + + expected_prefetch_restore_message = ( + f"The prefetch count will be gradually restored to {MAX_PREFETCH} " f"as the tasks complete processing." + ) + celery_setup.worker.wait_for_log(expected_prefetch_restore_message) + + def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Real bug in Redis broker") + + expected_running_tasks_count = MAX_PREFETCH+1 + sig = group(long_running_task.s(10) for _ in range(expected_running_tasks_count)) + sig.apply_async(queue=celery_setup.worker.worker_queue) + celery_setup.broker.restart() + expected_prefetch_restore_message = ( + f"Resuming normal operations following a restart.\n" + f"Prefetch count has been restored to the maximum of {MAX_PREFETCH}" + ) + celery_setup.worker.wait_for_log(expected_prefetch_restore_message) diff --git a/t/smoke/test_control.py b/t/smoke/test_control.py new file mode 100644 index 00000000000..97ed8b9fe69 --- /dev/null +++ b/t/smoke/test_control.py @@ -0,0 +1,7 @@ +from pytest_celery import CeleryTestSetup + + +class test_control: + def test_sanity(self, celery_setup: CeleryTestSetup): + r = celery_setup.app.control.ping() + assert all([all([res["ok"] == "pong" for _, res in response.items()]) for response in r]) diff --git a/t/smoke/test_failover.py b/t/smoke/test_failover.py new file mode 100644 index 00000000000..65d24ba5f63 --- /dev/null +++ b/t/smoke/test_failover.py @@ -0,0 +1,41 @@ +import pytest +from pytest_celery import (RABBITMQ_CONTAINER_TIMEOUT, RESULT_TIMEOUT, CeleryBrokerCluster, CeleryTestSetup, + RabbitMQContainer, RabbitMQTestBroker) +from pytest_docker_tools import container, fxtr + +from t.smoke.tasks import identity + +failover_broker = container( + image="{default_rabbitmq_broker_image}", + ports=fxtr("default_rabbitmq_broker_ports"), + environment=fxtr("default_rabbitmq_broker_env"), + network="{default_pytest_celery_network.name}", + wrapper_class=RabbitMQContainer, + timeout=RABBITMQ_CONTAINER_TIMEOUT, +) + + +@pytest.fixture +def failover_rabbitmq_broker(failover_broker: RabbitMQContainer) -> RabbitMQTestBroker: + broker = RabbitMQTestBroker(failover_broker) + yield broker + broker.teardown() + + +@pytest.fixture +def celery_broker_cluster( + celery_rabbitmq_broker: RabbitMQTestBroker, + failover_rabbitmq_broker: RabbitMQTestBroker, +) -> CeleryBrokerCluster: + cluster = CeleryBrokerCluster(celery_rabbitmq_broker, failover_rabbitmq_broker) + yield cluster + cluster.teardown() + + +class test_failover: + def test_sanity(self, celery_setup: CeleryTestSetup): + assert len(celery_setup.broker_cluster) > 1 + celery_setup.broker.kill() + expected = "test_broker_failover" + res = identity.s(expected).apply_async(queue=celery_setup.worker.worker_queue) + assert res.get(timeout=RESULT_TIMEOUT) == expected diff --git a/t/smoke/test_signals.py b/t/smoke/test_signals.py new file mode 100644 index 00000000000..c3b6210eb2b --- /dev/null +++ b/t/smoke/test_signals.py @@ -0,0 +1,54 @@ +import pytest +from pytest_celery import CeleryTestSetup + +from celery.signals import after_task_publish, before_task_publish +from t.smoke.tasks import noop + + +@pytest.fixture +def default_worker_signals(default_worker_signals: set) -> set: + from t.smoke import signals + + default_worker_signals.add(signals) + yield default_worker_signals + + +class test_signals: + @pytest.mark.parametrize( + "log, control", + [ + ("worker_init_handler", None), + ("worker_process_init_handler", None), + ("worker_ready_handler", None), + ("worker_process_shutdown_handler", "shutdown"), + ("worker_shutdown_handler", "shutdown"), + ], + ) + def test_sanity(self, celery_setup: CeleryTestSetup, log: str, control: str): + if control: + celery_setup.app.control.broadcast(control) + celery_setup.worker.wait_for_log(log) + + +class test_before_task_publish: + def test_sanity(self, celery_setup: CeleryTestSetup): + @before_task_publish.connect + def before_task_publish_handler(*args, **kwargs): + nonlocal signal_was_called + signal_was_called = True + + signal_was_called = False + noop.s().apply_async(queue=celery_setup.worker.worker_queue) + assert signal_was_called is True + + +class test_after_task_publish: + def test_sanity(self, celery_setup: CeleryTestSetup): + @after_task_publish.connect + def after_task_publish_handler(*args, **kwargs): + nonlocal signal_was_called + signal_was_called = True + + signal_was_called = False + noop.s().apply_async(queue=celery_setup.worker.worker_queue) + assert signal_was_called is True diff --git a/t/smoke/workers/__init__.py b/t/smoke/workers/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/workers/dev.py b/t/smoke/workers/dev.py new file mode 100644 index 00000000000..13901729240 --- /dev/null +++ b/t/smoke/workers/dev.py @@ -0,0 +1,66 @@ +import os +from typing import Any, Type + +import pytest +from pytest_celery import CeleryWorkerContainer, defaults +from pytest_docker_tools import build, container, fxtr + +import celery + + +class SmokeWorkerContainer(CeleryWorkerContainer): + @property + def client(self) -> Any: + return self + + @classmethod + def version(cls) -> str: + return celery.__version__ + + @classmethod + def log_level(cls) -> str: + return "INFO" + + @classmethod + def worker_name(cls) -> str: + return "smoke_tests_worker" + + @classmethod + def worker_queue(cls) -> str: + return "smoke_tests_queue" + + +celery_dev_worker_image = build( + path=".", + dockerfile="t/smoke/workers/docker/dev", + tag="t/smoke/worker:dev", + buildargs=SmokeWorkerContainer.buildargs(), +) + + +default_worker_container = container( + image="{celery_dev_worker_image.id}", + environment=fxtr("default_worker_env"), + network="{default_pytest_celery_network.name}", + volumes={ + # Volume: Worker /app + "{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME, + # Mount: Celery source + os.path.abspath(os.getcwd()): { + "bind": "/celery", + "mode": "rw", + }, + }, + wrapper_class=SmokeWorkerContainer, + timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, +) + + +@pytest.fixture +def default_worker_container_cls() -> Type[CeleryWorkerContainer]: + return SmokeWorkerContainer + + +@pytest.fixture(scope="session") +def default_worker_container_session_cls() -> Type[CeleryWorkerContainer]: + return SmokeWorkerContainer diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev new file mode 100644 index 00000000000..ee1709835e3 --- /dev/null +++ b/t/smoke/workers/docker/dev @@ -0,0 +1,34 @@ +FROM python:3.11-bookworm + +# Create a user to run the worker +RUN adduser --disabled-password --gecos "" test_user + +# Install system dependencies +RUN apt-get update && apt-get install -y build-essential + +# Set arguments +ARG CELERY_LOG_LEVEL=INFO +ARG CELERY_WORKER_NAME=celery_dev_worker +ARG CELERY_WORKER_QUEUE=celery +ENV LOG_LEVEL=$CELERY_LOG_LEVEL +ENV WORKER_NAME=$CELERY_WORKER_NAME +ENV WORKER_QUEUE=$CELERY_WORKER_QUEUE + +ENV PYTHONUNBUFFERED=1 +ENV PYTHONDONTWRITEBYTECODE=1 + +# Install celery from source +WORKDIR /celery + +COPY --chown=test_user:test_user . /celery +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir -e /celery[redis,memcache,pymemcache] + +# The workdir must be /app +WORKDIR /app + +# Switch to the test_user +USER test_user + +# Start the celery worker +CMD celery -A app worker --loglevel=$LOG_LEVEL -n $WORKER_NAME@%h -Q $WORKER_QUEUE diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi new file mode 100644 index 00000000000..85d51dadf9a --- /dev/null +++ b/t/smoke/workers/docker/pypi @@ -0,0 +1,33 @@ +FROM python:3.10-bookworm + +# Create a user to run the worker +RUN adduser --disabled-password --gecos "" test_user + +# Install system dependencies +RUN apt-get update && apt-get install -y build-essential + +# Set arguments +ARG CELERY_VERSION="" +ARG CELERY_LOG_LEVEL=INFO +ARG CELERY_WORKER_NAME=celery_tests_worker +ARG CELERY_WORKER_QUEUE=celery +ENV PIP_VERSION=$CELERY_VERSION +ENV LOG_LEVEL=$CELERY_LOG_LEVEL +ENV WORKER_NAME=$CELERY_WORKER_NAME +ENV WORKER_QUEUE=$CELERY_WORKER_QUEUE + +ENV PYTHONUNBUFFERED=1 +ENV PYTHONDONTWRITEBYTECODE=1 + +# Install Python dependencies +RUN pip install --no-cache-dir --upgrade pip \ + && pip install --no-cache-dir celery[redis,memcache,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} + +# The workdir must be /app +WORKDIR /app + +# Switch to the test_user +USER test_user + +# Start the celery worker +CMD celery -A app worker --loglevel=$LOG_LEVEL -n $WORKER_NAME@%h -Q $WORKER_QUEUE diff --git a/t/smoke/workers/latest.py b/t/smoke/workers/latest.py new file mode 100644 index 00000000000..da18ceb602e --- /dev/null +++ b/t/smoke/workers/latest.py @@ -0,0 +1,51 @@ +from typing import Any + +import pytest +from pytest_celery import CeleryTestWorker, CeleryWorkerContainer, defaults +from pytest_docker_tools import build, container, fxtr + +from celery import Celery + + +class CeleryLatestWorkerContainer(CeleryWorkerContainer): + @property + def client(self) -> Any: + return self + + @classmethod + def log_level(cls) -> str: + return "INFO" + + @classmethod + def worker_name(cls) -> str: + return "celery_latest_tests_worker" + + @classmethod + def worker_queue(cls) -> str: + return "celery_latest_tests_queue" + + +celery_latest_worker_image = build( + path=".", + dockerfile="t/smoke/workers/docker/pypi", + tag="t/smoke/worker:latest", + buildargs=CeleryLatestWorkerContainer.buildargs(), +) + + +celery_latest_worker_container = container( + image="{celery_latest_worker_image.id}", + environment=fxtr("default_worker_env"), + network="{default_pytest_celery_network.name}", + volumes={"{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME}, + wrapper_class=CeleryLatestWorkerContainer, + timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, +) + + +@pytest.fixture +def celery_latest_worker( + celery_latest_worker_container: CeleryLatestWorkerContainer, + celery_setup_app: Celery, +) -> CeleryTestWorker: + yield CeleryTestWorker(celery_latest_worker_container, app=celery_setup_app) diff --git a/t/smoke/workers/legacy.py b/t/smoke/workers/legacy.py new file mode 100644 index 00000000000..0fb1f419bb6 --- /dev/null +++ b/t/smoke/workers/legacy.py @@ -0,0 +1,55 @@ +from typing import Any + +import pytest +from pytest_celery import CeleryTestWorker, CeleryWorkerContainer, defaults +from pytest_docker_tools import build, container, fxtr + +from celery import Celery + + +class CeleryLegacyWorkerContainer(CeleryWorkerContainer): + @property + def client(self) -> Any: + return self + + @classmethod + def version(cls) -> str: + return "4.4.7" # Last version of 4.x + + @classmethod + def log_level(cls) -> str: + return "INFO" + + @classmethod + def worker_name(cls) -> str: + return "celery4_tests_worker" + + @classmethod + def worker_queue(cls) -> str: + return "celery4_tests_queue" + + +celery_legacy_worker_image = build( + path=".", + dockerfile="t/smoke/workers/docker/pypi", + tag="t/smoke/worker:legacy", + buildargs=CeleryLegacyWorkerContainer.buildargs(), +) + + +celery_legacy_worker_container = container( + image="{celery_legacy_worker_image.id}", + environment=fxtr("default_worker_env"), + network="{default_pytest_celery_network.name}", + volumes={"{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME}, + wrapper_class=CeleryLegacyWorkerContainer, + timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, +) + + +@pytest.fixture +def celery_legacy_worker( + celery_legacy_worker_container: CeleryLegacyWorkerContainer, + celery_setup_app: Celery, +) -> CeleryTestWorker: + yield CeleryTestWorker(celery_legacy_worker_container, app=celery_setup_app) diff --git a/tox.ini b/tox.ini index 806b3d977ee..cc5087b3e03 100644 --- a/tox.ini +++ b/tox.ini @@ -4,6 +4,7 @@ requires = envlist = {3.8,3.9,3.10,3.11,3.12,pypy3}-unit {3.8,3.9,3.10,3.11,3.12,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch,docker} + {3.8,3.9,3.10,3.11,3.12,pypy3}-smoke flake8 apicheck @@ -28,6 +29,7 @@ passenv = deps= -r{toxinidir}/requirements/test.txt + -r{toxinidir}/requirements/test-tmp_for_dev.txt -r{toxinidir}/requirements/pkgutils.txt 3.8,3.9,3.10,3.11,3.12: -r{toxinidir}/requirements/test-ci-default.txt @@ -35,6 +37,7 @@ deps= pypy3: -r{toxinidir}/requirements/test-ci-default.txt integration: -r{toxinidir}/requirements/test-integration.txt + smoke: pytest-xdist==3.3.1 linkcheck,apicheck,configcheck: -r{toxinidir}/requirements/docs.txt lint: pre-commit @@ -43,11 +46,14 @@ deps= commands = unit: pytest --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsv t/integration {posargs} + smoke: pytest -xsv t/smoke {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null WORKER_LOGLEVEL = INFO PYTHONIOENCODING = UTF-8 + PYTHONUNBUFFERED = 1 + PYTHONDONTWRITEBYTECODE = 1 cache: TEST_BROKER=redis:// cache: TEST_BACKEND=cache+pylibmc:// @@ -113,3 +119,15 @@ commands = [testenv:lint] commands = pre-commit {posargs:run --all-files --show-diff-on-failure} + +[testenv:clean] +allowlist_externals = bash +commands_pre = + pip install cleanpy +commands = + python -m cleanpy . + bash -c 'files=$(find . -name "*.coverage*" -type f); if [ -n "$files" ]; then echo "Removed coverage file(s):"; echo "$files" | tr " " "\n"; rm $files; fi' + bash -c 'containers=$(docker ps -aq --filter label=creator=pytest-docker-tools); if [ -n "$containers" ]; then echo "Removed Docker container(s):"; docker rm -f $containers; fi' + bash -c 'networks=$(docker network ls --filter name=pytest- -q); if [ -n "$networks" ]; then echo "Removed Docker network(s):"; docker network rm $networks; fi' + bash -c 'volumes=$(docker volume ls --filter name=pytest- -q); if [ -n "$volumes" ]; then echo "Removed Docker volume(s):"; docker volume rm $volumes; fi' + From af898ac41fe1b2491f93ad0e4258dfe06f2d3f2a Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 20 Oct 2023 20:48:09 +0300 Subject: [PATCH 29/70] Bugfix in test_prefetch_count_restored() and other enhancements (#8580) * Fixed bug in test: test_prefetch_count_restored() * Changed all smoke tests workers log level from INFO to DEBUG * Changed usage of wait_for_log() -> assert_log_exists() --- t/smoke/test_consumer.py | 15 ++++++--------- t/smoke/workers/dev.py | 2 +- t/smoke/workers/latest.py | 2 +- t/smoke/workers/legacy.py | 2 +- 4 files changed, 9 insertions(+), 12 deletions(-) diff --git a/t/smoke/test_consumer.py b/t/smoke/test_consumer.py index 0e0f09dbf33..168711bc101 100644 --- a/t/smoke/test_consumer.py +++ b/t/smoke/test_consumer.py @@ -1,5 +1,5 @@ import pytest -from pytest_celery import CeleryTestSetup, RedisTestBroker +from pytest_celery import CeleryTestSetup from celery import Celery from celery.canvas import group @@ -33,18 +33,15 @@ def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_r f"Temporarily reducing the prefetch count to {expected_reduced_prefetch} " f"to avoid over-fetching since {expected_running_tasks_count} tasks are currently being processed." ) - celery_setup.worker.wait_for_log(expected_prefetch_reduce_message) + celery_setup.worker.assert_log_exists(expected_prefetch_reduce_message) expected_prefetch_restore_message = ( - f"The prefetch count will be gradually restored to {MAX_PREFETCH} " f"as the tasks complete processing." + f"The prefetch count will be gradually restored to {MAX_PREFETCH} as the tasks complete processing." ) - celery_setup.worker.wait_for_log(expected_prefetch_restore_message) + celery_setup.worker.assert_log_exists(expected_prefetch_restore_message) def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Real bug in Redis broker") - - expected_running_tasks_count = MAX_PREFETCH+1 + expected_running_tasks_count = MAX_PREFETCH * WORKER_PREFETCH_MULTIPLIER sig = group(long_running_task.s(10) for _ in range(expected_running_tasks_count)) sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() @@ -52,4 +49,4 @@ def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): f"Resuming normal operations following a restart.\n" f"Prefetch count has been restored to the maximum of {MAX_PREFETCH}" ) - celery_setup.worker.wait_for_log(expected_prefetch_restore_message) + celery_setup.worker.assert_log_exists(expected_prefetch_restore_message) diff --git a/t/smoke/workers/dev.py b/t/smoke/workers/dev.py index 13901729240..14afe4435af 100644 --- a/t/smoke/workers/dev.py +++ b/t/smoke/workers/dev.py @@ -19,7 +19,7 @@ def version(cls) -> str: @classmethod def log_level(cls) -> str: - return "INFO" + return "DEBUG" @classmethod def worker_name(cls) -> str: diff --git a/t/smoke/workers/latest.py b/t/smoke/workers/latest.py index da18ceb602e..46ced3f34cd 100644 --- a/t/smoke/workers/latest.py +++ b/t/smoke/workers/latest.py @@ -14,7 +14,7 @@ def client(self) -> Any: @classmethod def log_level(cls) -> str: - return "INFO" + return "DEBUG" @classmethod def worker_name(cls) -> str: diff --git a/t/smoke/workers/legacy.py b/t/smoke/workers/legacy.py index 0fb1f419bb6..9aefc89bcd2 100644 --- a/t/smoke/workers/legacy.py +++ b/t/smoke/workers/legacy.py @@ -18,7 +18,7 @@ def version(cls) -> str: @classmethod def log_level(cls) -> str: - return "INFO" + return "DEBUG" @classmethod def worker_name(cls) -> str: From dd9699556aee4ecbb8e6659d9e28a0741ab9433f Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 1 Nov 2023 01:10:47 +0200 Subject: [PATCH 30/70] * Added t/smoke/test_tasks.py (#8599) * Added auto-session redis:latest container to smoke tests --- t/integration/tasks.py | 13 ++++++++----- t/smoke/conftest.py | 22 ++++++++++++++++++++++ t/smoke/tasks.py | 14 ++++++++++++-- t/smoke/test_tasks.py | 29 +++++++++++++++++++++++++++++ 4 files changed, 71 insertions(+), 7 deletions(-) create mode 100644 t/smoke/test_tasks.py diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 038b137f823..b863c0739c7 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -11,7 +11,9 @@ def get_redis_connection(): from redis import StrictRedis - return StrictRedis(host=os.environ.get("REDIS_HOST")) + host = os.environ.get("REDIS_HOST", "localhost") + port = os.environ.get("REDIS_PORT", 6379) + return StrictRedis(host=host, port=port) logger = get_task_logger(__name__) @@ -461,6 +463,11 @@ def errback_new_style(request, exc, tb): return request.id +@shared_task +def replaced_with_me(): + return True + + try: from celery.canvas import StampingVisitor @@ -477,10 +484,6 @@ def on_replace(self, sig): sig.stamp(StampOnReplace()) return super().on_replace(sig) - @shared_task - def replaced_with_me(): - return True - @shared_task(bind=True, base=StampedTaskOnReplace) def replace_with_stamped_task(self: StampedTaskOnReplace, replace_with=None): if replace_with is None: diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 3b9b8e3c7ca..14954053654 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -1,4 +1,8 @@ +import os + import pytest +from pytest_celery import REDIS_CONTAINER_TIMEOUT, REDIS_ENV, REDIS_IMAGE, REDIS_PORTS, RedisContainer +from pytest_docker_tools import container, fetch, network from t.smoke.workers.dev import * # noqa from t.smoke.workers.latest import * # noqa @@ -14,3 +18,21 @@ def default_worker_tasks() -> set: integration_tests_tasks, smoke_tests_tasks, } + + +redis_image = fetch(repository=REDIS_IMAGE) +redis_test_container_network = network(scope="session") +redis_test_container: RedisContainer = container( + image="{redis_image.id}", + scope="session", + ports=REDIS_PORTS, + environment=REDIS_ENV, + network="{redis_test_container_network.name}", + wrapper_class=RedisContainer, + timeout=REDIS_CONTAINER_TIMEOUT, +) + + +@pytest.fixture(scope="session", autouse=True) +def set_redis_test_container(redis_test_container: RedisContainer): + os.environ["REDIS_PORT"] = str(redis_test_container.port) diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index ad316d7347f..edeb9a33b70 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -1,8 +1,10 @@ from time import sleep import celery.utils -from celery import shared_task +from celery import Task, shared_task, signature +from celery.canvas import Signature from t.integration.tasks import * # noqa +from t.integration.tasks import replaced_with_me @shared_task @@ -11,5 +13,13 @@ def noop(*args, **kwargs) -> None: @shared_task -def long_running_task(seconds: float = 1) -> None: +def long_running_task(seconds: float = 1) -> bool: sleep(seconds) + return True + + +@shared_task(bind=True) +def replace_with_task(self: Task, replace_with: Signature = None): + if replace_with is None: + replace_with = replaced_with_me.s() + self.replace(signature(replace_with)) diff --git a/t/smoke/test_tasks.py b/t/smoke/test_tasks.py new file mode 100644 index 00000000000..289a537da9b --- /dev/null +++ b/t/smoke/test_tasks.py @@ -0,0 +1,29 @@ +import pytest +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster + +from celery import signature +from t.integration.tasks import add, identity +from t.smoke.tasks import replace_with_task + + +class test_replace: + @pytest.fixture + def celery_worker_cluster( + self, + celery_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, + ) -> CeleryWorkerCluster: + cluster = CeleryWorkerCluster(celery_worker, celery_latest_worker) + yield cluster + cluster.teardown() + + def test_sanity(self, celery_setup: CeleryTestSetup): + queues = [w.worker_queue for w in celery_setup.worker_cluster] + assert len(queues) == 2 + assert queues[0] != queues[1] + replace_with = signature(identity, args=(40,), queue=queues[1]) + sig1 = replace_with_task.s(replace_with) + sig2 = add.s(2).set(queue=queues[1]) + c = sig1 | sig2 + r = c.apply_async(queue=queues[0]) + assert r.get(timeout=RESULT_TIMEOUT) == 42 From 99690613c4c1744890b34611ea5052c896412799 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 23 Nov 2023 12:58:33 +0200 Subject: [PATCH 31/70] Hotfix + New smoke tests (#8664) * Changed smoke tests workers log level to INFO * Hotfix in t/smoke/tasks.py * Fixed missing teardown() call in latest & legacy workers in the smoke tests * Prefetch count smoke tests * Added t/smoke/test_control.py::test_shutdown_exit_with_zero() * Trigger CI tests on PR to smoke_tests branch. To be removed before merge to main! --- .github/workflows/python-package.yml | 2 +- t/smoke/tasks.py | 2 +- t/smoke/test_consumer.py | 54 ++++++++++++++++++++++++++-- t/smoke/test_control.py | 6 ++++ t/smoke/workers/dev.py | 2 +- t/smoke/workers/latest.py | 6 ++-- t/smoke/workers/legacy.py | 6 ++-- 7 files changed, 68 insertions(+), 10 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 04c363a818c..88945263ab0 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -12,7 +12,7 @@ on: - '.github/workflows/python-package.yml' - '**.toml' pull_request: - branches: [ 'main'] + branches: [ 'main', 'smoke_tests' ] paths: - '**.py' - '**.txt' diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index edeb9a33b70..99ef9eb4751 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -22,4 +22,4 @@ def long_running_task(seconds: float = 1) -> bool: def replace_with_task(self: Task, replace_with: Signature = None): if replace_with is None: replace_with = replaced_with_me.s() - self.replace(signature(replace_with)) + return self.replace(signature(replace_with)) diff --git a/t/smoke/test_consumer.py b/t/smoke/test_consumer.py index 168711bc101..04da3a1cdc7 100644 --- a/t/smoke/test_consumer.py +++ b/t/smoke/test_consumer.py @@ -1,9 +1,9 @@ import pytest -from pytest_celery import CeleryTestSetup +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, RedisTestBroker from celery import Celery from celery.canvas import group -from t.smoke.tasks import long_running_task +from t.smoke.tasks import long_running_task, noop WORKER_PREFETCH_MULTIPLIER = 2 WORKER_CONCURRENCY = 5 @@ -18,7 +18,13 @@ def default_worker_app(default_worker_app: Celery) -> Celery: yield app -class test_consumer: +class test_worker_enable_prefetch_count_reduction_true: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_enable_prefetch_count_reduction = True + yield app + @pytest.mark.parametrize("expected_running_tasks_count", range(1, WORKER_CONCURRENCY + 1)) def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_running_tasks_count: int): sig = group(long_running_task.s(420) for _ in range(expected_running_tasks_count)) @@ -50,3 +56,45 @@ def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): f"Prefetch count has been restored to the maximum of {MAX_PREFETCH}" ) celery_setup.worker.assert_log_exists(expected_prefetch_restore_message) + + class test_cancel_tasks_on_connection_loss: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_prefetch_multiplier = 2 + app.conf.worker_cancel_long_running_tasks_on_connection_loss = True + app.conf.task_acks_late = True + yield app + + def test_max_prefetch_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Real Bug: Broker does not fetch messages after restart") + + sig = group(long_running_task.s(420) for _ in range(WORKER_CONCURRENCY)) + sig.apply_async(queue=celery_setup.worker.worker_queue) + celery_setup.broker.restart() + noop.s().apply_async(queue=celery_setup.worker.worker_queue) + celery_setup.worker.assert_log_exists("Task t.smoke.tasks.noop") + + +class test_worker_enable_prefetch_count_reduction_false: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_prefetch_multiplier = 1 + app.conf.worker_enable_prefetch_count_reduction = False + app.conf.worker_cancel_long_running_tasks_on_connection_loss = True + app.conf.task_acks_late = True + yield app + + def test_max_prefetch_not_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Real Bug: Broker does not fetch messages after restart") + + sig = group(long_running_task.s(10) for _ in range(WORKER_CONCURRENCY)) + r = sig.apply_async(queue=celery_setup.worker.worker_queue) + celery_setup.broker.restart() + noop.s().apply_async(queue=celery_setup.worker.worker_queue) + assert "Task t.smoke.tasks.noop" not in celery_setup.worker.logs() + r.get(timeout=RESULT_TIMEOUT) + assert "Task t.smoke.tasks.noop" in celery_setup.worker.logs() diff --git a/t/smoke/test_control.py b/t/smoke/test_control.py index 97ed8b9fe69..edd108b36e7 100644 --- a/t/smoke/test_control.py +++ b/t/smoke/test_control.py @@ -5,3 +5,9 @@ class test_control: def test_sanity(self, celery_setup: CeleryTestSetup): r = celery_setup.app.control.ping() assert all([all([res["ok"] == "pong" for _, res in response.items()]) for response in r]) + + def test_shutdown_exit_with_zero(self, celery_setup: CeleryTestSetup): + celery_setup.app.control.shutdown() + while celery_setup.worker.container.status != "exited": + celery_setup.worker.container.reload() + assert celery_setup.worker.container.attrs['State']['ExitCode'] == 0 diff --git a/t/smoke/workers/dev.py b/t/smoke/workers/dev.py index 14afe4435af..13901729240 100644 --- a/t/smoke/workers/dev.py +++ b/t/smoke/workers/dev.py @@ -19,7 +19,7 @@ def version(cls) -> str: @classmethod def log_level(cls) -> str: - return "DEBUG" + return "INFO" @classmethod def worker_name(cls) -> str: diff --git a/t/smoke/workers/latest.py b/t/smoke/workers/latest.py index 46ced3f34cd..c922e98e6ef 100644 --- a/t/smoke/workers/latest.py +++ b/t/smoke/workers/latest.py @@ -14,7 +14,7 @@ def client(self) -> Any: @classmethod def log_level(cls) -> str: - return "DEBUG" + return "INFO" @classmethod def worker_name(cls) -> str: @@ -48,4 +48,6 @@ def celery_latest_worker( celery_latest_worker_container: CeleryLatestWorkerContainer, celery_setup_app: Celery, ) -> CeleryTestWorker: - yield CeleryTestWorker(celery_latest_worker_container, app=celery_setup_app) + worker = CeleryTestWorker(celery_latest_worker_container, app=celery_setup_app) + yield worker + worker.teardown() diff --git a/t/smoke/workers/legacy.py b/t/smoke/workers/legacy.py index 9aefc89bcd2..42a3952d575 100644 --- a/t/smoke/workers/legacy.py +++ b/t/smoke/workers/legacy.py @@ -18,7 +18,7 @@ def version(cls) -> str: @classmethod def log_level(cls) -> str: - return "DEBUG" + return "INFO" @classmethod def worker_name(cls) -> str: @@ -52,4 +52,6 @@ def celery_legacy_worker( celery_legacy_worker_container: CeleryLegacyWorkerContainer, celery_setup_app: Celery, ) -> CeleryTestWorker: - yield CeleryTestWorker(celery_legacy_worker_container, app=celery_setup_app) + worker = CeleryTestWorker(celery_legacy_worker_container, app=celery_setup_app) + yield worker + worker.teardown() From 200520c6e9304764c325a7ae8b6099af0d17084f Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 29 Nov 2023 02:16:13 +0200 Subject: [PATCH 32/70] Canvas Stamping smoke tests (#8683) * Added t/smoke/stamping/ * Refactored tests folder structure * Added t/smoke/tests/stamping/test_stamping.py * Added test_stamping::test_sanity() * Added test_stamping::test_sanity_worker_hop() * Implemented stamping/signals.py::task_received_handler() * Added test_stamping.py::test_multiple_stamps_multiple_workers() * Added LEGACY_TASKS_DISABLED to t/integration/tasks.py * Removed celery_latest_worker from stamping smoke tests worker cluster * Added test_stamping.py::test_stamping_on_replace_with_legacy_worker_in_cluster() * Added test_stamping.py::class test_revoke_by_stamped_headers * Added Python 3.12 in smoke tests CI * --reruns-delay 60 -> 10 for smoke tests CI * Fixed incorrect assertion in test_revoke_by_stamped_headers::test_revoke_by_stamped_headers_after_publish() * Refactored test_stamping::test_sanity() * Added test_stamping::test_callback() * Refactored stamping tests worker clusters (better readability) * Disabled unstable test configuration in t/smoke/tests/test_consumer.py --- .github/workflows/python-package.yml | 4 +- t/integration/tasks.py | 15 +- t/smoke/tests/stamping/__init__.py | 0 t/smoke/tests/stamping/conftest.py | 17 ++ t/smoke/tests/stamping/signals.py | 12 ++ t/smoke/tests/stamping/tasks.py | 22 ++ t/smoke/tests/stamping/test_stamping.py | 261 ++++++++++++++++++++++++ t/smoke/{ => tests}/test_canvas.py | 0 t/smoke/{ => tests}/test_consumer.py | 6 + t/smoke/{ => tests}/test_control.py | 0 t/smoke/{ => tests}/test_failover.py | 0 t/smoke/{ => tests}/test_signals.py | 0 t/smoke/{ => tests}/test_tasks.py | 0 13 files changed, 329 insertions(+), 8 deletions(-) create mode 100644 t/smoke/tests/stamping/__init__.py create mode 100644 t/smoke/tests/stamping/conftest.py create mode 100644 t/smoke/tests/stamping/signals.py create mode 100644 t/smoke/tests/stamping/tasks.py create mode 100644 t/smoke/tests/stamping/test_stamping.py rename t/smoke/{ => tests}/test_canvas.py (100%) rename t/smoke/{ => tests}/test_consumer.py (94%) rename t/smoke/{ => tests}/test_control.py (100%) rename t/smoke/{ => tests}/test_failover.py (100%) rename t/smoke/{ => tests}/test_signals.py (100%) rename t/smoke/{ => tests}/test_tasks.py (100%) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 88945263ab0..7e555144da6 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -130,7 +130,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Fetch Docker Images @@ -157,4 +157,4 @@ jobs: timeout-minutes: 30 run: > tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- --reruns 5 --reruns-delay 60 --rerun-except AssertionError -n auto + "${{ matrix.python-version }}-smoke" -- --reruns 5 --reruns-delay 10 --rerun-except AssertionError -n auto diff --git a/t/integration/tasks.py b/t/integration/tasks.py index b863c0739c7..f09492f3fd5 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -7,6 +7,13 @@ from celery.exceptions import SoftTimeLimitExceeded from celery.utils.log import get_task_logger +LEGACY_TASKS_DISABLED = True +try: + # Imports that are not available in Celery 4 + from celery.canvas import StampingVisitor +except ImportError: + LEGACY_TASKS_DISABLED = False + def get_redis_connection(): from redis import StrictRedis @@ -468,11 +475,9 @@ def replaced_with_me(): return True -try: - from celery.canvas import StampingVisitor - +if LEGACY_TASKS_DISABLED: class StampOnReplace(StampingVisitor): - stamp = {'StampOnReplace': 'This is the replaced task'} + stamp = {"StampOnReplace": "This is the replaced task"} def on_signature(self, sig, **headers) -> dict: return self.stamp @@ -489,5 +494,3 @@ def replace_with_stamped_task(self: StampedTaskOnReplace, replace_with=None): if replace_with is None: replace_with = replaced_with_me.s() self.replace(signature(replace_with)) -except ImportError: - pass diff --git a/t/smoke/tests/stamping/__init__.py b/t/smoke/tests/stamping/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/tests/stamping/conftest.py b/t/smoke/tests/stamping/conftest.py new file mode 100644 index 00000000000..0838a7a6ca0 --- /dev/null +++ b/t/smoke/tests/stamping/conftest.py @@ -0,0 +1,17 @@ +import pytest + + +@pytest.fixture +def default_worker_tasks(default_worker_tasks: set) -> set: + from t.smoke.tests.stamping import tasks as stamping_tasks + + default_worker_tasks.add(stamping_tasks) + yield default_worker_tasks + + +@pytest.fixture +def default_worker_signals(default_worker_signals: set) -> set: + from t.smoke.tests.stamping import signals + + default_worker_signals.add(signals) + yield default_worker_signals diff --git a/t/smoke/tests/stamping/signals.py b/t/smoke/tests/stamping/signals.py new file mode 100644 index 00000000000..86b27d7bb91 --- /dev/null +++ b/t/smoke/tests/stamping/signals.py @@ -0,0 +1,12 @@ +import json + +from celery.signals import task_received + + +@task_received.connect +def task_received_handler(request, **kwargs): + stamps = request.request_dict.get("stamps") + stamped_headers = request.request_dict.get("stamped_headers") + stamps_dump = json.dumps(stamps, indent=4, sort_keys=True) if stamps else stamps + print(f"stamped_headers = {stamped_headers}") + print(f"stamps = {stamps_dump}") diff --git a/t/smoke/tests/stamping/tasks.py b/t/smoke/tests/stamping/tasks.py new file mode 100644 index 00000000000..1068439358c --- /dev/null +++ b/t/smoke/tests/stamping/tasks.py @@ -0,0 +1,22 @@ +from time import sleep + +from celery import shared_task +from t.integration.tasks import LEGACY_TASKS_DISABLED + + +@shared_task +def waitfor(seconds: int) -> None: + print(f"Waiting for {seconds} seconds...") + for i in range(seconds): + sleep(1) + print(f"{i+1} seconds passed") + print("Done waiting") + + +if LEGACY_TASKS_DISABLED: + from t.integration.tasks import StampedTaskOnReplace, StampOnReplace + + @shared_task(bind=True, base=StampedTaskOnReplace) + def wait_for_revoke(self: StampOnReplace, seconds: int, waitfor_worker_queue) -> None: + print(f"Replacing {self.request.id} with waitfor({seconds})") + self.replace(waitfor.s(seconds).set(queue=waitfor_worker_queue)) diff --git a/t/smoke/tests/stamping/test_stamping.py b/t/smoke/tests/stamping/test_stamping.py new file mode 100644 index 00000000000..8507f371955 --- /dev/null +++ b/t/smoke/tests/stamping/test_stamping.py @@ -0,0 +1,261 @@ +from __future__ import annotations + +import json + +import pytest +from pytest_celery import (RESULT_TIMEOUT, CeleryBackendCluster, CeleryTestSetup, CeleryTestWorker, + CeleryWorkerCluster) + +from celery.canvas import Signature, StampingVisitor, chain +from celery.result import AsyncResult +from t.integration.tasks import StampOnReplace, add, identity, replace_with_stamped_task +from t.smoke.tests.stamping.tasks import wait_for_revoke +from t.smoke.workers.dev import SmokeWorkerContainer +from t.smoke.workers.legacy import CeleryLegacyWorkerContainer + + +@pytest.fixture +def dev_worker(celery_setup: CeleryTestSetup) -> CeleryTestWorker: + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + if worker.version == SmokeWorkerContainer.version(): + return worker + return None + + +@pytest.fixture +def legacy_worker(celery_setup: CeleryTestSetup) -> CeleryTestWorker: + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + if worker.version == CeleryLegacyWorkerContainer.version(): + return worker + return None + + +class test_stamping: + def test_callback(self, dev_worker: CeleryTestWorker): + on_signature_stamp = {"on_signature_stamp": 4} + no_visitor_stamp = {"no_visitor_stamp": "Stamp without visitor"} + on_callback_stamp = {"on_callback_stamp": 2} + link_stamp = { + **on_signature_stamp, + **no_visitor_stamp, + **on_callback_stamp, + } + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return on_signature_stamp.copy() + + def on_callback(self, callback, **header) -> dict: + return on_callback_stamp.copy() + + stamped_task = identity.si(123).set(queue=dev_worker.worker_queue) + stamped_task.link( + add.s(0) + .stamp(no_visitor_stamp=no_visitor_stamp["no_visitor_stamp"]) + .set(queue=dev_worker.worker_queue) + ) + stamped_task.stamp(visitor=CustomStampingVisitor()) + stamped_task.delay().get(timeout=RESULT_TIMEOUT) + assert dev_worker.logs().count( + json.dumps(on_signature_stamp, indent=4, sort_keys=True) + ) + assert dev_worker.logs().count(json.dumps(link_stamp, indent=4, sort_keys=True)) + + +class test_stamping_hybrid_worker_cluster: + @pytest.fixture( + # Each param item is a list of workers to be used in the cluster + # and each cluster will be tested separately (with parallel support) + params=[ + ["celery_setup_worker"], + ["celery_setup_worker", "celery_legacy_worker"], + ] + ) + def celery_worker_cluster( + self, + request: pytest.FixtureRequest, + ) -> CeleryWorkerCluster: + nodes: tuple[CeleryTestWorker] = [ + request.getfixturevalue(worker) for worker in request.param + ] + cluster = CeleryWorkerCluster(*nodes) + yield cluster + cluster.teardown() + + def test_sanity(self, celery_setup: CeleryTestSetup): + stamp = {"stamp": 42} + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return stamp.copy() + + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + queue = worker.worker_queue + stamped_task = identity.si(123) + stamped_task.stamp(visitor=CustomStampingVisitor()) + assert stamped_task.apply_async(queue=queue).get(timeout=RESULT_TIMEOUT) + assert worker.logs().count(json.dumps(stamp, indent=4, sort_keys=True)) + + def test_sanity_worker_hop(self, celery_setup: CeleryTestSetup): + if len(celery_setup.worker_cluster) < 2: + pytest.skip("Not enough workers in cluster") + + stamp = {"stamp": 42} + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return stamp.copy() + + w1: CeleryTestWorker = celery_setup.worker_cluster[0] + w2: CeleryTestWorker = celery_setup.worker_cluster[1] + stamped_task = chain( + identity.si(4).set(queue=w1.worker_queue), + identity.si(2).set(queue=w2.worker_queue), + ) + stamped_task.stamp(visitor=CustomStampingVisitor()) + stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) + + stamp = json.dumps(stamp, indent=4) + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + assert worker.logs().count(stamp) + + def test_multiple_stamps_multiple_workers(self, celery_setup: CeleryTestSetup): + if len(celery_setup.worker_cluster) < 2: + pytest.skip("Not enough workers in cluster") + + stamp = {"stamp": 420} + stamp1 = {**stamp, "stamp1": 4} + stamp2 = {**stamp, "stamp2": 2} + + w1: CeleryTestWorker = celery_setup.worker_cluster[0] + w2: CeleryTestWorker = celery_setup.worker_cluster[1] + stamped_task = chain( + identity.si(4).set(queue=w1.worker_queue).stamp(stamp1=stamp1["stamp1"]), + identity.si(2).set(queue=w2.worker_queue).stamp(stamp2=stamp2["stamp2"]), + ) + stamped_task.stamp(stamp=stamp["stamp"]) + stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) + + stamp1 = json.dumps(stamp1, indent=4) + stamp2 = json.dumps(stamp2, indent=4) + + assert w1.logs().count(stamp1) + assert w1.logs().count(stamp2) == 0 + + assert w2.logs().count(stamp1) == 0 + assert w2.logs().count(stamp2) + + def test_stamping_on_replace_with_legacy_worker_in_cluster( + self, + celery_setup: CeleryTestSetup, + dev_worker: CeleryTestWorker, + legacy_worker: CeleryTestWorker, + ): + if len(celery_setup.worker_cluster) < 2: + pytest.skip("Not enough workers in cluster") + + stamp = {"stamp": "Only for dev worker tasks"} + stamp1 = {**StampOnReplace.stamp, "stamp1": "1) Only for legacy worker tasks"} + stamp2 = {**StampOnReplace.stamp, "stamp2": "2) Only for legacy worker tasks"} + + replaced_sig1 = ( + identity.si(4) + .set(queue=legacy_worker.worker_queue) + .stamp(stamp1=stamp1["stamp1"]) + ) + replaced_sig2 = ( + identity.si(2) + .set(queue=legacy_worker.worker_queue) + .stamp(stamp2=stamp2["stamp2"]) + ) + + stamped_task = chain( + replace_with_stamped_task.si(replace_with=replaced_sig1).set( + queue=dev_worker.worker_queue + ), + replace_with_stamped_task.si(replace_with=replaced_sig2).set( + queue=dev_worker.worker_queue + ), + ) + stamped_task.stamp(stamp=stamp["stamp"]) + stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) + + stamp = json.dumps(stamp, indent=4) + stamp1 = json.dumps(stamp1, indent=4) + stamp2 = json.dumps(stamp2, indent=4) + + assert dev_worker.logs().count(stamp) + assert dev_worker.logs().count(stamp1) == 0 + assert dev_worker.logs().count(stamp2) == 0 + + assert legacy_worker.logs().count(stamp) == 0 + assert legacy_worker.logs().count(stamp1) + assert legacy_worker.logs().count(stamp2) + + +class test_revoke_by_stamped_headers: + @pytest.fixture + def celery_worker_cluster( + self, + celery_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, + ) -> CeleryWorkerCluster: + cluster = CeleryWorkerCluster(celery_worker, celery_latest_worker) + yield cluster + cluster.teardown() + + @pytest.fixture + def celery_backend_cluster(self) -> CeleryBackendCluster: + # Disable backend + return None + + @pytest.fixture + def wait_for_revoke_timeout(self) -> int: + return 4 + + @pytest.fixture + def canvas( + self, + dev_worker: CeleryTestWorker, + wait_for_revoke_timeout: int, + ) -> Signature: + return chain( + identity.s(wait_for_revoke_timeout), + wait_for_revoke.s(waitfor_worker_queue=dev_worker.worker_queue).set( + queue=dev_worker.worker_queue + ), + ) + + def test_revoke_by_stamped_headers_after_publish( + self, + dev_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, + wait_for_revoke_timeout: int, + canvas: Signature, + ): + result: AsyncResult = canvas.apply_async( + queue=celery_latest_worker.worker_queue + ) + result.revoke_by_stamped_headers(StampOnReplace.stamp, terminate=True) + dev_worker.assert_log_does_not_exist( + "Done waiting", + timeout=wait_for_revoke_timeout, + ) + + def test_revoke_by_stamped_headers_before_publish( + self, + dev_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, + canvas: Signature, + ): + result = canvas.freeze() + result.revoke_by_stamped_headers(StampOnReplace.stamp) + result: AsyncResult = canvas.apply_async( + queue=celery_latest_worker.worker_queue + ) + dev_worker.assert_log_exists("Discarding revoked task") + dev_worker.assert_log_exists(f"revoked by header: {StampOnReplace.stamp}") diff --git a/t/smoke/test_canvas.py b/t/smoke/tests/test_canvas.py similarity index 100% rename from t/smoke/test_canvas.py rename to t/smoke/tests/test_canvas.py diff --git a/t/smoke/test_consumer.py b/t/smoke/tests/test_consumer.py similarity index 94% rename from t/smoke/test_consumer.py rename to t/smoke/tests/test_consumer.py index 04da3a1cdc7..5645f2689b8 100644 --- a/t/smoke/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -27,6 +27,9 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: @pytest.mark.parametrize("expected_running_tasks_count", range(1, WORKER_CONCURRENCY + 1)) def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_running_tasks_count: int): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Potential Bug: Redis Broker Restart is unstable") + sig = group(long_running_task.s(420) for _ in range(expected_running_tasks_count)) sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() @@ -47,6 +50,9 @@ def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_r celery_setup.worker.assert_log_exists(expected_prefetch_restore_message) def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Potential Bug: Redis Broker Restart is unstable") + expected_running_tasks_count = MAX_PREFETCH * WORKER_PREFETCH_MULTIPLIER sig = group(long_running_task.s(10) for _ in range(expected_running_tasks_count)) sig.apply_async(queue=celery_setup.worker.worker_queue) diff --git a/t/smoke/test_control.py b/t/smoke/tests/test_control.py similarity index 100% rename from t/smoke/test_control.py rename to t/smoke/tests/test_control.py diff --git a/t/smoke/test_failover.py b/t/smoke/tests/test_failover.py similarity index 100% rename from t/smoke/test_failover.py rename to t/smoke/tests/test_failover.py diff --git a/t/smoke/test_signals.py b/t/smoke/tests/test_signals.py similarity index 100% rename from t/smoke/test_signals.py rename to t/smoke/tests/test_signals.py diff --git a/t/smoke/test_tasks.py b/t/smoke/tests/test_tasks.py similarity index 100% rename from t/smoke/test_tasks.py rename to t/smoke/tests/test_tasks.py From 1794c6e115b80f29a09384826b7d618204480de2 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 3 Dec 2023 17:13:22 +0200 Subject: [PATCH 33/70] Increased stamping tests coverage + hotfixes (#8685) --- t/smoke/tests/stamping/test_stamping.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/t/smoke/tests/stamping/test_stamping.py b/t/smoke/tests/stamping/test_stamping.py index 8507f371955..fd10da44939 100644 --- a/t/smoke/tests/stamping/test_stamping.py +++ b/t/smoke/tests/stamping/test_stamping.py @@ -70,7 +70,9 @@ class test_stamping_hybrid_worker_cluster: # and each cluster will be tested separately (with parallel support) params=[ ["celery_setup_worker"], + ["celery_legacy_worker"], ["celery_setup_worker", "celery_legacy_worker"], + ["celery_setup_worker", "celery_latest_worker", "celery_legacy_worker"], ] ) def celery_worker_cluster( @@ -120,7 +122,7 @@ def on_signature(self, sig, **headers) -> dict: stamp = json.dumps(stamp, indent=4) worker: CeleryTestWorker - for worker in celery_setup.worker_cluster: + for worker in (w1, w2): assert worker.logs().count(stamp) def test_multiple_stamps_multiple_workers(self, celery_setup: CeleryTestSetup): @@ -252,10 +254,10 @@ def test_revoke_by_stamped_headers_before_publish( celery_latest_worker: CeleryTestWorker, canvas: Signature, ): - result = canvas.freeze() - result.revoke_by_stamped_headers(StampOnReplace.stamp) - result: AsyncResult = canvas.apply_async( - queue=celery_latest_worker.worker_queue + dev_worker.app.control.revoke_by_stamped_headers( + StampOnReplace.stamp, + terminate=True, ) + canvas.apply_async(queue=celery_latest_worker.worker_queue) dev_worker.assert_log_exists("Discarding revoked task") dev_worker.assert_log_exists(f"revoked by header: {StampOnReplace.stamp}") From b88b3d7e86e4e918ac32fbfce7cbc68d29693032 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 3 Dec 2023 18:17:43 +0200 Subject: [PATCH 34/70] Added test_broker_failover::test_reconnect_to_main() (#8686) --- t/smoke/tests/test_failover.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/t/smoke/tests/test_failover.py b/t/smoke/tests/test_failover.py index 65d24ba5f63..bfcaa86a688 100644 --- a/t/smoke/tests/test_failover.py +++ b/t/smoke/tests/test_failover.py @@ -32,10 +32,21 @@ def celery_broker_cluster( cluster.teardown() -class test_failover: - def test_sanity(self, celery_setup: CeleryTestSetup): +class test_broker_failover: + def test_killing_first_broker(self, celery_setup: CeleryTestSetup): assert len(celery_setup.broker_cluster) > 1 celery_setup.broker.kill() expected = "test_broker_failover" res = identity.s(expected).apply_async(queue=celery_setup.worker.worker_queue) assert res.get(timeout=RESULT_TIMEOUT) == expected + + def test_reconnect_to_main(self, celery_setup: CeleryTestSetup): + assert len(celery_setup.broker_cluster) > 1 + celery_setup.broker_cluster[0].kill() + expected = "test_broker_failover" + res = identity.s(expected).apply_async(queue=celery_setup.worker.worker_queue) + assert res.get(timeout=RESULT_TIMEOUT) == expected + celery_setup.broker_cluster[1].kill() + celery_setup.broker_cluster[0].restart() + res = identity.s(expected).apply_async(queue=celery_setup.worker.worker_queue) + assert res.get(timeout=RESULT_TIMEOUT) == expected From 94aaade1f8aeab302522d7ad7f33cec1664955f6 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 4 Dec 2023 01:11:03 +0200 Subject: [PATCH 35/70] Initial worker restart smoke tests (#8693) * Added t/smoke/tests/test_worker.py * Added another worker restart method: docker_restart_force --- .github/workflows/python-package.yml | 2 +- t/smoke/tasks.py | 17 +++++++- t/smoke/tests/test_worker.py | 60 ++++++++++++++++++++++++++++ tox.ini | 2 +- 4 files changed, 77 insertions(+), 4 deletions(-) create mode 100644 t/smoke/tests/test_worker.py diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 7e555144da6..c6d01374b38 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -157,4 +157,4 @@ jobs: timeout-minutes: 30 run: > tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- --reruns 5 --reruns-delay 10 --rerun-except AssertionError -n auto + "${{ matrix.python-version }}-smoke" -- -n auto diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index 99ef9eb4751..e5e8fac92d5 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -13,8 +13,21 @@ def noop(*args, **kwargs) -> None: @shared_task -def long_running_task(seconds: float = 1) -> bool: - sleep(seconds) +def long_running_task(seconds: float = 1, verbose: bool = False) -> bool: + from celery import current_task + from celery.utils.log import get_task_logger + + logger = get_task_logger(current_task.name) + + logger.info('Starting long running task') + + for i in range(0, int(seconds)): + sleep(1) + if verbose: + logger.info(f'Sleeping: {i}') + + logger.info('Finished long running task') + return True diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py new file mode 100644 index 00000000000..f88c6c4119c --- /dev/null +++ b/t/smoke/tests/test_worker.py @@ -0,0 +1,60 @@ +import pytest +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup + +from celery import Celery +from celery.canvas import chain +from t.smoke.tasks import long_running_task + + +@pytest.mark.parametrize( + "restart_method", + [ + "pool_restart", + "docker_restart_gracefully", + "docker_restart_force", + ], +) +class test_worker_restart: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_pool_restarts = True + app.conf.task_acks_late = True + yield app + + def test_restart_during_task_execution( + self, + celery_setup: CeleryTestSetup, + restart_method: str, + ): + queue = celery_setup.worker.worker_queue + sig = long_running_task.si(5, verbose=True).set(queue=queue) + res = sig.delay() + if restart_method == "pool_restart": + celery_setup.app.control.pool_restart() + elif restart_method == "docker_restart_gracefully": + celery_setup.worker.restart() + elif restart_method == "docker_restart_force": + celery_setup.worker.restart(force=True) + assert res.get(RESULT_TIMEOUT) is True + + def test_restart_between_task_execution( + self, + celery_setup: CeleryTestSetup, + restart_method: str, + ): + queue = celery_setup.worker.worker_queue + first = long_running_task.si(5, verbose=True).set(queue=queue) + first_res = first.freeze() + second = long_running_task.si(5, verbose=True).set(queue=queue) + second_res = second.freeze() + sig = chain(first, second) + sig.delay() + assert first_res.get(RESULT_TIMEOUT) is True + if restart_method == "pool_restart": + celery_setup.app.control.pool_restart() + elif restart_method == "docker_restart_gracefully": + celery_setup.worker.restart() + elif restart_method == "docker_restart_force": + celery_setup.worker.restart(force=True) + assert second_res.get(RESULT_TIMEOUT) is True diff --git a/tox.ini b/tox.ini index cc5087b3e03..e4b27ef70c7 100644 --- a/tox.ini +++ b/tox.ini @@ -46,7 +46,7 @@ deps= commands = unit: pytest --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsv t/integration {posargs} - smoke: pytest -xsv t/smoke {posargs} + smoke: pytest -xsv t/smoke --reruns 5 --reruns-delay 10 --rerun-except AssertionError {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null From 570beabd7b1506db5d0a2ac236849c7c4d17915e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 4 Dec 2023 01:40:01 +0200 Subject: [PATCH 36/70] Removed backend from setup in t/smoke/tests/test_signals.py (Optimization) (#8694) --- t/smoke/tests/test_signals.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/t/smoke/tests/test_signals.py b/t/smoke/tests/test_signals.py index c3b6210eb2b..17e9eae9406 100644 --- a/t/smoke/tests/test_signals.py +++ b/t/smoke/tests/test_signals.py @@ -1,5 +1,5 @@ import pytest -from pytest_celery import CeleryTestSetup +from pytest_celery import CeleryBackendCluster, CeleryTestSetup from celery.signals import after_task_publish, before_task_publish from t.smoke.tasks import noop @@ -13,6 +13,12 @@ def default_worker_signals(default_worker_signals: set) -> set: yield default_worker_signals +@pytest.fixture +def celery_backend_cluster() -> CeleryBackendCluster: + # Disable backend + return None + + class test_signals: @pytest.mark.parametrize( "log, control", From 31c23c53ba1b94dc207bbfeade7279bede3c4e86 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 5 Dec 2023 22:00:34 +0200 Subject: [PATCH 37/70] Added initial worker failover smoke tests (#8695) * Added alternative dev container with shared queue with the smoke tests worker (the default dev worker) * Added t/smoke/tests/failover/test_worker_failover.py * Added test_worker_failover::test_task_retry_on_worker_crash() * Added "memory_limit" termination method to class test_worker_failover * Cleanup * Added comments --- t/smoke/conftest.py | 1 + t/smoke/tasks.py | 19 +++- .../test_broker_failover.py} | 0 .../tests/failover/test_worker_failover.py | 95 +++++++++++++++++++ t/smoke/workers/alt.py | 37 ++++++++ 5 files changed, 148 insertions(+), 4 deletions(-) rename t/smoke/tests/{test_failover.py => failover/test_broker_failover.py} (100%) create mode 100644 t/smoke/tests/failover/test_worker_failover.py create mode 100644 t/smoke/workers/alt.py diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 14954053654..fc461d8c361 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -4,6 +4,7 @@ from pytest_celery import REDIS_CONTAINER_TIMEOUT, REDIS_ENV, REDIS_IMAGE, REDIS_PORTS, RedisContainer from pytest_docker_tools import container, fetch, network +from t.smoke.workers.alt import * # noqa from t.smoke.workers.dev import * # noqa from t.smoke.workers.latest import * # noqa from t.smoke.workers.legacy import * # noqa diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index e5e8fac92d5..301d36652ee 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -1,3 +1,6 @@ +from __future__ import annotations + +from sys import getsizeof from time import sleep import celery.utils @@ -13,20 +16,28 @@ def noop(*args, **kwargs) -> None: @shared_task -def long_running_task(seconds: float = 1, verbose: bool = False) -> bool: +def long_running_task( + seconds: float = 1, + verbose: bool = False, + allocate: int | None = None, +) -> bool: from celery import current_task from celery.utils.log import get_task_logger logger = get_task_logger(current_task.name) - logger.info('Starting long running task') + logger.info("Starting long running task") + + if allocate: + # Attempt to allocate megabytes in memory + _ = [0] * (allocate * 1024 * 1024 // getsizeof(int())) for i in range(0, int(seconds)): sleep(1) if verbose: - logger.info(f'Sleeping: {i}') + logger.info(f"Sleeping: {i}") - logger.info('Finished long running task') + logger.info("Finished long running task") return True diff --git a/t/smoke/tests/test_failover.py b/t/smoke/tests/failover/test_broker_failover.py similarity index 100% rename from t/smoke/tests/test_failover.py rename to t/smoke/tests/failover/test_broker_failover.py diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py new file mode 100644 index 00000000000..625a1255268 --- /dev/null +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +import pytest +from pytest_celery import CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster, RedisTestBroker + +from celery import Celery +from t.smoke.tasks import long_running_task + + +@pytest.fixture +def celery_worker_cluster( + celery_worker: CeleryTestWorker, + celery_alt_dev_worker: CeleryTestWorker, +) -> CeleryWorkerCluster: + cluster = CeleryWorkerCluster(celery_worker, celery_alt_dev_worker) + yield cluster + cluster.teardown() + + +@pytest.mark.parametrize( + "termination_method", + [ + "SIGKILL", + "control.shutdown", + "memory_limit", + ], +) +class test_worker_failover: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.task_acks_late = True + app.conf.worker_max_memory_per_child = 10 * 1024 # Limit to 10MB + if app.conf.broker_url.startswith("redis"): + app.conf.broker_transport_options = {"visibility_timeout": 1} + yield app + + def terminate(self, worker: CeleryTestWorker, method: str): + if method == "SIGKILL": + # Reduces actual workers count by 1 + worker.kill() + elif method == "control.shutdown": + # Completes the task and then shuts down the worker + worker.app.control.broadcast("shutdown", destination=[worker.hostname()]) + elif method == "memory_limit": + # Child process is killed and a new one is spawned, but the worker is not terminated + allocate = worker.app.conf.worker_max_memory_per_child * 1_000_000_000 + sig = long_running_task.si(allocate=allocate).set(queue=worker.worker_queue) + sig.delay() + + def test_killing_first_worker( + self, + celery_setup: CeleryTestSetup, + termination_method: str, + ): + queue = celery_setup.worker.worker_queue + sig = long_running_task.si(1).set(queue=queue) + res = sig.delay() + assert res.get(timeout=2) is True + self.terminate(celery_setup.worker, termination_method) + sig = long_running_task.si(1).set(queue=queue) + res = sig.delay() + assert res.get(timeout=2) is True + + def test_reconnect_to_restarted_worker( + self, + celery_setup: CeleryTestSetup, + termination_method: str, + ): + queue = celery_setup.worker.worker_queue + sig = long_running_task.si(1).set(queue=queue) + res = sig.delay() + assert res.get(timeout=10) is True + for worker in celery_setup.worker_cluster: + self.terminate(worker, termination_method) + celery_setup.worker.restart() + sig = long_running_task.si(1).set(queue=queue) + res = sig.delay() + assert res.get(timeout=10) is True + + def test_task_retry_on_worker_crash( + self, + celery_setup: CeleryTestSetup, + termination_method: str, + ): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Potential Bug: works with RabbitMQ, but not Redis") + + sleep_time = 4 + queue = celery_setup.worker.worker_queue + sig = long_running_task.si(sleep_time, verbose=True).set(queue=queue) + res = sig.apply_async(retry=True, retry_policy={"max_retries": 1}) + celery_setup.worker.wait_for_log("Sleeping: 2") # Wait for the task to run a bit + self.terminate(celery_setup.worker, termination_method) + assert res.get(timeout=10) is True diff --git a/t/smoke/workers/alt.py b/t/smoke/workers/alt.py new file mode 100644 index 00000000000..b333f2616e3 --- /dev/null +++ b/t/smoke/workers/alt.py @@ -0,0 +1,37 @@ +import os + +import pytest +from pytest_celery import CeleryTestWorker, defaults +from pytest_docker_tools import container, fxtr + +from celery import Celery +from t.smoke.workers.dev import SmokeWorkerContainer + +# Allows having two different workers with the same queue and settings +# that are based on the current codebase +alt_dev_worker_container = container( + image="{celery_dev_worker_image.id}", + environment=fxtr("default_worker_env"), + network="{default_pytest_celery_network.name}", + volumes={ + # Volume: Worker /app + "{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME, + # Mount: Celery source + os.path.abspath(os.getcwd()): { + "bind": "/celery", + "mode": "rw", + }, + }, + wrapper_class=SmokeWorkerContainer, + timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, +) + + +@pytest.fixture +def celery_alt_dev_worker( + alt_dev_worker_container: SmokeWorkerContainer, + celery_setup_app: Celery, +) -> CeleryTestWorker: + worker = CeleryTestWorker(alt_dev_worker_container, app=celery_setup_app) + yield worker + worker.teardown() From b7433b8a076ccde903036456eab4a3068b4acdeb Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 6 Dec 2023 14:13:10 +0200 Subject: [PATCH 38/70] Hotfix to test_worker_failover.terminate() (#8698) * Use type annotation for control command * control.broadcast() -> control.shutdown() --- t/smoke/tests/failover/test_worker_failover.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index 625a1255268..1e4b535b63f 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -4,6 +4,7 @@ from pytest_celery import CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster, RedisTestBroker from celery import Celery +from celery.app.control import Control from t.smoke.tasks import long_running_task @@ -41,7 +42,8 @@ def terminate(self, worker: CeleryTestWorker, method: str): worker.kill() elif method == "control.shutdown": # Completes the task and then shuts down the worker - worker.app.control.broadcast("shutdown", destination=[worker.hostname()]) + control: Control = worker.app.control + control.shutdown(destination=[worker.hostname()]) elif method == "memory_limit": # Child process is killed and a new one is spawned, but the worker is not terminated allocate = worker.app.conf.worker_max_memory_per_child * 1_000_000_000 From 76acdb326ac7250ee409f01fa0287efcd9827592 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 7 Dec 2023 19:13:30 +0200 Subject: [PATCH 39/70] Fixed default_worker_tasks() in t/smoke/conftest.py (#8704) --- t/smoke/conftest.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index fc461d8c361..68383dfd4d6 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -11,14 +11,13 @@ @pytest.fixture -def default_worker_tasks() -> set: +def default_worker_tasks(default_worker_tasks: set) -> set: from t.integration import tasks as integration_tests_tasks from t.smoke import tasks as smoke_tests_tasks - yield { - integration_tests_tasks, - smoke_tests_tasks, - } + default_worker_tasks.add(integration_tests_tasks) + default_worker_tasks.add(smoke_tests_tasks) + yield default_worker_tasks redis_image = fetch(repository=REDIS_IMAGE) From 97b7656348485f4e1f296419a8af562e736676bd Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 10 Dec 2023 19:59:32 +0200 Subject: [PATCH 40/70] Refactored worker smoke tests (#8708) --- t/smoke/tasks.py | 8 ++- t/smoke/tests/conftest.py | 63 +++++++++++++++++++ .../tests/failover/test_worker_failover.py | 33 +++------- t/smoke/tests/test_worker.py | 27 +++----- 4 files changed, 89 insertions(+), 42 deletions(-) create mode 100644 t/smoke/tests/conftest.py diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index 301d36652ee..d7b3f929461 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -20,6 +20,7 @@ def long_running_task( seconds: float = 1, verbose: bool = False, allocate: int | None = None, + exhaust_memory: bool = False, ) -> bool: from celery import current_task from celery.utils.log import get_task_logger @@ -30,7 +31,12 @@ def long_running_task( if allocate: # Attempt to allocate megabytes in memory - _ = [0] * (allocate * 1024 * 1024 // getsizeof(int())) + _ = [0] * (allocate * 10**6 // getsizeof(int())) + + if exhaust_memory: + mem = [] + while True: + mem.append(' ' * 10**6) # 1 MB of spaces for i in range(0, int(seconds)): sleep(1) diff --git a/t/smoke/tests/conftest.py b/t/smoke/tests/conftest.py new file mode 100644 index 00000000000..16f550c9167 --- /dev/null +++ b/t/smoke/tests/conftest.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from enum import Enum, auto + +from billiard.exceptions import WorkerLostError +from pytest_celery import CeleryTestSetup, CeleryTestWorker + +from celery.app.control import Control +from t.smoke.tasks import long_running_task + + +class WorkerOperations: + class TerminationMethod(Enum): + SIGKILL = auto() + CONTROL_SHUTDOWN = auto() + MAX_MEMORY_ALLOCATED = auto() + MEMORY_LIMIT_EXCEEDED = auto() + + class RestartMethod(Enum): + POOL_RESTART = auto() + DOCKER_RESTART_GRACEFULLY = auto() + DOCKER_RESTART_FORCE = auto() + + def terminate(self, worker: CeleryTestWorker, method: TerminationMethod): + if method == WorkerOperations.TerminationMethod.SIGKILL: + worker.kill() + return + + if method == WorkerOperations.TerminationMethod.CONTROL_SHUTDOWN: + control: Control = worker.app.control + control.shutdown(destination=[worker.hostname()]) + return + + if method == WorkerOperations.TerminationMethod.MAX_MEMORY_ALLOCATED: + allocate = worker.app.conf.worker_max_memory_per_child * 10**6 + try: + ( + long_running_task.si(allocate=allocate) + .apply_async(queue=worker.worker_queue) + .get() + ) + except MemoryError: + return + + if method == WorkerOperations.TerminationMethod.MEMORY_LIMIT_EXCEEDED: + try: + ( + long_running_task.si(exhaust_memory=True) + .apply_async(queue=worker.worker_queue) + .get() + ) + except WorkerLostError: + return + + assert False + + def restart(self, celery_setup: CeleryTestSetup, method: RestartMethod): + if method == WorkerOperations.RestartMethod.POOL_RESTART: + celery_setup.app.control.pool_restart() + elif method == WorkerOperations.RestartMethod.DOCKER_RESTART_GRACEFULLY: + celery_setup.worker.restart() + elif method == WorkerOperations.RestartMethod.DOCKER_RESTART_FORCE: + celery_setup.worker.restart(force=True) diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index 1e4b535b63f..b555054e38f 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -4,8 +4,8 @@ from pytest_celery import CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster, RedisTestBroker from celery import Celery -from celery.app.control import Control from t.smoke.tasks import long_running_task +from t.smoke.tests.conftest import WorkerOperations @pytest.fixture @@ -21,12 +21,13 @@ def celery_worker_cluster( @pytest.mark.parametrize( "termination_method", [ - "SIGKILL", - "control.shutdown", - "memory_limit", + WorkerOperations.TerminationMethod.SIGKILL, + WorkerOperations.TerminationMethod.CONTROL_SHUTDOWN, + WorkerOperations.TerminationMethod.MAX_MEMORY_ALLOCATED, + WorkerOperations.TerminationMethod.MEMORY_LIMIT_EXCEEDED, ], ) -class test_worker_failover: +class test_worker_failover(WorkerOperations): @pytest.fixture def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app @@ -36,24 +37,10 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app.conf.broker_transport_options = {"visibility_timeout": 1} yield app - def terminate(self, worker: CeleryTestWorker, method: str): - if method == "SIGKILL": - # Reduces actual workers count by 1 - worker.kill() - elif method == "control.shutdown": - # Completes the task and then shuts down the worker - control: Control = worker.app.control - control.shutdown(destination=[worker.hostname()]) - elif method == "memory_limit": - # Child process is killed and a new one is spawned, but the worker is not terminated - allocate = worker.app.conf.worker_max_memory_per_child * 1_000_000_000 - sig = long_running_task.si(allocate=allocate).set(queue=worker.worker_queue) - sig.delay() - def test_killing_first_worker( self, celery_setup: CeleryTestSetup, - termination_method: str, + termination_method: WorkerOperations.TerminationMethod, ): queue = celery_setup.worker.worker_queue sig = long_running_task.si(1).set(queue=queue) @@ -67,7 +54,7 @@ def test_killing_first_worker( def test_reconnect_to_restarted_worker( self, celery_setup: CeleryTestSetup, - termination_method: str, + termination_method: WorkerOperations.TerminationMethod, ): queue = celery_setup.worker.worker_queue sig = long_running_task.si(1).set(queue=queue) @@ -83,7 +70,7 @@ def test_reconnect_to_restarted_worker( def test_task_retry_on_worker_crash( self, celery_setup: CeleryTestSetup, - termination_method: str, + termination_method: WorkerOperations.TerminationMethod, ): if isinstance(celery_setup.broker, RedisTestBroker): pytest.xfail("Potential Bug: works with RabbitMQ, but not Redis") @@ -92,6 +79,6 @@ def test_task_retry_on_worker_crash( queue = celery_setup.worker.worker_queue sig = long_running_task.si(sleep_time, verbose=True).set(queue=queue) res = sig.apply_async(retry=True, retry_policy={"max_retries": 1}) - celery_setup.worker.wait_for_log("Sleeping: 2") # Wait for the task to run a bit + celery_setup.worker.wait_for_log("Sleeping: 2") # Let task run self.terminate(celery_setup.worker, termination_method) assert res.get(timeout=10) is True diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index f88c6c4119c..8a2713c9179 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -4,17 +4,18 @@ from celery import Celery from celery.canvas import chain from t.smoke.tasks import long_running_task +from t.smoke.tests.conftest import WorkerOperations @pytest.mark.parametrize( "restart_method", [ - "pool_restart", - "docker_restart_gracefully", - "docker_restart_force", + WorkerOperations.RestartMethod.POOL_RESTART, + WorkerOperations.RestartMethod.DOCKER_RESTART_GRACEFULLY, + WorkerOperations.RestartMethod.DOCKER_RESTART_FORCE, ], ) -class test_worker_restart: +class test_worker_restart(WorkerOperations): @pytest.fixture def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app @@ -25,23 +26,18 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: def test_restart_during_task_execution( self, celery_setup: CeleryTestSetup, - restart_method: str, + restart_method: WorkerOperations.RestartMethod, ): queue = celery_setup.worker.worker_queue sig = long_running_task.si(5, verbose=True).set(queue=queue) res = sig.delay() - if restart_method == "pool_restart": - celery_setup.app.control.pool_restart() - elif restart_method == "docker_restart_gracefully": - celery_setup.worker.restart() - elif restart_method == "docker_restart_force": - celery_setup.worker.restart(force=True) + self.restart(celery_setup, restart_method) assert res.get(RESULT_TIMEOUT) is True def test_restart_between_task_execution( self, celery_setup: CeleryTestSetup, - restart_method: str, + restart_method: WorkerOperations.RestartMethod, ): queue = celery_setup.worker.worker_queue first = long_running_task.si(5, verbose=True).set(queue=queue) @@ -51,10 +47,5 @@ def test_restart_between_task_execution( sig = chain(first, second) sig.delay() assert first_res.get(RESULT_TIMEOUT) is True - if restart_method == "pool_restart": - celery_setup.app.control.pool_restart() - elif restart_method == "docker_restart_gracefully": - celery_setup.worker.restart() - elif restart_method == "docker_restart_force": - celery_setup.worker.restart(force=True) + self.restart(celery_setup, restart_method) assert second_res.get(RESULT_TIMEOUT) is True From 11732bd06e2332df395aeb79f8d764d59ef37a50 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 10 Dec 2023 23:41:09 +0200 Subject: [PATCH 41/70] Hotfix (#8710) * Run all tests in CI together (to be reverted) * Changed celery_alt_dev_worker name from smoke_tests_worker -> alt_smoke_tests_worker * Refactored stamping smoke tests --- .github/workflows/python-package.yml | 8 +- t/smoke/conftest.py | 2 +- t/smoke/tests/stamping/conftest.py | 23 ++ t/smoke/tests/stamping/test_hybrid_cluster.py | 160 +++++++++++ t/smoke/tests/stamping/test_revoke.py | 75 +++++ t/smoke/tests/stamping/test_stamping.py | 263 ------------------ t/smoke/tests/stamping/test_visitor.py | 40 +++ .../{ => tests/stamping}/workers/legacy.py | 12 +- t/smoke/tests/test_tasks.py | 4 +- t/smoke/workers/alt.py | 27 +- t/smoke/workers/other.py | 56 ++++ 11 files changed, 388 insertions(+), 282 deletions(-) create mode 100644 t/smoke/tests/stamping/test_hybrid_cluster.py create mode 100644 t/smoke/tests/stamping/test_revoke.py delete mode 100644 t/smoke/tests/stamping/test_stamping.py create mode 100644 t/smoke/tests/stamping/test_visitor.py rename t/smoke/{ => tests/stamping}/workers/legacy.py (80%) create mode 100644 t/smoke/workers/other.py diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index c6d01374b38..d68297ea641 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -70,10 +70,10 @@ jobs: verbose: true # optional (default = false) Integration: - needs: - - Unit - if: needs.Unit.result == 'success' - timeout-minutes: 240 + # needs: + # - Unit + # if: needs.Unit.result == 'success' + # timeout-minutes: 240 runs-on: ubuntu-latest strategy: diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 68383dfd4d6..f7ed5436790 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -7,7 +7,7 @@ from t.smoke.workers.alt import * # noqa from t.smoke.workers.dev import * # noqa from t.smoke.workers.latest import * # noqa -from t.smoke.workers.legacy import * # noqa +from t.smoke.workers.other import * # noqa @pytest.fixture diff --git a/t/smoke/tests/stamping/conftest.py b/t/smoke/tests/stamping/conftest.py index 0838a7a6ca0..db7e86ae030 100644 --- a/t/smoke/tests/stamping/conftest.py +++ b/t/smoke/tests/stamping/conftest.py @@ -1,4 +1,9 @@ import pytest +from pytest_celery import CeleryTestSetup, CeleryTestWorker + +from t.smoke.tests.stamping.workers.legacy import * # noqa +from t.smoke.tests.stamping.workers.legacy import LegacyWorkerContainer +from t.smoke.workers.dev import SmokeWorkerContainer @pytest.fixture @@ -15,3 +20,21 @@ def default_worker_signals(default_worker_signals: set) -> set: default_worker_signals.add(signals) yield default_worker_signals + + +@pytest.fixture +def dev_worker(celery_setup: CeleryTestSetup) -> CeleryTestWorker: + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + if worker.version == SmokeWorkerContainer.version(): + return worker + return None + + +@pytest.fixture +def legacy_worker(celery_setup: CeleryTestSetup) -> CeleryTestWorker: + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + if worker.version == LegacyWorkerContainer.version(): + return worker + return None diff --git a/t/smoke/tests/stamping/test_hybrid_cluster.py b/t/smoke/tests/stamping/test_hybrid_cluster.py new file mode 100644 index 00000000000..4e5af7a3e03 --- /dev/null +++ b/t/smoke/tests/stamping/test_hybrid_cluster.py @@ -0,0 +1,160 @@ +from __future__ import annotations + +import json + +import pytest +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster + +from celery.canvas import StampingVisitor, chain +from t.integration.tasks import StampOnReplace, identity, replace_with_stamped_task + + +def get_hybrid_clusters_matrix() -> list[list[str]]: + """Returns a matrix of hybrid worker clusters + + Each item in the matrix is a list of workers to be used in the cluster + and each cluster will be tested separately (with parallel support) + """ + + return [ + # Dev worker only + ["celery_setup_worker"], + # Legacy (Celery 4) worker only + ["celery_legacy_worker"], + # Both dev and legacy workers + ["celery_setup_worker", "celery_legacy_worker"], + # Dev worker and last official Celery release worker + ["celery_setup_worker", "celery_latest_worker"], + # Dev worker and legacy worker and last official Celery release worker + ["celery_setup_worker", "celery_latest_worker", "celery_legacy_worker"], + ] + + +@pytest.fixture(params=get_hybrid_clusters_matrix()) +def celery_worker_cluster(request: pytest.FixtureRequest) -> CeleryWorkerCluster: + nodes: tuple[CeleryTestWorker] = [ + request.getfixturevalue(worker) for worker in request.param + ] + cluster = CeleryWorkerCluster(*nodes) + yield cluster + cluster.teardown() + + +class test_stamping_hybrid_worker_cluster: + def test_sanity(self, celery_setup: CeleryTestSetup): + stamp = {"stamp": 42} + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return stamp.copy() + + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + queue = worker.worker_queue + stamped_task = identity.si(123) + stamped_task.stamp(visitor=CustomStampingVisitor()) + assert stamped_task.apply_async(queue=queue).get(timeout=RESULT_TIMEOUT) + assert worker.logs().count(json.dumps(stamp, indent=4, sort_keys=True)) + + def test_sanity_worker_hop(self, celery_setup: CeleryTestSetup): + if len(celery_setup.worker_cluster) < 2: + pytest.skip("Not enough workers in cluster") + + stamp = {"stamp": 42} + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return stamp.copy() + + w1: CeleryTestWorker = celery_setup.worker_cluster[0] + w2: CeleryTestWorker = celery_setup.worker_cluster[1] + stamped_task = chain( + identity.si(4).set(queue=w1.worker_queue), + identity.si(2).set(queue=w2.worker_queue), + ) + stamped_task.stamp(visitor=CustomStampingVisitor()) + stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) + + stamp = json.dumps(stamp, indent=4) + worker: CeleryTestWorker + for worker in (w1, w2): + assert worker.logs().count(stamp) + + def test_multiple_stamps_multiple_workers(self, celery_setup: CeleryTestSetup): + if len(celery_setup.worker_cluster) < 2: + pytest.skip("Not enough workers in cluster") + + stamp = {"stamp": 420} + stamp1 = {**stamp, "stamp1": 4} + stamp2 = {**stamp, "stamp2": 2} + + w1: CeleryTestWorker = celery_setup.worker_cluster[0] + w2: CeleryTestWorker = celery_setup.worker_cluster[1] + stamped_task = chain( + identity.si(4).set(queue=w1.worker_queue).stamp(stamp1=stamp1["stamp1"]), + identity.si(2).set(queue=w2.worker_queue).stamp(stamp2=stamp2["stamp2"]), + ) + stamped_task.stamp(stamp=stamp["stamp"]) + stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) + + stamp1 = json.dumps(stamp1, indent=4) + stamp2 = json.dumps(stamp2, indent=4) + + assert w1.logs().count(stamp1) + assert w1.logs().count(stamp2) == 0 + + assert w2.logs().count(stamp1) == 0 + assert w2.logs().count(stamp2) + + def test_stamping_on_replace_with_legacy_worker_in_cluster( + self, + celery_setup: CeleryTestSetup, + dev_worker: CeleryTestWorker, + legacy_worker: CeleryTestWorker, + ): + if len(celery_setup.worker_cluster) < 2: + pytest.skip("Not enough workers in cluster") + + if not dev_worker: + pytest.skip("Dev worker not in cluster") + + if not legacy_worker: + pytest.skip("Legacy worker not in cluster") + + stamp = {"stamp": "Only for dev worker tasks"} + stamp1 = {**StampOnReplace.stamp, "stamp1": "1) Only for legacy worker tasks"} + stamp2 = {**StampOnReplace.stamp, "stamp2": "2) Only for legacy worker tasks"} + + replaced_sig1 = ( + identity.si(4) + .set(queue=legacy_worker.worker_queue) + .stamp(stamp1=stamp1["stamp1"]) + ) + replaced_sig2 = ( + identity.si(2) + .set(queue=legacy_worker.worker_queue) + .stamp(stamp2=stamp2["stamp2"]) + ) + + stamped_task = chain( + replace_with_stamped_task.si(replace_with=replaced_sig1).set( + queue=dev_worker.worker_queue + ), + replace_with_stamped_task.si(replace_with=replaced_sig2).set( + queue=dev_worker.worker_queue + ), + ) + stamped_task.stamp(stamp=stamp["stamp"]) + stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) + + stamp = json.dumps(stamp, indent=4) + stamp1 = json.dumps(stamp1, indent=4) + stamp2 = json.dumps(stamp2, indent=4) + + assert dev_worker.logs().count(stamp) + assert dev_worker.logs().count(stamp1) == 0 + assert dev_worker.logs().count(stamp2) == 0 + + assert legacy_worker.logs().count(stamp) == 0 + assert legacy_worker.logs().count(stamp1) + assert legacy_worker.logs().count(stamp2) diff --git a/t/smoke/tests/stamping/test_revoke.py b/t/smoke/tests/stamping/test_revoke.py new file mode 100644 index 00000000000..3ec1dcbadcd --- /dev/null +++ b/t/smoke/tests/stamping/test_revoke.py @@ -0,0 +1,75 @@ +from __future__ import annotations + +import pytest +from pytest_celery import CeleryBackendCluster, CeleryTestWorker, CeleryWorkerCluster + +from celery.canvas import Signature, chain +from celery.result import AsyncResult +from t.integration.tasks import StampOnReplace, identity +from t.smoke.tests.stamping.tasks import wait_for_revoke + + +@pytest.fixture +def celery_worker_cluster( + celery_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, +) -> CeleryWorkerCluster: + cluster = CeleryWorkerCluster(celery_worker, celery_latest_worker) + yield cluster + cluster.teardown() + + +@pytest.fixture +def celery_backend_cluster() -> CeleryBackendCluster: + # Disable backend + return None + + +@pytest.fixture +def wait_for_revoke_timeout() -> int: + return 4 + + +@pytest.fixture +def canvas( + dev_worker: CeleryTestWorker, + wait_for_revoke_timeout: int, +) -> Signature: + return chain( + identity.s(wait_for_revoke_timeout), + wait_for_revoke.s(waitfor_worker_queue=dev_worker.worker_queue).set( + queue=dev_worker.worker_queue + ), + ) + + +class test_revoke_by_stamped_headers: + def test_revoke_by_stamped_headers_after_publish( + self, + dev_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, + wait_for_revoke_timeout: int, + canvas: Signature, + ): + result: AsyncResult = canvas.apply_async( + queue=celery_latest_worker.worker_queue + ) + result.revoke_by_stamped_headers(StampOnReplace.stamp, terminate=True) + dev_worker.assert_log_does_not_exist( + "Done waiting", + timeout=wait_for_revoke_timeout, + ) + + def test_revoke_by_stamped_headers_before_publish( + self, + dev_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, + canvas: Signature, + ): + dev_worker.app.control.revoke_by_stamped_headers( + StampOnReplace.stamp, + terminate=True, + ) + canvas.apply_async(queue=celery_latest_worker.worker_queue) + dev_worker.assert_log_exists("Discarding revoked task") + dev_worker.assert_log_exists(f"revoked by header: {StampOnReplace.stamp}") diff --git a/t/smoke/tests/stamping/test_stamping.py b/t/smoke/tests/stamping/test_stamping.py deleted file mode 100644 index fd10da44939..00000000000 --- a/t/smoke/tests/stamping/test_stamping.py +++ /dev/null @@ -1,263 +0,0 @@ -from __future__ import annotations - -import json - -import pytest -from pytest_celery import (RESULT_TIMEOUT, CeleryBackendCluster, CeleryTestSetup, CeleryTestWorker, - CeleryWorkerCluster) - -from celery.canvas import Signature, StampingVisitor, chain -from celery.result import AsyncResult -from t.integration.tasks import StampOnReplace, add, identity, replace_with_stamped_task -from t.smoke.tests.stamping.tasks import wait_for_revoke -from t.smoke.workers.dev import SmokeWorkerContainer -from t.smoke.workers.legacy import CeleryLegacyWorkerContainer - - -@pytest.fixture -def dev_worker(celery_setup: CeleryTestSetup) -> CeleryTestWorker: - worker: CeleryTestWorker - for worker in celery_setup.worker_cluster: - if worker.version == SmokeWorkerContainer.version(): - return worker - return None - - -@pytest.fixture -def legacy_worker(celery_setup: CeleryTestSetup) -> CeleryTestWorker: - worker: CeleryTestWorker - for worker in celery_setup.worker_cluster: - if worker.version == CeleryLegacyWorkerContainer.version(): - return worker - return None - - -class test_stamping: - def test_callback(self, dev_worker: CeleryTestWorker): - on_signature_stamp = {"on_signature_stamp": 4} - no_visitor_stamp = {"no_visitor_stamp": "Stamp without visitor"} - on_callback_stamp = {"on_callback_stamp": 2} - link_stamp = { - **on_signature_stamp, - **no_visitor_stamp, - **on_callback_stamp, - } - - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return on_signature_stamp.copy() - - def on_callback(self, callback, **header) -> dict: - return on_callback_stamp.copy() - - stamped_task = identity.si(123).set(queue=dev_worker.worker_queue) - stamped_task.link( - add.s(0) - .stamp(no_visitor_stamp=no_visitor_stamp["no_visitor_stamp"]) - .set(queue=dev_worker.worker_queue) - ) - stamped_task.stamp(visitor=CustomStampingVisitor()) - stamped_task.delay().get(timeout=RESULT_TIMEOUT) - assert dev_worker.logs().count( - json.dumps(on_signature_stamp, indent=4, sort_keys=True) - ) - assert dev_worker.logs().count(json.dumps(link_stamp, indent=4, sort_keys=True)) - - -class test_stamping_hybrid_worker_cluster: - @pytest.fixture( - # Each param item is a list of workers to be used in the cluster - # and each cluster will be tested separately (with parallel support) - params=[ - ["celery_setup_worker"], - ["celery_legacy_worker"], - ["celery_setup_worker", "celery_legacy_worker"], - ["celery_setup_worker", "celery_latest_worker", "celery_legacy_worker"], - ] - ) - def celery_worker_cluster( - self, - request: pytest.FixtureRequest, - ) -> CeleryWorkerCluster: - nodes: tuple[CeleryTestWorker] = [ - request.getfixturevalue(worker) for worker in request.param - ] - cluster = CeleryWorkerCluster(*nodes) - yield cluster - cluster.teardown() - - def test_sanity(self, celery_setup: CeleryTestSetup): - stamp = {"stamp": 42} - - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return stamp.copy() - - worker: CeleryTestWorker - for worker in celery_setup.worker_cluster: - queue = worker.worker_queue - stamped_task = identity.si(123) - stamped_task.stamp(visitor=CustomStampingVisitor()) - assert stamped_task.apply_async(queue=queue).get(timeout=RESULT_TIMEOUT) - assert worker.logs().count(json.dumps(stamp, indent=4, sort_keys=True)) - - def test_sanity_worker_hop(self, celery_setup: CeleryTestSetup): - if len(celery_setup.worker_cluster) < 2: - pytest.skip("Not enough workers in cluster") - - stamp = {"stamp": 42} - - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return stamp.copy() - - w1: CeleryTestWorker = celery_setup.worker_cluster[0] - w2: CeleryTestWorker = celery_setup.worker_cluster[1] - stamped_task = chain( - identity.si(4).set(queue=w1.worker_queue), - identity.si(2).set(queue=w2.worker_queue), - ) - stamped_task.stamp(visitor=CustomStampingVisitor()) - stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) - - stamp = json.dumps(stamp, indent=4) - worker: CeleryTestWorker - for worker in (w1, w2): - assert worker.logs().count(stamp) - - def test_multiple_stamps_multiple_workers(self, celery_setup: CeleryTestSetup): - if len(celery_setup.worker_cluster) < 2: - pytest.skip("Not enough workers in cluster") - - stamp = {"stamp": 420} - stamp1 = {**stamp, "stamp1": 4} - stamp2 = {**stamp, "stamp2": 2} - - w1: CeleryTestWorker = celery_setup.worker_cluster[0] - w2: CeleryTestWorker = celery_setup.worker_cluster[1] - stamped_task = chain( - identity.si(4).set(queue=w1.worker_queue).stamp(stamp1=stamp1["stamp1"]), - identity.si(2).set(queue=w2.worker_queue).stamp(stamp2=stamp2["stamp2"]), - ) - stamped_task.stamp(stamp=stamp["stamp"]) - stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) - - stamp1 = json.dumps(stamp1, indent=4) - stamp2 = json.dumps(stamp2, indent=4) - - assert w1.logs().count(stamp1) - assert w1.logs().count(stamp2) == 0 - - assert w2.logs().count(stamp1) == 0 - assert w2.logs().count(stamp2) - - def test_stamping_on_replace_with_legacy_worker_in_cluster( - self, - celery_setup: CeleryTestSetup, - dev_worker: CeleryTestWorker, - legacy_worker: CeleryTestWorker, - ): - if len(celery_setup.worker_cluster) < 2: - pytest.skip("Not enough workers in cluster") - - stamp = {"stamp": "Only for dev worker tasks"} - stamp1 = {**StampOnReplace.stamp, "stamp1": "1) Only for legacy worker tasks"} - stamp2 = {**StampOnReplace.stamp, "stamp2": "2) Only for legacy worker tasks"} - - replaced_sig1 = ( - identity.si(4) - .set(queue=legacy_worker.worker_queue) - .stamp(stamp1=stamp1["stamp1"]) - ) - replaced_sig2 = ( - identity.si(2) - .set(queue=legacy_worker.worker_queue) - .stamp(stamp2=stamp2["stamp2"]) - ) - - stamped_task = chain( - replace_with_stamped_task.si(replace_with=replaced_sig1).set( - queue=dev_worker.worker_queue - ), - replace_with_stamped_task.si(replace_with=replaced_sig2).set( - queue=dev_worker.worker_queue - ), - ) - stamped_task.stamp(stamp=stamp["stamp"]) - stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) - - stamp = json.dumps(stamp, indent=4) - stamp1 = json.dumps(stamp1, indent=4) - stamp2 = json.dumps(stamp2, indent=4) - - assert dev_worker.logs().count(stamp) - assert dev_worker.logs().count(stamp1) == 0 - assert dev_worker.logs().count(stamp2) == 0 - - assert legacy_worker.logs().count(stamp) == 0 - assert legacy_worker.logs().count(stamp1) - assert legacy_worker.logs().count(stamp2) - - -class test_revoke_by_stamped_headers: - @pytest.fixture - def celery_worker_cluster( - self, - celery_worker: CeleryTestWorker, - celery_latest_worker: CeleryTestWorker, - ) -> CeleryWorkerCluster: - cluster = CeleryWorkerCluster(celery_worker, celery_latest_worker) - yield cluster - cluster.teardown() - - @pytest.fixture - def celery_backend_cluster(self) -> CeleryBackendCluster: - # Disable backend - return None - - @pytest.fixture - def wait_for_revoke_timeout(self) -> int: - return 4 - - @pytest.fixture - def canvas( - self, - dev_worker: CeleryTestWorker, - wait_for_revoke_timeout: int, - ) -> Signature: - return chain( - identity.s(wait_for_revoke_timeout), - wait_for_revoke.s(waitfor_worker_queue=dev_worker.worker_queue).set( - queue=dev_worker.worker_queue - ), - ) - - def test_revoke_by_stamped_headers_after_publish( - self, - dev_worker: CeleryTestWorker, - celery_latest_worker: CeleryTestWorker, - wait_for_revoke_timeout: int, - canvas: Signature, - ): - result: AsyncResult = canvas.apply_async( - queue=celery_latest_worker.worker_queue - ) - result.revoke_by_stamped_headers(StampOnReplace.stamp, terminate=True) - dev_worker.assert_log_does_not_exist( - "Done waiting", - timeout=wait_for_revoke_timeout, - ) - - def test_revoke_by_stamped_headers_before_publish( - self, - dev_worker: CeleryTestWorker, - celery_latest_worker: CeleryTestWorker, - canvas: Signature, - ): - dev_worker.app.control.revoke_by_stamped_headers( - StampOnReplace.stamp, - terminate=True, - ) - canvas.apply_async(queue=celery_latest_worker.worker_queue) - dev_worker.assert_log_exists("Discarding revoked task") - dev_worker.assert_log_exists(f"revoked by header: {StampOnReplace.stamp}") diff --git a/t/smoke/tests/stamping/test_visitor.py b/t/smoke/tests/stamping/test_visitor.py new file mode 100644 index 00000000000..c64991f35d5 --- /dev/null +++ b/t/smoke/tests/stamping/test_visitor.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +import json + +from pytest_celery import RESULT_TIMEOUT, CeleryTestWorker + +from celery.canvas import StampingVisitor +from t.integration.tasks import add, identity + + +class test_stamping_visitor: + def test_callback(self, dev_worker: CeleryTestWorker): + on_signature_stamp = {"on_signature_stamp": 4} + no_visitor_stamp = {"no_visitor_stamp": "Stamp without visitor"} + on_callback_stamp = {"on_callback_stamp": 2} + link_stamp = { + **on_signature_stamp, + **no_visitor_stamp, + **on_callback_stamp, + } + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return on_signature_stamp.copy() + + def on_callback(self, callback, **header) -> dict: + return on_callback_stamp.copy() + + stamped_task = identity.si(123).set(queue=dev_worker.worker_queue) + stamped_task.link( + add.s(0) + .stamp(no_visitor_stamp=no_visitor_stamp["no_visitor_stamp"]) + .set(queue=dev_worker.worker_queue) + ) + stamped_task.stamp(visitor=CustomStampingVisitor()) + stamped_task.delay().get(timeout=RESULT_TIMEOUT) + assert dev_worker.logs().count( + json.dumps(on_signature_stamp, indent=4, sort_keys=True) + ) + assert dev_worker.logs().count(json.dumps(link_stamp, indent=4, sort_keys=True)) diff --git a/t/smoke/workers/legacy.py b/t/smoke/tests/stamping/workers/legacy.py similarity index 80% rename from t/smoke/workers/legacy.py rename to t/smoke/tests/stamping/workers/legacy.py index 42a3952d575..385c7c5762b 100644 --- a/t/smoke/workers/legacy.py +++ b/t/smoke/tests/stamping/workers/legacy.py @@ -7,7 +7,7 @@ from celery import Celery -class CeleryLegacyWorkerContainer(CeleryWorkerContainer): +class LegacyWorkerContainer(CeleryWorkerContainer): @property def client(self) -> Any: return self @@ -22,18 +22,18 @@ def log_level(cls) -> str: @classmethod def worker_name(cls) -> str: - return "celery4_tests_worker" + return "celery_legacy_tests_worker" @classmethod def worker_queue(cls) -> str: - return "celery4_tests_queue" + return "celery_legacy_tests_queue" celery_legacy_worker_image = build( path=".", dockerfile="t/smoke/workers/docker/pypi", tag="t/smoke/worker:legacy", - buildargs=CeleryLegacyWorkerContainer.buildargs(), + buildargs=LegacyWorkerContainer.buildargs(), ) @@ -42,14 +42,14 @@ def worker_queue(cls) -> str: environment=fxtr("default_worker_env"), network="{default_pytest_celery_network.name}", volumes={"{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME}, - wrapper_class=CeleryLegacyWorkerContainer, + wrapper_class=LegacyWorkerContainer, timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, ) @pytest.fixture def celery_legacy_worker( - celery_legacy_worker_container: CeleryLegacyWorkerContainer, + celery_legacy_worker_container: LegacyWorkerContainer, celery_setup_app: Celery, ) -> CeleryTestWorker: worker = CeleryTestWorker(celery_legacy_worker_container, app=celery_setup_app) diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index 289a537da9b..162db9bfc70 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -11,9 +11,9 @@ class test_replace: def celery_worker_cluster( self, celery_worker: CeleryTestWorker, - celery_latest_worker: CeleryTestWorker, + celery_other_dev_worker: CeleryTestWorker, ) -> CeleryWorkerCluster: - cluster = CeleryWorkerCluster(celery_worker, celery_latest_worker) + cluster = CeleryWorkerCluster(celery_worker, celery_other_dev_worker) yield cluster cluster.teardown() diff --git a/t/smoke/workers/alt.py b/t/smoke/workers/alt.py index b333f2616e3..63dbd673d67 100644 --- a/t/smoke/workers/alt.py +++ b/t/smoke/workers/alt.py @@ -1,16 +1,31 @@ +from __future__ import annotations + import os import pytest from pytest_celery import CeleryTestWorker, defaults -from pytest_docker_tools import container, fxtr +from pytest_docker_tools import build, container, fxtr from celery import Celery from t.smoke.workers.dev import SmokeWorkerContainer -# Allows having two different workers with the same queue and settings -# that are based on the current codebase + +class AltSmokeWorkerContainer(SmokeWorkerContainer): + @classmethod + def worker_name(cls) -> str: + return "alt_smoke_tests_worker" + + +celery_alt_dev_worker_image = build( + path=".", + dockerfile="t/smoke/workers/docker/dev", + tag="t/smoke/worker:alt", + buildargs=AltSmokeWorkerContainer.buildargs(), +) + + alt_dev_worker_container = container( - image="{celery_dev_worker_image.id}", + image="{celery_alt_dev_worker_image.id}", environment=fxtr("default_worker_env"), network="{default_pytest_celery_network.name}", volumes={ @@ -22,14 +37,14 @@ "mode": "rw", }, }, - wrapper_class=SmokeWorkerContainer, + wrapper_class=AltSmokeWorkerContainer, timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, ) @pytest.fixture def celery_alt_dev_worker( - alt_dev_worker_container: SmokeWorkerContainer, + alt_dev_worker_container: AltSmokeWorkerContainer, celery_setup_app: Celery, ) -> CeleryTestWorker: worker = CeleryTestWorker(alt_dev_worker_container, app=celery_setup_app) diff --git a/t/smoke/workers/other.py b/t/smoke/workers/other.py new file mode 100644 index 00000000000..28a24cb38c0 --- /dev/null +++ b/t/smoke/workers/other.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +import os + +import pytest +from pytest_celery import CeleryTestWorker, defaults +from pytest_docker_tools import build, container, fxtr + +from celery import Celery +from t.smoke.workers.dev import SmokeWorkerContainer + + +class OtherSmokeWorkerContainer(SmokeWorkerContainer): + @classmethod + def worker_name(cls) -> str: + return "other_smoke_tests_worker" + + @classmethod + def worker_queue(cls) -> str: + return "other_smoke_tests_queue" + + +celery_other_dev_worker_image = build( + path=".", + dockerfile="t/smoke/workers/docker/dev", + tag="t/smoke/worker:other", + buildargs=OtherSmokeWorkerContainer.buildargs(), +) + + +other_dev_worker_container = container( + image="{celery_other_dev_worker_image.id}", + environment=fxtr("default_worker_env"), + network="{default_pytest_celery_network.name}", + volumes={ + # Volume: Worker /app + "{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME, + # Mount: Celery source + os.path.abspath(os.getcwd()): { + "bind": "/celery", + "mode": "rw", + }, + }, + wrapper_class=OtherSmokeWorkerContainer, + timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, +) + + +@pytest.fixture +def celery_other_dev_worker( + other_dev_worker_container: OtherSmokeWorkerContainer, + celery_setup_app: Celery, +) -> CeleryTestWorker: + worker = CeleryTestWorker(other_dev_worker_container, app=celery_setup_app) + yield worker + worker.teardown() From f1b367b83c594414d7883ca3255ad64debf302c3 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 11 Dec 2023 22:15:38 +0200 Subject: [PATCH 42/70] Refactored worker smoke tests utilities (#8712) * Fixed imports in smoke tests * Refactored WorkerOperations in smoke tests * Use dataclass for worker termination operation options instead of plain dict * Using get(timeout=RESULT_TIMEOUT) * Reload worker container obj after termination/restart * Added cleanup to suicide_exhaust_hdd() * Reverted "Run all tests in CI together (to be reverted)" * Run smoke tests CI only after integration tests (finally) * --reruns-delay 10 -> 60 for smoke tests * BaseException -> Exception * Disabled Redis Broker in Smoke Tests - Redis Broker feature is too unstable * Improved stability of smoke tests * Configure back Redis Broker for smoke tests * Cleanup and renaming * Added TODO * t/smoke --reruns 10 --reruns-delay 60 --rerun-except AssertionError * Renamed WorkerOperations -> SuiteOperations * Refactored SuiteOperations code into separated modules --- .github/workflows/python-package.yml | 194 +++++++++--------- t/smoke/conftest.py | 11 + t/smoke/operations/__init__.py | 0 t/smoke/operations/task_termination.py | 78 +++++++ t/smoke/operations/worker_kill.py | 33 +++ t/smoke/operations/worker_restart.py | 34 +++ t/smoke/tasks.py | 136 ++++++++++-- t/smoke/tests/__init__.py | 0 t/smoke/tests/conftest.py | 63 ------ t/smoke/tests/failover/__init__.py | 0 .../tests/failover/test_broker_failover.py | 2 +- .../tests/failover/test_worker_failover.py | 43 ++-- t/smoke/tests/stamping/workers/__init__.py | 0 t/smoke/tests/test_canvas.py | 2 +- t/smoke/tests/test_control.py | 10 +- t/smoke/tests/test_worker.py | 21 +- tox.ini | 2 +- 17 files changed, 414 insertions(+), 215 deletions(-) create mode 100644 t/smoke/operations/__init__.py create mode 100644 t/smoke/operations/task_termination.py create mode 100644 t/smoke/operations/worker_kill.py create mode 100644 t/smoke/operations/worker_restart.py create mode 100644 t/smoke/tests/__init__.py delete mode 100644 t/smoke/tests/conftest.py create mode 100644 t/smoke/tests/failover/__init__.py create mode 100644 t/smoke/tests/stamping/workers/__init__.py diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index d68297ea641..1dd4d7a2b92 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -23,102 +23,102 @@ permissions: contents: read # to fetch code (actions/checkout) jobs: - Unit: - - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.10'] - os: ["ubuntu-latest", "windows-latest"] - exclude: - - python-version: '3.9' - os: "windows-latest" - - python-version: 'pypy-3.10' - os: "windows-latest" - - python-version: '3.10' - os: "windows-latest" - - python-version: '3.11' - os: "windows-latest" - - steps: - - name: Install apt packages - if: startsWith(matrix.os, 'ubuntu-') - run: | - sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - - name: Install tox - run: python -m pip install --upgrade pip 'tox' tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-unit" - timeout-minutes: 30 - run: | - tox --verbose --verbose - - - uses: codecov/codecov-action@v3 - with: - flags: unittests # optional - fail_ci_if_error: true # optional (default = false) - verbose: true # optional (default = false) - - Integration: - # needs: - # - Unit - # if: needs.Unit.result == 'success' - # timeout-minutes: 240 - - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] - toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] - - services: - redis: - image: redis - ports: - - 6379:6379 - env: - REDIS_HOST: localhost - REDIS_PORT: 6379 - rabbitmq: - image: rabbitmq - ports: - - 5672:5672 - env: - RABBITMQ_DEFAULT_USER: guest - RABBITMQ_DEFAULT_PASS: guest - - steps: - - name: Install apt packages - run: | - sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip 'tox' tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" - timeout-minutes: 60 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv + # Unit: + + # runs-on: ${{ matrix.os }} + # strategy: + # fail-fast: false + # matrix: + # python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.10'] + # os: ["ubuntu-latest", "windows-latest"] + # exclude: + # - python-version: '3.9' + # os: "windows-latest" + # - python-version: 'pypy-3.10' + # os: "windows-latest" + # - python-version: '3.10' + # os: "windows-latest" + # - python-version: '3.11' + # os: "windows-latest" + + # steps: + # - name: Install apt packages + # if: startsWith(matrix.os, 'ubuntu-') + # run: | + # sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev + # - uses: actions/checkout@v4 + # - name: Set up Python ${{ matrix.python-version }} + # uses: actions/setup-python@v5 + # with: + # python-version: ${{ matrix.python-version }} + # cache: 'pip' + # cache-dependency-path: '**/setup.py' + + # - name: Install tox + # run: python -m pip install --upgrade pip 'tox' tox-gh-actions + # - name: > + # Run tox for + # "${{ matrix.python-version }}-unit" + # timeout-minutes: 30 + # run: | + # tox --verbose --verbose + + # - uses: codecov/codecov-action@v3 + # with: + # flags: unittests # optional + # fail_ci_if_error: true # optional (default = false) + # verbose: true # optional (default = false) + + # Integration: + # needs: + # - Unit + # if: needs.Unit.result == 'success' + # timeout-minutes: 240 + + # runs-on: ubuntu-latest + # strategy: + # fail-fast: false + # matrix: + # python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + # toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] + + # services: + # redis: + # image: redis + # ports: + # - 6379:6379 + # env: + # REDIS_HOST: localhost + # REDIS_PORT: 6379 + # rabbitmq: + # image: rabbitmq + # ports: + # - 5672:5672 + # env: + # RABBITMQ_DEFAULT_USER: guest + # RABBITMQ_DEFAULT_PASS: guest + + # steps: + # - name: Install apt packages + # run: | + # sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev + + # - uses: actions/checkout@v4 + # - name: Set up Python ${{ matrix.python-version }} + # uses: actions/setup-python@v5 + # with: + # python-version: ${{ matrix.python-version }} + # cache: 'pip' + # cache-dependency-path: '**/setup.py' + # - name: Install tox + # run: python -m pip install --upgrade pip 'tox' tox-gh-actions + # - name: > + # Run tox for + # "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" + # timeout-minutes: 60 + # run: > + # tox --verbose --verbose -e + # "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv Smoke: # needs: @@ -154,7 +154,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 30 + timeout-minutes: 60 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index f7ed5436790..25687325dbd 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -4,12 +4,23 @@ from pytest_celery import REDIS_CONTAINER_TIMEOUT, REDIS_ENV, REDIS_IMAGE, REDIS_PORTS, RedisContainer from pytest_docker_tools import container, fetch, network +from t.smoke.operations.task_termination import TaskTermination +from t.smoke.operations.worker_kill import WorkerKill +from t.smoke.operations.worker_restart import WorkerRestart from t.smoke.workers.alt import * # noqa from t.smoke.workers.dev import * # noqa from t.smoke.workers.latest import * # noqa from t.smoke.workers.other import * # noqa +class SuiteOperations( + TaskTermination, + WorkerKill, + WorkerRestart, +): + pass + + @pytest.fixture def default_worker_tasks(default_worker_tasks: set) -> set: from t.integration import tasks as integration_tests_tasks diff --git a/t/smoke/operations/__init__.py b/t/smoke/operations/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/operations/task_termination.py b/t/smoke/operations/task_termination.py new file mode 100644 index 00000000000..d51f64da307 --- /dev/null +++ b/t/smoke/operations/task_termination.py @@ -0,0 +1,78 @@ +from __future__ import annotations + +from dataclasses import dataclass +from enum import Enum, auto + +from pytest_celery import CeleryTestWorker + +from celery.exceptions import TimeLimitExceeded, WorkerLostError +from t.smoke.tasks import suicide + + +class TaskTermination: + class Method(Enum): + DELAY_TIMEOUT = auto() + CPU_OVERLOAD = auto() + EXCEPTION = auto() + SYSTEM_EXIT = auto() + ALLOCATE_MAX_MEMORY = auto() + EXHAUST_MEMORY = auto() + EXHAUST_HDD = auto() + CONTROL_SHUTDOWN = auto() + SIGKILL = auto() + + @dataclass + class Options: + worker: CeleryTestWorker + method: str + allocate: int + large_file_name: str + hostname: str + try_eager: bool = True + time_limit: int = 4 + cpu_load_factor: int = 420 + + def run_suicide_task( + self, + worker: CeleryTestWorker, + method: TaskTermination.Method, + **options: dict, + ): + # Update kwargs with default values for missing keys + defaults = { + "worker": worker, + "method": method.name, + "allocate": worker.app.conf.worker_max_memory_per_child * 10**9, + "large_file_name": worker.name(), + "hostname": worker.hostname(), + } + options = {**defaults, **options} + options = TaskTermination.Options(**options) + + expected_error = { + TaskTermination.Method.DELAY_TIMEOUT: TimeLimitExceeded, + TaskTermination.Method.CPU_OVERLOAD: RecursionError, + TaskTermination.Method.EXCEPTION: Exception, + TaskTermination.Method.SYSTEM_EXIT: WorkerLostError, + TaskTermination.Method.ALLOCATE_MAX_MEMORY: MemoryError, + TaskTermination.Method.EXHAUST_MEMORY: WorkerLostError, + TaskTermination.Method.EXHAUST_HDD: OSError, + TaskTermination.Method.SIGKILL: WorkerLostError, + }.get(method) + + try: + suicide(**options.__dict__) + except Exception as e: + if expected_error is None: + # No specific error expected, this is an unexpected exception + assert ( + False + ), f"Worker termination by '{method.name}' failed due to an unexpected error: {e}" + + if not isinstance(e, expected_error): + # Specific error expected but an unexpected type of error occurred + assert ( + False + ), f"Worker termination by '{method.name}' failed due to a different error: {e}" + finally: + worker.container.reload() diff --git a/t/smoke/operations/worker_kill.py b/t/smoke/operations/worker_kill.py new file mode 100644 index 00000000000..6a4af26b383 --- /dev/null +++ b/t/smoke/operations/worker_kill.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from enum import Enum, auto + +from pytest_celery import CeleryTestWorker + +from celery.app.control import Control + + +class WorkerKill: + class Method(Enum): + DOCKER_KILL = auto() + CONTROL_SHUTDOWN = auto() + + def kill_worker( + self, + worker: CeleryTestWorker, + method: WorkerKill.Method, + assertion: bool = True, + ): + if method == WorkerKill.Method.DOCKER_KILL: + worker.kill() + + if method == WorkerKill.Method.CONTROL_SHUTDOWN: + control: Control = worker.app.control + control.shutdown(destination=[worker.hostname()]) + worker.container.reload() + + if assertion: + assert worker.container.status == "exited", ( + f"Worker container should be in 'exited' state after kill, " + f"but is in '{worker.container.status}' state instead." + ) diff --git a/t/smoke/operations/worker_restart.py b/t/smoke/operations/worker_restart.py new file mode 100644 index 00000000000..58d87c9def0 --- /dev/null +++ b/t/smoke/operations/worker_restart.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +from enum import Enum, auto + +from pytest_celery import CeleryTestWorker + + +class WorkerRestart: + class Method(Enum): + POOL_RESTART = auto() + DOCKER_RESTART_GRACEFULLY = auto() + DOCKER_RESTART_FORCE = auto() + + def restart_worker( + self, + worker: CeleryTestWorker, + method: WorkerRestart.Method, + assertion: bool = True, + ): + if method == WorkerRestart.Method.POOL_RESTART: + worker.app.control.pool_restart() + worker.container.reload() + + if method == WorkerRestart.Method.DOCKER_RESTART_GRACEFULLY: + worker.restart() + + if method == WorkerRestart.Method.DOCKER_RESTART_FORCE: + worker.restart(force=True) + + if assertion: + assert worker.container.status == "running", ( + f"Worker container should be in 'running' state after restart, " + f"but is in '{worker.container.status}' state instead." + ) diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index d7b3f929461..549cfb0406a 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -1,10 +1,15 @@ from __future__ import annotations +import math +import os +import sys +from signal import SIGKILL from sys import getsizeof from time import sleep import celery.utils from celery import Task, shared_task, signature +from celery.app.control import Control from celery.canvas import Signature from t.integration.tasks import * # noqa from t.integration.tasks import replaced_with_me @@ -16,12 +21,7 @@ def noop(*args, **kwargs) -> None: @shared_task -def long_running_task( - seconds: float = 1, - verbose: bool = False, - allocate: int | None = None, - exhaust_memory: bool = False, -) -> bool: +def long_running_task(seconds: float = 1, verbose: bool = False) -> bool: from celery import current_task from celery.utils.log import get_task_logger @@ -29,15 +29,6 @@ def long_running_task( logger.info("Starting long running task") - if allocate: - # Attempt to allocate megabytes in memory - _ = [0] * (allocate * 10**6 // getsizeof(int())) - - if exhaust_memory: - mem = [] - while True: - mem.append(' ' * 10**6) # 1 MB of spaces - for i in range(0, int(seconds)): sleep(1) if verbose: @@ -53,3 +44,118 @@ def replace_with_task(self: Task, replace_with: Signature = None): if replace_with is None: replace_with = replaced_with_me.s() return self.replace(signature(replace_with)) + + +@shared_task +def suicide(method: str, try_eager: bool = True, **options: dict): + termination_method = { + "DELAY_TIMEOUT": suicide_delay_timeout.si( + time_limit=options["time_limit"], + ), + "CPU_OVERLOAD": suicide_cpu_overload.si( + cpu_load_factor=options["cpu_load_factor"] + ), + "EXCEPTION": suicide_exception.si(), + "SYSTEM_EXIT": suicide_system_exit.si(), + "ALLOCATE_MAX_MEMORY": suicide_allocate_max_memory.si( + allocate=options["allocate"] + ), + "EXHAUST_MEMORY": suicide_exhaust_memory.si(), + "EXHAUST_HDD": suicide_exhaust_hdd.si( + large_file_name=options["large_file_name"] + ), + "CONTROL_SHUTDOWN": suicide_control_shutdown.si( + hostname=options["hostname"], + ), + "SIGKILL": suicide_sigkill.si(), + } + + sig = termination_method.get(method) + if sig: + if try_eager and method in { + "CONTROL_SHUTDOWN", + }: + return sig.apply().get() + + worker = options["worker"] + return sig.apply_async(queue=worker.worker_queue).get() + else: + raise ValueError(f"Unsupported termination method: {method}") + + +@shared_task(time_limit=2) +def suicide_delay_timeout(time_limit: int = 4): + """Delays the execution to simulate a task timeout.""" + sleep(time_limit) + + +@shared_task +def suicide_cpu_overload(cpu_load_factor: int = 420): + """Performs CPU-intensive operations to simulate a CPU overload.""" + + def cpu_intensive_calculation(n): + return cpu_intensive_calculation(math.sin(n)) + + cpu_intensive_calculation(cpu_load_factor) + + +@shared_task +def suicide_exception(): + """Raises an exception to simulate an unexpected error during task execution.""" + raise Exception("Simulated task failure due to an exception.") + + +@shared_task +def suicide_system_exit(): + """Triggers a system exit to simulate a critical stop of the Celery worker.""" + sys.exit("Simulated Celery worker stop via system exit.") + + +@shared_task +def suicide_allocate_max_memory(allocate: int): + """Allocates the maximum amount of memory permitted, potentially leading to memory errors.""" + _ = [0] * (allocate // getsizeof(int())) + + +@shared_task +def suicide_exhaust_memory(): + """Continuously allocates memory to simulate memory exhaustion.""" + mem = [] + while True: + mem.append(" " * 10**6) + + +@shared_task +def suicide_exhaust_hdd(large_file_name: str = "large_file"): + """Consumes disk space in /tmp to simulate a scenario where the disk is getting full.""" + # file_path = f"/tmp/{large_file_name}.tmp" + # try: + # with open(file_path, "wb") as f: + # chunk = b"\0" * 42 * 1024**2 # 42 MB + # while True: + # f.write(chunk) + # finally: + # if os.path.exists(file_path): + # os.remove(file_path) + + # This code breaks GitHub CI so we simulate the same error as best effort + ######################################################################### + # [error]Failed to create step summary using 'GITHUB_STEP_SUMMARY': No space left on device + # [error]No space left on device + raise OSError("No space left on device") + + +@shared_task +def suicide_control_shutdown(hostname: str): + """Initiates a controlled shutdown via the Control API.""" + from celery.app.base import get_current_app + + app = get_current_app() + control: Control = app.control + control.shutdown(destination=[hostname]) + + +@shared_task +def suicide_sigkill(): + """Forceful termination.""" + os.kill(os.getpid(), SIGKILL) diff --git a/t/smoke/tests/__init__.py b/t/smoke/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/tests/conftest.py b/t/smoke/tests/conftest.py deleted file mode 100644 index 16f550c9167..00000000000 --- a/t/smoke/tests/conftest.py +++ /dev/null @@ -1,63 +0,0 @@ -from __future__ import annotations - -from enum import Enum, auto - -from billiard.exceptions import WorkerLostError -from pytest_celery import CeleryTestSetup, CeleryTestWorker - -from celery.app.control import Control -from t.smoke.tasks import long_running_task - - -class WorkerOperations: - class TerminationMethod(Enum): - SIGKILL = auto() - CONTROL_SHUTDOWN = auto() - MAX_MEMORY_ALLOCATED = auto() - MEMORY_LIMIT_EXCEEDED = auto() - - class RestartMethod(Enum): - POOL_RESTART = auto() - DOCKER_RESTART_GRACEFULLY = auto() - DOCKER_RESTART_FORCE = auto() - - def terminate(self, worker: CeleryTestWorker, method: TerminationMethod): - if method == WorkerOperations.TerminationMethod.SIGKILL: - worker.kill() - return - - if method == WorkerOperations.TerminationMethod.CONTROL_SHUTDOWN: - control: Control = worker.app.control - control.shutdown(destination=[worker.hostname()]) - return - - if method == WorkerOperations.TerminationMethod.MAX_MEMORY_ALLOCATED: - allocate = worker.app.conf.worker_max_memory_per_child * 10**6 - try: - ( - long_running_task.si(allocate=allocate) - .apply_async(queue=worker.worker_queue) - .get() - ) - except MemoryError: - return - - if method == WorkerOperations.TerminationMethod.MEMORY_LIMIT_EXCEEDED: - try: - ( - long_running_task.si(exhaust_memory=True) - .apply_async(queue=worker.worker_queue) - .get() - ) - except WorkerLostError: - return - - assert False - - def restart(self, celery_setup: CeleryTestSetup, method: RestartMethod): - if method == WorkerOperations.RestartMethod.POOL_RESTART: - celery_setup.app.control.pool_restart() - elif method == WorkerOperations.RestartMethod.DOCKER_RESTART_GRACEFULLY: - celery_setup.worker.restart() - elif method == WorkerOperations.RestartMethod.DOCKER_RESTART_FORCE: - celery_setup.worker.restart(force=True) diff --git a/t/smoke/tests/failover/__init__.py b/t/smoke/tests/failover/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/tests/failover/test_broker_failover.py b/t/smoke/tests/failover/test_broker_failover.py index bfcaa86a688..be41cdcce43 100644 --- a/t/smoke/tests/failover/test_broker_failover.py +++ b/t/smoke/tests/failover/test_broker_failover.py @@ -3,7 +3,7 @@ RabbitMQContainer, RabbitMQTestBroker) from pytest_docker_tools import container, fxtr -from t.smoke.tasks import identity +from t.integration.tasks import identity failover_broker = container( image="{default_rabbitmq_broker_image}", diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index b555054e38f..ae235168266 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -1,11 +1,13 @@ from __future__ import annotations import pytest -from pytest_celery import CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster, RedisTestBroker +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster, RedisTestBroker from celery import Celery +from t.smoke.conftest import SuiteOperations, WorkerKill from t.smoke.tasks import long_running_task -from t.smoke.tests.conftest import WorkerOperations + +MB = 1024 * 1024 @pytest.fixture @@ -18,50 +20,47 @@ def celery_worker_cluster( cluster.teardown() -@pytest.mark.parametrize( - "termination_method", - [ - WorkerOperations.TerminationMethod.SIGKILL, - WorkerOperations.TerminationMethod.CONTROL_SHUTDOWN, - WorkerOperations.TerminationMethod.MAX_MEMORY_ALLOCATED, - WorkerOperations.TerminationMethod.MEMORY_LIMIT_EXCEEDED, - ], -) -class test_worker_failover(WorkerOperations): +@pytest.mark.parametrize("method", [WorkerKill.Method.DOCKER_KILL]) +class test_worker_failover(SuiteOperations): @pytest.fixture def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.task_acks_late = True - app.conf.worker_max_memory_per_child = 10 * 1024 # Limit to 10MB + app.conf.worker_max_memory_per_child = 10 * MB if app.conf.broker_url.startswith("redis"): + # Redis Broker optimization to speed up the tests app.conf.broker_transport_options = {"visibility_timeout": 1} yield app def test_killing_first_worker( self, celery_setup: CeleryTestSetup, - termination_method: WorkerOperations.TerminationMethod, + method: WorkerKill.Method, ): + assert len(celery_setup.worker_cluster) > 1 + queue = celery_setup.worker.worker_queue sig = long_running_task.si(1).set(queue=queue) res = sig.delay() - assert res.get(timeout=2) is True - self.terminate(celery_setup.worker, termination_method) + assert res.get(timeout=RESULT_TIMEOUT) is True + self.kill_worker(celery_setup.worker, method) sig = long_running_task.si(1).set(queue=queue) res = sig.delay() - assert res.get(timeout=2) is True + assert res.get(timeout=RESULT_TIMEOUT) is True def test_reconnect_to_restarted_worker( self, celery_setup: CeleryTestSetup, - termination_method: WorkerOperations.TerminationMethod, + method: WorkerKill.Method, ): + assert len(celery_setup.worker_cluster) > 1 + queue = celery_setup.worker.worker_queue sig = long_running_task.si(1).set(queue=queue) res = sig.delay() assert res.get(timeout=10) is True for worker in celery_setup.worker_cluster: - self.terminate(worker, termination_method) + self.kill_worker(worker, method) celery_setup.worker.restart() sig = long_running_task.si(1).set(queue=queue) res = sig.delay() @@ -70,8 +69,10 @@ def test_reconnect_to_restarted_worker( def test_task_retry_on_worker_crash( self, celery_setup: CeleryTestSetup, - termination_method: WorkerOperations.TerminationMethod, + method: WorkerKill, ): + assert len(celery_setup.worker_cluster) > 1 + if isinstance(celery_setup.broker, RedisTestBroker): pytest.xfail("Potential Bug: works with RabbitMQ, but not Redis") @@ -80,5 +81,5 @@ def test_task_retry_on_worker_crash( sig = long_running_task.si(sleep_time, verbose=True).set(queue=queue) res = sig.apply_async(retry=True, retry_policy={"max_retries": 1}) celery_setup.worker.wait_for_log("Sleeping: 2") # Let task run - self.terminate(celery_setup.worker, termination_method) + self.kill_worker(celery_setup.worker, method) assert res.get(timeout=10) is True diff --git a/t/smoke/tests/stamping/workers/__init__.py b/t/smoke/tests/stamping/workers/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/tests/test_canvas.py b/t/smoke/tests/test_canvas.py index 965ac5e3179..e25aaaffc28 100644 --- a/t/smoke/tests/test_canvas.py +++ b/t/smoke/tests/test_canvas.py @@ -2,7 +2,7 @@ from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup from celery.canvas import chain, chord, group, signature -from t.smoke.tasks import add, identity +from t.integration.tasks import add, identity class test_signature: diff --git a/t/smoke/tests/test_control.py b/t/smoke/tests/test_control.py index edd108b36e7..5a911524186 100644 --- a/t/smoke/tests/test_control.py +++ b/t/smoke/tests/test_control.py @@ -4,10 +4,16 @@ class test_control: def test_sanity(self, celery_setup: CeleryTestSetup): r = celery_setup.app.control.ping() - assert all([all([res["ok"] == "pong" for _, res in response.items()]) for response in r]) + assert all( + [ + all([res["ok"] == "pong" for _, res in response.items()]) + for response in r + ] + ) def test_shutdown_exit_with_zero(self, celery_setup: CeleryTestSetup): + # TODO: celery_setup.app.control.shutdown(destination=[celery_setup.worker.hostname()]) celery_setup.app.control.shutdown() while celery_setup.worker.container.status != "exited": celery_setup.worker.container.reload() - assert celery_setup.worker.container.attrs['State']['ExitCode'] == 0 + assert celery_setup.worker.container.attrs["State"]["ExitCode"] == 0 diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 8a2713c9179..182efc700e7 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -3,19 +3,12 @@ from celery import Celery from celery.canvas import chain +from t.smoke.conftest import SuiteOperations, WorkerRestart from t.smoke.tasks import long_running_task -from t.smoke.tests.conftest import WorkerOperations -@pytest.mark.parametrize( - "restart_method", - [ - WorkerOperations.RestartMethod.POOL_RESTART, - WorkerOperations.RestartMethod.DOCKER_RESTART_GRACEFULLY, - WorkerOperations.RestartMethod.DOCKER_RESTART_FORCE, - ], -) -class test_worker_restart(WorkerOperations): +@pytest.mark.parametrize("method", list(WorkerRestart.Method)) +class test_worker_restart(SuiteOperations): @pytest.fixture def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app @@ -26,18 +19,18 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: def test_restart_during_task_execution( self, celery_setup: CeleryTestSetup, - restart_method: WorkerOperations.RestartMethod, + method: WorkerRestart, ): queue = celery_setup.worker.worker_queue sig = long_running_task.si(5, verbose=True).set(queue=queue) res = sig.delay() - self.restart(celery_setup, restart_method) + self.restart_worker(celery_setup.worker, method) assert res.get(RESULT_TIMEOUT) is True def test_restart_between_task_execution( self, celery_setup: CeleryTestSetup, - restart_method: WorkerOperations.RestartMethod, + method: WorkerRestart, ): queue = celery_setup.worker.worker_queue first = long_running_task.si(5, verbose=True).set(queue=queue) @@ -47,5 +40,5 @@ def test_restart_between_task_execution( sig = chain(first, second) sig.delay() assert first_res.get(RESULT_TIMEOUT) is True - self.restart(celery_setup, restart_method) + self.restart_worker(celery_setup.worker, method) assert second_res.get(RESULT_TIMEOUT) is True diff --git a/tox.ini b/tox.ini index e4b27ef70c7..cb0cca1a719 100644 --- a/tox.ini +++ b/tox.ini @@ -46,7 +46,7 @@ deps= commands = unit: pytest --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsv t/integration {posargs} - smoke: pytest -xsv t/smoke --reruns 5 --reruns-delay 10 --rerun-except AssertionError {posargs} + smoke: pytest -xsv t/smoke --reruns 10 --reruns-delay 60 --rerun-except AssertionError {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null From d03c810a0e7b3969826573e49caae1d2b7381a21 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 12 Dec 2023 18:29:09 +0200 Subject: [PATCH 43/70] Hotfix (#8717) * Removed useless test: test_task_retry_on_worker_crash() * Completed TODO in test_shutdown_exit_with_zero() * Increased worker memory for test_worker_failover from 10MB to 100MB * Updated pytest-xdist to v3.5+ --- .../tests/failover/test_worker_failover.py | 26 +++---------------- t/smoke/tests/test_control.py | 3 +-- t/smoke/tests/test_worker.py | 1 + tox.ini | 2 +- 4 files changed, 7 insertions(+), 25 deletions(-) diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index ae235168266..b3b7b788f73 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -1,7 +1,7 @@ from __future__ import annotations import pytest -from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster, RedisTestBroker +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster from celery import Celery from t.smoke.conftest import SuiteOperations, WorkerKill @@ -26,7 +26,7 @@ class test_worker_failover(SuiteOperations): def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.task_acks_late = True - app.conf.worker_max_memory_per_child = 10 * MB + app.conf.worker_max_memory_per_child = 100 * MB if app.conf.broker_url.startswith("redis"): # Redis Broker optimization to speed up the tests app.conf.broker_transport_options = {"visibility_timeout": 1} @@ -58,28 +58,10 @@ def test_reconnect_to_restarted_worker( queue = celery_setup.worker.worker_queue sig = long_running_task.si(1).set(queue=queue) res = sig.delay() - assert res.get(timeout=10) is True + assert res.get(timeout=RESULT_TIMEOUT) is True for worker in celery_setup.worker_cluster: self.kill_worker(worker, method) celery_setup.worker.restart() sig = long_running_task.si(1).set(queue=queue) res = sig.delay() - assert res.get(timeout=10) is True - - def test_task_retry_on_worker_crash( - self, - celery_setup: CeleryTestSetup, - method: WorkerKill, - ): - assert len(celery_setup.worker_cluster) > 1 - - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Potential Bug: works with RabbitMQ, but not Redis") - - sleep_time = 4 - queue = celery_setup.worker.worker_queue - sig = long_running_task.si(sleep_time, verbose=True).set(queue=queue) - res = sig.apply_async(retry=True, retry_policy={"max_retries": 1}) - celery_setup.worker.wait_for_log("Sleeping: 2") # Let task run - self.kill_worker(celery_setup.worker, method) - assert res.get(timeout=10) is True + assert res.get(timeout=RESULT_TIMEOUT) is True diff --git a/t/smoke/tests/test_control.py b/t/smoke/tests/test_control.py index 5a911524186..7c6123a7db9 100644 --- a/t/smoke/tests/test_control.py +++ b/t/smoke/tests/test_control.py @@ -12,8 +12,7 @@ def test_sanity(self, celery_setup: CeleryTestSetup): ) def test_shutdown_exit_with_zero(self, celery_setup: CeleryTestSetup): - # TODO: celery_setup.app.control.shutdown(destination=[celery_setup.worker.hostname()]) - celery_setup.app.control.shutdown() + celery_setup.app.control.shutdown(destination=[celery_setup.worker.hostname()]) while celery_setup.worker.container.status != "exited": celery_setup.worker.container.reload() assert celery_setup.worker.container.attrs["State"]["ExitCode"] == 0 diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 182efc700e7..28e7a304d95 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -32,6 +32,7 @@ def test_restart_between_task_execution( celery_setup: CeleryTestSetup, method: WorkerRestart, ): + # We use freeze() to control the order of execution for the restart operation queue = celery_setup.worker.worker_queue first = long_running_task.si(5, verbose=True).set(queue=queue) first_res = first.freeze() diff --git a/tox.ini b/tox.ini index cb0cca1a719..d4a77bc8e47 100644 --- a/tox.ini +++ b/tox.ini @@ -37,7 +37,7 @@ deps= pypy3: -r{toxinidir}/requirements/test-ci-default.txt integration: -r{toxinidir}/requirements/test-integration.txt - smoke: pytest-xdist==3.3.1 + smoke: pytest-xdist>=3.5 linkcheck,apicheck,configcheck: -r{toxinidir}/requirements/docs.txt lint: pre-commit From 6dc797b50ce470201f830f17fe228c7c149a9a6d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 12 Dec 2023 18:57:31 +0200 Subject: [PATCH 44/70] [Smoke Tests only] Using pytest-xdist config: --dist=loadscope (#8719) * [Smoke Tests only] Using pytest-xdist config: --dist=loadscope * Trigger CI Tests if tox.ini was changed in a PR --- .github/workflows/python-package.yml | 2 ++ tox.ini | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 1dd4d7a2b92..e4d3858c843 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -11,6 +11,7 @@ on: - '**.txt' - '.github/workflows/python-package.yml' - '**.toml' + - "tox.ini" pull_request: branches: [ 'main', 'smoke_tests' ] paths: @@ -18,6 +19,7 @@ on: - '**.txt' - '**.toml' - '.github/workflows/python-package.yml' + - "tox.ini" permissions: contents: read # to fetch code (actions/checkout) diff --git a/tox.ini b/tox.ini index d4a77bc8e47..8ace1223262 100644 --- a/tox.ini +++ b/tox.ini @@ -46,7 +46,7 @@ deps= commands = unit: pytest --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsv t/integration {posargs} - smoke: pytest -xsv t/smoke --reruns 10 --reruns-delay 60 --rerun-except AssertionError {posargs} + smoke: pytest -xsv t/smoke --dist=loadscope --reruns 10 --reruns-delay 60 --rerun-except AssertionError {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null From b77bb9c3d650d3889d88c2596a0e2df4b5cac0ee Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 12 Dec 2023 22:15:39 +0200 Subject: [PATCH 45/70] Added test_broker_failover::test_broker_failover_ui() (#8720) --- t/smoke/tests/failover/test_broker_failover.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/t/smoke/tests/failover/test_broker_failover.py b/t/smoke/tests/failover/test_broker_failover.py index be41cdcce43..53ccaeee59d 100644 --- a/t/smoke/tests/failover/test_broker_failover.py +++ b/t/smoke/tests/failover/test_broker_failover.py @@ -50,3 +50,11 @@ def test_reconnect_to_main(self, celery_setup: CeleryTestSetup): celery_setup.broker_cluster[0].restart() res = identity.s(expected).apply_async(queue=celery_setup.worker.worker_queue) assert res.get(timeout=RESULT_TIMEOUT) == expected + + def test_broker_failover_ui(self, celery_setup: CeleryTestSetup): + assert len(celery_setup.broker_cluster) > 1 + celery_setup.broker_cluster[0].kill() + celery_setup.worker.assert_log_exists("Will retry using next failover.") + celery_setup.worker.assert_log_exists( + f"Connected to amqp://guest:**@{celery_setup.broker_cluster[1].hostname()}:5672//" + ) From 9ba1669648a48dc3a1188f7e629d173455eb0bc3 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 13 Dec 2023 21:52:02 +0200 Subject: [PATCH 46/70] Cleanup useless code (#8723) --- t/smoke/tests/failover/test_worker_failover.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index b3b7b788f73..2d5bf48f7d0 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -26,9 +26,7 @@ class test_worker_failover(SuiteOperations): def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.task_acks_late = True - app.conf.worker_max_memory_per_child = 100 * MB if app.conf.broker_url.startswith("redis"): - # Redis Broker optimization to speed up the tests app.conf.broker_transport_options = {"visibility_timeout": 1} yield app @@ -40,9 +38,6 @@ def test_killing_first_worker( assert len(celery_setup.worker_cluster) > 1 queue = celery_setup.worker.worker_queue - sig = long_running_task.si(1).set(queue=queue) - res = sig.delay() - assert res.get(timeout=RESULT_TIMEOUT) is True self.kill_worker(celery_setup.worker, method) sig = long_running_task.si(1).set(queue=queue) res = sig.delay() @@ -56,9 +51,6 @@ def test_reconnect_to_restarted_worker( assert len(celery_setup.worker_cluster) > 1 queue = celery_setup.worker.worker_queue - sig = long_running_task.si(1).set(queue=queue) - res = sig.delay() - assert res.get(timeout=RESULT_TIMEOUT) is True for worker in celery_setup.worker_cluster: self.kill_worker(worker, method) celery_setup.worker.restart() From 3ba927e903f43af2ab2f65b093758148ab79b600 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 25 Dec 2023 20:22:47 +0200 Subject: [PATCH 47/70] Added test_thread_safe.py to smoke tests (#8738) --- t/smoke/tests/test_thread_safe.py | 67 +++++++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 t/smoke/tests/test_thread_safe.py diff --git a/t/smoke/tests/test_thread_safe.py b/t/smoke/tests/test_thread_safe.py new file mode 100644 index 00000000000..375dff2acdd --- /dev/null +++ b/t/smoke/tests/test_thread_safe.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +from concurrent.futures import ThreadPoolExecutor +from unittest.mock import Mock + +import pytest +from pytest_celery import CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster + +from celery.app.base import set_default_app +from celery.signals import after_task_publish +from t.integration.tasks import identity + + +@pytest.fixture( + params=[ + # Single worker + ["celery_setup_worker"], + # Workers cluster (same queue) + ["celery_setup_worker", "celery_alt_dev_worker"], + ] +) +def celery_worker_cluster(request: pytest.FixtureRequest) -> CeleryWorkerCluster: + nodes: tuple[CeleryTestWorker] = [ + request.getfixturevalue(worker) for worker in request.param + ] + cluster = CeleryWorkerCluster(*nodes) + yield cluster + cluster.teardown() + + +class test_thread_safety: + @pytest.mark.parametrize( + "threads_count", + [ + # Single + 1, + # Multiple + 2, + # Many + 42, + ], + ) + def test_multithread_task_publish( + self, + celery_setup: CeleryTestSetup, + threads_count: int, + ): + signal_was_called = Mock() + + @after_task_publish.connect + def after_task_publish_handler(*args, **kwargs): + nonlocal signal_was_called + signal_was_called(True) + + def thread_worker(): + set_default_app(celery_setup.app) + identity.si("Published from thread").apply_async( + queue=celery_setup.worker.worker_queue + ) + + executor = ThreadPoolExecutor(threads_count) + + with executor: + for _ in range(threads_count): + executor.submit(thread_worker) + + assert signal_was_called.call_count == threads_count From 5a58f1c7258365e95f534a68a6ff1d843733391d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 27 Dec 2023 12:31:20 +0200 Subject: [PATCH 48/70] Added task termination tests (#8741) * Fixed wrong type annotations in t/smoke/tests/test_worker.py * Added t/smoke/tests/test_tasks.py::test_task_termination suite * Added 'psutil' to t/smoke/workers/docker/* * Added test_task_termination.test_child_process_respawn() * Added test_task_termination.test_terminated_task_logs() --- t/smoke/operations/task_termination.py | 77 +++++------------- t/smoke/tasks.py | 108 ++----------------------- t/smoke/tests/test_tasks.py | 92 ++++++++++++++++++++- t/smoke/tests/test_worker.py | 4 +- t/smoke/workers/docker/dev | 6 +- t/smoke/workers/docker/pypi | 6 +- 6 files changed, 127 insertions(+), 166 deletions(-) diff --git a/t/smoke/operations/task_termination.py b/t/smoke/operations/task_termination.py index d51f64da307..a35dbcf0f2f 100644 --- a/t/smoke/operations/task_termination.py +++ b/t/smoke/operations/task_termination.py @@ -1,78 +1,37 @@ from __future__ import annotations -from dataclasses import dataclass from enum import Enum, auto from pytest_celery import CeleryTestWorker -from celery.exceptions import TimeLimitExceeded, WorkerLostError -from t.smoke.tasks import suicide +from celery.canvas import Signature +from celery.result import AsyncResult +from t.smoke.tasks import suicide_delay_timeout, suicide_exhaust_memory, suicide_sigkill, suicide_system_exit class TaskTermination: class Method(Enum): - DELAY_TIMEOUT = auto() - CPU_OVERLOAD = auto() - EXCEPTION = auto() + SIGKILL = auto() SYSTEM_EXIT = auto() - ALLOCATE_MAX_MEMORY = auto() + DELAY_TIMEOUT = auto() EXHAUST_MEMORY = auto() - EXHAUST_HDD = auto() - CONTROL_SHUTDOWN = auto() - SIGKILL = auto() - @dataclass - class Options: - worker: CeleryTestWorker - method: str - allocate: int - large_file_name: str - hostname: str - try_eager: bool = True - time_limit: int = 4 - cpu_load_factor: int = 420 - - def run_suicide_task( + def apply_suicide_task( self, worker: CeleryTestWorker, method: TaskTermination.Method, - **options: dict, - ): - # Update kwargs with default values for missing keys - defaults = { - "worker": worker, - "method": method.name, - "allocate": worker.app.conf.worker_max_memory_per_child * 10**9, - "large_file_name": worker.name(), - "hostname": worker.hostname(), - } - options = {**defaults, **options} - options = TaskTermination.Options(**options) - - expected_error = { - TaskTermination.Method.DELAY_TIMEOUT: TimeLimitExceeded, - TaskTermination.Method.CPU_OVERLOAD: RecursionError, - TaskTermination.Method.EXCEPTION: Exception, - TaskTermination.Method.SYSTEM_EXIT: WorkerLostError, - TaskTermination.Method.ALLOCATE_MAX_MEMORY: MemoryError, - TaskTermination.Method.EXHAUST_MEMORY: WorkerLostError, - TaskTermination.Method.EXHAUST_HDD: OSError, - TaskTermination.Method.SIGKILL: WorkerLostError, - }.get(method) - + ) -> AsyncResult: try: - suicide(**options.__dict__) - except Exception as e: - if expected_error is None: - # No specific error expected, this is an unexpected exception - assert ( - False - ), f"Worker termination by '{method.name}' failed due to an unexpected error: {e}" - - if not isinstance(e, expected_error): - # Specific error expected but an unexpected type of error occurred - assert ( - False - ), f"Worker termination by '{method.name}' failed due to a different error: {e}" + suicide_sig: Signature = { + TaskTermination.Method.SIGKILL: suicide_sigkill.si(), + TaskTermination.Method.SYSTEM_EXIT: suicide_system_exit.si(), + TaskTermination.Method.DELAY_TIMEOUT: suicide_delay_timeout.si(), + TaskTermination.Method.EXHAUST_MEMORY: suicide_exhaust_memory.si(), + }[method] + + return suicide_sig.apply_async(queue=worker.worker_queue) finally: + # If there's an unexpected bug and the termination of the task caused the worker + # to crash, this will refresh the container object with the updated container status + # which can be asserted/checked during a test (for dev/debug) worker.container.reload() diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index 549cfb0406a..e15514320d0 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -1,15 +1,12 @@ from __future__ import annotations -import math import os import sys from signal import SIGKILL -from sys import getsizeof from time import sleep import celery.utils from celery import Task, shared_task, signature -from celery.app.control import Control from celery.canvas import Signature from t.integration.tasks import * # noqa from t.integration.tasks import replaced_with_me @@ -47,74 +44,21 @@ def replace_with_task(self: Task, replace_with: Signature = None): @shared_task -def suicide(method: str, try_eager: bool = True, **options: dict): - termination_method = { - "DELAY_TIMEOUT": suicide_delay_timeout.si( - time_limit=options["time_limit"], - ), - "CPU_OVERLOAD": suicide_cpu_overload.si( - cpu_load_factor=options["cpu_load_factor"] - ), - "EXCEPTION": suicide_exception.si(), - "SYSTEM_EXIT": suicide_system_exit.si(), - "ALLOCATE_MAX_MEMORY": suicide_allocate_max_memory.si( - allocate=options["allocate"] - ), - "EXHAUST_MEMORY": suicide_exhaust_memory.si(), - "EXHAUST_HDD": suicide_exhaust_hdd.si( - large_file_name=options["large_file_name"] - ), - "CONTROL_SHUTDOWN": suicide_control_shutdown.si( - hostname=options["hostname"], - ), - "SIGKILL": suicide_sigkill.si(), - } - - sig = termination_method.get(method) - if sig: - if try_eager and method in { - "CONTROL_SHUTDOWN", - }: - return sig.apply().get() - - worker = options["worker"] - return sig.apply_async(queue=worker.worker_queue).get() - else: - raise ValueError(f"Unsupported termination method: {method}") - - -@shared_task(time_limit=2) -def suicide_delay_timeout(time_limit: int = 4): - """Delays the execution to simulate a task timeout.""" - sleep(time_limit) - - -@shared_task -def suicide_cpu_overload(cpu_load_factor: int = 420): - """Performs CPU-intensive operations to simulate a CPU overload.""" - - def cpu_intensive_calculation(n): - return cpu_intensive_calculation(math.sin(n)) - - cpu_intensive_calculation(cpu_load_factor) - - -@shared_task -def suicide_exception(): - """Raises an exception to simulate an unexpected error during task execution.""" - raise Exception("Simulated task failure due to an exception.") +def suicide_sigkill(): + """Forceful termination.""" + os.kill(os.getpid(), SIGKILL) @shared_task def suicide_system_exit(): """Triggers a system exit to simulate a critical stop of the Celery worker.""" - sys.exit("Simulated Celery worker stop via system exit.") + sys.exit(1) -@shared_task -def suicide_allocate_max_memory(allocate: int): - """Allocates the maximum amount of memory permitted, potentially leading to memory errors.""" - _ = [0] * (allocate // getsizeof(int())) +@shared_task(time_limit=2) +def suicide_delay_timeout(): + """Delays the execution to simulate a task timeout.""" + sleep(4) @shared_task @@ -123,39 +67,3 @@ def suicide_exhaust_memory(): mem = [] while True: mem.append(" " * 10**6) - - -@shared_task -def suicide_exhaust_hdd(large_file_name: str = "large_file"): - """Consumes disk space in /tmp to simulate a scenario where the disk is getting full.""" - # file_path = f"/tmp/{large_file_name}.tmp" - # try: - # with open(file_path, "wb") as f: - # chunk = b"\0" * 42 * 1024**2 # 42 MB - # while True: - # f.write(chunk) - # finally: - # if os.path.exists(file_path): - # os.remove(file_path) - - # This code breaks GitHub CI so we simulate the same error as best effort - ######################################################################### - # [error]Failed to create step summary using 'GITHUB_STEP_SUMMARY': No space left on device - # [error]No space left on device - raise OSError("No space left on device") - - -@shared_task -def suicide_control_shutdown(hostname: str): - """Initiates a controlled shutdown via the Control API.""" - from celery.app.base import get_current_app - - app = get_current_app() - control: Control = app.control - control.shutdown(destination=[hostname]) - - -@shared_task -def suicide_sigkill(): - """Forceful termination.""" - os.kill(os.getpid(), SIGKILL) diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index 162db9bfc70..6909d40f024 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -1,11 +1,101 @@ import pytest from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster +from retry import retry -from celery import signature +from celery import Celery, signature +from celery.exceptions import TimeLimitExceeded, WorkerLostError from t.integration.tasks import add, identity +from t.smoke.conftest import SuiteOperations, TaskTermination from t.smoke.tasks import replace_with_task +class test_task_termination(SuiteOperations): + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_prefetch_multiplier = 1 + app.conf.worker_concurrency = 1 + yield app + + @pytest.mark.parametrize( + "method,expected_error", + [ + (TaskTermination.Method.SIGKILL, WorkerLostError), + (TaskTermination.Method.SYSTEM_EXIT, WorkerLostError), + (TaskTermination.Method.DELAY_TIMEOUT, TimeLimitExceeded), + (TaskTermination.Method.EXHAUST_MEMORY, WorkerLostError), + ], + ) + def test_child_process_respawn( + self, + celery_setup: CeleryTestSetup, + method: TaskTermination.Method, + expected_error: Exception, + ): + pinfo_before = celery_setup.worker.get_running_processes_info( + ["pid", "name"], + filters={"name": "celery"}, + ) + + with pytest.raises(expected_error): + self.apply_suicide_task(celery_setup.worker, method).get() + + # Allowing the worker to respawn the child process before we continue + @retry(tries=42, delay=0.1) # 4.2 seconds + def wait_for_two_celery_processes(): + pinfo_current = celery_setup.worker.get_running_processes_info( + ["pid", "name"], + filters={"name": "celery"}, + ) + if len(pinfo_current) != 2: + assert ( + False + ), f"Child process did not respawn with method: {method.name}" + + wait_for_two_celery_processes() + + pinfo_after = celery_setup.worker.get_running_processes_info( + ["pid", "name"], + filters={"name": "celery"}, + ) + + pids_before = {item["pid"] for item in pinfo_before} + pids_after = {item["pid"] for item in pinfo_after} + assert len(pids_before | pids_after) == 3 + + @pytest.mark.parametrize( + "method,expected_log", + [ + ( + TaskTermination.Method.SIGKILL, + "Worker exited prematurely: signal 9 (SIGKILL)", + ), + ( + TaskTermination.Method.SYSTEM_EXIT, + "Worker exited prematurely: exitcode 1", + ), + ( + TaskTermination.Method.DELAY_TIMEOUT, + "Hard time limit (2s) exceeded for t.smoke.tasks.suicide_delay_timeout", + ), + ( + TaskTermination.Method.EXHAUST_MEMORY, + "Worker exited prematurely: signal 9 (SIGKILL)", + ), + ], + ) + def test_terminated_task_logs( + self, + celery_setup: CeleryTestSetup, + method: TaskTermination.Method, + expected_log: str, + ): + with pytest.raises(Exception): + self.apply_suicide_task(celery_setup.worker, method).get() + + celery_setup.worker.assert_log_exists(expected_log) + + class test_replace: @pytest.fixture def celery_worker_cluster( diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 28e7a304d95..6aefc731304 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -19,7 +19,7 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: def test_restart_during_task_execution( self, celery_setup: CeleryTestSetup, - method: WorkerRestart, + method: WorkerRestart.Method, ): queue = celery_setup.worker.worker_queue sig = long_running_task.si(5, verbose=True).set(queue=queue) @@ -30,7 +30,7 @@ def test_restart_during_task_execution( def test_restart_between_task_execution( self, celery_setup: CeleryTestSetup, - method: WorkerRestart, + method: WorkerRestart.Method, ): # We use freeze() to control the order of execution for the restart operation queue = celery_setup.worker.worker_queue diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index ee1709835e3..8265e56d7be 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -21,8 +21,10 @@ ENV PYTHONDONTWRITEBYTECODE=1 WORKDIR /celery COPY --chown=test_user:test_user . /celery -RUN pip install --no-cache-dir --upgrade pip && \ - pip install --no-cache-dir -e /celery[redis,memcache,pymemcache] +RUN pip install --no-cache-dir --upgrade \ + pip \ + -e /celery[redis,memcache,pymemcache] \ + psutil # The workdir must be /app WORKDIR /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index 85d51dadf9a..4d3300d3e28 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -20,8 +20,10 @@ ENV PYTHONUNBUFFERED=1 ENV PYTHONDONTWRITEBYTECODE=1 # Install Python dependencies -RUN pip install --no-cache-dir --upgrade pip \ - && pip install --no-cache-dir celery[redis,memcache,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} +RUN pip install --no-cache-dir --upgrade \ + pip \ + celery[redis,memcache,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ + psutil # The workdir must be /app WORKDIR /app From dd92814a5322aae3df6cbb132db615825ee28fe2 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 3 Jan 2024 03:18:12 +0200 Subject: [PATCH 49/70] Use pytest-celery via PyPI: v1.0.0a11 (#8749) --- requirements/test-tmp_for_dev.txt | 3 --- requirements/test.txt | 4 ++-- tox.ini | 1 - 3 files changed, 2 insertions(+), 6 deletions(-) delete mode 100644 requirements/test-tmp_for_dev.txt diff --git a/requirements/test-tmp_for_dev.txt b/requirements/test-tmp_for_dev.txt deleted file mode 100644 index 326c2e82e07..00000000000 --- a/requirements/test-tmp_for_dev.txt +++ /dev/null @@ -1,3 +0,0 @@ -# -e ../pytest-celery -git+https://github.com/celery/pytest-celery.git -# git+https://github.com/Katz-Consulting-Group/pytest-celery.git@BRANCH_NAME#egg=pytest-celery \ No newline at end of file diff --git a/requirements/test.txt b/requirements/test.txt index 2b26eef5e9f..82b33838875 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,6 +1,6 @@ pytest==7.4.4 -# pytest-celery==1.0.0a1 -pytest-rerunfailures==12.0 +pytest-celery==1.0.0a11 +pytest-rerunfailures==13.0.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 pytest-click==1.1.0 diff --git a/tox.ini b/tox.ini index 8ace1223262..37a568a00b2 100644 --- a/tox.ini +++ b/tox.ini @@ -29,7 +29,6 @@ passenv = deps= -r{toxinidir}/requirements/test.txt - -r{toxinidir}/requirements/test-tmp_for_dev.txt -r{toxinidir}/requirements/pkgutils.txt 3.8,3.9,3.10,3.11,3.12: -r{toxinidir}/requirements/test-ci-default.txt From e350e809c1339fa97d26a302b94e2cb1de0b9ccd Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sat, 6 Jan 2024 21:57:02 +0200 Subject: [PATCH 50/70] Updated Community standards (#8758) * Added pytest-celery to .github/ISSUE_TEMPLATE/config.yml * Added pytest-celery to CONTRIBUTING.rst * Added Tomer Nosrati to CONTRIBUTING.rst * Added Tomer Nosrati to CONTRIBUTORS.txt --- .github/ISSUE_TEMPLATE/config.yml | 2 ++ CONTRIBUTING.rst | 15 +++++++++++++++ CONTRIBUTORS.txt | 1 + 3 files changed, 18 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 69e8b18cb12..44099454b10 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -9,3 +9,5 @@ contact_links: - name: py-amqp Issue Tracker url: https://github.com/celery/py-amqp/issues/ about: If this issue only involves py-amqp, please open a new issue there. + - name: pytest-celery Issue Tracker + url: https://github.com/celery/pytest-celery/issues/ diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 8fdb3df4dc4..82d5c918a05 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -256,6 +256,7 @@ issue tracker. * :pypi:`kombu`: https://github.com/celery/kombu/issues * :pypi:`amqp`: https://github.com/celery/py-amqp/issues * :pypi:`vine`: https://github.com/celery/vine/issues +* :pypi:`pytest-celery`: https://github.com/celery/pytest-celery/issues * :pypi:`librabbitmq`: https://github.com/celery/librabbitmq/issues * :pypi:`django-celery-beat`: https://github.com/celery/django-celery-beat/issues * :pypi:`django-celery-results`: https://github.com/celery/django-celery-results/issues @@ -1245,6 +1246,11 @@ Josue Balandrano Coronel :github: https://github.com/xirdneh :twitter: https://twitter.com/eusoj_xirdneh +Tomer Nosrati +~~~~~~~~~~~~~ +:github: https://github.com/Nusnus +:twitter: https://x.com/tomer_nosrati + Website ------- @@ -1312,6 +1318,15 @@ Promise/deferred implementation. :PyPI: :pypi:`vine` :docs: https://vine.readthedocs.io +``pytest-celery`` +----------------- + +Pytest plugin for Celery. + +:git: https://github.com/celery/pytest-celery +:PyPI: :pypi:`pytest-celery` +:docs: https://pytest-celery.readthedocs.io + ``billiard`` ------------ diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index d63caa5ca65..e0a8394bc6f 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -295,3 +295,4 @@ JoonHwan Kim, 2022/08/01 Kaustav Banerjee, 2022/11/10 Austin Snoeyink 2022/12/06 Jeremy Z. Othieno 2023/07/27 +Tomer Nosrati, 2022/17/07 \ No newline at end of file From 477561d0f74c42675385c358577b78289e257dd0 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 8 Jan 2024 06:29:11 +0200 Subject: [PATCH 51/70] Upgrade from pytest-celery v1.0.0a11 -> v1.0.0a12 (#8762) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 82b33838875..8912fd59174 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==7.4.4 -pytest-celery==1.0.0a11 +pytest-celery==1.0.0a12 pytest-rerunfailures==13.0.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 From 7d2cda1851e2aed265bc5ceecc8d18b6f39547e8 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 10 Jan 2024 02:17:00 +0200 Subject: [PATCH 52/70] Hotfix (#8781) * Added exception msg check to test_terminated_task_logs() * Renamed test_terminated_task_logs -> test_terminated_task_logs_correct_error * Configured app.conf.broker_pool_limit = 42 for test_thread_safety::test_multithread_task_publish * Cleanup * Fixed TaskTermination.Method.DELAY_TIMEOUT case for test_terminated_task_logs_correct_error --- t/smoke/tests/test_tasks.py | 15 ++++++++++++--- t/smoke/tests/test_thread_safe.py | 7 +++++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index 6909d40f024..7e532594608 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster from retry import retry @@ -64,34 +66,41 @@ def wait_for_two_celery_processes(): assert len(pids_before | pids_after) == 3 @pytest.mark.parametrize( - "method,expected_log", + "method,expected_log,expected_exception_msg", [ ( TaskTermination.Method.SIGKILL, "Worker exited prematurely: signal 9 (SIGKILL)", + None, ), ( TaskTermination.Method.SYSTEM_EXIT, "Worker exited prematurely: exitcode 1", + None, ), ( TaskTermination.Method.DELAY_TIMEOUT, "Hard time limit (2s) exceeded for t.smoke.tasks.suicide_delay_timeout", + 'TimeLimitExceeded(2,)', ), ( TaskTermination.Method.EXHAUST_MEMORY, "Worker exited prematurely: signal 9 (SIGKILL)", + None, ), ], ) - def test_terminated_task_logs( + def test_terminated_task_logs_correct_error( self, celery_setup: CeleryTestSetup, method: TaskTermination.Method, expected_log: str, + expected_exception_msg: str | None, ): - with pytest.raises(Exception): + try: self.apply_suicide_task(celery_setup.worker, method).get() + except Exception as err: + assert expected_exception_msg or expected_log in str(err) celery_setup.worker.assert_log_exists(expected_log) diff --git a/t/smoke/tests/test_thread_safe.py b/t/smoke/tests/test_thread_safe.py index 375dff2acdd..0cb4325357f 100644 --- a/t/smoke/tests/test_thread_safe.py +++ b/t/smoke/tests/test_thread_safe.py @@ -6,6 +6,7 @@ import pytest from pytest_celery import CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster +from celery import Celery from celery.app.base import set_default_app from celery.signals import after_task_publish from t.integration.tasks import identity @@ -29,6 +30,12 @@ def celery_worker_cluster(request: pytest.FixtureRequest) -> CeleryWorkerCluster class test_thread_safety: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.broker_pool_limit = 42 + yield app + @pytest.mark.parametrize( "threads_count", [ From 3122d12cd715c6a574ddd57b6146d5017f32e586 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 10 Jan 2024 13:00:06 +0200 Subject: [PATCH 53/70] Testing tasks renaming (#8784) --- t/smoke/operations/task_termination.py | 17 +++++++++-------- t/smoke/tasks.py | 8 ++++---- t/smoke/tests/test_tasks.py | 6 +++--- 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/t/smoke/operations/task_termination.py b/t/smoke/operations/task_termination.py index a35dbcf0f2f..98d2c5fc2e6 100644 --- a/t/smoke/operations/task_termination.py +++ b/t/smoke/operations/task_termination.py @@ -6,7 +6,8 @@ from celery.canvas import Signature from celery.result import AsyncResult -from t.smoke.tasks import suicide_delay_timeout, suicide_exhaust_memory, suicide_sigkill, suicide_system_exit +from t.smoke.tasks import (self_termination_delay_timeout, self_termination_exhaust_memory, self_termination_sigkill, + self_termination_system_exit) class TaskTermination: @@ -16,20 +17,20 @@ class Method(Enum): DELAY_TIMEOUT = auto() EXHAUST_MEMORY = auto() - def apply_suicide_task( + def apply_self_termination_task( self, worker: CeleryTestWorker, method: TaskTermination.Method, ) -> AsyncResult: try: - suicide_sig: Signature = { - TaskTermination.Method.SIGKILL: suicide_sigkill.si(), - TaskTermination.Method.SYSTEM_EXIT: suicide_system_exit.si(), - TaskTermination.Method.DELAY_TIMEOUT: suicide_delay_timeout.si(), - TaskTermination.Method.EXHAUST_MEMORY: suicide_exhaust_memory.si(), + self_termination_sig: Signature = { + TaskTermination.Method.SIGKILL: self_termination_sigkill.si(), + TaskTermination.Method.SYSTEM_EXIT: self_termination_system_exit.si(), + TaskTermination.Method.DELAY_TIMEOUT: self_termination_delay_timeout.si(), + TaskTermination.Method.EXHAUST_MEMORY: self_termination_exhaust_memory.si(), }[method] - return suicide_sig.apply_async(queue=worker.worker_queue) + return self_termination_sig.apply_async(queue=worker.worker_queue) finally: # If there's an unexpected bug and the termination of the task caused the worker # to crash, this will refresh the container object with the updated container status diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index e15514320d0..fcaffb2779a 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -44,25 +44,25 @@ def replace_with_task(self: Task, replace_with: Signature = None): @shared_task -def suicide_sigkill(): +def self_termination_sigkill(): """Forceful termination.""" os.kill(os.getpid(), SIGKILL) @shared_task -def suicide_system_exit(): +def self_termination_system_exit(): """Triggers a system exit to simulate a critical stop of the Celery worker.""" sys.exit(1) @shared_task(time_limit=2) -def suicide_delay_timeout(): +def self_termination_delay_timeout(): """Delays the execution to simulate a task timeout.""" sleep(4) @shared_task -def suicide_exhaust_memory(): +def self_termination_exhaust_memory(): """Continuously allocates memory to simulate memory exhaustion.""" mem = [] while True: diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index 7e532594608..cd71bf88478 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -40,7 +40,7 @@ def test_child_process_respawn( ) with pytest.raises(expected_error): - self.apply_suicide_task(celery_setup.worker, method).get() + self.apply_self_termination_task(celery_setup.worker, method).get() # Allowing the worker to respawn the child process before we continue @retry(tries=42, delay=0.1) # 4.2 seconds @@ -80,7 +80,7 @@ def wait_for_two_celery_processes(): ), ( TaskTermination.Method.DELAY_TIMEOUT, - "Hard time limit (2s) exceeded for t.smoke.tasks.suicide_delay_timeout", + "Hard time limit (2s) exceeded for t.smoke.tasks.self_termination_delay_timeout", 'TimeLimitExceeded(2,)', ), ( @@ -98,7 +98,7 @@ def test_terminated_task_logs_correct_error( expected_exception_msg: str | None, ): try: - self.apply_suicide_task(celery_setup.worker, method).get() + self.apply_self_termination_task(celery_setup.worker, method).get() except Exception as err: assert expected_exception_msg or expected_log in str(err) From 701da1ef4040ed0731e9026d54278cc69bbb5f59 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 12 Jan 2024 12:25:27 +0200 Subject: [PATCH 54/70] Cleanup (#8788) --- t/smoke/tests/test_canvas.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/t/smoke/tests/test_canvas.py b/t/smoke/tests/test_canvas.py index e25aaaffc28..2a235da5665 100644 --- a/t/smoke/tests/test_canvas.py +++ b/t/smoke/tests/test_canvas.py @@ -1,4 +1,3 @@ -import pytest from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup from celery.canvas import chain, chord, group, signature @@ -35,9 +34,6 @@ def test_sanity(self, celery_setup: CeleryTestSetup): class test_chord: def test_sanity(self, celery_setup: CeleryTestSetup): - if not celery_setup.chords_allowed(): - pytest.skip("Chords are not supported") - upgraded_chord = signature( group( identity.si("header_task1"), From 3252b69109cd6826dc94bce6447823a449dca0a7 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 14 Jan 2024 21:20:33 +0200 Subject: [PATCH 55/70] Upgrade to pytest-celery v1.0.0b1 (First Beta Release) (#8792) * Refactored yield -> return in all fixtures that makes sense * Upgrade from pytest-celery v1.0.0a12 -> v1.0.0b1 * Added back unit & integration CI --- .github/workflows/python-package.yml | 200 +++++++++--------- requirements/test.txt | 2 +- t/smoke/conftest.py | 2 +- .../tests/failover/test_worker_failover.py | 2 +- t/smoke/tests/stamping/conftest.py | 4 +- t/smoke/tests/test_consumer.py | 8 +- t/smoke/tests/test_tasks.py | 2 +- t/smoke/tests/test_thread_safe.py | 2 +- t/smoke/tests/test_worker.py | 2 +- 9 files changed, 112 insertions(+), 112 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index e4d3858c843..5164695efdb 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -25,108 +25,108 @@ permissions: contents: read # to fetch code (actions/checkout) jobs: - # Unit: - - # runs-on: ${{ matrix.os }} - # strategy: - # fail-fast: false - # matrix: - # python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.10'] - # os: ["ubuntu-latest", "windows-latest"] - # exclude: - # - python-version: '3.9' - # os: "windows-latest" - # - python-version: 'pypy-3.10' - # os: "windows-latest" - # - python-version: '3.10' - # os: "windows-latest" - # - python-version: '3.11' - # os: "windows-latest" - - # steps: - # - name: Install apt packages - # if: startsWith(matrix.os, 'ubuntu-') - # run: | - # sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - # - uses: actions/checkout@v4 - # - name: Set up Python ${{ matrix.python-version }} - # uses: actions/setup-python@v5 - # with: - # python-version: ${{ matrix.python-version }} - # cache: 'pip' - # cache-dependency-path: '**/setup.py' - - # - name: Install tox - # run: python -m pip install --upgrade pip 'tox' tox-gh-actions - # - name: > - # Run tox for - # "${{ matrix.python-version }}-unit" - # timeout-minutes: 30 - # run: | - # tox --verbose --verbose - - # - uses: codecov/codecov-action@v3 - # with: - # flags: unittests # optional - # fail_ci_if_error: true # optional (default = false) - # verbose: true # optional (default = false) - - # Integration: - # needs: - # - Unit - # if: needs.Unit.result == 'success' - # timeout-minutes: 240 - - # runs-on: ubuntu-latest - # strategy: - # fail-fast: false - # matrix: - # python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] - # toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] - - # services: - # redis: - # image: redis - # ports: - # - 6379:6379 - # env: - # REDIS_HOST: localhost - # REDIS_PORT: 6379 - # rabbitmq: - # image: rabbitmq - # ports: - # - 5672:5672 - # env: - # RABBITMQ_DEFAULT_USER: guest - # RABBITMQ_DEFAULT_PASS: guest - - # steps: - # - name: Install apt packages - # run: | - # sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - - # - uses: actions/checkout@v4 - # - name: Set up Python ${{ matrix.python-version }} - # uses: actions/setup-python@v5 - # with: - # python-version: ${{ matrix.python-version }} - # cache: 'pip' - # cache-dependency-path: '**/setup.py' - # - name: Install tox - # run: python -m pip install --upgrade pip 'tox' tox-gh-actions - # - name: > - # Run tox for - # "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" - # timeout-minutes: 60 - # run: > - # tox --verbose --verbose -e - # "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv + Unit: + + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.10'] + os: ["ubuntu-latest", "windows-latest"] + exclude: + - python-version: '3.9' + os: "windows-latest" + - python-version: 'pypy-3.10' + os: "windows-latest" + - python-version: '3.10' + os: "windows-latest" + - python-version: '3.11' + os: "windows-latest" + + steps: + - name: Install apt packages + if: startsWith(matrix.os, 'ubuntu-') + run: | + sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + + - name: Install tox + run: python -m pip install --upgrade pip 'tox' tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-unit" + timeout-minutes: 30 + run: | + tox --verbose --verbose + + - uses: codecov/codecov-action@v3 + with: + flags: unittests # optional + fail_ci_if_error: true # optional (default = false) + verbose: true # optional (default = false) + + Integration: + needs: + - Unit + if: needs.Unit.result == 'success' + timeout-minutes: 240 + + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] + + services: + redis: + image: redis + ports: + - 6379:6379 + env: + REDIS_HOST: localhost + REDIS_PORT: 6379 + rabbitmq: + image: rabbitmq + ports: + - 5672:5672 + env: + RABBITMQ_DEFAULT_USER: guest + RABBITMQ_DEFAULT_PASS: guest + + steps: + - name: Install apt packages + run: | + sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip 'tox' tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv Smoke: - # needs: - # - Integration - # if: needs.Integration.result == 'success' - # timeout-minutes: 240 + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 runs-on: ubuntu-latest strategy: diff --git a/requirements/test.txt b/requirements/test.txt index 8912fd59174..3ada61cca64 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==7.4.4 -pytest-celery==1.0.0a12 +pytest-celery==1.0.0b1 pytest-rerunfailures==13.0.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 25687325dbd..4a00ff63fb4 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -28,7 +28,7 @@ def default_worker_tasks(default_worker_tasks: set) -> set: default_worker_tasks.add(integration_tests_tasks) default_worker_tasks.add(smoke_tests_tasks) - yield default_worker_tasks + return default_worker_tasks redis_image = fetch(repository=REDIS_IMAGE) diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index 2d5bf48f7d0..301d7be1047 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -28,7 +28,7 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app.conf.task_acks_late = True if app.conf.broker_url.startswith("redis"): app.conf.broker_transport_options = {"visibility_timeout": 1} - yield app + return app def test_killing_first_worker( self, diff --git a/t/smoke/tests/stamping/conftest.py b/t/smoke/tests/stamping/conftest.py index db7e86ae030..fa1e3f49874 100644 --- a/t/smoke/tests/stamping/conftest.py +++ b/t/smoke/tests/stamping/conftest.py @@ -11,7 +11,7 @@ def default_worker_tasks(default_worker_tasks: set) -> set: from t.smoke.tests.stamping import tasks as stamping_tasks default_worker_tasks.add(stamping_tasks) - yield default_worker_tasks + return default_worker_tasks @pytest.fixture @@ -19,7 +19,7 @@ def default_worker_signals(default_worker_signals: set) -> set: from t.smoke.tests.stamping import signals default_worker_signals.add(signals) - yield default_worker_signals + return default_worker_signals @pytest.fixture diff --git a/t/smoke/tests/test_consumer.py b/t/smoke/tests/test_consumer.py index 5645f2689b8..2586bbf9f1b 100644 --- a/t/smoke/tests/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -15,7 +15,7 @@ def default_worker_app(default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.worker_prefetch_multiplier = WORKER_PREFETCH_MULTIPLIER app.conf.worker_concurrency = WORKER_CONCURRENCY - yield app + return app class test_worker_enable_prefetch_count_reduction_true: @@ -23,7 +23,7 @@ class test_worker_enable_prefetch_count_reduction_true: def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.worker_enable_prefetch_count_reduction = True - yield app + return app @pytest.mark.parametrize("expected_running_tasks_count", range(1, WORKER_CONCURRENCY + 1)) def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_running_tasks_count: int): @@ -70,7 +70,7 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app.conf.worker_prefetch_multiplier = 2 app.conf.worker_cancel_long_running_tasks_on_connection_loss = True app.conf.task_acks_late = True - yield app + return app def test_max_prefetch_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): if isinstance(celery_setup.broker, RedisTestBroker): @@ -91,7 +91,7 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app.conf.worker_enable_prefetch_count_reduction = False app.conf.worker_cancel_long_running_tasks_on_connection_loss = True app.conf.task_acks_late = True - yield app + return app def test_max_prefetch_not_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): if isinstance(celery_setup.broker, RedisTestBroker): diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index cd71bf88478..f4748296b8b 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -17,7 +17,7 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.worker_prefetch_multiplier = 1 app.conf.worker_concurrency = 1 - yield app + return app @pytest.mark.parametrize( "method,expected_error", diff --git a/t/smoke/tests/test_thread_safe.py b/t/smoke/tests/test_thread_safe.py index 0cb4325357f..ceab993e24d 100644 --- a/t/smoke/tests/test_thread_safe.py +++ b/t/smoke/tests/test_thread_safe.py @@ -34,7 +34,7 @@ class test_thread_safety: def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.broker_pool_limit = 42 - yield app + return app @pytest.mark.parametrize( "threads_count", diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 6aefc731304..15fbbf3cda8 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -14,7 +14,7 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.worker_pool_restarts = True app.conf.task_acks_late = True - yield app + return app def test_restart_during_task_execution( self, From ec636fad813320bfb2a860cf69712702dcc530cb Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 14 Jan 2024 22:09:47 +0200 Subject: [PATCH 56/70] Hotfix (#8794) * Removed smoke_tests branch from .github/workflows/python-package.yml * actions/checkout@v3 -> v4, actions/setup-python@v4 -> v5 * Updated requirements/extras/pytest.txt from pytest-celery==0.0.0 -> pytest-celery==1.0.0b1 * Removed duplicated memcache install in the smoke tests workers --- .github/workflows/python-package.yml | 6 +++--- requirements/extras/pytest.txt | 5 +---- t/smoke/workers/docker/dev | 2 +- t/smoke/workers/docker/pypi | 2 +- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 5164695efdb..ad9e22112bf 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -13,7 +13,7 @@ on: - '**.toml' - "tox.ini" pull_request: - branches: [ 'main', 'smoke_tests' ] + branches: [ 'main' ] paths: - '**.py' - '**.txt' @@ -144,9 +144,9 @@ jobs: run: | sudo apt update - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index 0d178f4a462..ed4fe4a199f 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1,4 +1 @@ -pytest-celery==0.0.0 -# pytest-celery==1.0.0a1 -# git+https://github.com/celery/pytest-celery.git -# git+https://github.com/Katz-Consulting-Group/pytest-celery.git@celery_integration#egg=pytest-celery \ No newline at end of file +pytest-celery==1.0.0b1 diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index 8265e56d7be..a0619761cc8 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -23,7 +23,7 @@ WORKDIR /celery COPY --chown=test_user:test_user . /celery RUN pip install --no-cache-dir --upgrade \ pip \ - -e /celery[redis,memcache,pymemcache] \ + -e /celery[redis,pymemcache] \ psutil # The workdir must be /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index 4d3300d3e28..be8c5871a45 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -22,7 +22,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 # Install Python dependencies RUN pip install --no-cache-dir --upgrade \ pip \ - celery[redis,memcache,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ + celery[redis,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ psutil # The workdir must be /app From f2407dcbe07f17974bbc164e0ed06967341ddf8d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Jan 2024 03:45:41 +0200 Subject: [PATCH 57/70] Moved smoke tests to their own workflow (#8797) --- .github/workflows/python-package.yml | 371 ++++++++++++++++++++++++--- 1 file changed, 337 insertions(+), 34 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index ad9e22112bf..a9c6d89ab2e 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -122,41 +122,344 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv - Smoke: - needs: - - Integration - if: needs.Integration.result == 'success' - timeout-minutes: 240 + failover: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest - steps: - - name: Fetch Docker Images - run: | - docker pull redis:latest - docker pull rabbitmq:latest + - name: Install apt packages + run: | + sudo apt update - - name: Install apt packages - run: | - sudo apt update + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k failover - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-smoke" - timeout-minutes: 60 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto + stamping: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k stamping + + canvas: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_canvas.py + + consumer: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_consumer.py + + control: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_control.py + + signals: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_signals.py + + tasks: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_tasks.py + + thread_safe: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_thread_safe.py + + worker: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_worker.py From 78c06af57ec0bc4afe84bf21289d2c0b50dcb313 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Jan 2024 13:38:58 +0200 Subject: [PATCH 58/70] Bugfix: Worker not consuming tasks after Redis broker restart (#8796) * Revert "Add annotations to minimise differences with celery-aio-pool's tracer.py. (#7925)" This reverts commit 0233c3b674dcfc6fff79f4161ca9a818dabf28e7. * Added smoke test: test_worker_consume_tasks_after_redis_broker_restart * Removed Redis xfail from tests now that the bug is fixed * Renamed smoke tests CI jobs --- .github/workflows/python-package.yml | 18 +++++------ celery/app/trace.py | 36 ++++----------------- t/smoke/tests/test_consumer.py | 47 +++++++++++++++++++++------- 3 files changed, 51 insertions(+), 50 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index a9c6d89ab2e..3efa187bc3e 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -122,7 +122,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv - failover: + Smoke-failover: needs: - Integration if: needs.Integration.result == 'success' @@ -160,7 +160,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k failover - stamping: + Smoke-stamping: needs: - Integration if: needs.Integration.result == 'success' @@ -198,7 +198,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k stamping - canvas: + Smoke-canvas: needs: - Integration if: needs.Integration.result == 'success' @@ -236,7 +236,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_canvas.py - consumer: + Smoke-consumer: needs: - Integration if: needs.Integration.result == 'success' @@ -274,7 +274,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_consumer.py - control: + Smoke-control: needs: - Integration if: needs.Integration.result == 'success' @@ -312,7 +312,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_control.py - signals: + Smoke-signals: needs: - Integration if: needs.Integration.result == 'success' @@ -350,7 +350,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_signals.py - tasks: + Smoke-tasks: needs: - Integration if: needs.Integration.result == 'success' @@ -388,7 +388,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_tasks.py - thread_safe: + Smoke-thread_safe: needs: - Integration if: needs.Integration.result == 'success' @@ -426,7 +426,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_thread_safe.py - worker: + Smoke-worker: needs: - Integration if: needs.Integration.result == 'success' diff --git a/celery/app/trace.py b/celery/app/trace.py index 3933d01a481..2e8cf8a3181 100644 --- a/celery/app/trace.py +++ b/celery/app/trace.py @@ -8,7 +8,6 @@ import sys import time from collections import namedtuple -from typing import Any, Callable, Dict, FrozenSet, Optional, Sequence, Tuple, Type, Union from warnings import warn from billiard.einfo import ExceptionInfo, ExceptionWithTraceback @@ -17,8 +16,6 @@ from kombu.serialization import prepare_accept_content from kombu.utils.encoding import safe_repr, safe_str -import celery -import celery.loaders.app from celery import current_app, group, signals, states from celery._state import _task_stack from celery.app.task import Context @@ -294,20 +291,10 @@ def traceback_clear(exc=None): tb = tb.tb_next -def build_tracer( - name: str, - task: Union[celery.Task, celery.local.PromiseProxy], - loader: Optional[celery.loaders.app.AppLoader] = None, - hostname: Optional[str] = None, - store_errors: bool = True, - Info: Type[TraceInfo] = TraceInfo, - eager: bool = False, - propagate: bool = False, - app: Optional[celery.Celery] = None, - monotonic: Callable[[], int] = time.monotonic, - trace_ok_t: Type[trace_ok_t] = trace_ok_t, - IGNORE_STATES: FrozenSet[str] = IGNORE_STATES) -> \ - Callable[[str, Tuple[Any, ...], Dict[str, Any], Any], trace_ok_t]: +def build_tracer(name, task, loader=None, hostname=None, store_errors=True, + Info=TraceInfo, eager=False, propagate=False, app=None, + monotonic=time.monotonic, trace_ok_t=trace_ok_t, + IGNORE_STATES=IGNORE_STATES): """Return a function that traces task execution. Catches all exceptions and updates result backend with the @@ -387,12 +374,7 @@ def build_tracer( from celery import canvas signature = canvas.maybe_signature # maybe_ does not clone if already - def on_error( - request: celery.app.task.Context, - exc: Union[Exception, Type[Exception]], - state: str = FAILURE, - call_errbacks: bool = True) -> Tuple[Info, Any, Any, Any]: - """Handle any errors raised by a `Task`'s execution.""" + def on_error(request, exc, state=FAILURE, call_errbacks=True): if propagate: raise I = Info(state, exc) @@ -401,13 +383,7 @@ def on_error( ) return I, R, I.state, I.retval - def trace_task( - uuid: str, - args: Sequence[Any], - kwargs: Dict[str, Any], - request: Optional[Dict[str, Any]] = None) -> trace_ok_t: - """Execute and trace a `Task`.""" - + def trace_task(uuid, args, kwargs, request=None): # R - is the possibly prepared return value. # I - is the Info object. # T - runtime diff --git a/t/smoke/tests/test_consumer.py b/t/smoke/tests/test_consumer.py index 2586bbf9f1b..6448946e6fa 100644 --- a/t/smoke/tests/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -2,7 +2,7 @@ from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, RedisTestBroker from celery import Celery -from celery.canvas import group +from celery.canvas import chain, group from t.smoke.tasks import long_running_task, noop WORKER_PREFETCH_MULTIPLIER = 2 @@ -15,6 +15,10 @@ def default_worker_app(default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.worker_prefetch_multiplier = WORKER_PREFETCH_MULTIPLIER app.conf.worker_concurrency = WORKER_CONCURRENCY + if app.conf.broker_url.startswith("redis"): + app.conf.broker_transport_options = {"visibility_timeout": 1} + if app.conf.result_backend.startswith("redis"): + app.conf.result_backend_transport_options = {"visibility_timeout": 1} return app @@ -27,9 +31,6 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: @pytest.mark.parametrize("expected_running_tasks_count", range(1, WORKER_CONCURRENCY + 1)) def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_running_tasks_count: int): - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Potential Bug: Redis Broker Restart is unstable") - sig = group(long_running_task.s(420) for _ in range(expected_running_tasks_count)) sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() @@ -51,7 +52,7 @@ def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_r def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Potential Bug: Redis Broker Restart is unstable") + pytest.xfail("Potential Bug with Redis Broker") expected_running_tasks_count = MAX_PREFETCH * WORKER_PREFETCH_MULTIPLIER sig = group(long_running_task.s(10) for _ in range(expected_running_tasks_count)) @@ -73,9 +74,6 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: return app def test_max_prefetch_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Real Bug: Broker does not fetch messages after restart") - sig = group(long_running_task.s(420) for _ in range(WORKER_CONCURRENCY)) sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() @@ -94,9 +92,6 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: return app def test_max_prefetch_not_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Real Bug: Broker does not fetch messages after restart") - sig = group(long_running_task.s(10) for _ in range(WORKER_CONCURRENCY)) r = sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() @@ -104,3 +99,33 @@ def test_max_prefetch_not_passed_on_broker_restart(self, celery_setup: CeleryTes assert "Task t.smoke.tasks.noop" not in celery_setup.worker.logs() r.get(timeout=RESULT_TIMEOUT) assert "Task t.smoke.tasks.noop" in celery_setup.worker.logs() + + +class test_consumer: + def test_worker_consume_tasks_after_redis_broker_restart( + self, + celery_setup: CeleryTestSetup, + ): + queue = celery_setup.worker.worker_queue + assert noop.s().apply_async(queue=queue).get(timeout=RESULT_TIMEOUT) is None + celery_setup.broker.kill() + celery_setup.worker.wait_for_log("Trying again in 8.00 seconds... (4/100)") + celery_setup.broker.restart() + + count = 5 + assert ( + group(noop.s() for _ in range(count)) + .apply_async(queue=queue) + .get(timeout=RESULT_TIMEOUT) + == [None] * count + ) + + assert ( + chain( + group(noop.si() for _ in range(count)), + group(noop.si() for _ in range(count)), + ) + .apply_async(queue=queue) + .get(timeout=RESULT_TIMEOUT) + == [None] * count + ) From ad4906599e701cc27307716e81998ea80a0b5eef Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Jan 2024 17:03:20 +0200 Subject: [PATCH 59/70] Bugfix: Missing id on chain (#8798) * Inherit the lask task id of a chain into the chain itself * Added unit tests * Added integration tests * Added smoke tests * Added documentation in the userguide --- celery/canvas.py | 1 + docs/userguide/canvas.rst | 7 +++++++ t/integration/test_canvas.py | 7 +++++++ t/smoke/tests/test_canvas.py | 23 ++++++++++++++++++++++- t/unit/tasks/test_canvas.py | 7 +++++++ 5 files changed, 44 insertions(+), 1 deletion(-) diff --git a/celery/canvas.py b/celery/canvas.py index a32d3eea7e7..469d3ee99fb 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1261,6 +1261,7 @@ def prepare_steps(self, args, kwargs, tasks, while node.parent: node = node.parent prev_res = node + self.id = last_task_id return tasks, results def apply(self, args=None, kwargs=None, **options): diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index b87dabca17c..58e8dbd8c12 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -614,6 +614,13 @@ Chains can also be made using the ``|`` (pipe) operator: >>> (add.s(2, 2) | mul.s(8) | mul.s(10)).apply_async() +Task ID +~~~~~~~ + +.. versionadded:: 5.4 + +A chain will inherit the task id of the last task in the chain. + Graphs ~~~~~~ diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index b5f88016f82..7c78a98148b 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -1030,6 +1030,13 @@ def test_chaining_upgraded_chords_mixed_canvas(self, manager, subtests): # Cleanup redis_connection.delete(redis_key, 'Done') + def test_freezing_chain_sets_id_of_last_task(self, manager): + last_task = add.s(2).set(task_id='42') + c = add.s(4) | last_task + assert c.id is None + c.freeze(last_task.id) + assert c.id == last_task.id + class test_result_set: diff --git a/t/smoke/tests/test_canvas.py b/t/smoke/tests/test_canvas.py index 2a235da5665..7ecf838af90 100644 --- a/t/smoke/tests/test_canvas.py +++ b/t/smoke/tests/test_canvas.py @@ -1,7 +1,8 @@ +import pytest from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup from celery.canvas import chain, chord, group, signature -from t.integration.tasks import add, identity +from t.integration.tasks import ExpectedException, add, fail, identity class test_signature: @@ -31,6 +32,26 @@ def test_sanity(self, celery_setup: CeleryTestSetup): res = sig.apply_async() assert res.get(timeout=RESULT_TIMEOUT) == "test_chain" + def test_chain_gets_last_task_id_with_failing_tasks_in_chain(self, celery_setup: CeleryTestSetup): + """https://github.com/celery/celery/issues/8786""" + queue = celery_setup.worker.worker_queue + sig = chain( + identity.si("start").set(queue=queue), + group( + identity.si("a").set(queue=queue), + fail.si().set(queue=queue), + ), + identity.si("break").set(queue=queue), + identity.si("end").set(queue=queue), + ) + res = sig.apply_async() + celery_setup.worker.assert_log_does_not_exist( + "ValueError: task_id must not be empty. Got None instead." + ) + + with pytest.raises(ExpectedException): + res.get(timeout=RESULT_TIMEOUT) + class test_chord: def test_sanity(self, celery_setup: CeleryTestSetup): diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 53dc52e5cbb..a90d203e234 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -476,6 +476,13 @@ def test_groups_in_chain_to_chord(self): c = g1 | g2 assert isinstance(c, chord) + def test_prepare_steps_set_last_task_id_to_chain(self): + last_task = self.add.s(2).set(task_id='42') + c = self.add.s(4) | last_task + assert c.id is None + tasks, _ = c.prepare_steps((), {}, c.tasks, last_task_id=last_task.id) + assert c.id == last_task.id + def test_group_to_chord(self): c = ( self.add.s(5) | From b02874bbeb5d5aa701f554febe33d543a9534ee7 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Jan 2024 19:41:07 +0200 Subject: [PATCH 60/70] Prepare for (pre) release: v5.4.0rc1 (#8800) * Moved whatsnew-5.3.rst to history folder * Fixed formatting in Changelog for v5.3.4, v5.3.5 * Fixed "WARNING: toctree contains reference to nonexisting document whatsnew-5.2" * Added changelog for v5.4.0rc1 --- Changelog.rst | 210 +++++++++++++++------------- docs/history/index.rst | 1 + docs/{ => history}/whatsnew-5.3.rst | 0 docs/index.rst | 1 - 4 files changed, 114 insertions(+), 98 deletions(-) rename docs/{ => history}/whatsnew-5.3.rst (100%) diff --git a/Changelog.rst b/Changelog.rst index 6904989625a..35a0fff71b4 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,54 @@ This document contains change notes for bugfix & new features in the main branch & 5.3.x series, please see :ref:`whatsnew-5.3` for an overview of what's new in Celery 5.3. +.. _version-5.4.0rc1: + +5.4.0rc1 +======== + +:release-date: 2024-01-17 7:00 P.M GMT+2 +:release-by: Tomer Nosrati + +Celery v5.4 continues our effort to provide improved stability in production +environments. The release candidate version is available for testing. +The official release is planned for March-April 2024. + +- New Config: worker_enable_prefetch_count_reduction (#8581) +- Added "Serverless" section to Redis doc (redis.rst) (#8640) +- Upstash's Celery example repo link fix (#8665) +- Update mypy version (#8679) +- Update cryptography dependency to 41.0.7 (#8690) +- Add type annotations to celery/utils/nodenames.py (#8667) +- Issue 3426. Adding myself to the contributors. (#8696) +- Bump actions/setup-python from 4 to 5 (#8701) +- Fixed bug where chord.link_error() throws an exception on a dict type errback object (#8702) +- Bump github/codeql-action from 2 to 3 (#8725) +- Fixed multiprocessing integration tests not running on Mac (#8727) +- Added make docker-docs (#8729) +- Fix DeprecationWarning: datetime.datetime.utcnow() (#8726) +- Remove `new` adjective in docs (#8743) +- add type annotation to celery/utils/sysinfo.py (#8747) +- add type annotation to celery/utils/iso8601.py (#8750) +- Change type annotation to celery/utils/iso8601.py (#8752) +- Update test deps (#8754) +- Mark flaky: test_asyncresult_get_cancels_subscription() (#8757) +- change _read_as_base64 (b64encode returns bytes) on celery/utils/term.py (#8759) +- Replace string concatenation with fstring on celery/utils/term.py (#8760) +- Add type annotation to celery/utils/term.py (#8755) +- Skipping test_tasks::test_task_accepted (#8761) +- Updated concurrency docs page. (#8753) +- Changed pyup -> dependabot for updating dependencies (#8764) +- Bump isort from 5.12.0 to 5.13.2 (#8772) +- Update elasticsearch requirement from <=8.11.0 to <=8.11.1 (#8775) +- Bump sphinx-click from 4.4.0 to 5.1.0 (#8774) +- Bump python-memcached from 1.59 to 1.61 (#8776) +- Update elastic-transport requirement from <=8.10.0 to <=8.11.0 (#8780) +- python-memcached==1.61 -> python-memcached>=1.61 (#8787) +- Remove usage of utcnow (#8791) +- Smoke Tests (#8793) +- Moved smoke tests to their own workflow (#8797) +- Bugfix: Worker not consuming tasks after Redis broker restart (#8796) +- Bugfix: Missing id on chain (#8798) .. _version-5.3.6: @@ -17,26 +65,17 @@ an overview of what's new in Celery 5.3. :release-date: 2023-11-22 9:15 P.M GMT+6 :release-by: Asif Saif Uddin - This release is focused mainly to fix AWS SQS new feature comatibility issue and old regressions. The code changes are mostly fix for regressions. More details can be found below. -What's Changed -============== -- Increased docker-build CI job timeout from 30m -> 60m by @Nusnus in https://github.com/celery/celery/pull/8635 -- Incredibly minor spelling fix. by @Asday in https://github.com/celery/celery/pull/8649 -- Fix non-zero exit code when receiving remote shutdown by @lyzlisa in https://github.com/celery/celery/pull/8650 -- Update task.py get_custom_headers missing 'compression' key by @auvipy in https://github.com/celery/celery/pull/8633 -- Update kombu>=5.3.4 to fix SQS request compatibility with boto JSON serializer by @auvipy in https://github.com/celery/celery/pull/8646 -- test requirements version update by @auvipy in https://github.com/celery/celery/pull/8655 -- Update elasticsearch version by @auvipy in https://github.com/celery/celery/pull/8656 -- Propagates more ImportErrors during autodiscovery by @johnjameswhitman in https://github.com/celery/celery/pull/8632 - -New Contributors -================ -- @Asday made their first contribution in https://github.com/celery/celery/pull/8649 -- @lyzlisa made their first contribution in https://github.com/celery/celery/pull/8650 -- @johnjameswhitman made their first contribution in https://github.com/celery/celery/pull/8632 +- Increased docker-build CI job timeout from 30m -> 60m (#8635) +- Incredibly minor spelling fix. (#8649) +- Fix non-zero exit code when receiving remote shutdown (#8650) +- Update task.py get_custom_headers missing 'compression' key (#8633) +- Update kombu>=5.3.4 to fix SQS request compatibility with boto JSON serializer (#8646) +- test requirements version update (#8655) +- Update elasticsearch version (#8656) +- Propagates more ImportErrors during autodiscovery (#8632) .. _version-5.3.5: @@ -47,86 +86,63 @@ New Contributors :release-date: 2023-11-10 7:15 P.M GMT+6 :release-by: Asif Saif Uddin - -What's Changed -============== -- Update test.txt versions by @auvipy in https://github.com/celery/celery/pull/8481 -- fix os.getcwd() FileNotFoundError by @mortimer2015 in https://github.com/celery/celery/pull/8448 -- Fix typo in CONTRIBUTING.rst by @monteiro-renato in https://github.com/celery/celery/pull/8494 -- typo(doc): configuration.rst by @shifenhutu in https://github.com/celery/celery/pull/8484 -- assert before raise by @monteiro-renato in https://github.com/celery/celery/pull/8495 -- Update GHA checkout version by @auvipy in https://github.com/celery/celery/pull/8496 -- Fixed replaced_task_nesting by @Nusnus in https://github.com/celery/celery/pull/8500 -- Fix code indentation for route_task() example by @stefmolin in https://github.com/celery/celery/pull/8502 -- support redis 5.x by @dulmandakh in https://github.com/celery/celery/pull/8504 -- Fix typos in test_canvas.py by @monteiro-renato in https://github.com/celery/celery/pull/8498 -- Marked flaky tests by @Nusnus in https://github.com/celery/celery/pull/8508 -- Fix typos in calling.rst by @visitorckw in https://github.com/celery/celery/pull/8506 -- Added support for replaced_task_nesting in chains by @Nusnus in https://github.com/celery/celery/pull/8501 -- Fix typos in canvas.rst by @visitorckw in https://github.com/celery/celery/pull/8509 -- Patch Version Release Checklist by @Nusnus in https://github.com/celery/celery/pull/8488 -- Added Python 3.11 support to Dockerfile by @Nusnus in https://github.com/celery/celery/pull/8511 -- Dependabot (Celery) by @Nusnus in https://github.com/celery/celery/pull/8510 -- Bump actions/checkout from 3 to 4 by @dependabot in https://github.com/celery/celery/pull/8512 -- Update ETA example to include timezone by @amantri in https://github.com/celery/celery/pull/8516 -- Replaces datetime.fromisoformat with the more lenient dateutil parser by @stumpylog in https://github.com/celery/celery/pull/8507 -- Fixed indentation in Dockerfile for Python 3.11 by @Nusnus in https://github.com/celery/celery/pull/8527 -- Fix git bug in Dockerfile by @Nusnus in https://github.com/celery/celery/pull/8528 -- Tox lint upgrade from Python 3.9 to Python 3.11 by @Nusnus in https://github.com/celery/celery/pull/8526 -- Document gevent concurrency by @cunla in https://github.com/celery/celery/pull/8520 -- Update test.txt by @auvipy in https://github.com/celery/celery/pull/8530 -- Celery Docker Upgrades by @Nusnus in https://github.com/celery/celery/pull/8531 -- pyupgrade upgrade v3.11.0 -> v3.13.0 by @Nusnus in https://github.com/celery/celery/pull/8535 -- Update msgpack.txt by @auvipy in https://github.com/celery/celery/pull/8548 -- Update auth.txt by @auvipy in https://github.com/celery/celery/pull/8547 -- Update msgpack.txt to fix build issues by @auvipy in https://github.com/celery/celery/pull/8552 -- Basic ElasticSearch / ElasticClient 8.x Support by @q2justin in https://github.com/celery/celery/pull/8519 -- Fix eager tasks does not populate name field by @KOliver94 in https://github.com/celery/celery/pull/8486 -- Fix typo in celery.app.control by @Spaceface16518 in https://github.com/celery/celery/pull/8563 -- Update solar.txt ephem by @auvipy in https://github.com/celery/celery/pull/8566 -- Update test.txt pytest-timeout by @auvipy in https://github.com/celery/celery/pull/8565 -- Correct some mypy errors by @rbtcollins in https://github.com/celery/celery/pull/8570 -- Update elasticsearch.txt by @auvipy in https://github.com/celery/celery/pull/8573 -- Update test.txt deps by @auvipy in https://github.com/celery/celery/pull/8574 -- Update test.txt by @auvipy in https://github.com/celery/celery/pull/8590 -- Improved the "Next steps" documentation (#8561). by @frolenkov-nikita in https://github.com/celery/celery/pull/8600 -- Disabled couchbase tests due to broken package breaking main by @Nusnus in https://github.com/celery/celery/pull/8602 -- Update elasticsearch deps by @auvipy in https://github.com/celery/celery/pull/8605 -- Update cryptography==41.0.5 by @auvipy in https://github.com/celery/celery/pull/8604 -- Update pytest==7.4.3 by @auvipy in https://github.com/celery/celery/pull/8606 -- test initial support of python 3.12.x by @auvipy in https://github.com/celery/celery/pull/8549 -- updated new versions to fix CI by @auvipy in https://github.com/celery/celery/pull/8607 -- Update zstd.txt by @auvipy in https://github.com/celery/celery/pull/8609 -- Fixed CI Support with Python 3.12 by @Nusnus in https://github.com/celery/celery/pull/8611 -- updated CI, docs and classifier for next release by @auvipy in https://github.com/celery/celery/pull/8613 -- updated dockerfile to add python 3.12 by @auvipy in https://github.com/celery/celery/pull/8614 -- lint,mypy,docker-unit-tests -> Python 3.12 by @Nusnus in https://github.com/celery/celery/pull/8617 -- Correct type of `request` in `task_revoked` documentation by @RJPercival in https://github.com/celery/celery/pull/8616 -- update docs docker image by @auvipy in https://github.com/celery/celery/pull/8618 -- Fixed RecursionError caused by giving `config_from_object` nested mod… by @frolenkov-nikita in https://github.com/celery/celery/pull/8619 -- Fix: serialization error when gossip working by @kitsuyui in https://github.com/celery/celery/pull/6566 -* [documentation] broker_connection_max_retries of 0 does not mean "retry forever" by @jakila in https://github.com/celery/celery/pull/8626 -- added 2 debian package for better stability in Docker by @auvipy in https://github.com/celery/celery/pull/8629 - - -New Contributors -================ -- @mortimer2015 made their first contribution in https://github.com/celery/celery/pull/8448 -- @monteiro-renato made their first contribution in https://github.com/celery/celery/pull/8494 -- @shifenhutu made their first contribution in https://github.com/celery/celery/pull/8484 -- @stefmolin made their first contribution in https://github.com/celery/celery/pull/8502 -- @visitorckw made their first contribution in https://github.com/celery/celery/pull/8506 -- @dependabot made their first contribution in https://github.com/celery/celery/pull/8512 -- @amantri made their first contribution in https://github.com/celery/celery/pull/8516 -- @cunla made their first contribution in https://github.com/celery/celery/pull/8520 -- @q2justin made their first contribution in https://github.com/celery/celery/pull/8519 -- @Spaceface16518 made their first contribution in https://github.com/celery/celery/pull/8563 -- @rbtcollins made their first contribution in https://github.com/celery/celery/pull/8570 -- @frolenkov-nikita made their first contribution in https://github.com/celery/celery/pull/8600 -- @RJPercival made their first contribution in https://github.com/celery/celery/pull/8616 -- @kitsuyui made their first contribution in https://github.com/celery/celery/pull/6566 -- @jakila made their first contribution in https://github.com/celery/celery/pull/8626 - +- Update test.txt versions (#8481) +- fix os.getcwd() FileNotFoundError (#8448) +- Fix typo in CONTRIBUTING.rst (#8494) +- typo(doc): configuration.rst (#8484) +- assert before raise (#8495) +- Update GHA checkout version (#8496) +- Fixed replaced_task_nesting (#8500) +- Fix code indentation for route_task() example (#8502) +- support redis 5.x (#8504) +- Fix typos in test_canvas.py (#8498) +- Marked flaky tests (#8508) +- Fix typos in calling.rst (#8506) +- Added support for replaced_task_nesting in chains (#8501) +- Fix typos in canvas.rst (#8509) +- Patch Version Release Checklist (#8488) +- Added Python 3.11 support to Dockerfile (#8511) +- Dependabot (Celery) (#8510) +- Bump actions/checkout from 3 to 4 (#8512) +- Update ETA example to include timezone (#8516) +- Replaces datetime.fromisoformat with the more lenient dateutil parser (#8507) +- Fixed indentation in Dockerfile for Python 3.11 (#8527) +- Fix git bug in Dockerfile (#8528) +- Tox lint upgrade from Python 3.9 to Python 3.11 (#8526) +- Document gevent concurrency (#8520) +- Update test.txt (#8530) +- Celery Docker Upgrades (#8531) +- pyupgrade upgrade v3.11.0 -> v3.13.0 (#8535) +- Update msgpack.txt (#8548) +- Update auth.txt (#8547) +- Update msgpack.txt to fix build issues (#8552) +- Basic ElasticSearch / ElasticClient 8.x Support (#8519) +- Fix eager tasks does not populate name field (#8486) +- Fix typo in celery.app.control (#8563) +- Update solar.txt ephem (#8566) +- Update test.txt pytest-timeout (#8565) +- Correct some mypy errors (#8570) +- Update elasticsearch.txt (#8573) +- Update test.txt deps (#8574) +- Update test.txt (#8590) +- Improved the "Next steps" documentation (#8561). (#8600) +- Disabled couchbase tests due to broken package breaking main (#8602) +- Update elasticsearch deps (#8605) +- Update cryptography==41.0.5 (#8604) +- Update pytest==7.4.3 (#8606) +- test initial support of python 3.12.x (#8549) +- updated new versions to fix CI (#8607) +- Update zstd.txt (#8609) +- Fixed CI Support with Python 3.12 (#8611) +- updated CI, docs and classifier for next release (#8613) +- updated dockerfile to add python 3.12 (#8614) +- lint,mypy,docker-unit-tests -> Python 3.12 (#8617) +- Correct type of `request` in `task_revoked` documentation (#8616) +- update docs docker image (#8618) +- Fixed RecursionError caused by giving `config_from_object` nested mod… (#8619) +- Fix: serialization error when gossip working (#6566) +- [documentation] broker_connection_max_retries of 0 does not mean "retry forever" (#8626) +- added 2 debian package for better stability in Docker (#8629) .. _version-5.3.4: diff --git a/docs/history/index.rst b/docs/history/index.rst index 35423550084..b0c39767826 100644 --- a/docs/history/index.rst +++ b/docs/history/index.rst @@ -13,6 +13,7 @@ version please visit :ref:`changelog`. .. toctree:: :maxdepth: 2 + whatsnew-5.3 whatsnew-5.1 changelog-5.1 whatsnew-5.0 diff --git a/docs/whatsnew-5.3.rst b/docs/history/whatsnew-5.3.rst similarity index 100% rename from docs/whatsnew-5.3.rst rename to docs/history/whatsnew-5.3.rst diff --git a/docs/index.rst b/docs/index.rst index 915b7c088aa..299fb5749f2 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -58,7 +58,6 @@ Contents tutorials/index faq changelog - whatsnew-5.2 reference/index internals/index history/index From 5d97edc0ed34c5cf1c122f9d57552f8dac419766 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Jan 2024 19:47:27 +0200 Subject: [PATCH 61/70] =?UTF-8?q?Bump=20version:=205.3.6=20=E2=86=92=205.4?= =?UTF-8?q?.0rc1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 2 +- celery/__init__.py | 4 ++-- docs/includes/introduction.txt | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 412d6ea69b4..f82cfbd7d53 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.3.6 +current_version = 5.4.0rc1 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index 7a2b2411f37..e206ec30140 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.6 (emerald-rush) +:Version: 5.4.0rc1 (opalescent) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index c60dbd4fe58..7212e277efc 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -15,9 +15,9 @@ # Lazy loading from . import local -SERIES = 'emerald-rush' +SERIES = 'opalescent' -__version__ = '5.3.6' +__version__ = '5.4.0rc1' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 79eb36eeb34..e3df2ded029 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.6 (emerald-rush) +:Version: 5.4.0rc1 (opalescent) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 2576e83dcba0edb06e8a4b5027b1fcb586972050 Mon Sep 17 00:00:00 2001 From: Axel H Date: Thu, 18 Jan 2024 00:27:27 +0100 Subject: [PATCH 62/70] feat(daemon): allows daemonization options to be fetched from app settings (#8553) * feat(daemon): allows daemonization options to be fetched from app settings * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Apply suggestions from code review Co-authored-by: Omer Katz * doc(configuration): add version added markers to the new daemonization settings --------- Co-authored-by: Asif Saif Uddin Co-authored-by: Omer Katz Co-authored-by: Tomer Nosrati --- celery/bin/base.py | 34 ++++-- docs/userguide/configuration.rst | 199 +++++++++++++++++++++++++++++++ t/unit/bin/proj/daemon.py | 4 + t/unit/bin/proj/daemon_config.py | 22 ++++ t/unit/bin/test_daemonization.py | 22 ++++ 5 files changed, 273 insertions(+), 8 deletions(-) create mode 100644 t/unit/bin/proj/daemon.py create mode 100644 t/unit/bin/proj/daemon_config.py create mode 100644 t/unit/bin/test_daemonization.py diff --git a/celery/bin/base.py b/celery/bin/base.py index 63a2895758b..57158a27e06 100644 --- a/celery/bin/base.py +++ b/celery/bin/base.py @@ -4,9 +4,10 @@ from collections import OrderedDict from functools import update_wrapper from pprint import pformat +from typing import Any import click -from click import ParamType +from click import Context, ParamType from kombu.utils.objects import cached_property from celery._state import get_current_app @@ -170,19 +171,36 @@ def format_options(self, ctx, formatter): formatter.write_dl(opts_group) +class DaemonOption(CeleryOption): + """Common daemonization option""" + def __init__(self, *args, **kwargs): + super().__init__(args, + help_group=kwargs.pop("help_group", "Daemonization Options"), + callback=kwargs.pop("callback", self.daemon_setting), + **kwargs) + + def daemon_setting(self, ctx: Context, opt: CeleryOption, value: Any) -> Any: + """ + Try to fetch deamonization option from applications settings. + Use the daemon command name as prefix (eg. `worker` -> `worker_pidfile`) + """ + return value or getattr(ctx.obj.app.conf, f"{ctx.command.name}_{self.name}", None) + + class CeleryDaemonCommand(CeleryCommand): """Daemon commands.""" def __init__(self, *args, **kwargs): """Initialize a Celery command with common daemon options.""" super().__init__(*args, **kwargs) - self.params.append(CeleryOption(('-f', '--logfile'), help_group="Daemonization Options", - help="Log destination; defaults to stderr")) - self.params.append(CeleryOption(('--pidfile',), help_group="Daemonization Options")) - self.params.append(CeleryOption(('--uid',), help_group="Daemonization Options")) - self.params.append(CeleryOption(('--gid',), help_group="Daemonization Options")) - self.params.append(CeleryOption(('--umask',), help_group="Daemonization Options")) - self.params.append(CeleryOption(('--executable',), help_group="Daemonization Options")) + self.params.extend(( + DaemonOption("--logfile", "-f", help="Log destination; defaults to stderr"), + DaemonOption("--pidfile", help="PID file path; defaults to no PID file"), + DaemonOption("--uid", help="Drops privileges to this user ID"), + DaemonOption("--gid", help="Drops privileges to this group ID"), + DaemonOption("--umask", help="Create files and directories with this umask"), + DaemonOption("--executable", help="Override path to the Python executable"), + )) class CommaSeparatedList(ParamType): diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 66a4ee71606..8b0c01bcf86 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -3219,6 +3219,71 @@ Message serialization format used when sending event messages. :ref:`calling-serializers`. +.. setting:: events_logfile + +``events_logfile`` +~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional file path for :program:`celery events` to log into (defaults to `stdout`). + +.. versionadded:: 5.4 + +.. setting:: events_pidfile + +``events_pidfile`` +~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional file path for :program:`celery events` to create/store its PID file (default to no PID file created). + +.. versionadded:: 5.4 + +.. setting:: events_uid + +``events_uid`` +~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional user ID to use when events :program:`celery events` drops its privileges (defaults to no UID change). + +.. versionadded:: 5.4 + +.. setting:: events_gid + +``events_gid`` +~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional group ID to use when :program:`celery events` daemon drops its privileges (defaults to no GID change). + +.. versionadded:: 5.4 + +.. setting:: events_umask + +``events_umask`` +~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional `umask` to use when :program:`celery events` creates files (log, pid...) when daemonizing. + +.. versionadded:: 5.4 + +.. setting:: events_executable + +``events_executable`` +~~~~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional `python` executable path for :program:`celery events` to use when deaemonizing (defaults to :data:`sys.executable`). + + .. _conf-control: Remote Control Commands @@ -3487,6 +3552,74 @@ Default: ``"kombu.asynchronous.hub.timer:Timer"``. Name of the ETA scheduler class used by the worker. Default is or set by the pool implementation. +.. setting:: worker_logfile + +``worker_logfile`` +~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional file path for :program:`celery worker` to log into (defaults to `stdout`). + +.. versionadded:: 5.4 + +.. setting:: worker_pidfile + +``worker_pidfile`` +~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional file path for :program:`celery worker` to create/store its PID file (defaults to no PID file created). + +.. versionadded:: 5.4 + +.. setting:: worker_uid + +``worker_uid`` +~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional user ID to use when :program:`celery worker` daemon drops its privileges (defaults to no UID change). + +.. versionadded:: 5.4 + +.. setting:: worker_gid + +``worker_gid`` +~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional group ID to use when :program:`celery worker` daemon drops its privileges (defaults to no GID change). + +.. versionadded:: 5.4 + +.. setting:: worker_umask + +``worker_umask`` +~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional `umask` to use when :program:`celery worker` creates files (log, pid...) when daemonizing. + +.. versionadded:: 5.4 + +.. setting:: worker_executable + +``worker_executable`` +~~~~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional `python` executable path for :program:`celery worker` to use when deaemonizing (defaults to :data:`sys.executable`). + +.. versionadded:: 5.4 + + + .. _conf-celerybeat: Beat Settings (:program:`celery beat`) @@ -3573,3 +3706,69 @@ Default: None. When using cron, the number of seconds :mod:`~celery.bin.beat` can look back when deciding whether a cron schedule is due. When set to `None`, cronjobs that are past due will always run immediately. + +.. setting:: beat_logfile + +``beat_logfile`` +~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional file path for :program:`celery beat` to log into (defaults to `stdout`). + +.. versionadded:: 5.4 + +.. setting:: beat_pidfile + +``beat_pidfile`` +~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional file path for :program:`celery beat` to create/store it PID file (defaults to no PID file created). + +.. versionadded:: 5.4 + +.. setting:: beat_uid + +``beat_uid`` +~~~~~~~~~~~~ + +Default: :const:`None` + +An optional user ID to use when beat :program:`celery beat` drops its privileges (defaults to no UID change). + +.. versionadded:: 5.4 + +.. setting:: beat_gid + +``beat_gid`` +~~~~~~~~~~~~ + +Default: :const:`None` + +An optional group ID to use when :program:`celery beat` daemon drops its privileges (defaults to no GID change). + +.. versionadded:: 5.4 + +.. setting:: beat_umask + +``beat_umask`` +~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional `umask` to use when :program:`celery beat` creates files (log, pid...) when daemonizing. + +.. versionadded:: 5.4 + +.. setting:: beat_executable + +``beat_executable`` +~~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional `python` executable path for :program:`celery beat` to use when deaemonizing (defaults to :data:`sys.executable`). + +.. versionadded:: 5.4 diff --git a/t/unit/bin/proj/daemon.py b/t/unit/bin/proj/daemon.py new file mode 100644 index 00000000000..82c642a5f95 --- /dev/null +++ b/t/unit/bin/proj/daemon.py @@ -0,0 +1,4 @@ +from celery import Celery + +app = Celery(set_as_current=False) +app.config_from_object("t.unit.bin.proj.daemon_config") diff --git a/t/unit/bin/proj/daemon_config.py b/t/unit/bin/proj/daemon_config.py new file mode 100644 index 00000000000..e0b6d151ce7 --- /dev/null +++ b/t/unit/bin/proj/daemon_config.py @@ -0,0 +1,22 @@ +# Test config for t/unit/bin/test_deamonization.py + +beat_pidfile = "/tmp/beat.test.pid" +beat_logfile = "/tmp/beat.test.log" +beat_uid = 42 +beat_gid = 4242 +beat_umask = 0o777 +beat_executable = "/beat/bin/python" + +events_pidfile = "/tmp/events.test.pid" +events_logfile = "/tmp/events.test.log" +events_uid = 42 +events_gid = 4242 +events_umask = 0o777 +events_executable = "/events/bin/python" + +worker_pidfile = "/tmp/worker.test.pid" +worker_logfile = "/tmp/worker.test.log" +worker_uid = 42 +worker_gid = 4242 +worker_umask = 0o777 +worker_executable = "/worker/bin/python" diff --git a/t/unit/bin/test_daemonization.py b/t/unit/bin/test_daemonization.py new file mode 100644 index 00000000000..9bd2be79beb --- /dev/null +++ b/t/unit/bin/test_daemonization.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from unittest.mock import patch + +import pytest +from click.testing import CliRunner + +from celery.bin.celery import celery + +from .proj import daemon_config as config + + +@pytest.mark.usefixtures('depends_on_current_app') +@pytest.mark.parametrize("daemon", ["worker", "beat", "events"]) +def test_daemon_options_from_config(daemon: str, cli_runner: CliRunner): + + with patch(f"celery.bin.{daemon}.{daemon}.callback") as mock: + cli_runner.invoke(celery, f"-A t.unit.bin.proj.daemon {daemon}") + + mock.assert_called_once() + for param in "logfile", "pidfile", "uid", "gid", "umask", "executable": + assert mock.call_args.kwargs[param] == getattr(config, f"{daemon}_{param}") From 5b9c7d18d205b4fe02b609c308b3c906f0eb0796 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 18 Jan 2024 01:36:14 +0200 Subject: [PATCH 63/70] Fixed version documentation tag from #8553 in configuration.rst (#8802) --- docs/userguide/configuration.rst | 72 ++++++++++++++++---------------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 8b0c01bcf86..2825c58434a 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -3224,61 +3224,63 @@ Message serialization format used when sending event messages. ``events_logfile`` ~~~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional file path for :program:`celery events` to log into (defaults to `stdout`). -.. versionadded:: 5.4 - .. setting:: events_pidfile ``events_pidfile`` ~~~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional file path for :program:`celery events` to create/store its PID file (default to no PID file created). -.. versionadded:: 5.4 - .. setting:: events_uid ``events_uid`` ~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional user ID to use when events :program:`celery events` drops its privileges (defaults to no UID change). -.. versionadded:: 5.4 - .. setting:: events_gid ``events_gid`` ~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional group ID to use when :program:`celery events` daemon drops its privileges (defaults to no GID change). -.. versionadded:: 5.4 - .. setting:: events_umask ``events_umask`` ~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional `umask` to use when :program:`celery events` creates files (log, pid...) when daemonizing. -.. versionadded:: 5.4 - .. setting:: events_executable ``events_executable`` ~~~~~~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional `python` executable path for :program:`celery events` to use when deaemonizing (defaults to :data:`sys.executable`). @@ -3557,68 +3559,66 @@ Default is or set by the pool implementation. ``worker_logfile`` ~~~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional file path for :program:`celery worker` to log into (defaults to `stdout`). -.. versionadded:: 5.4 - .. setting:: worker_pidfile ``worker_pidfile`` ~~~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional file path for :program:`celery worker` to create/store its PID file (defaults to no PID file created). -.. versionadded:: 5.4 - .. setting:: worker_uid ``worker_uid`` ~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional user ID to use when :program:`celery worker` daemon drops its privileges (defaults to no UID change). -.. versionadded:: 5.4 - .. setting:: worker_gid ``worker_gid`` ~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional group ID to use when :program:`celery worker` daemon drops its privileges (defaults to no GID change). -.. versionadded:: 5.4 - .. setting:: worker_umask ``worker_umask`` ~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional `umask` to use when :program:`celery worker` creates files (log, pid...) when daemonizing. -.. versionadded:: 5.4 - .. setting:: worker_executable ``worker_executable`` ~~~~~~~~~~~~~~~~~~~~~ -Default: :const:`None` - -An optional `python` executable path for :program:`celery worker` to use when deaemonizing (defaults to :data:`sys.executable`). - .. versionadded:: 5.4 +Default: :const:`None` +An optional `python` executable path for :program:`celery worker` to use when deaemonizing (defaults to :data:`sys.executable`). .. _conf-celerybeat: @@ -3712,63 +3712,63 @@ are past due will always run immediately. ``beat_logfile`` ~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional file path for :program:`celery beat` to log into (defaults to `stdout`). -.. versionadded:: 5.4 - .. setting:: beat_pidfile ``beat_pidfile`` ~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional file path for :program:`celery beat` to create/store it PID file (defaults to no PID file created). -.. versionadded:: 5.4 - .. setting:: beat_uid ``beat_uid`` ~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional user ID to use when beat :program:`celery beat` drops its privileges (defaults to no UID change). -.. versionadded:: 5.4 - .. setting:: beat_gid ``beat_gid`` ~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional group ID to use when :program:`celery beat` daemon drops its privileges (defaults to no GID change). -.. versionadded:: 5.4 - .. setting:: beat_umask ``beat_umask`` ~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional `umask` to use when :program:`celery beat` creates files (log, pid...) when daemonizing. -.. versionadded:: 5.4 - .. setting:: beat_executable ``beat_executable`` ~~~~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional `python` executable path for :program:`celery beat` to use when deaemonizing (defaults to :data:`sys.executable`). - -.. versionadded:: 5.4 From d7700e259d89efbfb432e429ef89404b8328b261 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 18 Jan 2024 03:40:15 +0200 Subject: [PATCH 64/70] Upgraded Sphinx from v5.x.x to v7.x.x (#8803) --- requirements/docs.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index 2596004d021..d4d43fb27c2 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,5 +1,5 @@ -sphinx_celery>=2.0.0 -Sphinx==5.3.0 +sphinx_celery>=2.1.1 +Sphinx>=7.0.0 sphinx-testing~=1.0.1 sphinx-click==5.1.0 -r extras/sqlalchemy.txt From 4a3930249aea8f72e62ce8fc97ae00d54f8ed2c1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 20 Jan 2024 01:39:30 +0200 Subject: [PATCH 65/70] Update elasticsearch requirement from <=8.11.1 to <=8.12.0 (#8810) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.12.0) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 696c6ce76cc..7c08aef8179 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.11.1 +elasticsearch<=8.12.0 elastic-transport<=8.11.0 From 8f389997887232500d4aa1a2b0ae0c7320c4c84a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 20 Jan 2024 01:42:13 +0200 Subject: [PATCH 66/70] Update elastic-transport requirement from <=8.11.0 to <=8.12.0 (#8811) Updates the requirements on [elastic-transport](https://github.com/elastic/elastic-transport-python) to permit the latest version. - [Release notes](https://github.com/elastic/elastic-transport-python/releases) - [Changelog](https://github.com/elastic/elastic-transport-python/blob/main/CHANGELOG.md) - [Commits](https://github.com/elastic/elastic-transport-python/compare/0.1.0b0...v8.12.0) --- updated-dependencies: - dependency-name: elastic-transport dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 7c08aef8179..39417c6d221 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ elasticsearch<=8.12.0 -elastic-transport<=8.11.0 +elastic-transport<=8.12.0 From 939f7b9cf4c6280382735a8422e7d2f2f3258c1f Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Tue, 23 Jan 2024 04:21:17 +0200 Subject: [PATCH 67/70] Update cryptography from 41.0.7 to 42.0.0 --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index ab817dd3527..c432c23341b 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==41.0.7 +cryptography==42.0.0 From 3e98049be9e0603c0f2065970848d14d47dcbb82 Mon Sep 17 00:00:00 2001 From: Andy Zickler Date: Tue, 23 Jan 2024 16:08:08 -0500 Subject: [PATCH 68/70] Catch UnicodeDecodeError or TypeError when opening beat schedule db (#8806) There is existing code to detect if celerybeat-schedule.db is corrupted and recreate it, however sometimes a UnicodeDecodeError or TypeError is thrown in the process of throwing the KeyError. This catches that error and allows Beat to use the existing code to recreate the database. (Fixes #2907) --- CONTRIBUTORS.txt | 3 ++- celery/beat.py | 4 ++-- t/unit/app/test_beat.py | 34 +++++++++++++++++++++++++++++++++- 3 files changed, 37 insertions(+), 4 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index e0a8394bc6f..6159effcc3a 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -295,4 +295,5 @@ JoonHwan Kim, 2022/08/01 Kaustav Banerjee, 2022/11/10 Austin Snoeyink 2022/12/06 Jeremy Z. Othieno 2023/07/27 -Tomer Nosrati, 2022/17/07 \ No newline at end of file +Tomer Nosrati, 2022/17/07 +Andy Zickler, 2024/01/18 \ No newline at end of file diff --git a/celery/beat.py b/celery/beat.py index 76e44721e14..9656493ecbe 100644 --- a/celery/beat.py +++ b/celery/beat.py @@ -568,11 +568,11 @@ def _create_schedule(self): for _ in (1, 2): try: self._store['entries'] - except KeyError: + except (KeyError, UnicodeDecodeError, TypeError): # new schedule db try: self._store['entries'] = {} - except KeyError as exc: + except (KeyError, UnicodeDecodeError, TypeError) as exc: self._store = self._destroy_open_corrupted_schedule(exc) continue else: diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index fa163bb931e..a95e8e41409 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -2,7 +2,7 @@ import sys from datetime import datetime, timedelta, timezone from pickle import dumps, loads -from unittest.mock import Mock, call, patch +from unittest.mock import MagicMock, Mock, call, patch import pytest @@ -669,6 +669,38 @@ def test_remove_db(self, remove): with pytest.raises(OSError): s._remove_db() + def test_create_schedule_corrupted(self): + """ + Test that any decoding errors that might happen when opening beat-schedule.db are caught + """ + s = create_persistent_scheduler()[0](app=self.app, + schedule_filename='schedule') + s._store = MagicMock() + s._destroy_open_corrupted_schedule = Mock() + s._destroy_open_corrupted_schedule.return_value = MagicMock() + + # self._store['entries'] will throw a KeyError + s._store.__getitem__.side_effect = KeyError() + # then, when _create_schedule tries to reset _store['entries'], throw another error + expected_error = UnicodeDecodeError("ascii", b"ordinal not in range(128)", 0, 0, "") + s._store.__setitem__.side_effect = expected_error + + s._create_schedule() + s._destroy_open_corrupted_schedule.assert_called_with(expected_error) + + def test_create_schedule_missing_entries(self): + """ + Test that if _create_schedule can't find the key "entries" in _store it will recreate it + """ + s = create_persistent_scheduler()[0](app=self.app, schedule_filename="schedule") + s._store = MagicMock() + + # self._store['entries'] will throw a KeyError + s._store.__getitem__.side_effect = TypeError() + + s._create_schedule() + s._store.__setitem__.assert_called_with("entries", {}) + def test_setup_schedule(self): s = create_persistent_scheduler()[0](app=self.app, schedule_filename='schedule') From b1c8b28fc273a2bea71c812f74c2ffa3bf9c907e Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Thu, 25 Jan 2024 06:20:00 +0200 Subject: [PATCH 69/70] Update cryptography from 42.0.0 to 42.0.1 --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index c432c23341b..d0384ae0df0 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==42.0.0 +cryptography==42.0.1 From 2b3fde49576771975ec462243f9adf296938f616 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 28 Jan 2024 23:53:57 +0200 Subject: [PATCH 70/70] Limit moto to <5.0.0 until the breaking issues are fixed (#8820) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 3ada61cca64..579a73977fd 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ pytest-timeout==2.2.0 pytest-click==1.1.0 pytest-order==1.2.0 boto3>=1.26.143 -moto>=4.1.11 +moto>=4.1.11,<5.0.0 # typing extensions mypy==1.8.0; platform_python_implementation=="CPython" pre-commit==3.5.0