forked from pool/python-celery
Accepting request 989213 from home:mcalabkova:branches:devel:languages:python
- Update to version 5.2.7 * Depend on the maintained python-consul2 library. (#6544). * Use result_chord_join_timeout instead of hardcoded default value. * Upgrade AzureBlockBlob storage backend to use Azure blob storage library v12 (#6580). * Exit celery with non zero exit value if failing (#6602). * Raise BackendStoreError when set value is too large for Redis. * Trace task optimizations are now set via Celery app instance. * Add store_eager_result setting so eager tasks can store result on the result backend (#6614). * Allow heartbeats to be sent in tests (#6632). * Simulate more exhaustive delivery info in apply(). * Start chord header tasks as soon as possible (#6576). * --quiet flag now actually makes celery avoid producing logs (#6599). * Update platforms.py "superuser privileges" check (#6600). * fnmatch.translate() already translates globs for us. (#6668). * Upgrade some syntax to Python 3.6+. * Fix checking expiration of X.509 certificates (#6678). * Fix JSON decoding errors when using MongoDB as backend (#6675). * Allow configuration of RedisBackend's health_check_interval (#6666). * Tasks can now have required kwargs at any order (#6699). * Initial support of python 3.9 added. * Add Python 3.10 support (#6807). * Fix docstring for Signal.send to match code (#6835). * Chords get body_type independently to handle cases where body.type does not exist (#6847). * Fix multithreaded backend usage (#6851). * Fix Open Collective donate button (#6848). OBS-URL: https://build.opensuse.org/request/show/989213 OBS-URL: https://build.opensuse.org/package/show/devel:languages:python/python-celery?expand=0&rev=147
This commit is contained in:
@@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f4efebe6f8629b0da2b8e529424de376494f5b7a743c321c8a2ddc2b1414921c
|
||||
size 1426433
|
||||
3
celery-5.2.7.tar.gz
Normal file
3
celery-5.2.7.tar.gz
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:fafbd82934d30f8a004f81e8f7a062e31413a23d444be8ee3326553915958c6d
|
||||
size 1474243
|
||||
@@ -1,15 +1,15 @@
|
||||
Index: celery-5.0.2/celery/contrib/pytest.py
|
||||
Index: celery-5.2.6/celery/contrib/pytest.py
|
||||
===================================================================
|
||||
--- celery-5.0.2.orig/celery/contrib/pytest.py
|
||||
+++ celery-5.0.2/celery/contrib/pytest.py
|
||||
@@ -13,16 +13,6 @@ NO_WORKER = os.environ.get('NO_WORKER')
|
||||
--- celery-5.2.6.orig/celery/contrib/pytest.py
|
||||
+++ celery-5.2.6/celery/contrib/pytest.py
|
||||
@@ -19,16 +19,6 @@ NO_WORKER = os.environ.get('NO_WORKER')
|
||||
# Well, they're called fixtures....
|
||||
|
||||
|
||||
-def pytest_configure(config):
|
||||
- """Register additional pytest configuration."""
|
||||
- # add the pytest.mark.celery() marker registration to the pytest.ini [markers] section
|
||||
- # this prevents pytest 4.5 and newer from issueing a warning about an unknown marker
|
||||
- # this prevents pytest 4.5 and newer from issuing a warning about an unknown marker
|
||||
- # and shows helpful marker documentation when running pytest --markers.
|
||||
- config.addinivalue_line(
|
||||
- "markers", "celery(**overrides): override celery configuration for a test case"
|
||||
@@ -19,12 +19,12 @@ Index: celery-5.0.2/celery/contrib/pytest.py
|
||||
@contextmanager
|
||||
def _create_app(enable_logging=False,
|
||||
use_trap=False,
|
||||
Index: celery-5.0.2/t/unit/conftest.py
|
||||
Index: celery-5.2.6/t/unit/conftest.py
|
||||
===================================================================
|
||||
--- celery-5.0.2.orig/t/unit/conftest.py
|
||||
+++ celery-5.0.2/t/unit/conftest.py
|
||||
@@ -40,6 +40,16 @@ CASE_LOG_LEVEL_EFFECT = 'Test {0} modifi
|
||||
CASE_LOG_HANDLER_EFFECT = 'Test {0} modified handlers for the root logger'
|
||||
--- celery-5.2.6.orig/t/unit/conftest.py
|
||||
+++ celery-5.2.6/t/unit/conftest.py
|
||||
@@ -64,6 +64,16 @@ class WhateverIO(io.StringIO):
|
||||
_SIO_write(self, data.decode() if isinstance(data, bytes) else data)
|
||||
|
||||
|
||||
+def pytest_configure(config):
|
||||
|
||||
@@ -1,3 +1,43 @@
|
||||
-------------------------------------------------------------------
|
||||
Mon May 2 10:19:23 UTC 2022 - Markéta Machová <mmachova@suse.com>
|
||||
|
||||
- Update to version 5.2.7
|
||||
* Depend on the maintained python-consul2 library. (#6544).
|
||||
* Use result_chord_join_timeout instead of hardcoded default value.
|
||||
* Upgrade AzureBlockBlob storage backend to use Azure blob storage
|
||||
library v12 (#6580).
|
||||
* Exit celery with non zero exit value if failing (#6602).
|
||||
* Raise BackendStoreError when set value is too large for Redis.
|
||||
* Trace task optimizations are now set via Celery app instance.
|
||||
* Add store_eager_result setting so eager tasks can store result on
|
||||
the result backend (#6614).
|
||||
* Allow heartbeats to be sent in tests (#6632).
|
||||
* Simulate more exhaustive delivery info in apply().
|
||||
* Start chord header tasks as soon as possible (#6576).
|
||||
* --quiet flag now actually makes celery avoid producing logs
|
||||
(#6599).
|
||||
* Update platforms.py "superuser privileges" check (#6600).
|
||||
* fnmatch.translate() already translates globs for us. (#6668).
|
||||
* Upgrade some syntax to Python 3.6+.
|
||||
* Fix checking expiration of X.509 certificates (#6678).
|
||||
* Fix JSON decoding errors when using MongoDB as backend (#6675).
|
||||
* Allow configuration of RedisBackend's health_check_interval
|
||||
(#6666).
|
||||
* Tasks can now have required kwargs at any order (#6699).
|
||||
* Initial support of python 3.9 added.
|
||||
* Add Python 3.10 support (#6807).
|
||||
* Fix docstring for Signal.send to match code (#6835).
|
||||
* Chords get body_type independently to handle cases where body.type
|
||||
does not exist (#6847).
|
||||
* Fix multithreaded backend usage (#6851).
|
||||
* Fix Open Collective donate button (#6848).
|
||||
* Make ResultSet.on_ready promise hold a weakref to self (#6784).
|
||||
* Amend IRC network link to Libera (#6837).
|
||||
* The Consul backend must correctly associate requests and responses
|
||||
(#6823).
|
||||
- Drop upstreamed relax-click.patch
|
||||
- Add upstream tests.patch
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Aug 20 13:00:14 UTC 2021 - Markéta Machová <mmachova@suse.com>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# spec file
|
||||
#
|
||||
# Copyright (c) 2021 SUSE LLC
|
||||
# Copyright (c) 2022 SUSE LLC
|
||||
#
|
||||
# All modifications and additions to the file contributed by third parties
|
||||
# remain the property of their copyright owners, unless otherwise agreed
|
||||
@@ -28,25 +28,25 @@
|
||||
%endif
|
||||
%bcond_with ringdisabled
|
||||
Name: python-celery%{psuffix}
|
||||
Version: 5.0.5
|
||||
Version: 5.2.7
|
||||
Release: 0
|
||||
Summary: Distributed Task Queue module for Python
|
||||
License: BSD-3-Clause
|
||||
URL: http://celeryproject.org
|
||||
Source: https://files.pythonhosted.org/packages/source/c/celery/celery-%{version}.tar.gz
|
||||
Patch0: move-pytest-configuration-to-conftest.patch
|
||||
Patch1: relax-click.patch
|
||||
Patch1: tests.patch
|
||||
BuildRequires: %{python_module setuptools}
|
||||
BuildRequires: fdupes
|
||||
BuildRequires: netcfg
|
||||
BuildRequires: python-rpm-macros
|
||||
Requires: python-billiard >= 3.6.3.0
|
||||
Requires: python-click >= 8.0
|
||||
Requires: python-billiard >= 3.6.4
|
||||
Requires: python-click >= 8.0.3
|
||||
Requires: python-click-didyoumean >= 0.0.3
|
||||
Requires: python-click-plugins >= 1.1.1
|
||||
Requires: python-click-repl >= 0.2.0
|
||||
Requires: python-kombu >= 5.0.0
|
||||
Requires: python-pytz >= 2016.7
|
||||
Requires: python-kombu >= 5.2.3
|
||||
Requires: python-pytz >= 2021.3
|
||||
Requires: python-vine >= 5.0.0
|
||||
Requires(post): update-alternatives
|
||||
Requires(postun):update-alternatives
|
||||
@@ -59,17 +59,17 @@ Suggests: python-python-daemon
|
||||
Suggests: python-pytyrant
|
||||
BuildArch: noarch
|
||||
%if %{with test}
|
||||
BuildRequires: %{python_module PyYAML}
|
||||
BuildRequires: %{python_module PyYAML >= 3.10}
|
||||
BuildRequires: %{python_module SQLAlchemy}
|
||||
BuildRequires: %{python_module boto3 >= 1.9.178}
|
||||
BuildRequires: %{python_module case >= 1.3.1}
|
||||
BuildRequires: %{python_module celery = %{version}}
|
||||
BuildRequires: %{python_module cryptography}
|
||||
BuildRequires: %{python_module eventlet >= 0.26.1}
|
||||
BuildRequires: %{python_module cryptography >= 36.0.2}
|
||||
BuildRequires: %{python_module eventlet >= 0.32.0}
|
||||
BuildRequires: %{python_module gevent}
|
||||
BuildRequires: %{python_module moto >= 1.3.7}
|
||||
BuildRequires: %{python_module moto >= 2.2.6}
|
||||
BuildRequires: %{python_module msgpack}
|
||||
BuildRequires: %{python_module pymongo >= 3.3.0}
|
||||
BuildRequires: %{python_module pymongo >= 4.0.2}
|
||||
BuildRequires: %{python_module pytest >= 4.5.0}
|
||||
BuildRequires: %{python_module pytest-subtests}
|
||||
%if %{with ringdisabled}
|
||||
@@ -86,8 +86,6 @@ scheduling as well.
|
||||
%prep
|
||||
%setup -q -n celery-%{version}
|
||||
%autopatch -p1
|
||||
# do not hardcode versions
|
||||
sed -i -e 's:==:>=:g' requirements/*.txt
|
||||
|
||||
%build
|
||||
%if !%{with test}
|
||||
@@ -103,8 +101,8 @@ sed -i -e 's:==:>=:g' requirements/*.txt
|
||||
|
||||
%check
|
||||
%if %{with test}
|
||||
# test_init_mongodb_dns_seedlist - does not work with new pymongo, will be fixed in 5.1
|
||||
%pytest -k 'not test_init_mongodb_dns_seedlist'
|
||||
# test_check_privileges_no_fchown - first it deletes fchown from the system, so it needs root privileges, and then it runs the worker and complains about root privileges
|
||||
%pytest -k "not test_check_privileges_no_fchown"
|
||||
%endif
|
||||
|
||||
%if !%{with test}
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
From ef026ea44f59e5d234c195c3ce73927f8323f9ee Mon Sep 17 00:00:00 2001
|
||||
From: Thomas Grainger <tagrain@gmail.com>
|
||||
Date: Tue, 20 Jul 2021 17:19:02 +0100
|
||||
Subject: [PATCH] relaxed click version (#6861)
|
||||
|
||||
* relaxed click version
|
||||
|
||||
* fix get_default
|
||||
|
||||
* pre-check WorkersPool click.Choice type before calling super
|
||||
|
||||
https://github.com/pallets/click/issues/1898#issuecomment-841546735
|
||||
|
||||
* apply pre-commit run --all-files
|
||||
|
||||
Co-authored-by: Asif Saif Uddin <auvipy@gmail.com>
|
||||
---
|
||||
celery/bin/base.py | 4 ++--
|
||||
celery/bin/worker.py | 4 ++++
|
||||
requirements/default.txt | 4 ++--
|
||||
3 files changed, 8 insertions(+), 4 deletions(-)
|
||||
|
||||
Index: celery-5.0.5/celery/bin/base.py
|
||||
===================================================================
|
||||
--- celery-5.0.5.orig/celery/bin/base.py
|
||||
+++ celery-5.0.5/celery/bin/base.py
|
||||
@@ -137,10 +137,10 @@ def handle_preload_options(f):
|
||||
class CeleryOption(click.Option):
|
||||
"""Customized option for Celery."""
|
||||
|
||||
- def get_default(self, ctx):
|
||||
+ def get_default(self, ctx, *args, **kwargs):
|
||||
if self.default_value_from_context:
|
||||
self.default = ctx.obj[self.default_value_from_context]
|
||||
- return super().get_default(ctx)
|
||||
+ return super().get_default(ctx, *args, **kwargs)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize a Celery option."""
|
||||
Index: celery-5.0.5/celery/bin/worker.py
|
||||
===================================================================
|
||||
--- celery-5.0.5.orig/celery/bin/worker.py
|
||||
+++ celery-5.0.5/celery/bin/worker.py
|
||||
@@ -11,6 +11,7 @@ from celery import concurrency
|
||||
from celery.bin.base import (COMMA_SEPARATED_LIST, LOG_LEVEL,
|
||||
CeleryDaemonCommand, CeleryOption,
|
||||
handle_preload_options)
|
||||
+from celery.concurrency.base import BasePool
|
||||
from celery.platforms import (EX_FAILURE, EX_OK, detached,
|
||||
maybe_drop_privileges)
|
||||
from celery.utils.log import get_logger
|
||||
@@ -44,6 +45,9 @@ class WorkersPool(click.Choice):
|
||||
def convert(self, value, param, ctx):
|
||||
# Pools like eventlet/gevent needs to patch libs as early
|
||||
# as possible.
|
||||
+ if isinstance(value, type) and issubclass(value, BasePool):
|
||||
+ return value
|
||||
+
|
||||
return concurrency.get_implementation(
|
||||
value) or ctx.obj.app.conf.worker_pool
|
||||
|
||||
Index: celery-5.0.5/requirements/default.txt
|
||||
===================================================================
|
||||
--- celery-5.0.5.orig/requirements/default.txt
|
||||
+++ celery-5.0.5/requirements/default.txt
|
||||
@@ -2,7 +2,7 @@ pytz>dev
|
||||
billiard>=3.6.3.0,<4.0
|
||||
kombu>=5.0.0,<6.0
|
||||
vine>=5.0.0,<6.0
|
||||
-click>=7.0,<8.0
|
||||
+click>=8.0,<9.0
|
||||
click-didyoumean>=0.0.3
|
||||
-click-repl>=0.1.6
|
||||
+click-repl>=0.2.0
|
||||
click-plugins>=1.1.1
|
||||
285
tests.patch
Normal file
285
tests.patch
Normal file
@@ -0,0 +1,285 @@
|
||||
From 9e324caaa6b175d8e51d3582378b78757e66a12d Mon Sep 17 00:00:00 2001
|
||||
From: dobosevych <dobosevych@users.noreply.github.com>
|
||||
Date: Thu, 14 Apr 2022 18:22:33 +0300
|
||||
Subject: [PATCH] Integration test fix (#7460)
|
||||
|
||||
* Integration debugging
|
||||
|
||||
* Integration debugging
|
||||
|
||||
* Integration debugging
|
||||
|
||||
* Commented tasks that aren't working
|
||||
|
||||
* Fixed test_inspect.py
|
||||
|
||||
* Fixed serialization test_canvas.py
|
||||
|
||||
* Request fixes
|
||||
|
||||
* Setup full pipeline
|
||||
|
||||
* Setup full pipeline
|
||||
|
||||
* Setup full pipeline
|
||||
|
||||
* Setup python-package.yml
|
||||
|
||||
* Setup python-package.yml
|
||||
|
||||
* Added 3.10 to integration tests
|
||||
|
||||
* test_task.py fixed
|
||||
|
||||
* test_generator fixed
|
||||
|
||||
* Added parametrization to test_generation
|
||||
|
||||
* fixed test_generator
|
||||
|
||||
* Reverted encoding in test_canvas.py
|
||||
|
||||
* Rollback codecov
|
||||
|
||||
* Retries now respect additional options.
|
||||
|
||||
Previously, expires and other options were not merged with
|
||||
the current task's options. This commit fixes the issue.
|
||||
|
||||
Co-authored-by: Omer Katz <omer.katz@kcg.tech>
|
||||
---
|
||||
celery/app/task.py | 2 +-
|
||||
celery/canvas.py | 13 +++++---
|
||||
celery/contrib/pytest.py | 2 +-
|
||||
celery/worker/request.py | 2 +-
|
||||
requirements/test-integration.txt | 1 +
|
||||
t/integration/tasks.py | 7 +++--
|
||||
t/integration/test_canvas.py | 19 ++++++------
|
||||
t/integration/test_tasks.py | 11 +++++--
|
||||
9 files changed, 79 insertions(+), 24 deletions(-)
|
||||
|
||||
Index: celery-5.2.6/celery/app/task.py
|
||||
===================================================================
|
||||
--- celery-5.2.6.orig/celery/app/task.py
|
||||
+++ celery-5.2.6/celery/app/task.py
|
||||
@@ -605,7 +605,7 @@ class Task:
|
||||
request = self.request if request is None else request
|
||||
args = request.args if args is None else args
|
||||
kwargs = request.kwargs if kwargs is None else kwargs
|
||||
- options = request.as_execution_options()
|
||||
+ options = {**request.as_execution_options(), **extra_options}
|
||||
delivery_info = request.delivery_info or {}
|
||||
priority = delivery_info.get('priority')
|
||||
if priority is not None:
|
||||
Index: celery-5.2.6/celery/canvas.py
|
||||
===================================================================
|
||||
--- celery-5.2.6.orig/celery/canvas.py
|
||||
+++ celery-5.2.6/celery/canvas.py
|
||||
@@ -26,7 +26,7 @@ from celery.utils import abstract
|
||||
from celery.utils.collections import ChainMap
|
||||
from celery.utils.functional import _regen
|
||||
from celery.utils.functional import chunks as _chunks
|
||||
-from celery.utils.functional import (is_list, lookahead, maybe_list, regen,
|
||||
+from celery.utils.functional import (is_list, maybe_list, regen,
|
||||
seq_concat_item, seq_concat_seq)
|
||||
from celery.utils.objects import getitem_property
|
||||
from celery.utils.text import remove_repeating_from_task, truncate
|
||||
@@ -1184,9 +1184,11 @@ class group(Signature):
|
||||
# next_task is None. This enables us to set the chord size
|
||||
# without burning through the entire generator. See #3021.
|
||||
chord_size = 0
|
||||
- for task_index, (current_task, next_task) in enumerate(
|
||||
- lookahead(tasks)
|
||||
- ):
|
||||
+ tasks_shifted, tasks = itertools.tee(tasks)
|
||||
+ next(tasks_shifted, None)
|
||||
+ next_task = next(tasks_shifted, None)
|
||||
+
|
||||
+ for task_index, current_task in enumerate(tasks):
|
||||
# We expect that each task must be part of the same group which
|
||||
# seems sensible enough. If that's somehow not the case we'll
|
||||
# end up messing up chord counts and there are all sorts of
|
||||
@@ -1212,6 +1214,7 @@ class group(Signature):
|
||||
if p and not p.cancelled and not p.ready:
|
||||
p.size += 1
|
||||
res.then(p, weak=True)
|
||||
+ next_task = next(tasks_shifted, None)
|
||||
yield res # <-- r.parent, etc set in the frozen result.
|
||||
|
||||
def _freeze_gid(self, options):
|
||||
@@ -1249,7 +1252,7 @@ class group(Signature):
|
||||
# we freeze all tasks in the clone tasks1, and then zip the results
|
||||
# with the IDs of tasks in the second clone, tasks2. and then, we build
|
||||
# a generator that takes only the task IDs from tasks2.
|
||||
- self.tasks = regen(x[0] for x in zip(tasks2, results))
|
||||
+ self.tasks = regen(tasks2)
|
||||
else:
|
||||
new_tasks = []
|
||||
# Need to unroll subgroups early so that chord gets the
|
||||
Index: celery-5.2.6/celery/contrib/pytest.py
|
||||
===================================================================
|
||||
--- celery-5.2.6.orig/celery/contrib/pytest.py
|
||||
+++ celery-5.2.6/celery/contrib/pytest.py
|
||||
@@ -88,7 +88,7 @@ def celery_session_worker(
|
||||
for module in celery_includes:
|
||||
celery_session_app.loader.import_task_module(module)
|
||||
for class_task in celery_class_tasks:
|
||||
- celery_session_app.tasks.register(class_task)
|
||||
+ celery_session_app.register_task(class_task)
|
||||
with worker.start_worker(celery_session_app,
|
||||
pool=celery_worker_pool,
|
||||
**celery_worker_parameters) as w:
|
||||
Index: celery-5.2.6/celery/worker/request.py
|
||||
===================================================================
|
||||
--- celery-5.2.6.orig/celery/worker/request.py
|
||||
+++ celery-5.2.6/celery/worker/request.py
|
||||
@@ -155,7 +155,7 @@ class Request:
|
||||
'exchange': delivery_info.get('exchange'),
|
||||
'routing_key': delivery_info.get('routing_key'),
|
||||
'priority': properties.get('priority'),
|
||||
- 'redelivered': delivery_info.get('redelivered'),
|
||||
+ 'redelivered': delivery_info.get('redelivered', False),
|
||||
}
|
||||
self._request_dict.update({
|
||||
'properties': properties,
|
||||
Index: celery-5.2.6/requirements/test-integration.txt
|
||||
===================================================================
|
||||
--- celery-5.2.6.orig/requirements/test-integration.txt
|
||||
+++ celery-5.2.6/requirements/test-integration.txt
|
||||
@@ -3,3 +3,4 @@
|
||||
-r extras/auth.txt
|
||||
-r extras/memcache.txt
|
||||
pytest-rerunfailures>=6.0
|
||||
+git+https://github.com/celery/kombu.git
|
||||
Index: celery-5.2.6/t/integration/tasks.py
|
||||
===================================================================
|
||||
--- celery-5.2.6.orig/t/integration/tasks.py
|
||||
+++ celery-5.2.6/t/integration/tasks.py
|
||||
@@ -197,16 +197,17 @@ def retry(self, return_value=None):
|
||||
raise self.retry(exc=ExpectedException(), countdown=5)
|
||||
|
||||
|
||||
-@shared_task(bind=True, expires=60.0, max_retries=1)
|
||||
-def retry_once(self, *args, expires=60.0, max_retries=1, countdown=0.1):
|
||||
+@shared_task(bind=True, expires=120.0, max_retries=1)
|
||||
+def retry_once(self, *args, expires=None, max_retries=1, countdown=0.1):
|
||||
"""Task that fails and is retried. Returns the number of retries."""
|
||||
if self.request.retries:
|
||||
return self.request.retries
|
||||
raise self.retry(countdown=countdown,
|
||||
+ expires=expires,
|
||||
max_retries=max_retries)
|
||||
|
||||
|
||||
-@shared_task(bind=True, expires=60.0, max_retries=1)
|
||||
+@shared_task(bind=True, max_retries=1)
|
||||
def retry_once_priority(self, *args, expires=60.0, max_retries=1,
|
||||
countdown=0.1):
|
||||
"""Task that fails and is retried. Returns the priority."""
|
||||
Index: celery-5.2.6/t/integration/test_canvas.py
|
||||
===================================================================
|
||||
--- celery-5.2.6.orig/t/integration/test_canvas.py
|
||||
+++ celery-5.2.6/t/integration/test_canvas.py
|
||||
@@ -124,7 +124,7 @@ class test_link_error:
|
||||
)
|
||||
assert result.get(timeout=TIMEOUT, propagate=False) == exception
|
||||
|
||||
- @flaky
|
||||
+ @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout instead of returning exception")
|
||||
def test_link_error_callback_retries(self):
|
||||
exception = ExpectedException("Task expected to fail", "test")
|
||||
result = fail.apply_async(
|
||||
@@ -144,7 +144,7 @@ class test_link_error:
|
||||
assert (fail.apply().get(timeout=TIMEOUT, propagate=False), True) == (
|
||||
exception, True)
|
||||
|
||||
- @flaky
|
||||
+ @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout instead of returning exception")
|
||||
def test_link_error_using_signature(self):
|
||||
fail = signature('t.integration.tasks.fail', args=("test",))
|
||||
retrun_exception = signature('t.integration.tasks.return_exception')
|
||||
@@ -179,7 +179,7 @@ class test_chain:
|
||||
res = c()
|
||||
assert res.get(timeout=TIMEOUT) == [64, 65, 66, 67]
|
||||
|
||||
- @flaky
|
||||
+ @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout")
|
||||
def test_group_results_in_chain(self, manager):
|
||||
# This adds in an explicit test for the special case added in commit
|
||||
# 1e3fcaa969de6ad32b52a3ed8e74281e5e5360e6
|
||||
@@ -477,7 +477,7 @@ class test_chain:
|
||||
res = c()
|
||||
assert res.get(timeout=TIMEOUT) == [8, 8]
|
||||
|
||||
- @flaky
|
||||
+ @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout")
|
||||
def test_nested_chain_group_lone(self, manager):
|
||||
"""
|
||||
Test that a lone group in a chain completes.
|
||||
@@ -1233,7 +1233,7 @@ class test_chord:
|
||||
result = c()
|
||||
assert result.get(timeout=TIMEOUT) == 4
|
||||
|
||||
- @flaky
|
||||
+ @pytest.mark.xfail(reason="async_results aren't performed in async way")
|
||||
def test_redis_subscribed_channels_leak(self, manager):
|
||||
if not manager.app.conf.result_backend.startswith('redis'):
|
||||
raise pytest.skip('Requires redis result backend.')
|
||||
@@ -1566,11 +1566,12 @@ class test_chord:
|
||||
) == 1
|
||||
|
||||
@flaky
|
||||
- def test_generator(self, manager):
|
||||
+ @pytest.mark.parametrize('size', [3, 4, 5, 6, 7, 8, 9])
|
||||
+ def test_generator(self, manager, size):
|
||||
def assert_generator(file_name):
|
||||
- for i in range(3):
|
||||
+ for i in range(size):
|
||||
sleep(1)
|
||||
- if i == 2:
|
||||
+ if i == size - 1:
|
||||
with open(file_name) as file_handle:
|
||||
# ensures chord header generators tasks are processed incrementally #3021
|
||||
assert file_handle.readline() == '0\n', "Chord header was unrolled too early"
|
||||
@@ -1579,7 +1580,7 @@ class test_chord:
|
||||
with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_file:
|
||||
file_name = tmp_file.name
|
||||
c = chord(assert_generator(file_name), tsum.s())
|
||||
- assert c().get(timeout=TIMEOUT) == 3
|
||||
+ assert c().get(timeout=TIMEOUT) == size * (size - 1) // 2
|
||||
|
||||
@flaky
|
||||
def test_parallel_chords(self, manager):
|
||||
Index: celery-5.2.6/t/integration/test_tasks.py
|
||||
===================================================================
|
||||
--- celery-5.2.6.orig/t/integration/test_tasks.py
|
||||
+++ celery-5.2.6/t/integration/test_tasks.py
|
||||
@@ -29,7 +29,7 @@ class test_class_based_tasks:
|
||||
def test_class_based_task_retried(self, celery_session_app,
|
||||
celery_session_worker):
|
||||
task = ClassBasedAutoRetryTask()
|
||||
- celery_session_app.tasks.register(task)
|
||||
+ celery_session_app.register_task(task)
|
||||
res = task.delay()
|
||||
assert res.get(timeout=TIMEOUT) == 1
|
||||
|
||||
@@ -255,12 +255,17 @@ class test_tasks:
|
||||
manager.assert_accepted([r1.id])
|
||||
|
||||
@flaky
|
||||
- def test_task_retried(self):
|
||||
+ def test_task_retried_once(self, manager):
|
||||
res = retry_once.delay()
|
||||
assert res.get(timeout=TIMEOUT) == 1 # retried once
|
||||
|
||||
@flaky
|
||||
- def test_task_retried_priority(self):
|
||||
+ def test_task_retried_once_with_expires(self, manager):
|
||||
+ res = retry_once.delay(expires=60)
|
||||
+ assert res.get(timeout=TIMEOUT) == 1 # retried once
|
||||
+
|
||||
+ @flaky
|
||||
+ def test_task_retried_priority(self, manager):
|
||||
res = retry_once_priority.apply_async(priority=7)
|
||||
assert res.get(timeout=TIMEOUT) == 7 # retried once with priority 7
|
||||
|
||||
Reference in New Issue
Block a user