Matej Cepl 2022-03-27 18:57:25 +00:00 committed by Git OBS Bridge
parent 739e3b195f
commit e65a57ca6a
6 changed files with 470 additions and 25 deletions

View File

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:d015e781efad256fa32ae36b4278252dfbe20b1cfd7cb51bf0f44349cfcb816f
size 1606067

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:615df296e593bc636ed584c6b13ce2f05f29af8aac74d398993da2e81fd164b7
size 1615328

View File

@ -0,0 +1,13 @@
Index: distributed-2022.03.0/distributed/utils_test.py
===================================================================
--- distributed-2022.03.0.orig/distributed/utils_test.py
+++ distributed-2022.03.0/distributed/utils_test.py
@@ -1612,7 +1612,7 @@ def check_thread_leak():
yield
start = time()
- while True:
+ while False:
bad_threads = [
thread
for thread in threading.enumerate()

View File

@ -0,0 +1,375 @@
From 9c6a4c905c75c5e64ca460ea17bb2bdf0f2782fa Mon Sep 17 00:00:00 2001
From: James Bourbeau <jrbourbeau@gmail.com>
Date: Thu, 3 Feb 2022 12:58:32 -0600
Subject: [PATCH 01/12] Add Python 3.10 build to CI
---
.github/workflows/tests.yaml | 2 +-
continuous_integration/environment-3.10.yaml | 56 ++++++++++++++++++++
2 files changed, 57 insertions(+), 1 deletion(-)
create mode 100644 continuous_integration/environment-3.10.yaml
Index: distributed-2022.03.0/.github/workflows/tests.yaml
===================================================================
--- distributed-2022.03.0.orig/.github/workflows/tests.yaml
+++ distributed-2022.03.0/.github/workflows/tests.yaml
@@ -23,7 +23,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
- python-version: ["3.8", "3.9"]
+ python-version: ["3.8", "3.9", "3.10"]
# Cherry-pick test modules to split the overall runtime roughly in half
partition: [ci1, not ci1]
include:
@@ -65,12 +65,6 @@ jobs:
shell: bash -l {0}
run: conda config --show
- - name: Install stacktrace
- shell: bash -l {0}
- # stacktrace for Python 3.8 has not been released at the moment of writing
- if: ${{ matrix.os == 'ubuntu-latest' && matrix.python-version < '3.8' }}
- run: mamba install -c conda-forge -c defaults -c numba libunwind stacktrace
-
- name: Hack around https://github.com/ipython/ipython/issues/12197
# This upstream issue causes an interpreter crash when running
# distributed/protocol/tests/test_serialize.py::test_profile_nested_sizeof
Index: distributed-2022.03.0/continuous_integration/environment-3.10.yaml
===================================================================
--- /dev/null
+++ distributed-2022.03.0/continuous_integration/environment-3.10.yaml
@@ -0,0 +1,56 @@
+name: dask-distributed
+channels:
+ - conda-forge
+ - defaults
+dependencies:
+ - python=3.10
+ - packaging
+ - pip
+ - asyncssh
+ - bokeh
+ - click
+ - cloudpickle
+ - coverage<6.3 # https://github.com/nedbat/coveragepy/issues/1310
+ - dask # overridden by git tip below
+ - filesystem-spec # overridden by git tip below
+ - h5py
+ - ipykernel
+ - ipywidgets
+ - jinja2
+ - joblib # overridden by git tip below
+ - jupyter_client
+ - lz4 # Only tested here
+ - msgpack-python
+ - netcdf4
+ - paramiko
+ - pre-commit
+ - prometheus_client
+ - psutil
+ - pynvml # Only tested here
+ - pytest
+ - pytest-cov
+ - pytest-faulthandler
+ - pytest-repeat
+ - pytest-rerunfailures
+ - pytest-timeout
+ - python-blosc # Only tested here
+ - python-snappy # Only tested here
+ - requests
+ - s3fs # overridden by git tip below
+ - scikit-learn
+ - scipy
+ - sortedcollections
+ - tblib
+ - toolz
+ - tornado=6
+ - zict # overridden by git tip below
+ - zstandard
+ - pip:
+ - git+https://github.com/dask/dask
+ - git+https://github.com/dask/s3fs
+ - git+https://github.com/dask/zict
+ # FIXME https://github.com/dask/distributed/issues/5345
+ # - git+https://github.com/intake/filesystem_spec
+ - git+https://github.com/joblib/joblib
+ - keras
+ - pytest-asyncio<0.14.0 # `pytest-asyncio<0.14.0` isn't available on conda-forge for Python 3.10
Index: distributed-2022.03.0/distributed/tests/test_client.py
===================================================================
--- distributed-2022.03.0.orig/distributed/tests/test_client.py
+++ distributed-2022.03.0/distributed/tests/test_client.py
@@ -6461,6 +6461,10 @@ async def test_performance_report(c, s,
assert "cdn.bokeh.org" in data
+@pytest.mark.skipif(
+ sys.version_info >= (3, 10),
+ reason="On Py3.10+ semaphore._loop is not bound until .acquire() blocks",
+)
@gen_cluster(nthreads=[])
async def test_client_gather_semaphore_loop(s):
async with Client(s.address, asynchronous=True) as c:
@@ -6471,9 +6475,16 @@ async def test_client_gather_semaphore_l
async def test_as_completed_condition_loop(c, s, a, b):
seq = c.map(inc, range(5))
ac = as_completed(seq)
+ # consume the ac so that the ac.condition is bound to the loop on py3.10+
+ async for _ in ac:
+ pass
assert ac.condition._loop == c.loop.asyncio_loop
+@pytest.mark.skipif(
+ sys.version_info >= (3, 10),
+ reason="On Py3.10+ semaphore._loop is not bound until .acquire() blocks",
+)
def test_client_connectionpool_semaphore_loop(s, a, b):
with Client(s["address"]) as c:
assert c.rpc.semaphore._loop is c.loop.asyncio_loop
Index: distributed-2022.03.0/distributed/node.py
===================================================================
--- distributed-2022.03.0.orig/distributed/node.py
+++ distributed-2022.03.0/distributed/node.py
@@ -131,12 +131,9 @@ class ServerNode(Server):
import ssl
ssl_options = ssl.create_default_context(
- cafile=tls_ca_file, purpose=ssl.Purpose.SERVER_AUTH
+ cafile=tls_ca_file, purpose=ssl.Purpose.CLIENT_AUTH
)
ssl_options.load_cert_chain(tls_cert, keyfile=tls_key)
- # We don't care about auth here, just encryption
- ssl_options.check_hostname = False
- ssl_options.verify_mode = ssl.CERT_NONE
self.http_server = HTTPServer(self.http_application, ssl_options=ssl_options)
Index: distributed-2022.03.0/distributed/profile.py
===================================================================
--- distributed-2022.03.0.orig/distributed/profile.py
+++ distributed-2022.03.0/distributed/profile.py
@@ -27,6 +27,7 @@ We represent this tree as a nested dicti
from __future__ import annotations
import bisect
+import dis
import linecache
import sys
import threading
@@ -59,21 +60,41 @@ def identifier(frame):
)
+# work around some frames lacking an f_lineo eg: https://bugs.python.org/issue47085
+def _f_lineno(frame):
+ f_lineno = frame.f_lineno
+ if f_lineno is not None:
+ return f_lineno
+
+ f_lasti = frame.f_lasti
+ code = frame.f_code
+ prev_line = code.co_firstlineno
+
+ for start, next_line in dis.findlinestarts(code):
+ if f_lasti < start:
+ return prev_line
+ prev_line = next_line
+
+ return prev_line
+
+
def repr_frame(frame):
"""Render a frame as a line for inclusion into a text traceback"""
co = frame.f_code
- text = f' File "{co.co_filename}", line {frame.f_lineno}, in {co.co_name}'
- line = linecache.getline(co.co_filename, frame.f_lineno, frame.f_globals).lstrip()
+ f_lineno = _f_lineno(frame)
+ text = f' File "{co.co_filename}", line {f_lineno}, in {co.co_name}'
+ line = linecache.getline(co.co_filename, f_lineno, frame.f_globals).lstrip()
return text + "\n\t" + line
def info_frame(frame):
co = frame.f_code
- line = linecache.getline(co.co_filename, frame.f_lineno, frame.f_globals).lstrip()
+ f_lineno = _f_lineno(frame)
+ line = linecache.getline(co.co_filename, f_lineno, frame.f_globals).lstrip()
return {
"filename": co.co_filename,
"name": co.co_name,
- "line_number": frame.f_lineno,
+ "line_number": f_lineno,
"line": line,
}
Index: distributed-2022.03.0/distributed/tests/test_profile.py
===================================================================
--- distributed-2022.03.0.orig/distributed/tests/test_profile.py
+++ distributed-2022.03.0/distributed/tests/test_profile.py
@@ -1,5 +1,9 @@
+from __future__ import annotations
+
+import dataclasses
import sys
import threading
+from collections.abc import Iterator, Sequence
from time import sleep
import pytest
@@ -11,6 +15,7 @@ from distributed.profile import (
call_stack,
create,
identifier,
+ info_frame,
ll_get_stack,
llprocess,
merge,
@@ -200,3 +205,102 @@ def test_watch():
while threading.active_count() > start_threads:
assert time() < start + 2
sleep(0.01)
+
+
+@dataclasses.dataclass(frozen=True)
+class FakeCode:
+ co_filename: str
+ co_name: str
+ co_firstlineno: int
+ co_lnotab: bytes
+ co_lines_seq: Sequence[tuple[int, int, int | None]]
+ co_code: bytes
+
+ def co_lines(self) -> Iterator[tuple[int, int, int | None]]:
+ yield from self.co_lines_seq
+
+
+FAKE_CODE = FakeCode(
+ co_filename="<stdin>",
+ co_name="example",
+ co_firstlineno=1,
+ # https://github.com/python/cpython/blob/b68431fadb3150134ac6ccbf501cdfeaf4c75678/Objects/lnotab_notes.txt#L84
+ # generated from:
+ # def example():
+ # for i in range(1):
+ # if i >= 0:
+ # pass
+ # example.__code__.co_lnotab
+ co_lnotab=b"\x00\x01\x0c\x01\x08\x01\x04\xfe",
+ # generated with list(example.__code__.co_lines())
+ co_lines_seq=[
+ (0, 12, 2),
+ (12, 20, 3),
+ (20, 22, 4),
+ (22, 24, None),
+ (24, 28, 2),
+ ],
+ # used in dis.findlinestarts as bytecode_len = len(code.co_code)
+ # https://github.com/python/cpython/blob/6f345d363308e3e6ecf0ad518ea0fcc30afde2a8/Lib/dis.py#L457
+ co_code=bytes(28),
+)
+
+
+@dataclasses.dataclass(frozen=True)
+class FakeFrame:
+ f_lasti: int
+ f_code: FakeCode
+ f_lineno: int | None = None
+ f_back: FakeFrame | None = None
+ f_globals: dict[str, object] = dataclasses.field(default_factory=dict)
+
+
+@pytest.mark.parametrize(
+ "f_lasti,f_lineno",
+ [
+ (-1, 1),
+ (0, 2),
+ (1, 2),
+ (11, 2),
+ (12, 3),
+ (21, 4),
+ (22, 4),
+ (23, 4),
+ (24, 2),
+ (25, 2),
+ (26, 2),
+ (27, 2),
+ (100, 2),
+ ],
+)
+def test_info_frame_f_lineno(f_lasti: int, f_lineno: int) -> None:
+ assert info_frame(FakeFrame(f_lasti=f_lasti, f_code=FAKE_CODE)) == {
+ "filename": "<stdin>",
+ "name": "example",
+ "line_number": f_lineno,
+ "line": "",
+ }
+
+
+@pytest.mark.parametrize(
+ "f_lasti,f_lineno",
+ [
+ (-1, 1),
+ (0, 2),
+ (1, 2),
+ (11, 2),
+ (12, 3),
+ (21, 4),
+ (22, 4),
+ (23, 4),
+ (24, 2),
+ (25, 2),
+ (26, 2),
+ (27, 2),
+ (100, 2),
+ ],
+)
+def test_call_stack_f_lineno(f_lasti: int, f_lineno: int) -> None:
+ assert call_stack(FakeFrame(f_lasti=f_lasti, f_code=FAKE_CODE)) == [
+ f' File "<stdin>", line {f_lineno}, in example\n\t'
+ ]
Index: distributed-2022.03.0/distributed/utils_test.py
===================================================================
--- distributed-2022.03.0.orig/distributed/utils_test.py
+++ distributed-2022.03.0/distributed/utils_test.py
@@ -706,13 +706,16 @@ def cluster(
except KeyError:
rpc_kwargs = {}
- with rpc(saddr, **rpc_kwargs) as s:
- while True:
- nthreads = loop.run_sync(s.ncores)
- if len(nthreads) == nworkers:
- break
- if time() - start > 5:
- raise Exception("Timeout on cluster creation")
+ async def wait_for_workers():
+ async with rpc(saddr, **rpc_kwargs) as s:
+ while True:
+ nthreads = await s.ncores()
+ if len(nthreads) == nworkers:
+ break
+ if time() - start > 5:
+ raise Exception("Timeout on cluster creation")
+
+ loop.run_sync(wait_for_workers)
# avoid sending processes down to function
yield {"address": saddr}, [
Index: distributed-2022.03.0/setup.py
===================================================================
--- distributed-2022.03.0.orig/setup.py
+++ distributed-2022.03.0/setup.py
@@ -98,8 +98,11 @@ setup(
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
"Topic :: Scientific/Engineering",
"Topic :: System :: Distributed Computing",
],

View File

@ -1,3 +1,27 @@
-------------------------------------------------------------------
Fri Mar 25 19:18:11 UTC 2022 - Ben Greiner <code@bnavigator.de>
- Update to 2022.03.0
* Support dumping cluster state to URL (GH#5863) Gabe Joseph
* Prevent data duplication on unspill (GH#5936) crusaderky
* Encapsulate spill buffer and memory_monitor (GH#5904)
crusaderky
* Drop pkg_resources in favour of importlib.metadata (GH#5923)
Thomas Grainger
* Worker State Machine refactor: redesign TaskState and scheduler
messages (GH#5922) crusaderky
* Tidying of OpenSSL 1.0.2/Python 3.9 (and earlier) handling
(GH#5854) jakirkham
* zict type annotations (GH#5905) crusaderky
* Add key to compute failed message (GH#5928) Florian Jetter
* Change default log format to include timestamp (GH#5897)
Florian Jetter
* Improve type annotations in worker.py (GH#5814) crusaderky
- Add distributed-pr5952-py310.patch -- gh#dask/distributed#5952
- Add distributed-ignore-thread-leaks.patch
- Make the distributed/dask update sync requirement even more
obvious.
-------------------------------------------------------------------
Tue Mar 8 07:46:52 UTC 2022 - Matej Cepl <mcepl@suse.com>

View File

@ -31,15 +31,12 @@
%bcond_without test
%endif
%if "%{flavor}" == "test-py310"
# add to _multibuild when enabling python310 (see below)
%define psuffix -test-py310
%define skip_python38 1
%define skip_python39 1
%bcond_without test
%endif
%if "%{flavor}" == ""
# https://github.com/dask/distributed/issues/5350 -- NOT fixed by https://github.com/dask/distributed/pull/5353
# %%define skip_python310 1
%bcond_with test
%endif
@ -53,18 +50,27 @@
%define cythonize --with-cython
%endif
# use this to run tests with xdist in parallel, unfortunately fails server side
%bcond_with paralleltests
%{?!python_module:%define python_module() python3-%{**}}
%define skip_python2 1
# ===> Note: python-dask MUST be updated in sync with python-distributed! <===
%define ghversiontag 2022.03.0
Name: python-distributed%{psuffix}
# Note: please always update together with python-dask
Version: 2022.02.1
# ===> Note: python-dask MUST be updated in sync with python-distributed! <===
Version: 2022.3.0
Release: 0
Summary: Library for distributed computing with Python
License: BSD-3-Clause
URL: https://distributed.readthedocs.io/en/latest/
Source: https://github.com/dask/distributed/archive/refs/tags//%{version}.tar.gz#/distributed-%{version}-gh.tar.gz
URL: https://distributed.dask.org
Source: https://github.com/dask/distributed/archive/refs/tags/%{ghversiontag}.tar.gz#/distributed-%{ghversiontag}-gh.tar.gz
Source99: python-distributed-rpmlintrc
BuildRequires: %{python_module base >= 3.7}
# PATCH-FIX-UPSTREAM distributed-pr5952-py310.patch -- gh#dask/distributed#5952
Patch1: distributed-pr5952-py310.patch
# PATCH-FIX-OPENSUSE distributed-ignore-thread-leaks.patch -- ignore leaking threads on obs, code@bnavigator.de
Patch2: distributed-ignore-thread-leaks.patch
BuildRequires: %{python_module base >= 3.8}
BuildRequires: %{python_module setuptools}
BuildRequires: fdupes
BuildRequires: python-rpm-macros
@ -112,6 +118,9 @@ BuildRequires: %{python_module tblib}
BuildRequires: %{python_module toolz >= 0.8.2}
BuildRequires: %{python_module tornado >= 6.0.3}
BuildRequires: %{python_module zict >= 0.1.3}
%if %{with paralleltests}
BuildRequires: %{python_module pytest-xdist}
%endif
%endif
%python_subpackages
@ -121,9 +130,10 @@ extends both the concurrent.futures and dask APIs to moderate sized
clusters.
%prep
%autosetup -p1 -n distributed-%{version}
%autosetup -p1 -n distributed-%{ghversiontag}
sed -i '/addopts/ {s/--durations=20//; s/--color=yes//}' setup.cfg
sed -i -e '/addopts/ {s/--durations=20//; s/--color=yes//}' \
-e 's/timeout_method = thread/timeout_method = signal/' setup.cfg
%build
%if ! %{with test}
@ -141,21 +151,44 @@ sed -i '/addopts/ {s/--durations=20//; s/--color=yes//}' setup.cfg
%if %{with test}
%check
# randomly fail server-side -- too slow for obs (?)
# we obviously don't test a git repo
donttest="test_git_revision"
# logger error
donttest+=" or test_version_warning_in_cluster"
# Some tests randomly fail server-side -- too slow for obs (?)
# see also https://github.com/dask/distributed/issues/5818
donttest+=" or (test_asyncprocess and test_exit_callback)"
donttest+=" or (test_nanny and test_throttle_outgoing_connections)"
donttest+=" or (test_scheduler and test_rebalance)"
donttest+=" or (test_tls_functional and test_rebalance)"
donttest+=" or (test_worker and test_fail_write_to_disk)"
donttest+=" or (test_worker and test_multiple_transfers)"
donttest+=" or (test_worker and test_remove_replicas_while_computing)"
donttest+=" or (test_client and test_repr)"
donttest+=" or (test_priorities and test_compute)"
donttest+=" or (test_resources and test_prefer_constrained)"
donttest+=" or (test_steal and test_steal_twice)"
donttest+=" or (test_worker and test_gather_dep_one_worker_always_busy)"
donttest+=" or (test_worker and test_worker_reconnects_mid_compute)"
# Exception messages not caught -- https://github.com/dask/distributed/issues/5460#issuecomment-1079432890
python310_donttest+=" or test_exception_text"
python310_donttest+=" or test_worker_bad_args"
if [[ $(getconf LONG_BIT) -eq 32 ]]; then
# OverflowError
donttest+=" or (test_ensure_spilled_immediately)"
donttest+=" or (test_value_raises_during_spilling)"
# OverflowError -- https://github.com/dask/distributed/issues/5252
donttest+=" or test_ensure_spilled_immediately"
donttest+=" or test_value_raises_during_spilling"
donttest+=" or test_fail_to_pickle_target_1"
fi
%pytest_arch distributed/tests -r sfER -m "not avoid_ci" -k "not (${donttest:4})" --reruns 3 --reruns-delay 3
%if %{with paralleltests}
# not fully supported parallel test suite: https://github.com/dask/distributed/issues/5186
# works locally, but fails with too many tests server-side
notparallel="rebalance or memory or upload"
notparallel+=" or test_open_close_many_workers"
notparallel+=" or test_recreate_error_array"
notparallel+=" or (test_preload and test_web_preload_worker)"
%pytest_arch distributed/tests -m "not avoid_ci" -n auto -k "not ($notparallel or $donttest ${$python_donttest})"
%pytest_arch distributed/tests -m "not avoid_ci" -k "($notparallel) and (not ($donttest ${$python_donttest}))"
%else
%pytest_arch distributed/tests -m "not avoid_ci" -k "not ($donttest ${$python_donttest})" --reruns 3 --reruns-delay 3
%endif
%endif
%if ! %{with test}
@ -172,7 +205,7 @@ fi
%python_alternative %{_bindir}/dask-scheduler
%python_alternative %{_bindir}/dask-worker
%{python_sitearch}/distributed
%{python_sitearch}/distributed-%(echo %{version}|sed -e 's/\.0/./')*-info
%{python_sitearch}/distributed-%{version}*-info
%endif