1
0
forked from pool/python-xarray

Accepting request 872458 from home:bnavigator:branches:devel:languages:python:numeric

- Disable python36 build: python36-numpy is no longer in Tumbleweed
  xarray will follow to drop Python 3.6 support on next release.
  (NEP 29)
- Add xarray-pr4884-dask2021.patch gh#pydata/xarray#4884
- Enable parralel testing with pytest-xdist
- Recommend/Suggest the extras

OBS-URL: https://build.opensuse.org/request/show/872458
OBS-URL: https://build.opensuse.org/package/show/devel:languages:python:numeric/python-xarray?expand=0&rev=43
This commit is contained in:
Markéta Machová 2021-02-15 13:28:30 +00:00 committed by Git OBS Bridge
parent a574211654
commit ffcf88ca69
3 changed files with 167 additions and 10 deletions

View File

@ -1,3 +1,13 @@
-------------------------------------------------------------------
Mon Feb 15 12:30:53 UTC 2021 - Ben Greiner <code@bnavigator.de>
- Disable python36 build: python36-numpy is no longer in Tumbleweed
xarray will follow to drop Python 3.6 support on next release.
(NEP 29)
- Add xarray-pr4884-dask2021.patch gh#pydata/xarray#4884
- Enable parralel testing with pytest-xdist
- Recommend/Suggest the extras
-------------------------------------------------------------------
Sun Dec 20 16:09:14 UTC 2020 - Sebastian Wagner <sebix+novell.com@sebix.at>

View File

@ -1,7 +1,7 @@
#
# spec file for package python-xarray
#
# Copyright (c) 2020 SUSE LLC
# Copyright (c) 2021 SUSE LLC
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@ -18,6 +18,8 @@
%{?!python_module:%define python_module() python-%{**} python3-%{**}}
%define skip_python2 1
# NEP 29: Numpy 1.20 dropped support for Python 3.6, python36-numpy is removed from Tumbleweed. xarray will follow on next release
%define skip_python36 1
Name: python-xarray
Version: 0.16.2
Release: 0
@ -25,6 +27,8 @@ Summary: N-D labeled arrays and datasets in Python
License: Apache-2.0
URL: https://github.com/pydata/xarray
Source: https://files.pythonhosted.org/packages/source/x/xarray/xarray-%{version}.tar.gz
# PATCH-FIX-UPSTREAM xarray-pr4884-dask2021.patch gh#pydata/xarray#4884
Patch0: xarray-pr4884-dask2021.patch
BuildRequires: %{python_module numpy >= 1.15}
BuildRequires: %{python_module numpy-devel >= 1.14}
BuildRequires: %{python_module pandas >= 0.25}
@ -34,15 +38,35 @@ BuildRequires: fdupes
BuildRequires: python-rpm-macros
Requires: python-numpy >= 1.15
Requires: python-pandas >= 0.25
Recommends: python-scipy >= 1.3
Suggests: python-dask >= 2.2
Provides: python-xray = %{version}
Obsoletes: python-xray < %{version}
BuildArch: noarch
Suggests: python-dask-all
# SECTION extras accel
Recommends: python-scipy >= 1.3
Recommends: python-bottleneck
Recommends: python-numbagg
# /SECTION
# SECTION extras viz
Suggests: python-matplotlib
Suggests: python-seaborn
Suggests: python-nc-time-axis
#/SECTION
# SECTION extras io
Suggests: python-netCDF4
Suggests: python-h5netcdf
Suggests: python-scipy
Suggests: python-pydap
Suggests: python-zarr
Suggests: python-fsspec
Suggests: python-cftime
Suggests: python-rasterio
Suggests: python-cfgrib
#/SECTION
# SECTION tests
# dask tests currently failing
# BuildRequires: %%{python_module dask-dataframe}
BuildRequires: %{python_module pytest >= 2.7.1}
BuildRequires: %{python_module dask-dataframe}
BuildRequires: %{python_module pytest-xdist}
BuildRequires: %{python_module pytest}
BuildRequires: %{python_module scipy}
# /SECTION
%python_subpackages
@ -57,7 +81,7 @@ The Common Data Model for self-describing scientific data is used.
The dataset is an in-memory representation of a netCDF file.
%prep
%setup -q -n xarray-%{version}
%autosetup -p1 -n xarray-%{version}
chmod -x xarray/util/print_versions.py
%build
@ -68,12 +92,16 @@ chmod -x xarray/util/print_versions.py
%python_expand %fdupes %{buildroot}%{$python_sitelib}
%check
# test_no_warning_from_dask_effective_get fails due to upstream scipy warning
%pytest -k "not test_download_from_github and not test_no_warning_from_dask_effective_get" xarray
if [ $(getconf LONG_BIT) -eq 32 ]; then
# precision errors on 32-bit
donttest="(test_interpolate_chunk_advanced and linear)"
fi
%pytest -n auto ${donttest:+ -k "not ($donttest)"} xarray
%files %{python_files}
%doc README.rst
%license LICENSE licenses/
%{python_sitelib}/xarray*
%{python_sitelib}/xarray
%{python_sitelib}/xarray-%{version}*-info
%changelog

View File

@ -0,0 +1,119 @@
From a825faaf60dc75e0365f18a0f24acb0fe288b263 Mon Sep 17 00:00:00 2001
From: crusaderky <crusaderky@gmail.com>
Date: Tue, 9 Feb 2021 16:03:14 +0000
Subject: [PATCH 1/2] Compatibility with dask 2021.02.0
---
ci/requirements/environment-windows.yml | 2 +-
ci/requirements/environment.yml | 2 +-
xarray/core/dataset.py | 32 +++++++++++++++++++------
3 files changed, 27 insertions(+), 9 deletions(-)
Index: xarray-0.16.2/xarray/core/dataset.py
===================================================================
--- xarray-0.16.2.orig/xarray/core/dataset.py
+++ xarray-0.16.2/xarray/core/dataset.py
@@ -809,13 +809,12 @@ class Dataset(Mapping, ImplementsDataset
import dask
info = [
- (True, k, v.__dask_postcompute__())
+ (k, None) + v.__dask_postcompute__()
if dask.is_dask_collection(v)
- else (False, k, v)
+ else (k, v, None, None)
for k, v in self._variables.items()
]
- args = (
- info,
+ construct_direct_args = (
self._coord_names,
self._dims,
self._attrs,
@@ -823,19 +822,18 @@ class Dataset(Mapping, ImplementsDataset
self._encoding,
self._file_obj,
)
- return self._dask_postcompute, args
+ return self._dask_postcompute, (info, construct_direct_args)
def __dask_postpersist__(self):
import dask
info = [
- (True, k, v.__dask_postpersist__())
+ (k, None, v.__dask_keys__()) + v.__dask_postpersist__()
if dask.is_dask_collection(v)
- else (False, k, v)
+ else (k, v, None, None, None)
for k, v in self._variables.items()
]
- args = (
- info,
+ construct_direct_args = (
self._coord_names,
self._dims,
self._attrs,
@@ -843,45 +841,37 @@ class Dataset(Mapping, ImplementsDataset
self._encoding,
self._file_obj,
)
- return self._dask_postpersist, args
+ return self._dask_postpersist, (info, construct_direct_args)
@staticmethod
- def _dask_postcompute(results, info, *args):
+ def _dask_postcompute(results, info, construct_direct_args):
variables = {}
- results2 = list(results[::-1])
- for is_dask, k, v in info:
- if is_dask:
- func, args2 = v
- r = results2.pop()
- result = func(r, *args2)
+ results_iter = iter(results)
+ for k, v, rebuild, rebuild_args in info:
+ if v is None:
+ variables[k] = rebuild(next(results_iter), *rebuild_args)
else:
- result = v
- variables[k] = result
+ variables[k] = v
- final = Dataset._construct_direct(variables, *args)
+ final = Dataset._construct_direct(variables, *construct_direct_args)
return final
@staticmethod
- def _dask_postpersist(dsk, info, *args):
+ def _dask_postpersist(dsk, info, construct_direct_args):
+ from dask.optimization import cull
+
variables = {}
# postpersist is called in both dask.optimize and dask.persist
# When persisting, we want to filter out unrelated keys for
# each Variable's task graph.
- is_persist = len(dsk) == len(info)
- for is_dask, k, v in info:
- if is_dask:
- func, args2 = v
- if is_persist:
- name = args2[1][0]
- dsk2 = {k: v for k, v in dsk.items() if k[0] == name}
- else:
- dsk2 = dsk
- result = func(dsk2, *args2)
+ for k, v, dask_keys, rebuild, rebuild_args in info:
+ if v is None:
+ dsk2, _ = cull(dsk, dask_keys)
+ variables[k] = rebuild(dsk2, *rebuild_args)
else:
- result = v
- variables[k] = result
+ variables[k] = v
- return Dataset._construct_direct(variables, *args)
+ return Dataset._construct_direct(variables, *construct_direct_args)
def compute(self, **kwargs) -> "Dataset":
"""Manually trigger loading and/or computation of this dataset's data