Skip to content

Commit 39bda44

Browse files
dcherianmax-sixtyTomNicholasandersy005
authored
Bump min deps (#6559)
Co-authored-by: Maximilian Roos <[email protected]> Co-authored-by: Tom Nicholas <[email protected]> Co-authored-by: Anderson Banihirwe <[email protected]>
1 parent 126051f commit 39bda44

File tree

5 files changed

+43
-147
lines changed

5 files changed

+43
-147
lines changed

ci/requirements/min-all-deps.yml

Lines changed: 17 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -10,46 +10,45 @@ dependencies:
1010
- python=3.8
1111
- boto3=1.13
1212
- bottleneck=1.3
13-
# cartopy 0.18 conflicts with pynio
14-
- cartopy=0.17
13+
- cartopy=0.19
1514
- cdms2=3.1
1615
- cfgrib=0.9
17-
- cftime=1.2
16+
- cftime=1.4
1817
- coveralls
19-
- dask-core=2.30
20-
- distributed=2.30
21-
- h5netcdf=0.8
22-
- h5py=2.10
23-
# hdf5 1.12 conflicts with h5py=2.10
18+
- dask-core=2021.04
19+
- distributed=2021.04
20+
- h5netcdf=0.11
21+
- h5py=3.1
22+
# hdf5 1.12 conflicts with h5py=3.1
2423
- hdf5=1.10
2524
- hypothesis
2625
- iris=2.4
2726
- lxml=4.6 # Optional dep of pydap
28-
- matplotlib-base=3.3
27+
- matplotlib-base=3.4
2928
- nc-time-axis=1.2
3029
# netcdf follows a 1.major.minor[.patch] convention
3130
# (see https://github.com/Unidata/netcdf4-python/issues/1090)
3231
# bumping the netCDF4 version is currently blocked by #4491
3332
- netcdf4=1.5.3
34-
- numba=0.51
35-
- numpy=1.18
33+
- numba=0.53
34+
- numpy=1.19
3635
- packaging=20.0
37-
- pandas=1.1
38-
- pint=0.16
36+
- pandas=1.2
37+
- pint=0.17
3938
- pip
4039
- pseudonetcdf=3.1
4140
- pydap=3.2
42-
- pynio=1.5
41+
# - pynio=1.5.5
4342
- pytest
4443
- pytest-cov
4544
- pytest-env
4645
- pytest-xdist
47-
- rasterio=1.1
48-
- scipy=1.5
46+
- rasterio=1.2
47+
- scipy=1.6
4948
- seaborn=0.11
50-
- sparse=0.11
49+
- sparse=0.12
5150
- toolz=0.11
5251
- typing_extensions=3.7
53-
- zarr=2.5
52+
- zarr=2.8
5453
- pip:
5554
- numbagg==0.1

doc/getting-started-guide/installing.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,9 @@ Required dependencies
77
---------------------
88

99
- Python (3.8 or later)
10-
- `numpy <https://www.numpy.org/>`__ (1.18 or later)
10+
- `numpy <https://www.numpy.org/>`__ (1.19 or later)
1111
- `packaging <https://packaging.pypa.io/en/latest/#>`__ (20.0 or later)
12-
- `pandas <https://pandas.pydata.org/>`__ (1.1 or later)
12+
- `pandas <https://pandas.pydata.org/>`__ (1.2 or later)
1313

1414
.. _optional-dependencies:
1515

doc/whats-new.rst

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,26 @@ New Features
4545
Breaking changes
4646
~~~~~~~~~~~~~~~~
4747

48+
- PyNIO support is now untested. The minimum versions of some dependencies were changed:
49+
50+
=============== ===== ====
51+
Package Old New
52+
=============== ===== ====
53+
cftime 1.2 1.4
54+
dask 2.30 2021.4
55+
distributed 2.30 2021.4
56+
h5netcdf 0.8 0.11
57+
matplotlib-base 3.3 3.4
58+
numba 0.51 0.53
59+
numpy 1.18 1.19
60+
pandas 1.1 1.2
61+
pint 0.16 0.17
62+
rasterio 1.1 1.2
63+
scipy 1.5 1.6
64+
sparse 0.11 0.12
65+
zarr 2.5 2.8
66+
=============== ===== ====
67+
4868
- The Dataset and DataArray ``rename*`` methods do not implicitly add or drop
4969
indexes. (:pull:`5692`).
5070
By `Benoît Bovy <https://github.com/benbovy>`_.

setup.cfg

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -75,8 +75,8 @@ zip_safe = False # https://mypy.readthedocs.io/en/latest/installed_packages.htm
7575
include_package_data = True
7676
python_requires = >=3.8
7777
install_requires =
78-
numpy >= 1.18
79-
pandas >= 1.1
78+
numpy >= 1.19
79+
pandas >= 1.2
8080
packaging >= 20.0
8181

8282
[options.extras_require]

xarray/core/dask_array_compat.py

Lines changed: 2 additions & 125 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,6 @@
11
import warnings
22

33
import numpy as np
4-
from packaging.version import Version
5-
6-
from .pycompat import dask_version
74

85
try:
96
import dask.array as da
@@ -57,127 +54,7 @@ def pad(array, pad_width, mode="constant", **kwargs):
5754
return padded
5855

5956

60-
if dask_version > Version("2.30.0"):
61-
ensure_minimum_chunksize = da.overlap.ensure_minimum_chunksize
62-
else:
63-
64-
# copied from dask
65-
def ensure_minimum_chunksize(size, chunks):
66-
"""Determine new chunks to ensure that every chunk >= size
67-
68-
Parameters
69-
----------
70-
size : int
71-
The maximum size of any chunk.
72-
chunks : tuple
73-
Chunks along one axis, e.g. ``(3, 3, 2)``
74-
75-
Examples
76-
--------
77-
>>> ensure_minimum_chunksize(10, (20, 20, 1))
78-
(20, 11, 10)
79-
>>> ensure_minimum_chunksize(3, (1, 1, 3))
80-
(5,)
81-
82-
See Also
83-
--------
84-
overlap
85-
"""
86-
if size <= min(chunks):
87-
return chunks
88-
89-
# add too-small chunks to chunks before them
90-
output = []
91-
new = 0
92-
for c in chunks:
93-
if c < size:
94-
if new > size + (size - c):
95-
output.append(new - (size - c))
96-
new = size
97-
else:
98-
new += c
99-
if new >= size:
100-
output.append(new)
101-
new = 0
102-
if c >= size:
103-
new += c
104-
if new >= size:
105-
output.append(new)
106-
elif len(output) >= 1:
107-
output[-1] += new
108-
else:
109-
raise ValueError(
110-
f"The overlapping depth {size} is larger than your "
111-
f"array {sum(chunks)}."
112-
)
113-
114-
return tuple(output)
115-
116-
117-
if dask_version > Version("2021.03.0"):
57+
if da is not None:
11858
sliding_window_view = da.lib.stride_tricks.sliding_window_view
11959
else:
120-
121-
def sliding_window_view(x, window_shape, axis=None):
122-
from dask.array.overlap import map_overlap
123-
from numpy.core.numeric import normalize_axis_tuple
124-
125-
from .npcompat import sliding_window_view as _np_sliding_window_view
126-
127-
window_shape = (
128-
tuple(window_shape) if np.iterable(window_shape) else (window_shape,)
129-
)
130-
131-
window_shape_array = np.array(window_shape)
132-
if np.any(window_shape_array <= 0):
133-
raise ValueError("`window_shape` must contain positive values")
134-
135-
if axis is None:
136-
axis = tuple(range(x.ndim))
137-
if len(window_shape) != len(axis):
138-
raise ValueError(
139-
f"Since axis is `None`, must provide "
140-
f"window_shape for all dimensions of `x`; "
141-
f"got {len(window_shape)} window_shape elements "
142-
f"and `x.ndim` is {x.ndim}."
143-
)
144-
else:
145-
axis = normalize_axis_tuple(axis, x.ndim, allow_duplicate=True)
146-
if len(window_shape) != len(axis):
147-
raise ValueError(
148-
f"Must provide matching length window_shape and "
149-
f"axis; got {len(window_shape)} window_shape "
150-
f"elements and {len(axis)} axes elements."
151-
)
152-
153-
depths = [0] * x.ndim
154-
for ax, window in zip(axis, window_shape):
155-
depths[ax] += window - 1
156-
157-
# Ensure that each chunk is big enough to leave at least a size-1 chunk
158-
# after windowing (this is only really necessary for the last chunk).
159-
safe_chunks = tuple(
160-
ensure_minimum_chunksize(d + 1, c) for d, c in zip(depths, x.chunks)
161-
)
162-
x = x.rechunk(safe_chunks)
163-
164-
# result.shape = x_shape_trimmed + window_shape,
165-
# where x_shape_trimmed is x.shape with every entry
166-
# reduced by one less than the corresponding window size.
167-
# trim chunks to match x_shape_trimmed
168-
newchunks = tuple(
169-
c[:-1] + (c[-1] - d,) for d, c in zip(depths, x.chunks)
170-
) + tuple((window,) for window in window_shape)
171-
172-
kwargs = dict(
173-
depth=tuple((0, d) for d in depths), # Overlap on +ve side only
174-
boundary="none",
175-
meta=x._meta,
176-
new_axis=range(x.ndim, x.ndim + len(axis)),
177-
chunks=newchunks,
178-
trim=False,
179-
window_shape=window_shape,
180-
axis=axis,
181-
)
182-
183-
return map_overlap(_np_sliding_window_view, x, align_arrays=False, **kwargs)
60+
sliding_window_view = None

0 commit comments

Comments
 (0)