Skip to content

Commit 528a645

Browse files
mathausedcherianpre-commit-ci[bot]
authored
tests: move xfail out of functions (#265)
* tests: move xfail out of functions * remove stray print * Update tests/test_core.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * add engine in test_dtype --------- Co-authored-by: Deepak Cherian <[email protected]> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 11d5083 commit 528a645

File tree

4 files changed

+54
-62
lines changed

4 files changed

+54
-62
lines changed

tests/__init__.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import importlib
2-
from contextlib import contextmanager
2+
from contextlib import nullcontext
33

44
import numpy as np
55
import packaging.version
@@ -46,6 +46,7 @@ def LooseVersion(vstring):
4646

4747

4848
has_dask, requires_dask = _importorskip("dask")
49+
has_numba, requires_numba = _importorskip("numba")
4950
has_xarray, requires_xarray = _importorskip("xarray")
5051

5152

@@ -67,15 +68,10 @@ def __call__(self, dsk, keys, **kwargs):
6768
return dask.get(dsk, keys, **kwargs)
6869

6970

70-
@contextmanager
71-
def dummy_context():
72-
yield None
73-
74-
7571
def raise_if_dask_computes(max_computes=0):
7672
# return a dummy context manager so that this can be used for non-dask objects
7773
if not has_dask:
78-
return dummy_context()
74+
return nullcontext()
7975
scheduler = CountingScheduler(max_computes)
8076
return dask.config.set(scheduler=scheduler)
8177

tests/conftest.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
11
import pytest
22

3+
from . import requires_numba
34

4-
@pytest.fixture(scope="module", params=["flox", "numpy", "numba"])
5+
6+
@pytest.fixture(
7+
scope="module", params=["flox", "numpy", pytest.param("numba", marks=requires_numba)]
8+
)
59
def engine(request):
6-
if request.param == "numba":
7-
try:
8-
import numba # noqa
9-
except ImportError:
10-
pytest.xfail()
1110
return request.param

tests/test_core.py

Lines changed: 37 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ def test_alignment_error():
105105

106106

107107
@pytest.mark.parametrize("dtype", (float, int))
108-
@pytest.mark.parametrize("chunk", [False, True])
108+
@pytest.mark.parametrize("chunk", [False, pytest.param(True, marks=requires_dask)])
109109
# TODO: make this intp when python 3.8 is dropped
110110
@pytest.mark.parametrize("expected_groups", [None, [0, 1, 2], np.array([0, 1, 2], dtype=np.int64)])
111111
@pytest.mark.parametrize(
@@ -145,7 +145,7 @@ def test_groupby_reduce(
145145
) -> None:
146146
array = array.astype(dtype)
147147
if chunk:
148-
if not has_dask or expected_groups is None:
148+
if expected_groups is None:
149149
pytest.skip()
150150
array = da.from_array(array, chunks=(3,) if array.ndim == 1 else (1, 3))
151151
by = da.from_array(by, chunks=(3,) if by.ndim == 1 else (1, 3))
@@ -166,7 +166,7 @@ def test_groupby_reduce(
166166
engine=engine,
167167
)
168168
# we use pd.Index(expected_groups).to_numpy() which is always int64
169-
# for the values in this tests
169+
# for the values in this test
170170
if expected_groups is None:
171171
g_dtype = by.dtype
172172
elif isinstance(expected_groups, np.ndarray):
@@ -191,14 +191,20 @@ def gen_array_by(size, func):
191191
return array, by
192192

193193

194-
@pytest.mark.parametrize("chunks", [None, -1, 3, 4])
194+
@pytest.mark.parametrize(
195+
"chunks",
196+
[
197+
None,
198+
pytest.param(-1, marks=requires_dask),
199+
pytest.param(3, marks=requires_dask),
200+
pytest.param(4, marks=requires_dask),
201+
],
202+
)
195203
@pytest.mark.parametrize("nby", [1, 2, 3])
196204
@pytest.mark.parametrize("size", ((12,), (12, 9)))
197205
@pytest.mark.parametrize("add_nan_by", [True, False])
198206
@pytest.mark.parametrize("func", ALL_FUNCS)
199207
def test_groupby_reduce_all(nby, size, chunks, func, add_nan_by, engine):
200-
if chunks is not None and not has_dask:
201-
pytest.skip()
202208
if "arg" in func and engine == "flox":
203209
pytest.skip()
204210

@@ -390,16 +396,16 @@ def test_numpy_reduce_nd_md():
390396
def test_groupby_agg_dask(func, shape, array_chunks, group_chunks, add_nan, dtype, engine, reindex):
391397
"""Tests groupby_reduce with dask arrays against groupby_reduce with numpy arrays"""
392398

393-
rng = np.random.default_rng(12345)
394-
array = dask.array.from_array(rng.random(shape), chunks=array_chunks).astype(dtype)
395-
array = dask.array.ones(shape, chunks=array_chunks)
396-
397399
if func in ["first", "last"]:
398400
pytest.skip()
399401

400402
if "arg" in func and (engine == "flox" or reindex):
401403
pytest.skip()
402404

405+
rng = np.random.default_rng(12345)
406+
array = dask.array.from_array(rng.random(shape), chunks=array_chunks).astype(dtype)
407+
array = dask.array.ones(shape, chunks=array_chunks)
408+
403409
labels = np.array([0, 0, 2, 2, 2, 1, 1, 2, 2, 1, 1, 0])
404410
if add_nan:
405411
labels = labels.astype(float)
@@ -612,7 +618,14 @@ def test_groupby_reduce_axis_subset_against_numpy(func, axis, engine):
612618
assert_equal(actual, expected, tolerance)
613619

614620

615-
@pytest.mark.parametrize("reindex,chunks", [(None, None), (False, (2, 2, 3)), (True, (2, 2, 3))])
621+
@pytest.mark.parametrize(
622+
"reindex, chunks",
623+
[
624+
(None, None),
625+
pytest.param(False, (2, 2, 3), marks=requires_dask),
626+
pytest.param(True, (2, 2, 3), marks=requires_dask),
627+
],
628+
)
616629
@pytest.mark.parametrize(
617630
"axis, groups, expected_shape",
618631
[
@@ -624,8 +637,6 @@ def test_groupby_reduce_axis_subset_against_numpy(func, axis, engine):
624637
def test_groupby_reduce_nans(reindex, chunks, axis, groups, expected_shape, engine):
625638
def _maybe_chunk(arr):
626639
if chunks:
627-
if not has_dask:
628-
pytest.skip()
629640
return da.from_array(arr, chunks=chunks)
630641
else:
631642
return arr
@@ -739,7 +750,14 @@ def test_npg_nanarg_bug(func):
739750
)
740751
@pytest.mark.parametrize("method", ["cohorts", "map-reduce"])
741752
@pytest.mark.parametrize("chunk_labels", [False, True])
742-
@pytest.mark.parametrize("chunks", ((), (1,), (2,)))
753+
@pytest.mark.parametrize(
754+
"chunks",
755+
(
756+
(),
757+
pytest.param((1,), marks=requires_dask),
758+
pytest.param((2,), marks=requires_dask),
759+
),
760+
)
743761
def test_groupby_bins(chunk_labels, kwargs, chunks, engine, method) -> None:
744762
array = [1, 1, 1, 1, 1, 1]
745763
labels = [0.2, 1.5, 1.9, 2, 3, 20]
@@ -748,8 +766,6 @@ def test_groupby_bins(chunk_labels, kwargs, chunks, engine, method) -> None:
748766
pytest.xfail()
749767

750768
if chunks:
751-
if not has_dask:
752-
pytest.skip()
753769
array = dask.array.from_array(array, chunks=chunks)
754770
if chunk_labels:
755771
labels = dask.array.from_array(labels, chunks=chunks)
@@ -825,16 +841,14 @@ def test_rechunk_for_cohorts(chunk_at, expected):
825841
assert rechunked.chunks == expected
826842

827843

828-
@pytest.mark.parametrize("chunks", [None, 3])
844+
@pytest.mark.parametrize("chunks", [None, pytest.param(3, marks=requires_dask)])
829845
@pytest.mark.parametrize("fill_value", [123, np.nan])
830846
@pytest.mark.parametrize("func", ALL_FUNCS)
831847
def test_fill_value_behaviour(func, chunks, fill_value, engine):
832848
# fill_value = np.nan tests promotion of int counts to float
833849
# This is used by xarray
834850
if func in ["all", "any"] or "arg" in func:
835851
pytest.skip()
836-
if chunks is not None and not has_dask:
837-
pytest.skip()
838852

839853
npfunc = _get_array_func(func)
840854
by = np.array([1, 2, 3, 1, 2, 3])
@@ -1050,11 +1064,8 @@ def test_factorize_values_outside_bins():
10501064
assert_equal(expected, actual)
10511065

10521066

1053-
@pytest.mark.parametrize("chunk", [True, False])
1067+
@pytest.mark.parametrize("chunk", [pytest.param(True, marks=requires_dask), False])
10541068
def test_multiple_groupers_bins(chunk) -> None:
1055-
if chunk and not has_dask:
1056-
pytest.skip()
1057-
10581069
xp = dask.array if chunk else np
10591070
array_kwargs = {"chunks": 2} if chunk else {}
10601071
array = xp.ones((5, 2), **array_kwargs, dtype=np.int64)
@@ -1087,9 +1098,9 @@ def test_multiple_groupers_bins(chunk) -> None:
10871098
np.arange(2, 4).reshape(1, 2),
10881099
],
10891100
)
1090-
@pytest.mark.parametrize("chunk", [True, False])
1101+
@pytest.mark.parametrize("chunk", [pytest.param(True, marks=requires_dask), False])
10911102
def test_multiple_groupers(chunk, by1, by2, expected_groups) -> None:
1092-
if chunk and (not has_dask or expected_groups is None):
1103+
if chunk and expected_groups is None:
10931104
pytest.skip()
10941105

10951106
xp = dask.array if chunk else np
@@ -1233,7 +1244,7 @@ def test_dtype(func, dtype, engine):
12331244
pytest.skip()
12341245
arr = np.ones((4, 12), dtype=dtype)
12351246
labels = np.array(["a", "a", "c", "c", "c", "b", "b", "c", "c", "b", "b", "f"])
1236-
actual, _ = groupby_reduce(arr, labels, func=func, dtype=np.float64)
1247+
actual, _ = groupby_reduce(arr, labels, func=func, dtype=np.float64, engine=engine)
12371248
assert actual.dtype == np.dtype("float64")
12381249

12391250

tests/test_xarray.py

Lines changed: 9 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
dask.config.set(scheduler="sync")
1717

1818
try:
19-
# Should test against legacy xarray implementation
19+
# test against legacy xarray implementation
2020
xr.set_options(use_flox=False)
2121
except ValueError:
2222
pass
@@ -31,15 +31,15 @@
3131
@pytest.mark.parametrize("add_nan", [True, False])
3232
@pytest.mark.parametrize("skipna", [True, False])
3333
def test_xarray_reduce(skipna, add_nan, min_count, engine, reindex):
34+
if skipna is False and min_count is not None:
35+
pytest.skip()
36+
3437
arr = np.ones((4, 12))
3538

3639
if add_nan:
3740
arr[1, ...] = np.nan
3841
arr[[0, 2], [3, 4]] = np.nan
3942

40-
if skipna is False and min_count is not None:
41-
pytest.skip()
42-
4343
labels = np.array(["a", "a", "c", "c", "c", "b", "b", "c", "c", "b", "b", "f"])
4444
labels = np.array(labels)
4545
labels2 = np.array([1, 2, 2, 1])
@@ -77,11 +77,8 @@ def test_xarray_reduce(skipna, add_nan, min_count, engine, reindex):
7777

7878
# TODO: sort
7979
@pytest.mark.parametrize("pass_expected_groups", [True, False])
80-
@pytest.mark.parametrize("chunk", (True, False))
80+
@pytest.mark.parametrize("chunk", (pytest.param(True, marks=requires_dask), False))
8181
def test_xarray_reduce_multiple_groupers(pass_expected_groups, chunk, engine):
82-
if not has_dask and chunk:
83-
pytest.skip()
84-
8582
if chunk and pass_expected_groups is False:
8683
pytest.skip()
8784

@@ -126,11 +123,8 @@ def test_xarray_reduce_multiple_groupers(pass_expected_groups, chunk, engine):
126123

127124

128125
@pytest.mark.parametrize("pass_expected_groups", [True, False])
129-
@pytest.mark.parametrize("chunk", (True, False))
126+
@pytest.mark.parametrize("chunk", (pytest.param(True, marks=requires_dask), False))
130127
def test_xarray_reduce_multiple_groupers_2(pass_expected_groups, chunk, engine):
131-
if not has_dask and chunk:
132-
pytest.skip()
133-
134128
if chunk and pass_expected_groups is False:
135129
pytest.skip()
136130

@@ -317,14 +311,12 @@ def test_multi_index_groupby_sum(engine):
317311
assert_equal(expected, actual)
318312

319313

320-
@pytest.mark.parametrize("chunks", (None, 2))
314+
@pytest.mark.parametrize("chunks", (None, pytest.param(2, marks=requires_dask)))
321315
def test_xarray_groupby_bins(chunks, engine):
322316
array = xr.DataArray([1, 1, 1, 1, 1], dims="x")
323317
labels = xr.DataArray([1, 1.5, 1.9, 2, 3], dims="x", name="labels")
324318

325319
if chunks:
326-
if not has_dask:
327-
pytest.skip()
328320
array = array.chunk({"x": chunks})
329321
labels = labels.chunk({"x": chunks})
330322

@@ -472,11 +464,8 @@ def test_alignment_error():
472464
@pytest.mark.parametrize("add_nan", [True, False])
473465
@pytest.mark.parametrize("dtype_out", [np.float64, "float64", np.dtype("float64")])
474466
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
475-
@pytest.mark.parametrize("chunk", (True, False))
467+
@pytest.mark.parametrize("chunk", (pytest.param(True, marks=requires_dask), False))
476468
def test_dtype(add_nan, chunk, dtype, dtype_out, engine):
477-
if chunk and not has_dask:
478-
pytest.skip()
479-
480469
xp = dask.array if chunk else np
481470
data = xp.linspace(0, 1, 48, dtype=dtype).reshape((4, 12))
482471

@@ -508,12 +497,9 @@ def test_dtype(add_nan, chunk, dtype, dtype_out, engine):
508497
xr.testing.assert_allclose(expected, actual.transpose("labels", ...), **tolerance64)
509498

510499

511-
@pytest.mark.parametrize("chunk", [True, False])
500+
@pytest.mark.parametrize("chunk", [pytest.param(True, marks=requires_dask), False])
512501
@pytest.mark.parametrize("use_flox", [True, False])
513502
def test_dtype_accumulation(use_flox, chunk):
514-
if chunk and not has_dask:
515-
pytest.skip()
516-
517503
datetimes = pd.date_range("2010-01", "2015-01", freq="6H", inclusive="left")
518504
samples = 10 + np.cos(2 * np.pi * 0.001 * np.arange(len(datetimes))) * 1
519505
samples += np.random.randn(len(datetimes))

0 commit comments

Comments
 (0)