From e98bf367aa423714fa647c1ebafb7248fea4db0b Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Wed, 6 Sep 2017 21:42:45 -0700 Subject: [PATCH 1/9] move requires_pynio to fix GH1531, refactor tests/__init__.py --- xarray/tests/__init__.py | 133 +++++++++------------------------- xarray/tests/test_backends.py | 4 +- 2 files changed, 37 insertions(+), 100 deletions(-) diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py index 05c4cd340cb..2fd2aa95518 100644 --- a/xarray/tests/__init__.py +++ b/xarray/tests/__init__.py @@ -5,6 +5,7 @@ from contextlib import contextmanager from distutils.version import LooseVersion import re +import importlib import numpy as np from numpy.testing import assert_array_equal @@ -15,121 +16,57 @@ from xarray.core.pycompat import PY3 from xarray.testing import assert_equal, assert_identical, assert_allclose -try: - import unittest2 as unittest -except ImportError: - import unittest -try: - from unittest import mock -except ImportError: - import mock - -try: - import scipy - has_scipy = True -except ImportError: - has_scipy = False - -try: - import pydap.client - has_pydap = True -except ImportError: - has_pydap = False +def _importorskip(modname, minversion=None): + try: + mod = importlib.import_module(modname) + has = True + if minversion is not None: + if LooseVersion(mod.__version__) < LooseVersion(minversion): + raise ImportError('Minimum version not satisfied') + except (ImportError, ModuleNotFoundError): + has = False -try: - import netCDF4 - has_netCDF4 = True -except ImportError: - has_netCDF4 = False + return (has, pytest.mark.skipif((not has), + reason='requires {}'.format(modname))) try: - import h5netcdf - has_h5netcdf = True + import unittest2 as unittest except ImportError: - has_h5netcdf = False - + import unittest try: - import Nio - has_pynio = True + from unittest import mock except ImportError: - has_pynio = False + import mock +has_matplotlib, requires_matplotlib = _importorskip('matplotlib') +has_scipy, requires_scipy = _importorskip('scipy') +has_pydap, requires_pydap = _importorskip('pydap.client') +has_netCDF4, requires_netCDF4 = _importorskip('netCDF4') +has_h5netcdf, requires_h5netcdf = _importorskip('h5netcdf') +has_pynio, requires_pynio = _importorskip('pynio') +has_dask, requires_dask = _importorskip('dask') +has_bottleneck, requires_bottleneck = _importorskip('bottleneck', '1.0') +has_rasterio, requires_rasterio = _importorskip('rasterio') +has_pathlib, requires_pathlib = _importorskip('pathlib') + +# some special cases +has_scipy_or_netCDF4 = has_scipy or has_netCDF4 +requires_scipy_or_netCDF4 = pytest.mark.skipif( + not has_scipy_or_netCDF4, reason='requires scipy or netCDF4') +if not has_pathlib: + has_pathlib, requires_pathlib = _importorskip('pathlib2') -try: - import dask.array +if has_dask: import dask dask.set_options(get=dask.get) - has_dask = True -except ImportError: - has_dask = False - - -try: - import matplotlib - has_matplotlib = True -except ImportError: - has_matplotlib = False - - -try: - import bottleneck - if LooseVersion(bottleneck.__version__) < LooseVersion('1.0'): - raise ImportError('Fall back to numpy') - has_bottleneck = True -except ImportError: - has_bottleneck = False - -try: - import rasterio - has_rasterio = True -except ImportError: - has_rasterio = False - -try: - import pathlib - has_pathlib = True -except ImportError: - try: - import pathlib2 - has_pathlib = True - except ImportError: - has_pathlib = False - - -# slighly simpler construction that the full functions. -# Generally `pytest.importorskip('package')` inline is even easier -requires_matplotlib = pytest.mark.skipif( - not has_matplotlib, reason='requires matplotlib') -requires_scipy = pytest.mark.skipif( - not has_scipy, reason='requires scipy') -requires_pydap = pytest.mark.skipif( - not has_pydap, reason='requires pydap') -requires_netCDF4 = pytest.mark.skipif( - not has_netCDF4, reason='requires netCDF4') -requires_h5netcdf = pytest.mark.skipif( - not has_h5netcdf, reason='requires h5netcdf') -requires_pynio = pytest.mark.skipif( - not has_pynio, reason='requires pynio') -requires_scipy_or_netCDF4 = pytest.mark.skipif( - not has_scipy and not has_netCDF4, reason='requires scipy or netCDF4') -requires_dask = pytest.mark.skipif( - not has_dask, reason='requires dask') -requires_bottleneck = pytest.mark.skipif( - not has_bottleneck, reason='requires bottleneck') -requires_rasterio = pytest.mark.skipif( - not has_rasterio, reason='requires rasterio') -requires_pathlib = pytest.mark.skipif( - not has_pathlib, reason='requires pathlib / pathlib2' -) - try: _SKIP_FLAKY = not pytest.config.getoption("--run-flaky") _SKIP_NETWORK_TESTS = not pytest.config.getoption("--run-network-tests") -except ValueError: +except (ValueError, AttributeError): # Can't get config from pytest, e.g., because xarray is installed instead # of being run from a development version (and hence conftests.py is not # available). Don't run flaky tests. diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 9f949ee241f..51e0ee5dc78 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -1508,8 +1508,6 @@ def test_dask(self): self.assertDatasetEqual(actual, expected) -@requires_scipy -@requires_pynio class TestPyNio(CFEncodedDataTest, Only32BitTypes, TestCase): def test_write_store(self): # pynio is read-only for now @@ -1529,6 +1527,8 @@ def roundtrip(self, data, save_kwargs={}, open_kwargs={}, autoclose=self.autoclose, **open_kwargs) as ds: yield ds + @requires_scipy + @requires_pynio def test_weakrefs(self): example = Dataset({'foo': ('x', np.arange(5.0))}) expected = example.rename({'foo': 'bar', 'x': 'y'}) From 36ad7c339df832ea459ddd7268e9083264c7eb5d Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Thu, 7 Sep 2017 00:40:29 -0700 Subject: [PATCH 2/9] remove ModuleNotFoundError --- xarray/tests/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py index 2fd2aa95518..4ac7f0adb12 100644 --- a/xarray/tests/__init__.py +++ b/xarray/tests/__init__.py @@ -24,9 +24,8 @@ def _importorskip(modname, minversion=None): if minversion is not None: if LooseVersion(mod.__version__) < LooseVersion(minversion): raise ImportError('Minimum version not satisfied') - except (ImportError, ModuleNotFoundError): + except ImportError: has = False - return (has, pytest.mark.skipif((not has), reason='requires {}'.format(modname))) From 8768d6581a9cf21ca883104b7c51393e7a608090 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Thu, 7 Sep 2017 10:54:44 -0700 Subject: [PATCH 3/9] no pytest decorators on classes with multiple inheritance in test_backends.py --- xarray/tests/__init__.py | 2 +- xarray/tests/test_backends.py | 113 +++++++++++++++++++++++++++------- 2 files changed, 93 insertions(+), 22 deletions(-) diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py index 4ac7f0adb12..e6e3ba0488d 100644 --- a/xarray/tests/__init__.py +++ b/xarray/tests/__init__.py @@ -47,7 +47,7 @@ def _importorskip(modname, minversion=None): has_h5netcdf, requires_h5netcdf = _importorskip('h5netcdf') has_pynio, requires_pynio = _importorskip('pynio') has_dask, requires_dask = _importorskip('dask') -has_bottleneck, requires_bottleneck = _importorskip('bottleneck', '1.0') +has_bottleneck, requires_bottleneck = _importorskip('bottleneck') has_rasterio, requires_rasterio = _importorskip('rasterio') has_pathlib, requires_pathlib = _importorskip('pathlib') diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 51e0ee5dc78..5f0633a4e2f 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -28,7 +28,7 @@ requires_scipy_or_netCDF4, requires_dask, requires_h5netcdf, requires_pynio, requires_pathlib, has_netCDF4, has_scipy, assert_allclose, flaky, network, requires_rasterio, - assert_identical) + assert_identical, has_scipy_or_netCDF4) from .test_dataset import create_test_data try: @@ -800,7 +800,6 @@ def test_variable_len_strings(self): self.assertDatasetIdentical(expected, actual) -@requires_netCDF4 class NetCDF4DataTest(BaseNetCDF4Test, TestCase): autoclose = False @@ -813,6 +812,7 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): + pytest.importorskip('netCDF4') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, **save_kwargs) @@ -853,12 +853,12 @@ class NetCDF4DataStoreAutocloseTrue(NetCDF4DataTest): autoclose = True -@requires_netCDF4 -@requires_dask class NetCDF4ViaDaskDataTest(NetCDF4DataTest): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): + pytest.importorskip('netCDF4') + pytest.importorskip('dask') with NetCDF4DataTest.roundtrip( self, data, save_kwargs, open_kwargs, allow_cleanup_failure) as ds: @@ -878,7 +878,6 @@ class NetCDF4ViaDaskDataTestAutocloseTrue(NetCDF4ViaDaskDataTest): autoclose = True -@requires_scipy class ScipyInMemoryDataTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -888,15 +887,18 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): + pytest.importorskip('scipy') serialized = data.to_netcdf(**save_kwargs) with open_dataset(serialized, engine='scipy', autoclose=self.autoclose, **open_kwargs) as ds: yield ds + @requires_scipy def test_to_netcdf_explicit_engine(self): # regression test for GH1321 Dataset({'foo': 42}).to_netcdf(engine='scipy') + @requires_scipy @pytest.mark.skipif(PY2, reason='cannot pickle BytesIO on Python 2') def test_bytesio_pickle(self): data = Dataset({'foo': ('x', [1, 2, 3])}) @@ -910,7 +912,6 @@ class ScipyInMemoryDataTestAutocloseTrue(ScipyInMemoryDataTest): autoclose = True -@requires_scipy class ScipyFileObjectTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -920,6 +921,7 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): + pytest.importorskip('scipy') with create_tmp_file() as tmp_file: with open(tmp_file, 'wb') as f: data.to_netcdf(f, **save_kwargs) @@ -936,7 +938,6 @@ def test_pickle_dataarray(self): pass -@requires_scipy class ScipyFilePathTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -947,6 +948,7 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): + pytest.importorskip('scipy') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, engine='scipy', **save_kwargs) @@ -954,12 +956,14 @@ def roundtrip(self, data, save_kwargs={}, open_kwargs={}, autoclose=self.autoclose, **open_kwargs) as ds: yield ds + @requires_scipy def test_array_attrs(self): ds = Dataset(attrs={'foo': [[1, 2], [3, 4]]}) with self.assertRaisesRegexp(ValueError, 'must be 1-dimensional'): with self.roundtrip(ds) as roundtripped: pass + @requires_scipy def test_roundtrip_example_1_netcdf_gz(self): if sys.version_info[:2] < (2, 7): with self.assertRaisesRegexp(ValueError, @@ -970,6 +974,7 @@ def test_roundtrip_example_1_netcdf_gz(self): with open_example_dataset('example_1.nc') as actual: self.assertDatasetIdentical(expected, actual) + @requires_scipy def test_netcdf3_endianness(self): # regression test for GH416 expected = open_example_dataset('bears.nc', engine='scipy') @@ -977,6 +982,7 @@ def test_netcdf3_endianness(self): self.assertTrue(var.dtype.isnative) @requires_netCDF4 + @requires_scipy def test_nc4_scipy(self): with create_tmp_file() as tmp_file: with nc4.Dataset(tmp_file, 'w', format='NETCDF4') as rootgrp: @@ -990,7 +996,6 @@ class ScipyFilePathTestAutocloseTrue(ScipyFilePathTest): autoclose = True -@requires_netCDF4 class NetCDF3ViaNetCDF4DataTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -1002,6 +1007,7 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): + pytest.importorskip('netCDF4') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, format='NETCDF3_CLASSIC', @@ -1015,7 +1021,6 @@ class NetCDF3ViaNetCDF4DataTestAutocloseTrue(NetCDF3ViaNetCDF4DataTest): autoclose = True -@requires_netCDF4 class NetCDF4ClassicViaNetCDF4DataTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager @@ -1028,6 +1033,7 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): + pytest.importorskip('netCDF4') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, format='NETCDF4_CLASSIC', @@ -1042,7 +1048,6 @@ class NetCDF4ClassicViaNetCDF4DataTestAutocloseTrue( autoclose = True -@requires_scipy_or_netCDF4 class GenericNetCDFDataTest(CFEncodedDataTest, Only32BitTypes, TestCase): # verify that we can read and write netCDF3 files as long as we have scipy # or netCDF4-python installed @@ -1054,6 +1059,7 @@ def test_write_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): + pytest.mark.skipif(not has_scipy_or_netCDF4, 'requires_scipy_or_netCDF4') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, format='netcdf3_64bit', **save_kwargs) @@ -1061,6 +1067,7 @@ def roundtrip(self, data, save_kwargs={}, open_kwargs={}, autoclose=self.autoclose, **open_kwargs) as ds: yield ds + @requires_scipy_or_netCDF4 def test_engine(self): data = create_test_data() with self.assertRaisesRegexp(ValueError, 'unrecognized engine'): @@ -1077,6 +1084,7 @@ def test_engine(self): with self.assertRaisesRegexp(ValueError, 'can only read'): open_dataset(BytesIO(netcdf_bytes), engine='foobar') + @requires_scipy_or_netCDF4 def test_cross_engine_read_write_netcdf3(self): data = create_test_data() valid_engines = set() @@ -1100,6 +1108,7 @@ def test_cross_engine_read_write_netcdf3(self): [assert_allclose(data[k].variable, actual[k].variable) for k in data] + @requires_scipy_or_netCDF4 def test_encoding_unlimited_dims(self): ds = Dataset({'x': ('y', np.arange(10.0))}) with self.roundtrip(ds, @@ -1116,8 +1125,6 @@ class GenericNetCDFDataTestAutocloseTrue(GenericNetCDFDataTest): autoclose = True -@requires_h5netcdf -@requires_netCDF4 class H5NetCDFDataTest(BaseNetCDF4Test, TestCase): @contextlib.contextmanager def create_store(self): @@ -1127,6 +1134,8 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): + pytest.importorskip('netCDF4') + pytest.importorskip('h5netcdf') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, engine='h5netcdf', **save_kwargs) @@ -1138,11 +1147,15 @@ def test_orthogonal_indexing(self): # doesn't work for h5py (without using dask as an intermediate layer) pass + @requires_h5netcdf + @requires_netCDF4 def test_complex(self): expected = Dataset({'x': ('y', np.ones(5) + 1j * np.ones(5))}) with self.roundtrip(expected) as actual: self.assertDatasetEqual(expected, actual) + @requires_h5netcdf + @requires_netCDF4 @pytest.mark.xfail(reason='https://github.com/pydata/xarray/issues/535') def test_cross_engine_read_write_netcdf4(self): # Drop dim3, because its labels include strings. These appear to be @@ -1158,6 +1171,8 @@ def test_cross_engine_read_write_netcdf4(self): with open_dataset(tmp_file, engine=read_engine) as actual: self.assertDatasetIdentical(data, actual) + @requires_h5netcdf + @requires_netCDF4 def test_read_byte_attrs_as_unicode(self): with create_tmp_file() as tmp_file: with nc4.Dataset(tmp_file, 'w') as nc: @@ -1166,6 +1181,8 @@ def test_read_byte_attrs_as_unicode(self): expected = Dataset(attrs={'foo': 'bar'}) self.assertDatasetIdentical(expected, actual) + @requires_h5netcdf + @requires_netCDF4 def test_encoding_unlimited_dims(self): ds = Dataset({'x': ('y', np.arange(10.0))}) ds.encoding = {'unlimited_dims': ['y']} @@ -1265,9 +1282,6 @@ def test_4_open_large_num_files_h5netcdf(self): self.validate_open_mfdataset_large_num_files(engine=['h5netcdf']) -@requires_dask -@requires_scipy -@requires_netCDF4 class DaskTest(TestCase, DatasetIOTestCases): @contextlib.contextmanager def create_store(self): @@ -1276,6 +1290,9 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): + pytest.importorskip('dask') + pytest.importorskip('scipy') + pytest.importorskip('netCDF4') yield data.chunk() def test_roundtrip_datetime_data(self): @@ -1296,6 +1313,9 @@ def test_dataset_caching(self): actual.foo.values # no caching assert not actual.foo.variable._in_memory + @requires_dask + @requires_scipy + @requires_netCDF4 def test_open_mfdataset(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp1: @@ -1317,6 +1337,9 @@ def test_open_mfdataset(self): open_mfdataset('foo-bar-baz-*.nc', autoclose=self.autoclose) @requires_pathlib + @requires_dask + @requires_scipy + @requires_netCDF4 def test_open_mfdataset_pathlib(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp1: @@ -1329,6 +1352,9 @@ def test_open_mfdataset_pathlib(self): autoclose=self.autoclose) as actual: self.assertDatasetAllClose(original, actual) + @requires_dask + @requires_scipy + @requires_netCDF4 def test_attrs_mfdataset(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp1: @@ -1348,6 +1374,9 @@ def test_attrs_mfdataset(self): 'no attribute'): actual.test2 + @requires_dask + @requires_scipy + @requires_netCDF4 def test_preprocess_mfdataset(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp: @@ -1361,6 +1390,9 @@ def preprocess(ds): autoclose=self.autoclose) as actual: self.assertDatasetIdentical(expected, actual) + @requires_dask + @requires_scipy + @requires_netCDF4 def test_save_mfdataset_roundtrip(self): original = Dataset({'foo': ('x', np.random.randn(10))}) datasets = [original.isel(x=slice(5)), @@ -1372,6 +1404,9 @@ def test_save_mfdataset_roundtrip(self): autoclose=self.autoclose) as actual: self.assertDatasetIdentical(actual, original) + @requires_dask + @requires_scipy + @requires_netCDF4 def test_save_mfdataset_invalid(self): ds = Dataset() with self.assertRaisesRegexp(ValueError, 'cannot use mode'): @@ -1379,6 +1414,9 @@ def test_save_mfdataset_invalid(self): with self.assertRaisesRegexp(ValueError, 'same length'): save_mfdataset([ds, ds], ['only one path']) + @requires_dask + @requires_scipy + @requires_netCDF4 @requires_pathlib def test_save_mfdataset_pathlib_roundtrip(self): original = Dataset({'foo': ('x', np.random.randn(10))}) @@ -1393,6 +1431,9 @@ def test_save_mfdataset_pathlib_roundtrip(self): autoclose=self.autoclose) as actual: self.assertDatasetIdentical(actual, original) + @requires_dask + @requires_scipy + @requires_netCDF4 def test_open_and_do_math(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp: @@ -1401,6 +1442,9 @@ def test_open_and_do_math(self): actual = 1.0 * ds self.assertDatasetAllClose(original, actual) + @requires_dask + @requires_scipy + @requires_netCDF4 def test_open_mfdataset_concat_dim_none(self): with create_tmp_file() as tmp1: with create_tmp_file() as tmp2: @@ -1411,6 +1455,9 @@ def test_open_mfdataset_concat_dim_none(self): autoclose=self.autoclose) as actual: self.assertDatasetIdentical(data, actual) + @requires_dask + @requires_scipy + @requires_netCDF4 def test_open_dataset(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp: @@ -1425,6 +1472,9 @@ def test_open_dataset(self): self.assertIsInstance(actual.foo.variable.data, np.ndarray) self.assertDatasetIdentical(original, actual) + @requires_dask + @requires_scipy + @requires_netCDF4 def test_dask_roundtrip(self): with create_tmp_file() as tmp: data = create_test_data() @@ -1437,6 +1487,9 @@ def test_dask_roundtrip(self): with open_dataset(tmp2) as on_disk: self.assertDatasetIdentical(data, on_disk) + @requires_dask + @requires_scipy + @requires_netCDF4 def test_deterministic_names(self): with create_tmp_file() as tmp: data = create_test_data() @@ -1452,6 +1505,9 @@ def test_deterministic_names(self): self.assertEqual(dask_name[:13], 'open_dataset-') self.assertEqual(original_names, repeat_names) + @requires_dask + @requires_scipy + @requires_netCDF4 def test_dataarray_compute(self): # Test DataArray.compute() on dask backend. # The test for Dataset.compute() is already in DatasetIOTestCases; @@ -1467,9 +1523,6 @@ class DaskTestAutocloseTrue(DaskTest): autoclose = True -@network -@requires_scipy_or_netCDF4 -@requires_pydap class PydapTest(TestCase): @contextlib.contextmanager def create_datasets(self, **kwargs): @@ -1483,6 +1536,9 @@ def create_datasets(self, **kwargs): expected = expected.drop('bears') yield actual, expected + @network + @requires_scipy_or_netCDF4 + @requires_pydap def test_cmp_local_file(self): with self.create_datasets() as (actual, expected): self.assertDatasetEqual(actual, expected) @@ -1502,6 +1558,9 @@ def test_cmp_local_file(self): self.assertDatasetEqual(actual.isel(j=slice(1, 2)), expected.isel(j=slice(1, 2))) + @network + @requires_scipy_or_netCDF4 + @requires_pydap @requires_dask def test_dask(self): with self.create_datasets(chunks={'j': 2}) as (actual, expected): @@ -1520,6 +1579,8 @@ def test_orthogonal_indexing(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): + pytest.importorskip('Nio') + pytest.importorskip('scipy') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, engine='scipy', **save_kwargs) @@ -1545,9 +1606,9 @@ class TestPyNioAutocloseTrue(TestPyNio): autoclose = True -@requires_rasterio class TestRasterio(TestCase): + @requires_rasterio def test_serialization_utm(self): import rasterio from rasterio.transform import from_origin @@ -1593,6 +1654,7 @@ def test_serialization_utm(self): with xr.open_dataarray(tmp_nc_file) as ncds: assert_identical(rioda, ncds) + @requires_rasterio def test_serialization_platecarree(self): import rasterio @@ -1637,6 +1699,7 @@ def test_serialization_platecarree(self): with xr.open_dataarray(tmp_nc_file) as ncds: assert_identical(rioda, ncds) + @requires_rasterio def test_indexing(self): import rasterio @@ -1730,6 +1793,7 @@ def test_indexing(self): ac = actual.isel(band=[0], x=slice(2, 5), y=[2]) assert_allclose(ac, ex) + @requires_rasterio def test_caching(self): import rasterio @@ -1774,6 +1838,7 @@ def test_caching(self): assert_allclose(ac, ex) @requires_dask + @requires_rasterio def test_chunks(self): import rasterio @@ -1841,8 +1906,9 @@ class MiscObject: pass -@requires_netCDF4 class TestValidateAttrs(TestCase): + + @requires_netCDF4 def test_validating_attrs(self): def new_dataset(): return Dataset({'data': ('y', np.arange(10.0))}, @@ -1941,9 +2007,9 @@ def new_dataset_and_coord_attrs(): ds.to_netcdf(tmp_file) -@requires_scipy_or_netCDF4 class TestDataArrayToNetCDF(TestCase): + @requires_scipy_or_netCDF4 def test_dataarray_to_netcdf_no_name(self): original_da = DataArray(np.arange(12).reshape((3, 4))) @@ -1953,6 +2019,7 @@ def test_dataarray_to_netcdf_no_name(self): with open_dataarray(tmp) as loaded_da: self.assertDataArrayIdentical(original_da, loaded_da) + @requires_scipy_or_netCDF4 def test_dataarray_to_netcdf_with_name(self): original_da = DataArray(np.arange(12).reshape((3, 4)), name='test') @@ -1963,6 +2030,7 @@ def test_dataarray_to_netcdf_with_name(self): with open_dataarray(tmp) as loaded_da: self.assertDataArrayIdentical(original_da, loaded_da) + @requires_scipy_or_netCDF4 def test_dataarray_to_netcdf_coord_name_clash(self): original_da = DataArray(np.arange(12).reshape((3, 4)), dims=['x', 'y'], @@ -1974,6 +2042,7 @@ def test_dataarray_to_netcdf_coord_name_clash(self): with open_dataarray(tmp) as loaded_da: self.assertDataArrayIdentical(original_da, loaded_da) + @requires_scipy_or_netCDF4 def test_open_dataarray_options(self): data = DataArray( np.arange(5), coords={'y': ('x', range(5))}, dims=['x']) @@ -1985,6 +2054,7 @@ def test_open_dataarray_options(self): with open_dataarray(tmp, drop_variables=['y']) as loaded: self.assertDataArrayIdentical(expected, loaded) + @requires_scipy_or_netCDF4 def test_dataarray_to_netcdf_return_bytes(self): # regression test for GH1410 data = xr.DataArray([1, 2, 3]) @@ -1992,6 +2062,7 @@ def test_dataarray_to_netcdf_return_bytes(self): assert isinstance(output, bytes) @requires_pathlib + @requires_scipy_or_netCDF4 def test_dataarray_to_netcdf_no_name_pathlib(self): original_da = DataArray(np.arange(12).reshape((3, 4))) From 18e01c9732d7405390401ef8d58cf5fa47dbd283 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Thu, 7 Sep 2017 16:33:34 -0700 Subject: [PATCH 4/9] wrap class decorators for tests --- xarray/tests/__init__.py | 28 +++++++- xarray/tests/test_backends.py | 128 +++++++++------------------------- 2 files changed, 59 insertions(+), 97 deletions(-) diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py index e6e3ba0488d..8e9a5b418d2 100644 --- a/xarray/tests/__init__.py +++ b/xarray/tests/__init__.py @@ -6,6 +6,7 @@ from distutils.version import LooseVersion import re import importlib +import types import numpy as np from numpy.testing import assert_array_equal @@ -17,6 +18,29 @@ from xarray.testing import assert_equal, assert_identical, assert_allclose +def mark_class(marker): + '''Workaround for https://github.com/pytest-dev/pytest/issues/568''' + + def copy_func(f): + try: + return types.FunctionType(f.__code__, f.__globals__, + name=f.__name__, argdefs=f.__defaults__, + closure=f.__closure__) + except AttributeError: + return types.FunctionType(f.func_code, f.func_globals, + name=f.func_name, + argdefs=f.func_defaults, + closure=f.func_closure) + + def mark(cls): + for method in dir(cls): + if method.startswith('test_'): + f = copy_func(getattr(cls, method)) + setattr(cls, method, marker(f)) + return cls + return mark + + def _importorskip(modname, minversion=None): try: mod = importlib.import_module(modname) @@ -26,8 +50,8 @@ def _importorskip(modname, minversion=None): raise ImportError('Minimum version not satisfied') except ImportError: has = False - return (has, pytest.mark.skipif((not has), - reason='requires {}'.format(modname))) + func = pytest.mark.skipif((not has), reason='requires {}'.format(modname)) + return has, func try: diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 5f0633a4e2f..0b4a5986b53 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -28,7 +28,7 @@ requires_scipy_or_netCDF4, requires_dask, requires_h5netcdf, requires_pynio, requires_pathlib, has_netCDF4, has_scipy, assert_allclose, flaky, network, requires_rasterio, - assert_identical, has_scipy_or_netCDF4) + assert_identical, mark_class) from .test_dataset import create_test_data try: @@ -588,7 +588,7 @@ def create_tmp_files(nfiles, suffix='.nc', allow_cleanup_failure=False): yield files -@requires_netCDF4 +@mark_class(requires_netCDF4) class BaseNetCDF4Test(CFEncodedDataTest): def test_open_group(self): # Create a netCDF file with a dataset stored within a group @@ -800,6 +800,7 @@ def test_variable_len_strings(self): self.assertDatasetIdentical(expected, actual) +@mark_class(requires_netCDF4) class NetCDF4DataTest(BaseNetCDF4Test, TestCase): autoclose = False @@ -812,7 +813,6 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): - pytest.importorskip('netCDF4') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, **save_kwargs) @@ -853,12 +853,12 @@ class NetCDF4DataStoreAutocloseTrue(NetCDF4DataTest): autoclose = True +@mark_class(requires_netCDF4) +@mark_class(requires_dask) class NetCDF4ViaDaskDataTest(NetCDF4DataTest): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): - pytest.importorskip('netCDF4') - pytest.importorskip('dask') with NetCDF4DataTest.roundtrip( self, data, save_kwargs, open_kwargs, allow_cleanup_failure) as ds: @@ -878,6 +878,7 @@ class NetCDF4ViaDaskDataTestAutocloseTrue(NetCDF4ViaDaskDataTest): autoclose = True +@mark_class(requires_scipy) class ScipyInMemoryDataTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -887,18 +888,15 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): - pytest.importorskip('scipy') serialized = data.to_netcdf(**save_kwargs) with open_dataset(serialized, engine='scipy', autoclose=self.autoclose, **open_kwargs) as ds: yield ds - @requires_scipy def test_to_netcdf_explicit_engine(self): # regression test for GH1321 Dataset({'foo': 42}).to_netcdf(engine='scipy') - @requires_scipy @pytest.mark.skipif(PY2, reason='cannot pickle BytesIO on Python 2') def test_bytesio_pickle(self): data = Dataset({'foo': ('x', [1, 2, 3])}) @@ -912,6 +910,7 @@ class ScipyInMemoryDataTestAutocloseTrue(ScipyInMemoryDataTest): autoclose = True +@mark_class(requires_scipy) class ScipyFileObjectTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -921,7 +920,6 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): - pytest.importorskip('scipy') with create_tmp_file() as tmp_file: with open(tmp_file, 'wb') as f: data.to_netcdf(f, **save_kwargs) @@ -938,6 +936,7 @@ def test_pickle_dataarray(self): pass +@mark_class(requires_scipy) class ScipyFilePathTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -948,7 +947,6 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): - pytest.importorskip('scipy') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, engine='scipy', **save_kwargs) @@ -956,14 +954,12 @@ def roundtrip(self, data, save_kwargs={}, open_kwargs={}, autoclose=self.autoclose, **open_kwargs) as ds: yield ds - @requires_scipy def test_array_attrs(self): ds = Dataset(attrs={'foo': [[1, 2], [3, 4]]}) with self.assertRaisesRegexp(ValueError, 'must be 1-dimensional'): with self.roundtrip(ds) as roundtripped: pass - @requires_scipy def test_roundtrip_example_1_netcdf_gz(self): if sys.version_info[:2] < (2, 7): with self.assertRaisesRegexp(ValueError, @@ -974,7 +970,6 @@ def test_roundtrip_example_1_netcdf_gz(self): with open_example_dataset('example_1.nc') as actual: self.assertDatasetIdentical(expected, actual) - @requires_scipy def test_netcdf3_endianness(self): # regression test for GH416 expected = open_example_dataset('bears.nc', engine='scipy') @@ -982,7 +977,6 @@ def test_netcdf3_endianness(self): self.assertTrue(var.dtype.isnative) @requires_netCDF4 - @requires_scipy def test_nc4_scipy(self): with create_tmp_file() as tmp_file: with nc4.Dataset(tmp_file, 'w', format='NETCDF4') as rootgrp: @@ -996,6 +990,7 @@ class ScipyFilePathTestAutocloseTrue(ScipyFilePathTest): autoclose = True +@mark_class(requires_netCDF4) class NetCDF3ViaNetCDF4DataTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -1007,7 +1002,6 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): - pytest.importorskip('netCDF4') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, format='NETCDF3_CLASSIC', @@ -1021,6 +1015,7 @@ class NetCDF3ViaNetCDF4DataTestAutocloseTrue(NetCDF3ViaNetCDF4DataTest): autoclose = True +@mark_class(requires_netCDF4) class NetCDF4ClassicViaNetCDF4DataTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager @@ -1033,7 +1028,6 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): - pytest.importorskip('netCDF4') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, format='NETCDF4_CLASSIC', @@ -1048,6 +1042,7 @@ class NetCDF4ClassicViaNetCDF4DataTestAutocloseTrue( autoclose = True +@mark_class(requires_scipy_or_netCDF4) class GenericNetCDFDataTest(CFEncodedDataTest, Only32BitTypes, TestCase): # verify that we can read and write netCDF3 files as long as we have scipy # or netCDF4-python installed @@ -1059,7 +1054,6 @@ def test_write_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): - pytest.mark.skipif(not has_scipy_or_netCDF4, 'requires_scipy_or_netCDF4') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, format='netcdf3_64bit', **save_kwargs) @@ -1067,7 +1061,6 @@ def roundtrip(self, data, save_kwargs={}, open_kwargs={}, autoclose=self.autoclose, **open_kwargs) as ds: yield ds - @requires_scipy_or_netCDF4 def test_engine(self): data = create_test_data() with self.assertRaisesRegexp(ValueError, 'unrecognized engine'): @@ -1084,7 +1077,6 @@ def test_engine(self): with self.assertRaisesRegexp(ValueError, 'can only read'): open_dataset(BytesIO(netcdf_bytes), engine='foobar') - @requires_scipy_or_netCDF4 def test_cross_engine_read_write_netcdf3(self): data = create_test_data() valid_engines = set() @@ -1108,7 +1100,6 @@ def test_cross_engine_read_write_netcdf3(self): [assert_allclose(data[k].variable, actual[k].variable) for k in data] - @requires_scipy_or_netCDF4 def test_encoding_unlimited_dims(self): ds = Dataset({'x': ('y', np.arange(10.0))}) with self.roundtrip(ds, @@ -1125,6 +1116,8 @@ class GenericNetCDFDataTestAutocloseTrue(GenericNetCDFDataTest): autoclose = True +@mark_class(requires_h5netcdf) +@mark_class(requires_netCDF4) class H5NetCDFDataTest(BaseNetCDF4Test, TestCase): @contextlib.contextmanager def create_store(self): @@ -1134,8 +1127,6 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): - pytest.importorskip('netCDF4') - pytest.importorskip('h5netcdf') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, engine='h5netcdf', **save_kwargs) @@ -1147,15 +1138,11 @@ def test_orthogonal_indexing(self): # doesn't work for h5py (without using dask as an intermediate layer) pass - @requires_h5netcdf - @requires_netCDF4 def test_complex(self): expected = Dataset({'x': ('y', np.ones(5) + 1j * np.ones(5))}) with self.roundtrip(expected) as actual: self.assertDatasetEqual(expected, actual) - @requires_h5netcdf - @requires_netCDF4 @pytest.mark.xfail(reason='https://github.com/pydata/xarray/issues/535') def test_cross_engine_read_write_netcdf4(self): # Drop dim3, because its labels include strings. These appear to be @@ -1171,8 +1158,6 @@ def test_cross_engine_read_write_netcdf4(self): with open_dataset(tmp_file, engine=read_engine) as actual: self.assertDatasetIdentical(data, actual) - @requires_h5netcdf - @requires_netCDF4 def test_read_byte_attrs_as_unicode(self): with create_tmp_file() as tmp_file: with nc4.Dataset(tmp_file, 'w') as nc: @@ -1181,8 +1166,6 @@ def test_read_byte_attrs_as_unicode(self): expected = Dataset(attrs={'foo': 'bar'}) self.assertDatasetIdentical(expected, actual) - @requires_h5netcdf - @requires_netCDF4 def test_encoding_unlimited_dims(self): ds = Dataset({'x': ('y', np.arange(10.0))}) ds.encoding = {'unlimited_dims': ['y']} @@ -1282,6 +1265,9 @@ def test_4_open_large_num_files_h5netcdf(self): self.validate_open_mfdataset_large_num_files(engine=['h5netcdf']) +@mark_class(requires_dask) +@mark_class(requires_scipy) +@mark_class(requires_netCDF4) class DaskTest(TestCase, DatasetIOTestCases): @contextlib.contextmanager def create_store(self): @@ -1290,9 +1276,6 @@ def create_store(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): - pytest.importorskip('dask') - pytest.importorskip('scipy') - pytest.importorskip('netCDF4') yield data.chunk() def test_roundtrip_datetime_data(self): @@ -1313,9 +1296,6 @@ def test_dataset_caching(self): actual.foo.values # no caching assert not actual.foo.variable._in_memory - @requires_dask - @requires_scipy - @requires_netCDF4 def test_open_mfdataset(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp1: @@ -1337,9 +1317,6 @@ def test_open_mfdataset(self): open_mfdataset('foo-bar-baz-*.nc', autoclose=self.autoclose) @requires_pathlib - @requires_dask - @requires_scipy - @requires_netCDF4 def test_open_mfdataset_pathlib(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp1: @@ -1352,9 +1329,6 @@ def test_open_mfdataset_pathlib(self): autoclose=self.autoclose) as actual: self.assertDatasetAllClose(original, actual) - @requires_dask - @requires_scipy - @requires_netCDF4 def test_attrs_mfdataset(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp1: @@ -1374,9 +1348,6 @@ def test_attrs_mfdataset(self): 'no attribute'): actual.test2 - @requires_dask - @requires_scipy - @requires_netCDF4 def test_preprocess_mfdataset(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp: @@ -1390,9 +1361,6 @@ def preprocess(ds): autoclose=self.autoclose) as actual: self.assertDatasetIdentical(expected, actual) - @requires_dask - @requires_scipy - @requires_netCDF4 def test_save_mfdataset_roundtrip(self): original = Dataset({'foo': ('x', np.random.randn(10))}) datasets = [original.isel(x=slice(5)), @@ -1404,9 +1372,6 @@ def test_save_mfdataset_roundtrip(self): autoclose=self.autoclose) as actual: self.assertDatasetIdentical(actual, original) - @requires_dask - @requires_scipy - @requires_netCDF4 def test_save_mfdataset_invalid(self): ds = Dataset() with self.assertRaisesRegexp(ValueError, 'cannot use mode'): @@ -1414,9 +1379,13 @@ def test_save_mfdataset_invalid(self): with self.assertRaisesRegexp(ValueError, 'same length'): save_mfdataset([ds, ds], ['only one path']) - @requires_dask - @requires_scipy - @requires_netCDF4 + def test_save_mfdataset_invalid_dataarray(self): + # regression test for GH1555 + da = DataArray([1, 2]) + with self.assertRaisesRegexp(TypeError, 'supports writing Dataset'): + save_mfdataset([da], ['dataarray']) + + @requires_pathlib def test_save_mfdataset_pathlib_roundtrip(self): original = Dataset({'foo': ('x', np.random.randn(10))}) @@ -1431,9 +1400,6 @@ def test_save_mfdataset_pathlib_roundtrip(self): autoclose=self.autoclose) as actual: self.assertDatasetIdentical(actual, original) - @requires_dask - @requires_scipy - @requires_netCDF4 def test_open_and_do_math(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp: @@ -1442,9 +1408,6 @@ def test_open_and_do_math(self): actual = 1.0 * ds self.assertDatasetAllClose(original, actual) - @requires_dask - @requires_scipy - @requires_netCDF4 def test_open_mfdataset_concat_dim_none(self): with create_tmp_file() as tmp1: with create_tmp_file() as tmp2: @@ -1455,9 +1418,6 @@ def test_open_mfdataset_concat_dim_none(self): autoclose=self.autoclose) as actual: self.assertDatasetIdentical(data, actual) - @requires_dask - @requires_scipy - @requires_netCDF4 def test_open_dataset(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp: @@ -1472,9 +1432,6 @@ def test_open_dataset(self): self.assertIsInstance(actual.foo.variable.data, np.ndarray) self.assertDatasetIdentical(original, actual) - @requires_dask - @requires_scipy - @requires_netCDF4 def test_dask_roundtrip(self): with create_tmp_file() as tmp: data = create_test_data() @@ -1487,9 +1444,6 @@ def test_dask_roundtrip(self): with open_dataset(tmp2) as on_disk: self.assertDatasetIdentical(data, on_disk) - @requires_dask - @requires_scipy - @requires_netCDF4 def test_deterministic_names(self): with create_tmp_file() as tmp: data = create_test_data() @@ -1505,9 +1459,6 @@ def test_deterministic_names(self): self.assertEqual(dask_name[:13], 'open_dataset-') self.assertEqual(original_names, repeat_names) - @requires_dask - @requires_scipy - @requires_netCDF4 def test_dataarray_compute(self): # Test DataArray.compute() on dask backend. # The test for Dataset.compute() is already in DatasetIOTestCases; @@ -1523,6 +1474,9 @@ class DaskTestAutocloseTrue(DaskTest): autoclose = True +@mark_class(network) +@mark_class(requires_scipy_or_netCDF4) +@mark_class(requires_pydap) class PydapTest(TestCase): @contextlib.contextmanager def create_datasets(self, **kwargs): @@ -1536,9 +1490,6 @@ def create_datasets(self, **kwargs): expected = expected.drop('bears') yield actual, expected - @network - @requires_scipy_or_netCDF4 - @requires_pydap def test_cmp_local_file(self): with self.create_datasets() as (actual, expected): self.assertDatasetEqual(actual, expected) @@ -1558,15 +1509,14 @@ def test_cmp_local_file(self): self.assertDatasetEqual(actual.isel(j=slice(1, 2)), expected.isel(j=slice(1, 2))) - @network - @requires_scipy_or_netCDF4 - @requires_pydap @requires_dask def test_dask(self): with self.create_datasets(chunks={'j': 2}) as (actual, expected): self.assertDatasetEqual(actual, expected) +@mark_class(requires_scipy) +@mark_class(requires_pynio) class TestPyNio(CFEncodedDataTest, Only32BitTypes, TestCase): def test_write_store(self): # pynio is read-only for now @@ -1579,8 +1529,6 @@ def test_orthogonal_indexing(self): @contextlib.contextmanager def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): - pytest.importorskip('Nio') - pytest.importorskip('scipy') with create_tmp_file( allow_cleanup_failure=allow_cleanup_failure) as tmp_file: data.to_netcdf(tmp_file, engine='scipy', **save_kwargs) @@ -1588,8 +1536,6 @@ def roundtrip(self, data, save_kwargs={}, open_kwargs={}, autoclose=self.autoclose, **open_kwargs) as ds: yield ds - @requires_scipy - @requires_pynio def test_weakrefs(self): example = Dataset({'foo': ('x', np.arange(5.0))}) expected = example.rename({'foo': 'bar', 'x': 'y'}) @@ -1602,13 +1548,15 @@ def test_weakrefs(self): self.assertDatasetIdentical(actual, expected) +@mark_class(requires_scipy) +@mark_class(requires_pynio) class TestPyNioAutocloseTrue(TestPyNio): autoclose = True +@mark_class(requires_rasterio) class TestRasterio(TestCase): - @requires_rasterio def test_serialization_utm(self): import rasterio from rasterio.transform import from_origin @@ -1654,7 +1602,6 @@ def test_serialization_utm(self): with xr.open_dataarray(tmp_nc_file) as ncds: assert_identical(rioda, ncds) - @requires_rasterio def test_serialization_platecarree(self): import rasterio @@ -1699,7 +1646,6 @@ def test_serialization_platecarree(self): with xr.open_dataarray(tmp_nc_file) as ncds: assert_identical(rioda, ncds) - @requires_rasterio def test_indexing(self): import rasterio @@ -1793,7 +1739,6 @@ def test_indexing(self): ac = actual.isel(band=[0], x=slice(2, 5), y=[2]) assert_allclose(ac, ex) - @requires_rasterio def test_caching(self): import rasterio @@ -1838,7 +1783,6 @@ def test_caching(self): assert_allclose(ac, ex) @requires_dask - @requires_rasterio def test_chunks(self): import rasterio @@ -1906,9 +1850,8 @@ class MiscObject: pass +@mark_class(requires_netCDF4) class TestValidateAttrs(TestCase): - - @requires_netCDF4 def test_validating_attrs(self): def new_dataset(): return Dataset({'data': ('y', np.arange(10.0))}, @@ -2007,9 +1950,9 @@ def new_dataset_and_coord_attrs(): ds.to_netcdf(tmp_file) +@mark_class(requires_scipy_or_netCDF4) class TestDataArrayToNetCDF(TestCase): - @requires_scipy_or_netCDF4 def test_dataarray_to_netcdf_no_name(self): original_da = DataArray(np.arange(12).reshape((3, 4))) @@ -2019,7 +1962,6 @@ def test_dataarray_to_netcdf_no_name(self): with open_dataarray(tmp) as loaded_da: self.assertDataArrayIdentical(original_da, loaded_da) - @requires_scipy_or_netCDF4 def test_dataarray_to_netcdf_with_name(self): original_da = DataArray(np.arange(12).reshape((3, 4)), name='test') @@ -2030,7 +1972,6 @@ def test_dataarray_to_netcdf_with_name(self): with open_dataarray(tmp) as loaded_da: self.assertDataArrayIdentical(original_da, loaded_da) - @requires_scipy_or_netCDF4 def test_dataarray_to_netcdf_coord_name_clash(self): original_da = DataArray(np.arange(12).reshape((3, 4)), dims=['x', 'y'], @@ -2042,7 +1983,6 @@ def test_dataarray_to_netcdf_coord_name_clash(self): with open_dataarray(tmp) as loaded_da: self.assertDataArrayIdentical(original_da, loaded_da) - @requires_scipy_or_netCDF4 def test_open_dataarray_options(self): data = DataArray( np.arange(5), coords={'y': ('x', range(5))}, dims=['x']) @@ -2054,7 +1994,6 @@ def test_open_dataarray_options(self): with open_dataarray(tmp, drop_variables=['y']) as loaded: self.assertDataArrayIdentical(expected, loaded) - @requires_scipy_or_netCDF4 def test_dataarray_to_netcdf_return_bytes(self): # regression test for GH1410 data = xr.DataArray([1, 2, 3]) @@ -2062,7 +2001,6 @@ def test_dataarray_to_netcdf_return_bytes(self): assert isinstance(output, bytes) @requires_pathlib - @requires_scipy_or_netCDF4 def test_dataarray_to_netcdf_no_name_pathlib(self): original_da = DataArray(np.arange(12).reshape((3, 4))) From 27b635b15094c7956e70baf1e310cbd2fd1b46e0 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 2 Oct 2017 19:29:03 -0700 Subject: [PATCH 5/9] no more class decorators in test_backends --- xarray/tests/__init__.py | 23 ----------- xarray/tests/test_backends.py | 76 +++++++++++++++++++++-------------- 2 files changed, 46 insertions(+), 53 deletions(-) diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py index 8e9a5b418d2..bd180d5c7cd 100644 --- a/xarray/tests/__init__.py +++ b/xarray/tests/__init__.py @@ -18,29 +18,6 @@ from xarray.testing import assert_equal, assert_identical, assert_allclose -def mark_class(marker): - '''Workaround for https://github.com/pytest-dev/pytest/issues/568''' - - def copy_func(f): - try: - return types.FunctionType(f.__code__, f.__globals__, - name=f.__name__, argdefs=f.__defaults__, - closure=f.__closure__) - except AttributeError: - return types.FunctionType(f.func_code, f.func_globals, - name=f.func_name, - argdefs=f.func_defaults, - closure=f.func_closure) - - def mark(cls): - for method in dir(cls): - if method.startswith('test_'): - f = copy_func(getattr(cls, method)) - setattr(cls, method, marker(f)) - return cls - return mark - - def _importorskip(modname, minversion=None): try: mod = importlib.import_module(modname) diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index f1d5216fcfe..aeaf404c57b 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -28,7 +28,7 @@ requires_scipy_or_netCDF4, requires_dask, requires_h5netcdf, requires_pynio, requires_pathlib, has_netCDF4, has_scipy, assert_allclose, flaky, network, requires_rasterio, - assert_identical, mark_class) + assert_identical) from .test_dataset import create_test_data from xarray.tests import mock @@ -590,8 +590,8 @@ def create_tmp_files(nfiles, suffix='.nc', allow_cleanup_failure=False): yield files -@mark_class(requires_netCDF4) class BaseNetCDF4Test(CFEncodedDataTest): + @requires_netCDF4 def test_open_group(self): # Create a netCDF file with a dataset stored within a group with create_tmp_file() as tmp_file: @@ -617,6 +617,7 @@ def test_open_group(self): with self.assertRaisesRegexp(ValueError, 'must be a string'): open_dataset(tmp_file, group=(1, 2, 3)) + @requires_netCDF4 def test_open_subgroup(self): # Create a netCDF file with a dataset stored within a group within a group with create_tmp_file() as tmp_file: @@ -638,6 +639,7 @@ def test_open_subgroup(self): with open_dataset(tmp_file, group=group) as actual: self.assertVariableEqual(actual['x'], expected['x']) + @requires_netCDF4 def test_write_groups(self): data1 = create_test_data() data2 = data1 * 2 @@ -649,6 +651,7 @@ def test_write_groups(self): with open_dataset(tmp_file, group='data/2') as actual2: self.assertDatasetIdentical(data2, actual2) + @requires_netCDF4 def test_roundtrip_character_array(self): with create_tmp_file() as tmp_file: values = np.array([['a', 'b', 'c'], ['d', 'e', 'f']], dtype='S') @@ -667,12 +670,14 @@ def test_roundtrip_character_array(self): with self.roundtrip(actual) as roundtripped: self.assertDatasetIdentical(expected, roundtripped) + @requires_netCDF4 def test_default_to_char_arrays(self): data = Dataset({'x': np.array(['foo', 'zzzz'], dtype='S')}) with self.roundtrip(data) as actual: self.assertDatasetIdentical(data, actual) self.assertEqual(actual['x'].dtype, np.dtype('S4')) + @requires_netCDF4 def test_open_encodings(self): # Create a netCDF file with explicit time units # and make sure it makes it into the encodings @@ -698,6 +703,7 @@ def test_open_encodings(self): if k in expected['time'].encoding) self.assertDictEqual(actual_encoding, expected['time'].encoding) + @requires_netCDF4 def test_dump_encodings(self): # regression test for #709 ds = Dataset({'x': ('y', np.arange(10.0))}) @@ -705,6 +711,7 @@ def test_dump_encodings(self): with self.roundtrip(ds, save_kwargs=kwargs) as actual: self.assertTrue(actual.x.encoding['zlib']) + @requires_netCDF4 def test_dump_and_open_encodings(self): # Create a netCDF file with explicit time units # and make sure it makes it into the encodings @@ -724,6 +731,7 @@ def test_dump_and_open_encodings(self): self.assertEqual(ds.variables['time'].getncattr('units'), units) self.assertArrayEqual(ds.variables['time'], np.arange(10) + 4) + @requires_netCDF4 def test_compression_encoding(self): data = create_test_data() data['var2'].encoding.update({'zlib': True, @@ -739,6 +747,7 @@ def test_compression_encoding(self): with self.roundtrip(expected) as actual: self.assertDatasetEqual(expected, actual) + @requires_netCDF4 def test_mask_and_scale(self): with create_tmp_file() as tmp_file: with nc4.Dataset(tmp_file, mode='w') as nc: @@ -763,6 +772,7 @@ def test_mask_and_scale(self): expected = create_masked_and_scaled_data() self.assertDatasetIdentical(expected, ds) + @requires_netCDF4 def test_0dimensional_variable(self): # This fix verifies our work-around to this netCDF4-python bug: # https://github.com/Unidata/netcdf4-python/pull/220 @@ -775,6 +785,7 @@ def test_0dimensional_variable(self): expected = Dataset({'x': ((), 123)}) self.assertDatasetIdentical(expected, ds) + @requires_netCDF4 def test_already_open_dataset(self): with create_tmp_file() as tmp_file: with nc4.Dataset(tmp_file, mode='w') as nc: @@ -787,6 +798,7 @@ def test_already_open_dataset(self): expected = Dataset({'x': ((), 42)}) self.assertDatasetIdentical(expected, ds) + @requires_netCDF4 def test_variable_len_strings(self): with create_tmp_file() as tmp_file: values = np.array(['foo', 'bar', 'baz'], dtype=object) @@ -802,7 +814,6 @@ def test_variable_len_strings(self): self.assertDatasetIdentical(expected, actual) -@mark_class(requires_netCDF4) class NetCDF4DataTest(BaseNetCDF4Test, TestCase): autoclose = False @@ -813,6 +824,7 @@ def create_store(self): yield store @contextlib.contextmanager + @requires_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -855,10 +867,10 @@ class NetCDF4DataStoreAutocloseTrue(NetCDF4DataTest): autoclose = True -@mark_class(requires_netCDF4) -@mark_class(requires_dask) class NetCDF4ViaDaskDataTest(NetCDF4DataTest): @contextlib.contextmanager + @requires_netCDF4 + @requires_dask def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with NetCDF4DataTest.roundtrip( @@ -880,7 +892,6 @@ class NetCDF4ViaDaskDataTestAutocloseTrue(NetCDF4ViaDaskDataTest): autoclose = True -@mark_class(requires_scipy) class ScipyInMemoryDataTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -888,6 +899,7 @@ def create_store(self): yield backends.ScipyDataStore(fobj, 'w') @contextlib.contextmanager + @requires_scipy def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): serialized = data.to_netcdf(**save_kwargs) @@ -912,7 +924,6 @@ class ScipyInMemoryDataTestAutocloseTrue(ScipyInMemoryDataTest): autoclose = True -@mark_class(requires_scipy) class ScipyFileObjectTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -920,6 +931,7 @@ def create_store(self): yield backends.ScipyDataStore(fobj, 'w') @contextlib.contextmanager + @requires_scipy def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file() as tmp_file: @@ -938,7 +950,6 @@ def test_pickle_dataarray(self): pass -@mark_class(requires_scipy) class ScipyFilePathTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -947,6 +958,7 @@ def create_store(self): yield store @contextlib.contextmanager + @requires_scipy def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -992,7 +1004,6 @@ class ScipyFilePathTestAutocloseTrue(ScipyFilePathTest): autoclose = True -@mark_class(requires_netCDF4) class NetCDF3ViaNetCDF4DataTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -1002,6 +1013,7 @@ def create_store(self): yield store @contextlib.contextmanager + @requires_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -1017,7 +1029,6 @@ class NetCDF3ViaNetCDF4DataTestAutocloseTrue(NetCDF3ViaNetCDF4DataTest): autoclose = True -@mark_class(requires_netCDF4) class NetCDF4ClassicViaNetCDF4DataTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager @@ -1028,6 +1039,7 @@ def create_store(self): yield store @contextlib.contextmanager + @requires_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -1044,7 +1056,6 @@ class NetCDF4ClassicViaNetCDF4DataTestAutocloseTrue( autoclose = True -@mark_class(requires_scipy_or_netCDF4) class GenericNetCDFDataTest(CFEncodedDataTest, Only32BitTypes, TestCase): # verify that we can read and write netCDF3 files as long as we have scipy # or netCDF4-python installed @@ -1054,6 +1065,7 @@ def test_write_store(self): pass @contextlib.contextmanager + @requires_scipy_or_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -1118,8 +1130,6 @@ class GenericNetCDFDataTestAutocloseTrue(GenericNetCDFDataTest): autoclose = True -@mark_class(requires_h5netcdf) -@mark_class(requires_netCDF4) class H5NetCDFDataTest(BaseNetCDF4Test, TestCase): @contextlib.contextmanager def create_store(self): @@ -1127,6 +1137,8 @@ def create_store(self): yield backends.H5NetCDFStore(tmp_file, 'w') @contextlib.contextmanager + @requires_h5netcdf + @requires_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -1179,9 +1191,9 @@ def test_encoding_unlimited_dims(self): # tests pending h5netcdf fix -@pytest.mark.xfail -class H5NetCDFDataTestAutocloseTrue(H5NetCDFDataTest): - autoclose = True +# @pytest.mark.xfail @shoyer - is this still an issue? +# class H5NetCDFDataTestAutocloseTrue(H5NetCDFDataTest): +# autoclose = True class OpenMFDatasetManyFilesTest(TestCase): @@ -1267,15 +1279,15 @@ def test_4_open_large_num_files_h5netcdf(self): self.validate_open_mfdataset_large_num_files(engine=['h5netcdf']) -@mark_class(requires_dask) -@mark_class(requires_scipy) -@mark_class(requires_netCDF4) class DaskTest(TestCase, DatasetIOTestCases): @contextlib.contextmanager def create_store(self): yield Dataset() @contextlib.contextmanager + @requires_dask + @requires_scipy + @requires_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): yield data.chunk() @@ -1387,7 +1399,6 @@ def test_save_mfdataset_invalid_dataarray(self): with self.assertRaisesRegexp(TypeError, 'supports writing Dataset'): save_mfdataset([da], ['dataarray']) - @requires_pathlib def test_save_mfdataset_pathlib_roundtrip(self): original = Dataset({'foo': ('x', np.random.randn(10))}) @@ -1476,11 +1487,11 @@ class DaskTestAutocloseTrue(DaskTest): autoclose = True -@mark_class(network) -@mark_class(requires_scipy_or_netCDF4) -@mark_class(requires_pydap) class PydapTest(TestCase): @contextlib.contextmanager + @network + @requires_scipy_or_netCDF4 + @requires_pydap def create_datasets(self, **kwargs): url = 'http://test.opendap.org/opendap/hyrax/data/nc/bears.nc' actual = open_dataset(url, engine='pydap', **kwargs) @@ -1511,6 +1522,7 @@ def test_cmp_local_file(self): self.assertDatasetEqual(actual.isel(j=slice(1, 2)), expected.isel(j=slice(1, 2))) + @requires_pydap def test_session(self): from pydap.cas.urs import setup_session @@ -1525,8 +1537,6 @@ def test_dask(self): self.assertDatasetEqual(actual, expected) -@mark_class(requires_scipy) -@mark_class(requires_pynio) class TestPyNio(CFEncodedDataTest, Only32BitTypes, TestCase): def test_write_store(self): # pynio is read-only for now @@ -1537,6 +1547,8 @@ def test_orthogonal_indexing(self): pass @contextlib.contextmanager + @requires_scipy + @requires_pynio def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -1546,6 +1558,8 @@ def roundtrip(self, data, save_kwargs={}, open_kwargs={}, autoclose=self.autoclose, **open_kwargs) as ds: yield ds + @requires_pynio + @requires_scipy def test_weakrefs(self): example = Dataset({'foo': ('x', np.arange(5.0))}) expected = example.rename({'foo': 'bar', 'x': 'y'}) @@ -1558,15 +1572,13 @@ def test_weakrefs(self): self.assertDatasetIdentical(actual, expected) -@mark_class(requires_scipy) -@mark_class(requires_pynio) class TestPyNioAutocloseTrue(TestPyNio): autoclose = True -@mark_class(requires_rasterio) class TestRasterio(TestCase): + @requires_rasterio def test_serialization_utm(self): import rasterio from rasterio.transform import from_origin @@ -1612,6 +1624,7 @@ def test_serialization_utm(self): with xr.open_dataarray(tmp_nc_file) as ncds: assert_identical(rioda, ncds) + @requires_rasterio def test_serialization_platecarree(self): import rasterio @@ -1656,6 +1669,7 @@ def test_serialization_platecarree(self): with xr.open_dataarray(tmp_nc_file) as ncds: assert_identical(rioda, ncds) + @requires_rasterio def test_indexing(self): import rasterio @@ -1749,6 +1763,7 @@ def test_indexing(self): ac = actual.isel(band=[0], x=slice(2, 5), y=[2]) assert_allclose(ac, ex) + @requires_rasterio def test_caching(self): import rasterio @@ -1793,6 +1808,7 @@ def test_caching(self): assert_allclose(ac, ex) @requires_dask + @requires_rasterio def test_chunks(self): import rasterio @@ -1860,7 +1876,7 @@ class MiscObject: pass -@mark_class(requires_netCDF4) +@requires_netCDF4 class TestValidateAttrs(TestCase): def test_validating_attrs(self): def new_dataset(): @@ -1960,7 +1976,7 @@ def new_dataset_and_coord_attrs(): ds.to_netcdf(tmp_file) -@mark_class(requires_scipy_or_netCDF4) +@requires_scipy_or_netCDF4 class TestDataArrayToNetCDF(TestCase): def test_dataarray_to_netcdf_no_name(self): From 5b4c966c95ca5a53036be6e5f47dce669e87873a Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Mon, 2 Oct 2017 22:41:17 -0700 Subject: [PATCH 6/9] whats new and some more cleanup --- doc/whats-new.rst | 9 +++++++-- xarray/tests/__init__.py | 1 - xarray/tests/test_backends.py | 4 ++++ xarray/tests/test_conventions.py | 13 ++++++++++--- xarray/tests/test_dask.py | 4 ++-- xarray/tests/test_groupby.py | 4 ++-- xarray/tests/test_plot.py | 12 +++++++++--- xarray/tests/test_tutorial.py | 3 +-- 8 files changed, 35 insertions(+), 15 deletions(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 7025e2dd7d9..f161aa10af7 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -54,9 +54,9 @@ Breaking changes [...] Note that both versions are currently supported, but using the old syntax will - produce a warning encouraging users to adopt the new syntax. + produce a warning encouraging users to adopt the new syntax. By `Daniel Rothenberg `_. - + - ``repr`` and the Jupyter Notebook won't automatically compute dask variables. Datasets loaded with ``open_dataset`` won't automatically read coords from disk when calling ``repr`` (:issue:`1522`). @@ -212,6 +212,11 @@ Bug fixes the first argument was a numpy variable (:issue:`1588`). By `Guido Imperiale `_. + - Fix bug when using ``pytest`` class decorators to skiping certain unittests. + The previous behavior unintentionally causing additional tests to be skipped. + A temporary work-around has been applied in :issue:`1531`. + By `Joe Hamman `_. + .. _whats-new.0.9.6: v0.9.6 (8 June 2017) diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py index bd180d5c7cd..7eb0df96be2 100644 --- a/xarray/tests/__init__.py +++ b/xarray/tests/__init__.py @@ -6,7 +6,6 @@ from distutils.version import LooseVersion import re import importlib -import types import numpy as np from numpy.testing import assert_array_equal diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index aeaf404c57b..6376a73fe71 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -1575,6 +1575,10 @@ def test_weakrefs(self): class TestPyNioAutocloseTrue(TestPyNio): autoclose = True + @requires_pynio + def setUp(self): + pass + class TestRasterio(TestCase): diff --git a/xarray/tests/test_conventions.py b/xarray/tests/test_conventions.py index 6aaa2cbfa89..6ae42ad7847 100644 --- a/xarray/tests/test_conventions.py +++ b/xarray/tests/test_conventions.py @@ -527,8 +527,8 @@ def test(self): self.assertArrayEqual(a, expected) -@requires_netCDF4 class TestEncodeCFVariable(TestCase): + @requires_netCDF4 def test_incompatible_attributes(self): invalid_vars = [ Variable(['t'], pd.date_range('2000-01-01', periods=3), @@ -541,6 +541,7 @@ def test_incompatible_attributes(self): with self.assertRaises(ValueError): conventions.encode_cf_variable(var) + @requires_netCDF4 def test_missing_fillvalue(self): v = Variable(['x'], np.array([np.nan, 1, 2, 3])) v.encoding = {'dtype': 'int16'} @@ -548,8 +549,8 @@ def test_missing_fillvalue(self): conventions.encode_cf_variable(v) -@requires_netCDF4 class TestDecodeCF(TestCase): + @requires_netCDF4 def test_dataset(self): original = Dataset({ 't': ('t', [0, 1, 2], {'units': 'days since 2000-01-01'}), @@ -562,25 +563,30 @@ def test_dataset(self): actual = conventions.decode_cf(original) self.assertDatasetIdentical(expected, actual) + @requires_netCDF4 def test_invalid_coordinates(self): # regression test for GH308 original = Dataset({'foo': ('t', [1, 2], {'coordinates': 'invalid'})}) actual = conventions.decode_cf(original) self.assertDatasetIdentical(original, actual) + @requires_netCDF4 def test_decode_coordinates(self): + # regression test for GH610 original = Dataset({'foo': ('t', [1, 2], {'coordinates': 'x'}), 'x': ('t', [4, 5])}) actual = conventions.decode_cf(original) self.assertEqual(actual.foo.encoding['coordinates'], 'x') + @requires_netCDF4 def test_0d_int32_encoding(self): original = Variable((), np.int32(0), encoding={'dtype': 'int64'}) expected = Variable((), np.int64(0)) actual = conventions.maybe_encode_dtype(original) self.assertDatasetIdentical(expected, actual) + @requires_netCDF4 def test_decode_cf_with_multiple_missing_values(self): original = Variable(['t'], [0, 1, 2], {'missing_value': np.array([0, 1])}) @@ -590,6 +596,7 @@ def test_decode_cf_with_multiple_missing_values(self): self.assertDatasetIdentical(expected, actual) self.assertIn('variable has multiple fill', str(w[0].message)) + @requires_netCDF4 def test_decode_cf_with_drop_variables(self): original = Dataset({ 't': ('t', [0, 1, 2], {'units': 'days since 2000-01-01'}), @@ -634,13 +641,13 @@ def null_wrap(ds): return InMemoryDataStore(variables=variables, attributes=ds.attrs) -@requires_netCDF4 class TestCFEncodedDataStore(CFEncodedDataTest, TestCase): @contextlib.contextmanager def create_store(self): yield CFEncodedInMemoryStore() @contextlib.contextmanager + @requires_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): store = CFEncodedInMemoryStore() diff --git a/xarray/tests/test_dask.py b/xarray/tests/test_dask.py index fdb2ff715dd..7221a96a042 100644 --- a/xarray/tests/test_dask.py +++ b/xarray/tests/test_dask.py @@ -44,7 +44,6 @@ def assertLazyAnd(self, expected, actual, test): assert False -@requires_dask class TestVariable(DaskTestCase): def assertLazyAndIdentical(self, expected, actual): self.assertLazyAnd(expected, actual, self.assertVariableIdentical) @@ -52,6 +51,7 @@ def assertLazyAndIdentical(self, expected, actual): def assertLazyAndAllClose(self, expected, actual): self.assertLazyAnd(expected, actual, self.assertVariableAllClose) + @requires_dask def setUp(self): self.values = np.random.RandomState(0).randn(4, 6) self.data = da.from_array(self.values, chunks=(2, 2)) @@ -206,7 +206,6 @@ def test_bivariate_ufunc(self): self.assertLazyAndAllClose(np.maximum(u, 0), xu.maximum(0, v)) -@requires_dask class TestDataArrayAndDataset(DaskTestCase): def assertLazyAndIdentical(self, expected, actual): self.assertLazyAnd(expected, actual, self.assertDataArrayIdentical) @@ -217,6 +216,7 @@ def assertLazyAndAllClose(self, expected, actual): def assertLazyAndEqual(self, expected, actual): self.assertLazyAnd(expected, actual, self.assertDataArrayEqual) + @requires_dask def setUp(self): self.values = np.random.randn(4, 6) self.data = da.from_array(self.values, chunks=(2, 2)) diff --git a/xarray/tests/test_groupby.py b/xarray/tests/test_groupby.py index 9a153e45da0..9d5682c0d1d 100644 --- a/xarray/tests/test_groupby.py +++ b/xarray/tests/test_groupby.py @@ -14,7 +14,7 @@ def test_consolidate_slices(): assert _consolidate_slices([slice(3), slice(3, 5)]) == [slice(5)] assert _consolidate_slices([slice(2, 3), slice(3, 6)]) == [slice(2, 6)] assert (_consolidate_slices([slice(2, 3, 1), slice(3, 6, 1)]) - == [slice(2, 6, 1)]) + == [slice(2, 6, 1)]) slices = [slice(2, 3), slice(5, 6)] assert _consolidate_slices(slices) == slices @@ -70,5 +70,5 @@ def test_groupby_duplicate_coordinate_labels(): actual = array.groupby('x').sum() assert expected.equals(actual) - + # TODO: move other groupby tests from test_dataset and test_dataarray over here diff --git a/xarray/tests/test_plot.py b/xarray/tests/test_plot.py index bdd0fec777f..ce890573bc4 100644 --- a/xarray/tests/test_plot.py +++ b/xarray/tests/test_plot.py @@ -65,7 +65,6 @@ def easy_array(shape, start=0, stop=1): return a.reshape(shape) -@requires_matplotlib class PlotTestCase(TestCase): def tearDown(self): @@ -91,6 +90,7 @@ def contourf_called(self, plotmethod): class TestPlot(PlotTestCase): + @requires_matplotlib def setUp(self): self.darray = DataArray(easy_array((2, 3, 4))) @@ -219,6 +219,7 @@ def test_convenient_facetgrid_4d(self): class TestPlot1D(PlotTestCase): + @requires_matplotlib def setUp(self): d = [0, 1.1, 0, 2] self.darray = DataArray(d, coords={'period': range(len(d))}, @@ -279,6 +280,7 @@ def test_slice_in_title(self): class TestPlotHistogram(PlotTestCase): + @requires_matplotlib def setUp(self): self.darray = DataArray(easy_array((2, 3, 4))) @@ -316,9 +318,9 @@ def test_plot_nans(self): self.darray.plot.hist() -@requires_matplotlib class TestDetermineCmapParams(TestCase): + @requires_matplotlib def setUp(self): self.data = np.linspace(0, 1, num=100) @@ -467,9 +469,9 @@ def test_divergentcontrol(self): self.assertEqual(cmap_params['cmap'].name, "viridis") -@requires_matplotlib class TestDiscreteColorMap(TestCase): + @requires_matplotlib def setUp(self): x = np.arange(start=0, stop=10, step=2) y = np.arange(start=9, stop=-7, step=-3) @@ -559,6 +561,7 @@ class Common2dMixin: Should have the same name as the method. """ + @requires_matplotlib def setUp(self): da = DataArray(easy_array( (10, 15), start=-1), dims=['y', 'x']) @@ -1019,6 +1022,7 @@ def test_2d_coord_names(self): class TestFacetGrid(PlotTestCase): + @requires_matplotlib def setUp(self): d = easy_array((10, 15, 3)) self.darray = DataArray(d, dims=['y', 'x', 'z'], @@ -1242,6 +1246,7 @@ def test_facetgrid_polar(self): class TestFacetGrid4d(PlotTestCase): + @requires_matplotlib def setUp(self): a = easy_array((10, 15, 3, 2)) darray = DataArray(a, dims=['y', 'x', 'col', 'row']) @@ -1270,6 +1275,7 @@ def test_default_labels(self): class TestDatetimePlot(PlotTestCase): + @requires_matplotlib def setUp(self): ''' Create a DataArray with a time-axis that contains datetime objects. diff --git a/xarray/tests/test_tutorial.py b/xarray/tests/test_tutorial.py index 56bdccedcfe..2f2af53ce8e 100644 --- a/xarray/tests/test_tutorial.py +++ b/xarray/tests/test_tutorial.py @@ -3,7 +3,6 @@ from __future__ import print_function import os -import pytest from xarray import tutorial, DataArray from xarray.core.pycompat import suppress @@ -11,7 +10,6 @@ from . import TestCase, network -@network class TestLoadDataset(TestCase): def setUp(self): @@ -23,6 +21,7 @@ def setUp(self): with suppress(OSError): os.remove('{}.md5'.format(self.testfilepath)) + @network def test_download_from_github(self): ds = tutorial.load_dataset(self.testfile) tiny = DataArray(range(5), name='tiny').to_dataset() From 5f733513c635981b5bcb0ffaaa9bfe61f7bd6d5a Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Tue, 3 Oct 2017 15:49:30 -0700 Subject: [PATCH 7/9] whatsnew update --- doc/whats-new.rst | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index f161aa10af7..f8c38c47ca1 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -212,10 +212,9 @@ Bug fixes the first argument was a numpy variable (:issue:`1588`). By `Guido Imperiale `_. - - Fix bug when using ``pytest`` class decorators to skiping certain unittests. - The previous behavior unintentionally causing additional tests to be skipped. - A temporary work-around has been applied in :issue:`1531`. - By `Joe Hamman `_. +- Fix bug when using ``pytest`` class decorators to skiping certain unittests. + The previous behavior unintentionally causing additional tests to be skipped + (:issue:`1531`). By `Joe Hamman `_. .. _whats-new.0.9.6: From 66515163a8622a717e0d3717a6505ab523660fa0 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Wed, 4 Oct 2017 14:38:36 -0700 Subject: [PATCH 8/9] use unittest skip decorators, way easier --- xarray/tests/__init__.py | 27 ++++++------ xarray/tests/test_backends.py | 72 +++++++++++--------------------- xarray/tests/test_conventions.py | 13 ++---- xarray/tests/test_dask.py | 4 +- xarray/tests/test_groupby.py | 4 +- xarray/tests/test_plot.py | 12 ++---- xarray/tests/test_tutorial.py | 3 +- 7 files changed, 51 insertions(+), 84 deletions(-) diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py index 7eb0df96be2..fbca4e08017 100644 --- a/xarray/tests/__init__.py +++ b/xarray/tests/__init__.py @@ -16,6 +16,16 @@ from xarray.core.pycompat import PY3 from xarray.testing import assert_equal, assert_identical, assert_allclose +try: + import unittest2 as unittest +except ImportError: + import unittest + +try: + from unittest import mock +except ImportError: + import mock + def _importorskip(modname, minversion=None): try: @@ -26,20 +36,11 @@ def _importorskip(modname, minversion=None): raise ImportError('Minimum version not satisfied') except ImportError: has = False - func = pytest.mark.skipif((not has), reason='requires {}'.format(modname)) + # TODO: use pytest skip + func = unittest.skipUnless(has, reason='requires {}'.format(modname)) return has, func -try: - import unittest2 as unittest -except ImportError: - import unittest - -try: - from unittest import mock -except ImportError: - import mock - has_matplotlib, requires_matplotlib = _importorskip('matplotlib') has_scipy, requires_scipy = _importorskip('scipy') has_pydap, requires_pydap = _importorskip('pydap.client') @@ -53,8 +54,8 @@ def _importorskip(modname, minversion=None): # some special cases has_scipy_or_netCDF4 = has_scipy or has_netCDF4 -requires_scipy_or_netCDF4 = pytest.mark.skipif( - not has_scipy_or_netCDF4, reason='requires scipy or netCDF4') +requires_scipy_or_netCDF4 = unittest.skipUnless( + has_scipy_or_netCDF4, reason='requires scipy or netCDF4') if not has_pathlib: has_pathlib, requires_pathlib = _importorskip('pathlib2') diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 6376a73fe71..828151d55a0 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -590,8 +590,8 @@ def create_tmp_files(nfiles, suffix='.nc', allow_cleanup_failure=False): yield files +@requires_netCDF4 class BaseNetCDF4Test(CFEncodedDataTest): - @requires_netCDF4 def test_open_group(self): # Create a netCDF file with a dataset stored within a group with create_tmp_file() as tmp_file: @@ -617,7 +617,6 @@ def test_open_group(self): with self.assertRaisesRegexp(ValueError, 'must be a string'): open_dataset(tmp_file, group=(1, 2, 3)) - @requires_netCDF4 def test_open_subgroup(self): # Create a netCDF file with a dataset stored within a group within a group with create_tmp_file() as tmp_file: @@ -639,7 +638,6 @@ def test_open_subgroup(self): with open_dataset(tmp_file, group=group) as actual: self.assertVariableEqual(actual['x'], expected['x']) - @requires_netCDF4 def test_write_groups(self): data1 = create_test_data() data2 = data1 * 2 @@ -651,7 +649,6 @@ def test_write_groups(self): with open_dataset(tmp_file, group='data/2') as actual2: self.assertDatasetIdentical(data2, actual2) - @requires_netCDF4 def test_roundtrip_character_array(self): with create_tmp_file() as tmp_file: values = np.array([['a', 'b', 'c'], ['d', 'e', 'f']], dtype='S') @@ -670,14 +667,12 @@ def test_roundtrip_character_array(self): with self.roundtrip(actual) as roundtripped: self.assertDatasetIdentical(expected, roundtripped) - @requires_netCDF4 def test_default_to_char_arrays(self): data = Dataset({'x': np.array(['foo', 'zzzz'], dtype='S')}) with self.roundtrip(data) as actual: self.assertDatasetIdentical(data, actual) self.assertEqual(actual['x'].dtype, np.dtype('S4')) - @requires_netCDF4 def test_open_encodings(self): # Create a netCDF file with explicit time units # and make sure it makes it into the encodings @@ -703,7 +698,6 @@ def test_open_encodings(self): if k in expected['time'].encoding) self.assertDictEqual(actual_encoding, expected['time'].encoding) - @requires_netCDF4 def test_dump_encodings(self): # regression test for #709 ds = Dataset({'x': ('y', np.arange(10.0))}) @@ -711,7 +705,6 @@ def test_dump_encodings(self): with self.roundtrip(ds, save_kwargs=kwargs) as actual: self.assertTrue(actual.x.encoding['zlib']) - @requires_netCDF4 def test_dump_and_open_encodings(self): # Create a netCDF file with explicit time units # and make sure it makes it into the encodings @@ -731,7 +724,6 @@ def test_dump_and_open_encodings(self): self.assertEqual(ds.variables['time'].getncattr('units'), units) self.assertArrayEqual(ds.variables['time'], np.arange(10) + 4) - @requires_netCDF4 def test_compression_encoding(self): data = create_test_data() data['var2'].encoding.update({'zlib': True, @@ -747,7 +739,6 @@ def test_compression_encoding(self): with self.roundtrip(expected) as actual: self.assertDatasetEqual(expected, actual) - @requires_netCDF4 def test_mask_and_scale(self): with create_tmp_file() as tmp_file: with nc4.Dataset(tmp_file, mode='w') as nc: @@ -772,7 +763,6 @@ def test_mask_and_scale(self): expected = create_masked_and_scaled_data() self.assertDatasetIdentical(expected, ds) - @requires_netCDF4 def test_0dimensional_variable(self): # This fix verifies our work-around to this netCDF4-python bug: # https://github.com/Unidata/netcdf4-python/pull/220 @@ -785,7 +775,6 @@ def test_0dimensional_variable(self): expected = Dataset({'x': ((), 123)}) self.assertDatasetIdentical(expected, ds) - @requires_netCDF4 def test_already_open_dataset(self): with create_tmp_file() as tmp_file: with nc4.Dataset(tmp_file, mode='w') as nc: @@ -798,7 +787,6 @@ def test_already_open_dataset(self): expected = Dataset({'x': ((), 42)}) self.assertDatasetIdentical(expected, ds) - @requires_netCDF4 def test_variable_len_strings(self): with create_tmp_file() as tmp_file: values = np.array(['foo', 'bar', 'baz'], dtype=object) @@ -814,6 +802,7 @@ def test_variable_len_strings(self): self.assertDatasetIdentical(expected, actual) +@requires_netCDF4 class NetCDF4DataTest(BaseNetCDF4Test, TestCase): autoclose = False @@ -824,7 +813,6 @@ def create_store(self): yield store @contextlib.contextmanager - @requires_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -867,10 +855,10 @@ class NetCDF4DataStoreAutocloseTrue(NetCDF4DataTest): autoclose = True +@requires_netCDF4 +@requires_dask class NetCDF4ViaDaskDataTest(NetCDF4DataTest): @contextlib.contextmanager - @requires_netCDF4 - @requires_dask def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with NetCDF4DataTest.roundtrip( @@ -892,6 +880,7 @@ class NetCDF4ViaDaskDataTestAutocloseTrue(NetCDF4ViaDaskDataTest): autoclose = True +@requires_scipy class ScipyInMemoryDataTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -899,7 +888,6 @@ def create_store(self): yield backends.ScipyDataStore(fobj, 'w') @contextlib.contextmanager - @requires_scipy def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): serialized = data.to_netcdf(**save_kwargs) @@ -924,6 +912,7 @@ class ScipyInMemoryDataTestAutocloseTrue(ScipyInMemoryDataTest): autoclose = True +@requires_scipy class ScipyFileObjectTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -931,7 +920,6 @@ def create_store(self): yield backends.ScipyDataStore(fobj, 'w') @contextlib.contextmanager - @requires_scipy def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file() as tmp_file: @@ -950,6 +938,7 @@ def test_pickle_dataarray(self): pass +@requires_scipy class ScipyFilePathTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -958,7 +947,6 @@ def create_store(self): yield store @contextlib.contextmanager - @requires_scipy def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -1004,6 +992,7 @@ class ScipyFilePathTestAutocloseTrue(ScipyFilePathTest): autoclose = True +@requires_netCDF4 class NetCDF3ViaNetCDF4DataTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager def create_store(self): @@ -1013,7 +1002,6 @@ def create_store(self): yield store @contextlib.contextmanager - @requires_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -1029,6 +1017,7 @@ class NetCDF3ViaNetCDF4DataTestAutocloseTrue(NetCDF3ViaNetCDF4DataTest): autoclose = True +@requires_netCDF4 class NetCDF4ClassicViaNetCDF4DataTest(CFEncodedDataTest, Only32BitTypes, TestCase): @contextlib.contextmanager @@ -1039,7 +1028,6 @@ def create_store(self): yield store @contextlib.contextmanager - @requires_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -1056,6 +1044,7 @@ class NetCDF4ClassicViaNetCDF4DataTestAutocloseTrue( autoclose = True +@requires_scipy_or_netCDF4 class GenericNetCDFDataTest(CFEncodedDataTest, Only32BitTypes, TestCase): # verify that we can read and write netCDF3 files as long as we have scipy # or netCDF4-python installed @@ -1065,7 +1054,6 @@ def test_write_store(self): pass @contextlib.contextmanager - @requires_scipy_or_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -1130,6 +1118,8 @@ class GenericNetCDFDataTestAutocloseTrue(GenericNetCDFDataTest): autoclose = True +@requires_h5netcdf +@requires_netCDF4 class H5NetCDFDataTest(BaseNetCDF4Test, TestCase): @contextlib.contextmanager def create_store(self): @@ -1137,8 +1127,6 @@ def create_store(self): yield backends.H5NetCDFStore(tmp_file, 'w') @contextlib.contextmanager - @requires_h5netcdf - @requires_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -1191,9 +1179,9 @@ def test_encoding_unlimited_dims(self): # tests pending h5netcdf fix -# @pytest.mark.xfail @shoyer - is this still an issue? -# class H5NetCDFDataTestAutocloseTrue(H5NetCDFDataTest): -# autoclose = True +@unittest.skip +class H5NetCDFDataTestAutocloseTrue(H5NetCDFDataTest): + autoclose = True class OpenMFDatasetManyFilesTest(TestCase): @@ -1279,15 +1267,15 @@ def test_4_open_large_num_files_h5netcdf(self): self.validate_open_mfdataset_large_num_files(engine=['h5netcdf']) +@requires_dask +@requires_scipy +@requires_netCDF4 class DaskTest(TestCase, DatasetIOTestCases): @contextlib.contextmanager def create_store(self): yield Dataset() @contextlib.contextmanager - @requires_dask - @requires_scipy - @requires_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): yield data.chunk() @@ -1399,6 +1387,7 @@ def test_save_mfdataset_invalid_dataarray(self): with self.assertRaisesRegexp(TypeError, 'supports writing Dataset'): save_mfdataset([da], ['dataarray']) + @requires_pathlib def test_save_mfdataset_pathlib_roundtrip(self): original = Dataset({'foo': ('x', np.random.randn(10))}) @@ -1487,11 +1476,11 @@ class DaskTestAutocloseTrue(DaskTest): autoclose = True +@network +@requires_scipy_or_netCDF4 +@requires_pydap class PydapTest(TestCase): @contextlib.contextmanager - @network - @requires_scipy_or_netCDF4 - @requires_pydap def create_datasets(self, **kwargs): url = 'http://test.opendap.org/opendap/hyrax/data/nc/bears.nc' actual = open_dataset(url, engine='pydap', **kwargs) @@ -1522,7 +1511,6 @@ def test_cmp_local_file(self): self.assertDatasetEqual(actual.isel(j=slice(1, 2)), expected.isel(j=slice(1, 2))) - @requires_pydap def test_session(self): from pydap.cas.urs import setup_session @@ -1537,6 +1525,8 @@ def test_dask(self): self.assertDatasetEqual(actual, expected) +@requires_scipy +@requires_pynio class TestPyNio(CFEncodedDataTest, Only32BitTypes, TestCase): def test_write_store(self): # pynio is read-only for now @@ -1547,8 +1537,6 @@ def test_orthogonal_indexing(self): pass @contextlib.contextmanager - @requires_scipy - @requires_pynio def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): with create_tmp_file( @@ -1558,8 +1546,6 @@ def roundtrip(self, data, save_kwargs={}, open_kwargs={}, autoclose=self.autoclose, **open_kwargs) as ds: yield ds - @requires_pynio - @requires_scipy def test_weakrefs(self): example = Dataset({'foo': ('x', np.arange(5.0))}) expected = example.rename({'foo': 'bar', 'x': 'y'}) @@ -1575,14 +1561,10 @@ def test_weakrefs(self): class TestPyNioAutocloseTrue(TestPyNio): autoclose = True - @requires_pynio - def setUp(self): - pass - +@requires_rasterio class TestRasterio(TestCase): - @requires_rasterio def test_serialization_utm(self): import rasterio from rasterio.transform import from_origin @@ -1628,7 +1610,6 @@ def test_serialization_utm(self): with xr.open_dataarray(tmp_nc_file) as ncds: assert_identical(rioda, ncds) - @requires_rasterio def test_serialization_platecarree(self): import rasterio @@ -1673,7 +1654,6 @@ def test_serialization_platecarree(self): with xr.open_dataarray(tmp_nc_file) as ncds: assert_identical(rioda, ncds) - @requires_rasterio def test_indexing(self): import rasterio @@ -1767,7 +1747,6 @@ def test_indexing(self): ac = actual.isel(band=[0], x=slice(2, 5), y=[2]) assert_allclose(ac, ex) - @requires_rasterio def test_caching(self): import rasterio @@ -1812,7 +1791,6 @@ def test_caching(self): assert_allclose(ac, ex) @requires_dask - @requires_rasterio def test_chunks(self): import rasterio diff --git a/xarray/tests/test_conventions.py b/xarray/tests/test_conventions.py index 6ae42ad7847..6aaa2cbfa89 100644 --- a/xarray/tests/test_conventions.py +++ b/xarray/tests/test_conventions.py @@ -527,8 +527,8 @@ def test(self): self.assertArrayEqual(a, expected) +@requires_netCDF4 class TestEncodeCFVariable(TestCase): - @requires_netCDF4 def test_incompatible_attributes(self): invalid_vars = [ Variable(['t'], pd.date_range('2000-01-01', periods=3), @@ -541,7 +541,6 @@ def test_incompatible_attributes(self): with self.assertRaises(ValueError): conventions.encode_cf_variable(var) - @requires_netCDF4 def test_missing_fillvalue(self): v = Variable(['x'], np.array([np.nan, 1, 2, 3])) v.encoding = {'dtype': 'int16'} @@ -549,8 +548,8 @@ def test_missing_fillvalue(self): conventions.encode_cf_variable(v) +@requires_netCDF4 class TestDecodeCF(TestCase): - @requires_netCDF4 def test_dataset(self): original = Dataset({ 't': ('t', [0, 1, 2], {'units': 'days since 2000-01-01'}), @@ -563,30 +562,25 @@ def test_dataset(self): actual = conventions.decode_cf(original) self.assertDatasetIdentical(expected, actual) - @requires_netCDF4 def test_invalid_coordinates(self): # regression test for GH308 original = Dataset({'foo': ('t', [1, 2], {'coordinates': 'invalid'})}) actual = conventions.decode_cf(original) self.assertDatasetIdentical(original, actual) - @requires_netCDF4 def test_decode_coordinates(self): - # regression test for GH610 original = Dataset({'foo': ('t', [1, 2], {'coordinates': 'x'}), 'x': ('t', [4, 5])}) actual = conventions.decode_cf(original) self.assertEqual(actual.foo.encoding['coordinates'], 'x') - @requires_netCDF4 def test_0d_int32_encoding(self): original = Variable((), np.int32(0), encoding={'dtype': 'int64'}) expected = Variable((), np.int64(0)) actual = conventions.maybe_encode_dtype(original) self.assertDatasetIdentical(expected, actual) - @requires_netCDF4 def test_decode_cf_with_multiple_missing_values(self): original = Variable(['t'], [0, 1, 2], {'missing_value': np.array([0, 1])}) @@ -596,7 +590,6 @@ def test_decode_cf_with_multiple_missing_values(self): self.assertDatasetIdentical(expected, actual) self.assertIn('variable has multiple fill', str(w[0].message)) - @requires_netCDF4 def test_decode_cf_with_drop_variables(self): original = Dataset({ 't': ('t', [0, 1, 2], {'units': 'days since 2000-01-01'}), @@ -641,13 +634,13 @@ def null_wrap(ds): return InMemoryDataStore(variables=variables, attributes=ds.attrs) +@requires_netCDF4 class TestCFEncodedDataStore(CFEncodedDataTest, TestCase): @contextlib.contextmanager def create_store(self): yield CFEncodedInMemoryStore() @contextlib.contextmanager - @requires_netCDF4 def roundtrip(self, data, save_kwargs={}, open_kwargs={}, allow_cleanup_failure=False): store = CFEncodedInMemoryStore() diff --git a/xarray/tests/test_dask.py b/xarray/tests/test_dask.py index 7221a96a042..fdb2ff715dd 100644 --- a/xarray/tests/test_dask.py +++ b/xarray/tests/test_dask.py @@ -44,6 +44,7 @@ def assertLazyAnd(self, expected, actual, test): assert False +@requires_dask class TestVariable(DaskTestCase): def assertLazyAndIdentical(self, expected, actual): self.assertLazyAnd(expected, actual, self.assertVariableIdentical) @@ -51,7 +52,6 @@ def assertLazyAndIdentical(self, expected, actual): def assertLazyAndAllClose(self, expected, actual): self.assertLazyAnd(expected, actual, self.assertVariableAllClose) - @requires_dask def setUp(self): self.values = np.random.RandomState(0).randn(4, 6) self.data = da.from_array(self.values, chunks=(2, 2)) @@ -206,6 +206,7 @@ def test_bivariate_ufunc(self): self.assertLazyAndAllClose(np.maximum(u, 0), xu.maximum(0, v)) +@requires_dask class TestDataArrayAndDataset(DaskTestCase): def assertLazyAndIdentical(self, expected, actual): self.assertLazyAnd(expected, actual, self.assertDataArrayIdentical) @@ -216,7 +217,6 @@ def assertLazyAndAllClose(self, expected, actual): def assertLazyAndEqual(self, expected, actual): self.assertLazyAnd(expected, actual, self.assertDataArrayEqual) - @requires_dask def setUp(self): self.values = np.random.randn(4, 6) self.data = da.from_array(self.values, chunks=(2, 2)) diff --git a/xarray/tests/test_groupby.py b/xarray/tests/test_groupby.py index 9d5682c0d1d..9a153e45da0 100644 --- a/xarray/tests/test_groupby.py +++ b/xarray/tests/test_groupby.py @@ -14,7 +14,7 @@ def test_consolidate_slices(): assert _consolidate_slices([slice(3), slice(3, 5)]) == [slice(5)] assert _consolidate_slices([slice(2, 3), slice(3, 6)]) == [slice(2, 6)] assert (_consolidate_slices([slice(2, 3, 1), slice(3, 6, 1)]) - == [slice(2, 6, 1)]) + == [slice(2, 6, 1)]) slices = [slice(2, 3), slice(5, 6)] assert _consolidate_slices(slices) == slices @@ -70,5 +70,5 @@ def test_groupby_duplicate_coordinate_labels(): actual = array.groupby('x').sum() assert expected.equals(actual) - + # TODO: move other groupby tests from test_dataset and test_dataarray over here diff --git a/xarray/tests/test_plot.py b/xarray/tests/test_plot.py index ce890573bc4..bdd0fec777f 100644 --- a/xarray/tests/test_plot.py +++ b/xarray/tests/test_plot.py @@ -65,6 +65,7 @@ def easy_array(shape, start=0, stop=1): return a.reshape(shape) +@requires_matplotlib class PlotTestCase(TestCase): def tearDown(self): @@ -90,7 +91,6 @@ def contourf_called(self, plotmethod): class TestPlot(PlotTestCase): - @requires_matplotlib def setUp(self): self.darray = DataArray(easy_array((2, 3, 4))) @@ -219,7 +219,6 @@ def test_convenient_facetgrid_4d(self): class TestPlot1D(PlotTestCase): - @requires_matplotlib def setUp(self): d = [0, 1.1, 0, 2] self.darray = DataArray(d, coords={'period': range(len(d))}, @@ -280,7 +279,6 @@ def test_slice_in_title(self): class TestPlotHistogram(PlotTestCase): - @requires_matplotlib def setUp(self): self.darray = DataArray(easy_array((2, 3, 4))) @@ -318,9 +316,9 @@ def test_plot_nans(self): self.darray.plot.hist() +@requires_matplotlib class TestDetermineCmapParams(TestCase): - @requires_matplotlib def setUp(self): self.data = np.linspace(0, 1, num=100) @@ -469,9 +467,9 @@ def test_divergentcontrol(self): self.assertEqual(cmap_params['cmap'].name, "viridis") +@requires_matplotlib class TestDiscreteColorMap(TestCase): - @requires_matplotlib def setUp(self): x = np.arange(start=0, stop=10, step=2) y = np.arange(start=9, stop=-7, step=-3) @@ -561,7 +559,6 @@ class Common2dMixin: Should have the same name as the method. """ - @requires_matplotlib def setUp(self): da = DataArray(easy_array( (10, 15), start=-1), dims=['y', 'x']) @@ -1022,7 +1019,6 @@ def test_2d_coord_names(self): class TestFacetGrid(PlotTestCase): - @requires_matplotlib def setUp(self): d = easy_array((10, 15, 3)) self.darray = DataArray(d, dims=['y', 'x', 'z'], @@ -1246,7 +1242,6 @@ def test_facetgrid_polar(self): class TestFacetGrid4d(PlotTestCase): - @requires_matplotlib def setUp(self): a = easy_array((10, 15, 3, 2)) darray = DataArray(a, dims=['y', 'x', 'col', 'row']) @@ -1275,7 +1270,6 @@ def test_default_labels(self): class TestDatetimePlot(PlotTestCase): - @requires_matplotlib def setUp(self): ''' Create a DataArray with a time-axis that contains datetime objects. diff --git a/xarray/tests/test_tutorial.py b/xarray/tests/test_tutorial.py index 2f2af53ce8e..56bdccedcfe 100644 --- a/xarray/tests/test_tutorial.py +++ b/xarray/tests/test_tutorial.py @@ -3,6 +3,7 @@ from __future__ import print_function import os +import pytest from xarray import tutorial, DataArray from xarray.core.pycompat import suppress @@ -10,6 +11,7 @@ from . import TestCase, network +@network class TestLoadDataset(TestCase): def setUp(self): @@ -21,7 +23,6 @@ def setUp(self): with suppress(OSError): os.remove('{}.md5'.format(self.testfilepath)) - @network def test_download_from_github(self): ds = tutorial.load_dataset(self.testfile) tiny = DataArray(range(5), name='tiny').to_dataset() From 993992def9afc075f52bedc2423bca03ac744672 Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Wed, 4 Oct 2017 15:57:38 -0700 Subject: [PATCH 9/9] update workaround comment --- xarray/tests/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py index fbca4e08017..33bad7e5ebd 100644 --- a/xarray/tests/__init__.py +++ b/xarray/tests/__init__.py @@ -36,7 +36,10 @@ def _importorskip(modname, minversion=None): raise ImportError('Minimum version not satisfied') except ImportError: has = False - # TODO: use pytest skip + # TODO: use pytest.skipif instead of unittest.skipUnless + # Using `unittest.skipUnless` is a temporary workaround for pytest#568, + # wherein class decorators stain inherited classes. + # xref: xarray#1531, implemented in xarray #1557. func = unittest.skipUnless(has, reason='requires {}'.format(modname)) return has, func