diff --git a/.travis.yml b/.travis.yml
index bd53edb0029..6df70e92954 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -14,8 +14,6 @@ matrix:
env: CONDA_ENV=py27-min
- python: 2.7
env: CONDA_ENV=py27-cdat+iris+pynio
- - python: 3.4
- env: CONDA_ENV=py34
- python: 3.5
env: CONDA_ENV=py35
- python: 3.6
@@ -102,7 +100,7 @@ install:
script:
# TODO: restore this check once the upstream pandas issue is fixed:
- # https://github.com/pandas-dev/pandas/issues/21071
+ # https://github.com/pandas-dev/pandas/issues/21071
# - python -OO -c "import xarray"
- if [[ "$CONDA_ENV" == "docs" ]]; then
conda install -c conda-forge sphinx sphinx_rtd_theme sphinx-gallery numpydoc;
diff --git a/ci/requirements-py27-min.yml b/ci/requirements-py27-min.yml
index 50f6724ec51..118b629271e 100644
--- a/ci/requirements-py27-min.yml
+++ b/ci/requirements-py27-min.yml
@@ -4,8 +4,8 @@ dependencies:
- pytest
- flake8
- mock
- - numpy==1.11
- - pandas==0.18.0
+ - numpy=1.12
+ - pandas=0.19
- pip:
- coveralls
- pytest-cov
diff --git a/ci/requirements-py34.yml b/ci/requirements-py34.yml
deleted file mode 100644
index ba79e00bb12..00000000000
--- a/ci/requirements-py34.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-name: test_env
-dependencies:
- - python=3.4
- - bottleneck
- - flake8
- - pandas
- - pip:
- - coveralls
- - pytest-cov
- - pytest
diff --git a/ci/requirements-py35.yml b/ci/requirements-py35.yml
index d3500bc5d10..9615aeba9aa 100644
--- a/ci/requirements-py35.yml
+++ b/ci/requirements-py35.yml
@@ -4,11 +4,10 @@ channels:
dependencies:
- python=3.5
- cftime
- - dask
- - distributed
+ - dask=0.16
- h5py
- h5netcdf
- - matplotlib
+ - matplotlib=1.5
- netcdf4
- pytest
- flake8
diff --git a/doc/installing.rst b/doc/installing.rst
index 31fc109ee2e..b3154c3d8bb 100644
--- a/doc/installing.rst
+++ b/doc/installing.rst
@@ -6,9 +6,9 @@ Installation
Required dependencies
---------------------
-- Python 2.7 [1]_, 3.4, 3.5, or 3.6
-- `numpy `__ (1.11 or later)
-- `pandas `__ (0.18.0 or later)
+- Python 2.7 [1]_, 3.5, or 3.6
+- `numpy `__ (1.12 or later)
+- `pandas `__ (0.19.2 or later)
Optional dependencies
---------------------
@@ -45,13 +45,14 @@ For accelerating xarray
For parallel computing
~~~~~~~~~~~~~~~~~~~~~~
-- `dask.array `__ (0.9.0 or later): required for
+- `dask.array `__ (0.16 or later): required for
:ref:`dask`.
For plotting
~~~~~~~~~~~~
- `matplotlib `__: required for :ref:`plotting`
+ (1.5 or later)
- `cartopy `__: recommended for
:ref:`plot-maps`
- `seaborn `__: for better
diff --git a/doc/whats-new.rst b/doc/whats-new.rst
index bd23386a460..af90ea7f9d3 100644
--- a/doc/whats-new.rst
+++ b/doc/whats-new.rst
@@ -66,6 +66,18 @@ Bug fixes
weren't monotonic (:issue:`2250`).
By `Fabien Maussion `_.
+Breaking changes
+~~~~~~~~~~~~~~~~
+
+- Xarray no longer supports python 3.4. Additionally, the minimum supported
+ versions of the following dependencies has been updated and/or clarified:
+
+ - Pandas: 0.18 -> 0.19
+ - NumPy: 1.11 -> 1.12
+ - Dask: 0.9 -> 0.16
+ - Matplotlib: unspecified -> 1.5
+
+ (:issue:`2204`). By `Joe Hamman `_.
.. _whats-new.0.10.7:
diff --git a/setup.py b/setup.py
index 77c6083f52c..e35611e01b1 100644
--- a/setup.py
+++ b/setup.py
@@ -71,4 +71,4 @@
tests_require=TESTS_REQUIRE,
url=URL,
packages=find_packages(),
- package_data={'xarray': ['tests/data/*', 'plot/default_colormap.csv']})
+ package_data={'xarray': ['tests/data/*']})
diff --git a/xarray/core/duck_array_ops.py b/xarray/core/duck_array_ops.py
index 065ac165a0d..3bd105064da 100644
--- a/xarray/core/duck_array_ops.py
+++ b/xarray/core/duck_array_ops.py
@@ -363,9 +363,9 @@ def f(values, axis=None, skipna=None, **kwargs):
median = _create_nan_agg_method('median', numeric_only=True)
prod = _create_nan_agg_method('prod', numeric_only=True, no_bottleneck=True)
cumprod_1d = _create_nan_agg_method(
- 'cumprod', numeric_only=True, np_compat=True, no_bottleneck=True)
+ 'cumprod', numeric_only=True, no_bottleneck=True)
cumsum_1d = _create_nan_agg_method(
- 'cumsum', numeric_only=True, np_compat=True, no_bottleneck=True)
+ 'cumsum', numeric_only=True, no_bottleneck=True)
def _nd_cum_func(cum_func, array, axis, **kwargs):
diff --git a/xarray/core/missing.py b/xarray/core/missing.py
index 743627bb381..bec9e2e1931 100644
--- a/xarray/core/missing.py
+++ b/xarray/core/missing.py
@@ -8,7 +8,6 @@
from . import rolling
from .computation import apply_ufunc
-from .npcompat import flip
from .pycompat import iteritems
from .utils import is_scalar, OrderedSet
from .variable import Variable, broadcast_variables
@@ -245,13 +244,13 @@ def _bfill(arr, n=None, axis=-1):
'''inverse of ffill'''
import bottleneck as bn
- arr = flip(arr, axis=axis)
+ arr = np.flip(arr, axis=axis)
# fill
arr = bn.push(arr, axis=axis, n=n)
# reverse back to original
- return flip(arr, axis=axis)
+ return np.flip(arr, axis=axis)
def ffill(arr, dim=None, limit=None):
diff --git a/xarray/core/npcompat.py b/xarray/core/npcompat.py
index ec8adfffbf8..6d4db063b98 100644
--- a/xarray/core/npcompat.py
+++ b/xarray/core/npcompat.py
@@ -1,261 +1,7 @@
from __future__ import absolute_import, division, print_function
-from distutils.version import LooseVersion
-
import numpy as np
-if LooseVersion(np.__version__) >= LooseVersion('1.12'):
- as_strided = np.lib.stride_tricks.as_strided
-else:
- def as_strided(x, shape=None, strides=None, subok=False, writeable=True):
- array = np.lib.stride_tricks.as_strided(x, shape, strides, subok)
- array.setflags(write=writeable)
- return array
-
-
-try:
- from numpy import nancumsum, nancumprod, flip
-except ImportError: # pragma: no cover
- # Code copied from newer versions of NumPy (v1.12).
- # Used under the terms of NumPy's license, see licenses/NUMPY_LICENSE.
-
- def _replace_nan(a, val):
- """
- If `a` is of inexact type, make a copy of `a`, replace NaNs with
- the `val` value, and return the copy together with a boolean mask
- marking the locations where NaNs were present. If `a` is not of
- inexact type, do nothing and return `a` together with a mask of None.
-
- Note that scalars will end up as array scalars, which is important
- for using the result as the value of the out argument in some
- operations.
-
- Parameters
- ----------
- a : array-like
- Input array.
- val : float
- NaN values are set to val before doing the operation.
-
- Returns
- -------
- y : ndarray
- If `a` is of inexact type, return a copy of `a` with the NaNs
- replaced by the fill value, otherwise return `a`.
- mask: {bool, None}
- If `a` is of inexact type, return a boolean mask marking locations
- of NaNs, otherwise return None.
-
- """
- is_new = not isinstance(a, np.ndarray)
- if is_new:
- a = np.array(a)
- if not issubclass(a.dtype.type, np.inexact):
- return a, None
- if not is_new:
- # need copy
- a = np.array(a, subok=True)
-
- mask = np.isnan(a)
- np.copyto(a, val, where=mask)
- return a, mask
-
- def nancumsum(a, axis=None, dtype=None, out=None):
- """
- Return the cumulative sum of array elements over a given axis treating
- Not a Numbers (NaNs) as zero. The cumulative sum does not change when
- NaNs are encountered and leading NaNs are replaced by zeros.
-
- Zeros are returned for slices that are all-NaN or empty.
-
- .. versionadded:: 1.12.0
-
- Parameters
- ----------
- a : array_like
- Input array.
- axis : int, optional
- Axis along which the cumulative sum is computed. The default
- (None) is to compute the cumsum over the flattened array.
- dtype : dtype, optional
- Type of the returned array and of the accumulator in which the
- elements are summed. If `dtype` is not specified, it defaults
- to the dtype of `a`, unless `a` has an integer dtype with a
- precision less than that of the default platform integer. In
- that case, the default platform integer is used.
- out : ndarray, optional
- Alternative output array in which to place the result. It must
- have the same shape and buffer length as the expected output
- but the type will be cast if necessary. See `doc.ufuncs`
- (Section "Output arguments") for more details.
-
- Returns
- -------
- nancumsum : ndarray.
- A new array holding the result is returned unless `out` is
- specified, in which it is returned. The result has the same
- size as `a`, and the same shape as `a` if `axis` is not None
- or `a` is a 1-d array.
-
- See Also
- --------
- numpy.cumsum : Cumulative sum across array propagating NaNs.
- isnan : Show which elements are NaN.
-
- Examples
- --------
- >>> np.nancumsum(1)
- array([1])
- >>> np.nancumsum([1])
- array([1])
- >>> np.nancumsum([1, np.nan])
- array([ 1., 1.])
- >>> a = np.array([[1, 2], [3, np.nan]])
- >>> np.nancumsum(a)
- array([ 1., 3., 6., 6.])
- >>> np.nancumsum(a, axis=0)
- array([[ 1., 2.],
- [ 4., 2.]])
- >>> np.nancumsum(a, axis=1)
- array([[ 1., 3.],
- [ 3., 3.]])
-
- """
- a, mask = _replace_nan(a, 0)
- return np.cumsum(a, axis=axis, dtype=dtype, out=out)
-
- def nancumprod(a, axis=None, dtype=None, out=None):
- """
- Return the cumulative product of array elements over a given axis
- treating Not a Numbers (NaNs) as one. The cumulative product does not
- change when NaNs are encountered and leading NaNs are replaced by ones.
-
- Ones are returned for slices that are all-NaN or empty.
-
- .. versionadded:: 1.12.0
-
- Parameters
- ----------
- a : array_like
- Input array.
- axis : int, optional
- Axis along which the cumulative product is computed. By default
- the input is flattened.
- dtype : dtype, optional
- Type of the returned array, as well as of the accumulator in which
- the elements are multiplied. If *dtype* is not specified, it
- defaults to the dtype of `a`, unless `a` has an integer dtype with
- a precision less than that of the default platform integer. In
- that case, the default platform integer is used instead.
- out : ndarray, optional
- Alternative output array in which to place the result. It must
- have the same shape and buffer length as the expected output
- but the type of the resulting values will be cast if necessary.
-
- Returns
- -------
- nancumprod : ndarray
- A new array holding the result is returned unless `out` is
- specified, in which case it is returned.
-
- See Also
- --------
- numpy.cumprod : Cumulative product across array propagating NaNs.
- isnan : Show which elements are NaN.
-
- Examples
- --------
- >>> np.nancumprod(1)
- array([1])
- >>> np.nancumprod([1])
- array([1])
- >>> np.nancumprod([1, np.nan])
- array([ 1., 1.])
- >>> a = np.array([[1, 2], [3, np.nan]])
- >>> np.nancumprod(a)
- array([ 1., 2., 6., 6.])
- >>> np.nancumprod(a, axis=0)
- array([[ 1., 2.],
- [ 3., 2.]])
- >>> np.nancumprod(a, axis=1)
- array([[ 1., 2.],
- [ 3., 3.]])
-
- """
- a, mask = _replace_nan(a, 1)
- return np.cumprod(a, axis=axis, dtype=dtype, out=out)
-
- def flip(m, axis):
- """
- Reverse the order of elements in an array along the given axis.
-
- The shape of the array is preserved, but the elements are reordered.
-
- .. versionadded:: 1.12.0
-
- Parameters
- ----------
- m : array_like
- Input array.
- axis : integer
- Axis in array, which entries are reversed.
-
-
- Returns
- -------
- out : array_like
- A view of `m` with the entries of axis reversed. Since a view is
- returned, this operation is done in constant time.
-
- See Also
- --------
- flipud : Flip an array vertically (axis=0).
- fliplr : Flip an array horizontally (axis=1).
-
- Notes
- -----
- flip(m, 0) is equivalent to flipud(m).
- flip(m, 1) is equivalent to fliplr(m).
- flip(m, n) corresponds to ``m[...,::-1,...]`` with ``::-1`` at index n.
-
- Examples
- --------
- >>> A = np.arange(8).reshape((2,2,2))
- >>> A
- array([[[0, 1],
- [2, 3]],
-
- [[4, 5],
- [6, 7]]])
-
- >>> flip(A, 0)
- array([[[4, 5],
- [6, 7]],
-
- [[0, 1],
- [2, 3]]])
-
- >>> flip(A, 1)
- array([[[2, 3],
- [0, 1]],
-
- [[6, 7],
- [4, 5]]])
-
- >>> A = np.random.randn(3,4,5)
- >>> np.all(flip(A,2) == A[:,:,::-1,...])
- True
- """
- if not hasattr(m, 'ndim'):
- m = np.asarray(m)
- indexer = [slice(None)] * m.ndim
- try:
- indexer[axis] = slice(None, None, -1)
- except IndexError:
- raise ValueError("axis=%i is invalid for the %i-dimensional "
- "input array" % (axis, m.ndim))
- return m[tuple(indexer)]
-
try:
from numpy import isin
except ImportError:
diff --git a/xarray/core/nputils.py b/xarray/core/nputils.py
index 4ca1f9390eb..6df2d34bfe3 100644
--- a/xarray/core/nputils.py
+++ b/xarray/core/nputils.py
@@ -5,8 +5,6 @@
import numpy as np
import pandas as pd
-from . import npcompat
-
def _validate_axis(data, axis):
ndim = data.ndim
@@ -194,6 +192,6 @@ def _rolling_window(a, window, axis=-1):
shape = a.shape[:-1] + (a.shape[-1] - window + 1, window)
strides = a.strides + (a.strides[-1],)
- rolling = npcompat.as_strided(a, shape=shape, strides=strides,
- writeable=False)
+ rolling = np.lib.stride_tricks.as_strided(a, shape=shape, strides=strides,
+ writeable=False)
return np.swapaxes(rolling, -2, axis)
diff --git a/xarray/plot/default_colormap.csv b/xarray/plot/default_colormap.csv
deleted file mode 100644
index de9632e3f26..00000000000
--- a/xarray/plot/default_colormap.csv
+++ /dev/null
@@ -1,256 +0,0 @@
-0.26700401,0.00487433,0.32941519
-0.26851048,0.00960483,0.33542652
-0.26994384,0.01462494,0.34137895
-0.27130489,0.01994186,0.34726862
-0.27259384,0.02556309,0.35309303
-0.27380934,0.03149748,0.35885256
-0.27495242,0.03775181,0.36454323
-0.27602238,0.04416723,0.37016418
-0.2770184,0.05034437,0.37571452
-0.27794143,0.05632444,0.38119074
-0.27879067,0.06214536,0.38659204
-0.2795655,0.06783587,0.39191723
-0.28026658,0.07341724,0.39716349
-0.28089358,0.07890703,0.40232944
-0.28144581,0.0843197,0.40741404
-0.28192358,0.08966622,0.41241521
-0.28232739,0.09495545,0.41733086
-0.28265633,0.10019576,0.42216032
-0.28291049,0.10539345,0.42690202
-0.28309095,0.11055307,0.43155375
-0.28319704,0.11567966,0.43611482
-0.28322882,0.12077701,0.44058404
-0.28318684,0.12584799,0.44496
-0.283072,0.13089477,0.44924127
-0.28288389,0.13592005,0.45342734
-0.28262297,0.14092556,0.45751726
-0.28229037,0.14591233,0.46150995
-0.28188676,0.15088147,0.46540474
-0.28141228,0.15583425,0.46920128
-0.28086773,0.16077132,0.47289909
-0.28025468,0.16569272,0.47649762
-0.27957399,0.17059884,0.47999675
-0.27882618,0.1754902,0.48339654
-0.27801236,0.18036684,0.48669702
-0.27713437,0.18522836,0.48989831
-0.27619376,0.19007447,0.49300074
-0.27519116,0.1949054,0.49600488
-0.27412802,0.19972086,0.49891131
-0.27300596,0.20452049,0.50172076
-0.27182812,0.20930306,0.50443413
-0.27059473,0.21406899,0.50705243
-0.26930756,0.21881782,0.50957678
-0.26796846,0.22354911,0.5120084
-0.26657984,0.2282621,0.5143487
-0.2651445,0.23295593,0.5165993
-0.2636632,0.23763078,0.51876163
-0.26213801,0.24228619,0.52083736
-0.26057103,0.2469217,0.52282822
-0.25896451,0.25153685,0.52473609
-0.25732244,0.2561304,0.52656332
-0.25564519,0.26070284,0.52831152
-0.25393498,0.26525384,0.52998273
-0.25219404,0.26978306,0.53157905
-0.25042462,0.27429024,0.53310261
-0.24862899,0.27877509,0.53455561
-0.2468114,0.28323662,0.53594093
-0.24497208,0.28767547,0.53726018
-0.24311324,0.29209154,0.53851561
-0.24123708,0.29648471,0.53970946
-0.23934575,0.30085494,0.54084398
-0.23744138,0.30520222,0.5419214
-0.23552606,0.30952657,0.54294396
-0.23360277,0.31382773,0.54391424
-0.2316735,0.3181058,0.54483444
-0.22973926,0.32236127,0.54570633
-0.22780192,0.32659432,0.546532
-0.2258633,0.33080515,0.54731353
-0.22392515,0.334994,0.54805291
-0.22198915,0.33916114,0.54875211
-0.22005691,0.34330688,0.54941304
-0.21812995,0.34743154,0.55003755
-0.21620971,0.35153548,0.55062743
-0.21429757,0.35561907,0.5511844
-0.21239477,0.35968273,0.55171011
-0.2105031,0.36372671,0.55220646
-0.20862342,0.36775151,0.55267486
-0.20675628,0.37175775,0.55311653
-0.20490257,0.37574589,0.55353282
-0.20306309,0.37971644,0.55392505
-0.20123854,0.38366989,0.55429441
-0.1994295,0.38760678,0.55464205
-0.1976365,0.39152762,0.55496905
-0.19585993,0.39543297,0.55527637
-0.19410009,0.39932336,0.55556494
-0.19235719,0.40319934,0.55583559
-0.19063135,0.40706148,0.55608907
-0.18892259,0.41091033,0.55632606
-0.18723083,0.41474645,0.55654717
-0.18555593,0.4185704,0.55675292
-0.18389763,0.42238275,0.55694377
-0.18225561,0.42618405,0.5571201
-0.18062949,0.42997486,0.55728221
-0.17901879,0.43375572,0.55743035
-0.17742298,0.4375272,0.55756466
-0.17584148,0.44128981,0.55768526
-0.17427363,0.4450441,0.55779216
-0.17271876,0.4487906,0.55788532
-0.17117615,0.4525298,0.55796464
-0.16964573,0.45626209,0.55803034
-0.16812641,0.45998802,0.55808199
-0.1666171,0.46370813,0.55811913
-0.16511703,0.4674229,0.55814141
-0.16362543,0.47113278,0.55814842
-0.16214155,0.47483821,0.55813967
-0.16066467,0.47853961,0.55811466
-0.15919413,0.4822374,0.5580728
-0.15772933,0.48593197,0.55801347
-0.15626973,0.4896237,0.557936
-0.15481488,0.49331293,0.55783967
-0.15336445,0.49700003,0.55772371
-0.1519182,0.50068529,0.55758733
-0.15047605,0.50436904,0.55742968
-0.14903918,0.50805136,0.5572505
-0.14760731,0.51173263,0.55704861
-0.14618026,0.51541316,0.55682271
-0.14475863,0.51909319,0.55657181
-0.14334327,0.52277292,0.55629491
-0.14193527,0.52645254,0.55599097
-0.14053599,0.53013219,0.55565893
-0.13914708,0.53381201,0.55529773
-0.13777048,0.53749213,0.55490625
-0.1364085,0.54117264,0.55448339
-0.13506561,0.54485335,0.55402906
-0.13374299,0.54853458,0.55354108
-0.13244401,0.55221637,0.55301828
-0.13117249,0.55589872,0.55245948
-0.1299327,0.55958162,0.55186354
-0.12872938,0.56326503,0.55122927
-0.12756771,0.56694891,0.55055551
-0.12645338,0.57063316,0.5498411
-0.12539383,0.57431754,0.54908564
-0.12439474,0.57800205,0.5482874
-0.12346281,0.58168661,0.54744498
-0.12260562,0.58537105,0.54655722
-0.12183122,0.58905521,0.54562298
-0.12114807,0.59273889,0.54464114
-0.12056501,0.59642187,0.54361058
-0.12009154,0.60010387,0.54253043
-0.11973756,0.60378459,0.54139999
-0.11951163,0.60746388,0.54021751
-0.11942341,0.61114146,0.53898192
-0.11948255,0.61481702,0.53769219
-0.11969858,0.61849025,0.53634733
-0.12008079,0.62216081,0.53494633
-0.12063824,0.62582833,0.53348834
-0.12137972,0.62949242,0.53197275
-0.12231244,0.63315277,0.53039808
-0.12344358,0.63680899,0.52876343
-0.12477953,0.64046069,0.52706792
-0.12632581,0.64410744,0.52531069
-0.12808703,0.64774881,0.52349092
-0.13006688,0.65138436,0.52160791
-0.13226797,0.65501363,0.51966086
-0.13469183,0.65863619,0.5176488
-0.13733921,0.66225157,0.51557101
-0.14020991,0.66585927,0.5134268
-0.14330291,0.66945881,0.51121549
-0.1466164,0.67304968,0.50893644
-0.15014782,0.67663139,0.5065889
-0.15389405,0.68020343,0.50417217
-0.15785146,0.68376525,0.50168574
-0.16201598,0.68731632,0.49912906
-0.1663832,0.69085611,0.49650163
-0.1709484,0.69438405,0.49380294
-0.17570671,0.6978996,0.49103252
-0.18065314,0.70140222,0.48818938
-0.18578266,0.70489133,0.48527326
-0.19109018,0.70836635,0.48228395
-0.19657063,0.71182668,0.47922108
-0.20221902,0.71527175,0.47608431
-0.20803045,0.71870095,0.4728733
-0.21400015,0.72211371,0.46958774
-0.22012381,0.72550945,0.46622638
-0.2263969,0.72888753,0.46278934
-0.23281498,0.73224735,0.45927675
-0.2393739,0.73558828,0.45568838
-0.24606968,0.73890972,0.45202405
-0.25289851,0.74221104,0.44828355
-0.25985676,0.74549162,0.44446673
-0.26694127,0.74875084,0.44057284
-0.27414922,0.75198807,0.4366009
-0.28147681,0.75520266,0.43255207
-0.28892102,0.75839399,0.42842626
-0.29647899,0.76156142,0.42422341
-0.30414796,0.76470433,0.41994346
-0.31192534,0.76782207,0.41558638
-0.3198086,0.77091403,0.41115215
-0.3277958,0.77397953,0.40664011
-0.33588539,0.7770179,0.40204917
-0.34407411,0.78002855,0.39738103
-0.35235985,0.78301086,0.39263579
-0.36074053,0.78596419,0.38781353
-0.3692142,0.78888793,0.38291438
-0.37777892,0.79178146,0.3779385
-0.38643282,0.79464415,0.37288606
-0.39517408,0.79747541,0.36775726
-0.40400101,0.80027461,0.36255223
-0.4129135,0.80304099,0.35726893
-0.42190813,0.80577412,0.35191009
-0.43098317,0.80847343,0.34647607
-0.44013691,0.81113836,0.3409673
-0.44936763,0.81376835,0.33538426
-0.45867362,0.81636288,0.32972749
-0.46805314,0.81892143,0.32399761
-0.47750446,0.82144351,0.31819529
-0.4870258,0.82392862,0.31232133
-0.49661536,0.82637633,0.30637661
-0.5062713,0.82878621,0.30036211
-0.51599182,0.83115784,0.29427888
-0.52577622,0.83349064,0.2881265
-0.5356211,0.83578452,0.28190832
-0.5455244,0.83803918,0.27562602
-0.55548397,0.84025437,0.26928147
-0.5654976,0.8424299,0.26287683
-0.57556297,0.84456561,0.25641457
-0.58567772,0.84666139,0.24989748
-0.59583934,0.84871722,0.24332878
-0.60604528,0.8507331,0.23671214
-0.61629283,0.85270912,0.23005179
-0.62657923,0.85464543,0.22335258
-0.63690157,0.85654226,0.21662012
-0.64725685,0.85839991,0.20986086
-0.65764197,0.86021878,0.20308229
-0.66805369,0.86199932,0.19629307
-0.67848868,0.86374211,0.18950326
-0.68894351,0.86544779,0.18272455
-0.69941463,0.86711711,0.17597055
-0.70989842,0.86875092,0.16925712
-0.72039115,0.87035015,0.16260273
-0.73088902,0.87191584,0.15602894
-0.74138803,0.87344918,0.14956101
-0.75188414,0.87495143,0.14322828
-0.76237342,0.87642392,0.13706449
-0.77285183,0.87786808,0.13110864
-0.78331535,0.87928545,0.12540538
-0.79375994,0.88067763,0.12000532
-0.80418159,0.88204632,0.11496505
-0.81457634,0.88339329,0.11034678
-0.82494028,0.88472036,0.10621724
-0.83526959,0.88602943,0.1026459
-0.84556056,0.88732243,0.09970219
-0.8558096,0.88860134,0.09745186
-0.86601325,0.88986815,0.09595277
-0.87616824,0.89112487,0.09525046
-0.88627146,0.89237353,0.09537439
-0.89632002,0.89361614,0.09633538
-0.90631121,0.89485467,0.09812496
-0.91624212,0.89609127,0.1007168
-0.92610579,0.89732977,0.10407067
-0.93590444,0.8985704,0.10813094
-0.94563626,0.899815,0.11283773
-0.95529972,0.90106534,0.11812832
-0.96489353,0.90232311,0.12394051
-0.97441665,0.90358991,0.13021494
-0.98386829,0.90486726,0.13689671
-0.99324789,0.90615657,0.1439362
diff --git a/xarray/plot/utils.py b/xarray/plot/utils.py
index 6846c553b8b..4b9645e02d5 100644
--- a/xarray/plot/utils.py
+++ b/xarray/plot/utils.py
@@ -13,20 +13,6 @@
ROBUST_PERCENTILE = 2.0
-def _load_default_cmap(fname='default_colormap.csv'):
- """
- Returns viridis color map
- """
- from matplotlib.colors import LinearSegmentedColormap
-
- # Not sure what the first arg here should be
- f = pkg_resources.resource_stream(__name__, fname)
- cm_data = pd.read_csv(f, header=None).values
- f.close()
-
- return LinearSegmentedColormap.from_list('viridis', cm_data)
-
-
def import_seaborn():
'''import seaborn and handle deprecation of apionly module'''
with warnings.catch_warnings(record=True) as w:
@@ -226,10 +212,6 @@ def _determine_cmap_params(plot_data, vmin=None, vmax=None, cmap=None,
else:
cmap = "viridis"
- # Allow viridis before matplotlib 1.5
- if cmap == "viridis":
- cmap = _load_default_cmap()
-
# Handle discrete levels
if levels is not None:
if is_scalar(levels):
diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py
index e93d9a80145..3b4d69a35f7 100644
--- a/xarray/tests/__init__.py
+++ b/xarray/tests/__init__.py
@@ -54,15 +54,13 @@ def _importorskip(modname, minversion=None):
raise ImportError('Minimum version not satisfied')
except ImportError:
has = False
- # TODO: use pytest.skipif instead of unittest.skipUnless
- # Using `unittest.skipUnless` is a temporary workaround for pytest#568,
- # wherein class decorators stain inherited classes.
- # xref: xarray#1531, implemented in xarray #1557.
- func = unittest.skipUnless(has, reason='requires {}'.format(modname))
+ func = pytest.mark.skipif(not has, reason='requires {}'.format(modname))
return has, func
has_matplotlib, requires_matplotlib = _importorskip('matplotlib')
+has_matplotlib2, requires_matplotlib2 = _importorskip('matplotlib',
+ minversion='2')
has_scipy, requires_scipy = _importorskip('scipy')
has_pydap, requires_pydap = _importorskip('pydap.client')
has_netCDF4, requires_netCDF4 = _importorskip('netCDF4')
@@ -75,15 +73,15 @@ def _importorskip(modname, minversion=None):
has_rasterio, requires_rasterio = _importorskip('rasterio')
has_pathlib, requires_pathlib = _importorskip('pathlib')
has_zarr, requires_zarr = _importorskip('zarr', minversion='2.2')
-has_np112, requires_np112 = _importorskip('numpy', minversion='1.12.0')
+has_np113, requires_np113 = _importorskip('numpy', minversion='1.13.0')
# some special cases
has_scipy_or_netCDF4 = has_scipy or has_netCDF4
-requires_scipy_or_netCDF4 = unittest.skipUnless(
- has_scipy_or_netCDF4, reason='requires scipy or netCDF4')
+requires_scipy_or_netCDF4 = pytest.mark.skipif(
+ not has_scipy_or_netCDF4, reason='requires scipy or netCDF4')
has_cftime_or_netCDF4 = has_cftime or has_netCDF4
-requires_cftime_or_netCDF4 = unittest.skipUnless(
- has_cftime_or_netCDF4, reason='requires cftime or netCDF4')
+requires_cftime_or_netCDF4 = pytest.mark.skipif(
+ not has_cftime_or_netCDF4, reason='requires cftime or netCDF4')
if not has_pathlib:
has_pathlib, requires_pathlib = _importorskip('pathlib2')
if has_dask:
@@ -97,7 +95,8 @@ def _importorskip(modname, minversion=None):
has_seaborn = True
except ImportError:
has_seaborn = False
-requires_seaborn = unittest.skipUnless(has_seaborn, reason='requires seaborn')
+requires_seaborn = pytest.mark.skipif(not has_seaborn,
+ reason='requires seaborn')
try:
_SKIP_FLAKY = not pytest.config.getoption("--run-flaky")
diff --git a/xarray/tests/test_computation.py b/xarray/tests/test_computation.py
index a802b91a3db..e30e7e31390 100644
--- a/xarray/tests/test_computation.py
+++ b/xarray/tests/test_computation.py
@@ -726,9 +726,6 @@ def pandas_median(x):
def test_vectorize():
- if LooseVersion(np.__version__) < LooseVersion('1.12.0'):
- pytest.skip('numpy 1.12 or later to support vectorize=True.')
-
data_array = xr.DataArray([[0, 1, 2], [1, 2, 3]], dims=('x', 'y'))
expected = xr.DataArray([1, 2], dims=['x'])
actual = apply_ufunc(pandas_median, data_array,
@@ -739,9 +736,6 @@ def test_vectorize():
@requires_dask
def test_vectorize_dask():
- if LooseVersion(np.__version__) < LooseVersion('1.12.0'):
- pytest.skip('numpy 1.12 or later to support vectorize=True.')
-
data_array = xr.DataArray([[0, 1, 2], [1, 2, 3]], dims=('x', 'y'))
expected = xr.DataArray([1, 2], dims=['x'])
actual = apply_ufunc(pandas_median, data_array.chunk({'x': 1}),
diff --git a/xarray/tests/test_dask.py b/xarray/tests/test_dask.py
index ee5b3514348..f6c47cce8d8 100644
--- a/xarray/tests/test_dask.py
+++ b/xarray/tests/test_dask.py
@@ -1,7 +1,6 @@
from __future__ import absolute_import, division, print_function
import pickle
-from distutils.version import LooseVersion
from textwrap import dedent
import numpy as np
@@ -208,8 +207,6 @@ def test_bivariate_ufunc(self):
self.assertLazyAndAllClose(np.maximum(u, 0), xu.maximum(v, 0))
self.assertLazyAndAllClose(np.maximum(u, 0), xu.maximum(0, v))
- @pytest.mark.skipif(LooseVersion(dask.__version__) <= '0.15.4',
- reason='Need dask 0.16 for new interface')
def test_compute(self):
u = self.eager_var
v = self.lazy_var
@@ -220,8 +217,6 @@ def test_compute(self):
assert ((u + 1).data == v2.data).all()
- @pytest.mark.skipif(LooseVersion(dask.__version__) <= '0.15.4',
- reason='Need dask 0.16 for new interface')
def test_persist(self):
u = self.eager_var
v = self.lazy_var + 1
@@ -281,8 +276,6 @@ def test_lazy_array(self):
actual = xr.concat([v[:2], v[2:]], 'x')
self.assertLazyAndAllClose(u, actual)
- @pytest.mark.skipif(LooseVersion(dask.__version__) <= '0.15.4',
- reason='Need dask 0.16 for new interface')
def test_compute(self):
u = self.eager_array
v = self.lazy_array
@@ -293,8 +286,6 @@ def test_compute(self):
assert ((u + 1).data == v2.data).all()
- @pytest.mark.skipif(LooseVersion(dask.__version__) <= '0.15.4',
- reason='Need dask 0.16 for new interface')
def test_persist(self):
u = self.eager_array
v = self.lazy_array + 1
@@ -384,10 +375,6 @@ def test_concat_loads_variables(self):
assert ds3['c'].data is c3
def test_groupby(self):
- if LooseVersion(dask.__version__) == LooseVersion('0.15.3'):
- pytest.xfail('upstream bug in dask: '
- 'https://github.com/dask/dask/issues/2718')
-
u = self.eager_array
v = self.lazy_array
@@ -779,12 +766,8 @@ def build_dask_array(name):
# test both the perist method and the dask.persist function
# the dask.persist function requires a new version of dask
-@pytest.mark.parametrize('persist', [
- lambda x: x.persist(),
- pytest.mark.skipif(LooseVersion(dask.__version__) <= '0.15.4',
- lambda x: dask.persist(x)[0],
- reason='Need Dask 0.16')
-])
+@pytest.mark.parametrize('persist', [lambda x: x.persist(),
+ lambda x: dask.persist(x)[0]])
def test_persist_Dataset(persist):
ds = Dataset({'foo': ('x', range(5)),
'bar': ('x', range(5))}).chunk()
@@ -797,12 +780,8 @@ def test_persist_Dataset(persist):
assert len(ds.foo.data.dask) == n # doesn't mutate in place
-@pytest.mark.parametrize('persist', [
- lambda x: x.persist(),
- pytest.mark.skipif(LooseVersion(dask.__version__) <= '0.15.4',
- lambda x: dask.persist(x)[0],
- reason='Need Dask 0.16')
-])
+@pytest.mark.parametrize('persist', [lambda x: x.persist(),
+ lambda x: dask.persist(x)[0]])
def test_persist_DataArray(persist):
x = da.arange(10, chunks=(5,))
y = DataArray(x)
@@ -815,8 +794,6 @@ def test_persist_DataArray(persist):
assert len(zz.data.dask) == zz.data.npartitions
-@pytest.mark.skipif(LooseVersion(dask.__version__) <= '0.15.4',
- reason='Need dask 0.16 for new interface')
def test_dataarray_with_dask_coords():
import toolz
x = xr.Variable('x', da.arange(8, chunks=(4,)))
diff --git a/xarray/tests/test_dataarray.py b/xarray/tests/test_dataarray.py
index df09a1e58df..e0b1496c7bf 100644
--- a/xarray/tests/test_dataarray.py
+++ b/xarray/tests/test_dataarray.py
@@ -2,7 +2,6 @@
import pickle
from copy import deepcopy
-from distutils.version import LooseVersion
from textwrap import dedent
import warnings
@@ -20,7 +19,7 @@
from xarray.tests import (
ReturnItem, TestCase, assert_allclose, assert_array_equal, assert_equal,
assert_identical, raises_regex, requires_bottleneck, requires_cftime,
- requires_dask, requires_scipy, source_ndarray, unittest)
+ requires_dask, requires_np113, requires_scipy, source_ndarray, unittest)
class TestDataArray(TestCase):
@@ -3379,9 +3378,6 @@ def test_sortby(self):
actual = da.sortby([day, dax])
assert_equal(actual, expected)
- if LooseVersion(np.__version__) < LooseVersion('1.11.0'):
- pytest.skip('numpy 1.11.0 or later to support object data-type.')
-
expected = sorted1d
actual = da.sortby('x')
assert_equal(actual, expected)
@@ -3647,9 +3643,7 @@ def test_rolling_reduce(da, center, min_periods, window, name):
assert actual.dims == expected.dims
-@pytest.mark.skipif(LooseVersion(np.__version__) < LooseVersion('1.13'),
- reason='Old numpy does not support nansum / nanmax for '
- 'object typed arrays.')
+@requires_np113
@pytest.mark.parametrize('center', (True, False))
@pytest.mark.parametrize('min_periods', (None, 1, 2, 3))
@pytest.mark.parametrize('window', (1, 2, 3, 4))
diff --git a/xarray/tests/test_dataset.py b/xarray/tests/test_dataset.py
index a0d316d74dc..4aa99b8ee5a 100644
--- a/xarray/tests/test_dataset.py
+++ b/xarray/tests/test_dataset.py
@@ -2,7 +2,6 @@
from __future__ import absolute_import, division, print_function
from copy import copy, deepcopy
-from distutils.version import LooseVersion
from io import StringIO
from textwrap import dedent
import warnings
@@ -4036,9 +4035,6 @@ def test_sortby(self):
actual = ds.sortby(ds['A'])
assert "DataArray is not 1-D" in str(excinfo.value)
- if LooseVersion(np.__version__) < LooseVersion('1.11.0'):
- pytest.skip('numpy 1.11.0 or later to support object data-type.')
-
expected = sorted1d
actual = ds.sortby('x')
assert_equal(actual, expected)
diff --git a/xarray/tests/test_distributed.py b/xarray/tests/test_distributed.py
index 8679e892be4..32035afdc57 100644
--- a/xarray/tests/test_distributed.py
+++ b/xarray/tests/test_distributed.py
@@ -7,8 +7,8 @@
import pytest
-dask = pytest.importorskip('dask') # isort:skip
-distributed = pytest.importorskip('distributed') # isort:skip
+dask = pytest.importorskip('dask', minversion='0.18') # isort:skip
+distributed = pytest.importorskip('distributed', minversion='1.21') # isort:skip
from dask import array
from dask.distributed import Client, Lock
diff --git a/xarray/tests/test_duck_array_ops.py b/xarray/tests/test_duck_array_ops.py
index 3f4adee6713..f3f93491822 100644
--- a/xarray/tests/test_duck_array_ops.py
+++ b/xarray/tests/test_duck_array_ops.py
@@ -1,7 +1,5 @@
from __future__ import absolute_import, division, print_function
-from distutils.version import LooseVersion
-
import numpy as np
import pytest
from numpy import array, nan
@@ -16,7 +14,8 @@
from xarray.testing import assert_allclose, assert_equal
from . import (
- TestCase, assert_array_equal, has_dask, raises_regex, requires_dask)
+ TestCase, assert_array_equal, has_dask, has_np113, raises_regex,
+ requires_dask)
class TestOps(TestCase):
@@ -263,8 +262,7 @@ def test_reduce(dim_num, dtype, dask, func, skipna, aggdim):
warnings.filterwarnings('ignore', 'All-NaN slice')
warnings.filterwarnings('ignore', 'invalid value encountered in')
- if (LooseVersion(np.__version__) >= LooseVersion('1.13.0') and
- da.dtype.kind == 'O' and skipna):
+ if has_np113 and da.dtype.kind == 'O' and skipna:
# Numpy < 1.13 does not handle object-type array.
try:
if skipna:
diff --git a/xarray/tests/test_missing.py b/xarray/tests/test_missing.py
index 1dde95adf42..5c7e384c789 100644
--- a/xarray/tests/test_missing.py
+++ b/xarray/tests/test_missing.py
@@ -12,7 +12,7 @@
from xarray.core.pycompat import dask_array_type
from xarray.tests import (
assert_array_equal, assert_equal, raises_regex, requires_bottleneck,
- requires_dask, requires_np112, requires_scipy)
+ requires_dask, requires_scipy)
@pytest.fixture
@@ -67,7 +67,6 @@ def make_interpolate_example_data(shape, frac_nan, seed=12345,
return da, df
-@requires_np112
@requires_scipy
def test_interpolate_pd_compat():
shapes = [(8, 8), (1, 20), (20, 1), (100, 100)]
@@ -93,7 +92,6 @@ def test_interpolate_pd_compat():
np.testing.assert_allclose(actual.values, expected.values)
-@requires_np112
@requires_scipy
def test_scipy_methods_function():
for method in ['barycentric', 'krog', 'pchip', 'spline', 'akima']:
@@ -105,7 +103,6 @@ def test_scipy_methods_function():
assert (da.count('time') <= actual.count('time')).all()
-@requires_np112
@requires_scipy
def test_interpolate_pd_compat_non_uniform_index():
shapes = [(8, 8), (1, 20), (20, 1), (100, 100)]
@@ -134,7 +131,6 @@ def test_interpolate_pd_compat_non_uniform_index():
np.testing.assert_allclose(actual.values, expected.values)
-@requires_np112
@requires_scipy
def test_interpolate_pd_compat_polynomial():
shapes = [(8, 8), (1, 20), (20, 1), (100, 100)]
@@ -154,7 +150,6 @@ def test_interpolate_pd_compat_polynomial():
np.testing.assert_allclose(actual.values, expected.values)
-@requires_np112
@requires_scipy
def test_interpolate_unsorted_index_raises():
vals = np.array([1, 2, 3], dtype=np.float64)
@@ -195,7 +190,6 @@ def test_interpolate_2d_coord_raises():
da.interpolate_na(dim='a', use_coordinate='x')
-@requires_np112
@requires_scipy
def test_interpolate_kwargs():
da = xr.DataArray(np.array([4, 5, np.nan], dtype=np.float64), dims='x')
@@ -208,7 +202,6 @@ def test_interpolate_kwargs():
assert_equal(actual, expected)
-@requires_np112
def test_interpolate():
vals = np.array([1, 2, 3, 4, 5, 6], dtype=np.float64)
@@ -222,7 +215,6 @@ def test_interpolate():
assert_equal(actual, expected)
-@requires_np112
def test_interpolate_nonans():
vals = np.array([1, 2, 3, 4, 5, 6], dtype=np.float64)
@@ -231,7 +223,6 @@ def test_interpolate_nonans():
assert_equal(actual, expected)
-@requires_np112
@requires_scipy
def test_interpolate_allnans():
vals = np.full(6, np.nan, dtype=np.float64)
@@ -241,7 +232,6 @@ def test_interpolate_allnans():
assert_equal(actual, expected)
-@requires_np112
@requires_bottleneck
def test_interpolate_limits():
da = xr.DataArray(np.array([1, 2, np.nan, np.nan, np.nan, 6],
@@ -257,7 +247,6 @@ def test_interpolate_limits():
assert_equal(actual, expected)
-@requires_np112
@requires_scipy
def test_interpolate_methods():
for method in ['linear', 'nearest', 'zero', 'slinear', 'quadratic',
@@ -273,7 +262,6 @@ def test_interpolate_methods():
@requires_scipy
-@requires_np112
def test_interpolators():
for method, interpolator in [('linear', NumpyInterpolator),
('linear', ScipyInterpolator),
@@ -287,7 +275,6 @@ def test_interpolators():
assert pd.isnull(out).sum() == 0
-@requires_np112
def test_interpolate_use_coordinate():
xc = xr.Variable('x', [100, 200, 300, 400, 500, 600])
da = xr.DataArray(np.array([1, 2, np.nan, np.nan, np.nan, 6],
@@ -310,7 +297,6 @@ def test_interpolate_use_coordinate():
assert_equal(actual, expected)
-@requires_np112
@requires_dask
def test_interpolate_dask():
da, _ = make_interpolate_example_data((40, 40), 0.5)
@@ -328,7 +314,6 @@ def test_interpolate_dask():
assert_equal(actual, expected)
-@requires_np112
@requires_dask
def test_interpolate_dask_raises_for_invalid_chunk_dim():
da, _ = make_interpolate_example_data((40, 40), 0.5)
@@ -337,7 +322,6 @@ def test_interpolate_dask_raises_for_invalid_chunk_dim():
da.interpolate_na('time')
-@requires_np112
@requires_bottleneck
def test_ffill():
da = xr.DataArray(np.array([4, 5, np.nan], dtype=np.float64), dims='x')
@@ -346,7 +330,6 @@ def test_ffill():
assert_equal(actual, expected)
-@requires_np112
@requires_bottleneck
@requires_dask
def test_ffill_dask():
@@ -384,7 +367,6 @@ def test_bfill_dask():
@requires_bottleneck
-@requires_np112
def test_ffill_bfill_nonans():
vals = np.array([1, 2, 3, 4, 5, 6], dtype=np.float64)
@@ -398,7 +380,6 @@ def test_ffill_bfill_nonans():
@requires_bottleneck
-@requires_np112
def test_ffill_bfill_allnans():
vals = np.full(6, np.nan, dtype=np.float64)
@@ -412,14 +393,12 @@ def test_ffill_bfill_allnans():
@requires_bottleneck
-@requires_np112
def test_ffill_functions(da):
result = da.ffill('time')
assert result.isnull().sum() == 0
@requires_bottleneck
-@requires_np112
def test_ffill_limit():
da = xr.DataArray(
[0, np.nan, np.nan, np.nan, np.nan, 3, 4, 5, np.nan, 6, 7],
@@ -433,7 +412,6 @@ def test_ffill_limit():
[0, 0, np.nan, np.nan, np.nan, 3, 4, 5, 5, 6, 7], dims='time')
-@requires_np112
def test_interpolate_dataset(ds):
actual = ds.interpolate_na(dim='time')
# no missing values in var1
@@ -444,12 +422,10 @@ def test_interpolate_dataset(ds):
@requires_bottleneck
-@requires_np112
def test_ffill_dataset(ds):
ds.ffill(dim='time')
@requires_bottleneck
-@requires_np112
def test_bfill_dataset(ds):
ds.ffill(dim='time')
diff --git a/xarray/tests/test_plot.py b/xarray/tests/test_plot.py
index 986a2a93380..90d30946c9c 100644
--- a/xarray/tests/test_plot.py
+++ b/xarray/tests/test_plot.py
@@ -17,7 +17,8 @@
from . import (
TestCase, assert_array_equal, assert_equal, raises_regex,
- requires_matplotlib, requires_seaborn, requires_cftime)
+ requires_matplotlib, requires_matplotlib2, requires_seaborn,
+ requires_cftime)
# import mpl and change the backend before other mpl imports
try:
@@ -283,6 +284,7 @@ def test_convenient_facetgrid(self):
d[0].plot(x='x', y='y', col='z', ax=plt.gca())
@pytest.mark.slow
+ @requires_matplotlib2
def test_subplot_kws(self):
a = easy_array((10, 15, 4))
d = DataArray(a, dims=['y', 'x', 'z'])
@@ -295,12 +297,9 @@ def test_subplot_kws(self):
cmap='cool',
subplot_kws=dict(facecolor='r'))
for ax in g.axes.flat:
- try:
- # mpl V2
- assert ax.get_facecolor()[0:3] == \
- mpl.colors.to_rgb('r')
- except AttributeError:
- assert ax.get_axis_bgcolor() == 'r'
+ # mpl V2
+ assert ax.get_facecolor()[0:3] == \
+ mpl.colors.to_rgb('r')
@pytest.mark.slow
def test_plot_size(self):
@@ -462,7 +461,7 @@ def test_robust(self):
cmap_params = _determine_cmap_params(self.data, robust=True)
assert cmap_params['vmin'] == np.percentile(self.data, 2)
assert cmap_params['vmax'] == np.percentile(self.data, 98)
- assert cmap_params['cmap'].name == 'viridis'
+ assert cmap_params['cmap'] == 'viridis'
assert cmap_params['extend'] == 'both'
assert cmap_params['levels'] is None
assert cmap_params['norm'] is None
@@ -546,7 +545,7 @@ def test_divergentcontrol(self):
cmap_params = _determine_cmap_params(pos)
assert cmap_params['vmin'] == 0
assert cmap_params['vmax'] == 1
- assert cmap_params['cmap'].name == "viridis"
+ assert cmap_params['cmap'] == "viridis"
# Default with negative data will be a divergent cmap
cmap_params = _determine_cmap_params(neg)
@@ -558,17 +557,17 @@ def test_divergentcontrol(self):
cmap_params = _determine_cmap_params(neg, vmin=-0.1, center=False)
assert cmap_params['vmin'] == -0.1
assert cmap_params['vmax'] == 0.9
- assert cmap_params['cmap'].name == "viridis"
+ assert cmap_params['cmap'] == "viridis"
cmap_params = _determine_cmap_params(neg, vmax=0.5, center=False)
assert cmap_params['vmin'] == -0.1
assert cmap_params['vmax'] == 0.5
- assert cmap_params['cmap'].name == "viridis"
+ assert cmap_params['cmap'] == "viridis"
# Setting center=False too
cmap_params = _determine_cmap_params(neg, center=False)
assert cmap_params['vmin'] == -0.1
assert cmap_params['vmax'] == 0.9
- assert cmap_params['cmap'].name == "viridis"
+ assert cmap_params['cmap'] == "viridis"
# However, I should still be able to set center and have a div cmap
cmap_params = _determine_cmap_params(neg, center=0)
@@ -598,17 +597,17 @@ def test_divergentcontrol(self):
cmap_params = _determine_cmap_params(pos, vmin=0.1)
assert cmap_params['vmin'] == 0.1
assert cmap_params['vmax'] == 1
- assert cmap_params['cmap'].name == "viridis"
+ assert cmap_params['cmap'] == "viridis"
cmap_params = _determine_cmap_params(pos, vmax=0.5)
assert cmap_params['vmin'] == 0
assert cmap_params['vmax'] == 0.5
- assert cmap_params['cmap'].name == "viridis"
+ assert cmap_params['cmap'] == "viridis"
# If both vmin and vmax are provided, output is non-divergent
cmap_params = _determine_cmap_params(neg, vmin=-0.2, vmax=0.6)
assert cmap_params['vmin'] == -0.2
assert cmap_params['vmax'] == 0.6
- assert cmap_params['cmap'].name == "viridis"
+ assert cmap_params['cmap'] == "viridis"
@requires_matplotlib
diff --git a/xarray/tests/test_ufuncs.py b/xarray/tests/test_ufuncs.py
index 91ec1142950..195bb36e36e 100644
--- a/xarray/tests/test_ufuncs.py
+++ b/xarray/tests/test_ufuncs.py
@@ -1,6 +1,5 @@
from __future__ import absolute_import, division, print_function
-from distutils.version import LooseVersion
import pickle
import numpy as np
@@ -11,12 +10,7 @@
from . import (
assert_array_equal, assert_identical as assert_identical_, mock,
- raises_regex,
-)
-
-
-requires_numpy113 = pytest.mark.skipif(LooseVersion(np.__version__) < '1.13',
- reason='numpy 1.13 or newer required')
+ raises_regex, requires_np113)
def assert_identical(a, b):
@@ -27,7 +21,7 @@ def assert_identical(a, b):
assert_array_equal(a, b)
-@requires_numpy113
+@requires_np113
def test_unary():
args = [0,
np.zeros(2),
@@ -38,7 +32,7 @@ def test_unary():
assert_identical(a + 1, np.cos(a))
-@requires_numpy113
+@requires_np113
def test_binary():
args = [0,
np.zeros(2),
@@ -53,7 +47,7 @@ def test_binary():
assert_identical(t2 + 1, np.maximum(t2 + 1, t1))
-@requires_numpy113
+@requires_np113
def test_binary_out():
args = [1,
np.ones(2),
@@ -66,7 +60,7 @@ def test_binary_out():
assert_identical(actual_exponent, arg)
-@requires_numpy113
+@requires_np113
def test_groupby():
ds = xr.Dataset({'a': ('x', [0, 0, 0])}, {'c': ('x', [0, 0, 1])})
ds_grouped = ds.groupby('c')
@@ -89,7 +83,7 @@ def test_groupby():
np.maximum(ds.a.variable, ds_grouped)
-@requires_numpy113
+@requires_np113
def test_alignment():
ds1 = xr.Dataset({'a': ('x', [1, 2])}, {'x': [0, 1]})
ds2 = xr.Dataset({'a': ('x', [2, 3]), 'b': 4}, {'x': [1, 2]})
@@ -105,14 +99,14 @@ def test_alignment():
assert_identical_(actual, expected)
-@requires_numpy113
+@requires_np113
def test_kwargs():
x = xr.DataArray(0)
result = np.add(x, 1, dtype=np.float64)
assert result.dtype == np.float64
-@requires_numpy113
+@requires_np113
def test_xarray_defers_to_unrecognized_type():
class Other(object):
@@ -125,7 +119,7 @@ def __array_ufunc__(self, *args, **kwargs):
assert np.sin(xarray_obj, out=other) == 'other'
-@requires_numpy113
+@requires_np113
def test_xarray_handles_dask():
da = pytest.importorskip('dask.array')
x = xr.DataArray(np.ones((2, 2)), dims=['x', 'y'])
@@ -135,7 +129,7 @@ def test_xarray_handles_dask():
assert isinstance(result, xr.DataArray)
-@requires_numpy113
+@requires_np113
def test_dask_defers_to_xarray():
da = pytest.importorskip('dask.array')
x = xr.DataArray(np.ones((2, 2)), dims=['x', 'y'])
@@ -145,14 +139,14 @@ def test_dask_defers_to_xarray():
assert isinstance(result, xr.DataArray)
-@requires_numpy113
+@requires_np113
def test_gufunc_methods():
xarray_obj = xr.DataArray([1, 2, 3])
with raises_regex(NotImplementedError, 'reduce method'):
np.add.reduce(xarray_obj, 1)
-@requires_numpy113
+@requires_np113
def test_out():
xarray_obj = xr.DataArray([1, 2, 3])
@@ -166,7 +160,7 @@ def test_out():
assert_identical(other, np.array([1, 2, 3]))
-@requires_numpy113
+@requires_np113
def test_gufuncs():
xarray_obj = xr.DataArray([1, 2, 3])
fake_gufunc = mock.Mock(signature='(n)->()', autospec=np.sin)
diff --git a/xarray/tests/test_variable.py b/xarray/tests/test_variable.py
index c486a394ae6..290c7a6e308 100644
--- a/xarray/tests/test_variable.py
+++ b/xarray/tests/test_variable.py
@@ -1496,12 +1496,7 @@ def test_reduce_funcs(self):
assert_identical(v.cumprod(axis=0),
Variable('x', np.array([1, 1, 2, 6])))
assert_identical(v.var(), Variable([], 2.0 / 3))
-
- if LooseVersion(np.__version__) < '1.9':
- with pytest.raises(NotImplementedError):
- v.median()
- else:
- assert_identical(v.median(), Variable([], 2))
+ assert_identical(v.median(), Variable([], 2))
v = Variable('x', [True, False, False])
assert_identical(v.any(), Variable([], True))
@@ -1665,15 +1660,9 @@ def test_eq_all_dtypes(self):
super(TestVariableWithDask, self).test_eq_all_dtypes()
def test_getitem_fancy(self):
- import dask
- if LooseVersion(dask.__version__) <= LooseVersion('0.15.1'):
- pytest.xfail("vindex from latest dask is required")
super(TestVariableWithDask, self).test_getitem_fancy()
def test_getitem_1d_fancy(self):
- import dask
- if LooseVersion(dask.__version__) <= LooseVersion('0.15.1'):
- pytest.xfail("vindex from latest dask is required")
super(TestVariableWithDask, self).test_getitem_1d_fancy()
def test_getitem_with_mask_nd_indexer(self):