diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 56cd0649989..e828faabc27 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -1196,8 +1196,8 @@ def save_mfdataset( Save a dataset into one netCDF per year of data: - >>> years, datasets = zip(*ds.groupby('time.year')) - >>> paths = ['%s.nc' % y for y in years] + >>> years, datasets = zip(*ds.groupby("time.year")) + >>> paths = ["%s.nc" % y for y in years] >>> xr.save_mfdataset(datasets, paths) """ if mode == "w" and len(set(paths)) < len(paths): diff --git a/xarray/coding/cftime_offsets.py b/xarray/coding/cftime_offsets.py index eeb68508527..a2306331ca7 100644 --- a/xarray/coding/cftime_offsets.py +++ b/xarray/coding/cftime_offsets.py @@ -938,7 +938,7 @@ def cftime_range( This function returns a ``CFTimeIndex``, populated with ``cftime.datetime`` objects associated with the specified calendar type, e.g. - >>> xr.cftime_range(start='2000', periods=6, freq='2MS', calendar='noleap') + >>> xr.cftime_range(start="2000", periods=6, freq="2MS", calendar="noleap") CFTimeIndex([2000-01-01 00:00:00, 2000-03-01 00:00:00, 2000-05-01 00:00:00, 2000-07-01 00:00:00, 2000-09-01 00:00:00, 2000-11-01 00:00:00], dtype='object') diff --git a/xarray/coding/cftimeindex.py b/xarray/coding/cftimeindex.py index 99f90430e91..a2847232428 100644 --- a/xarray/coding/cftimeindex.py +++ b/xarray/coding/cftimeindex.py @@ -268,29 +268,32 @@ def _partial_date_slice(self, resolution, parsed): >>> from cftime import DatetimeNoLeap >>> import pandas as pd >>> import xarray as xr - >>> da = xr.DataArray([1, 2], - coords=[[DatetimeNoLeap(2001, 1, 1), - DatetimeNoLeap(2001, 2, 1)]], - dims=['time']) - >>> da.sel(time='2001-01-01') + >>> da = xr.DataArray( + ... [1, 2], + ... coords=[[DatetimeNoLeap(2001, 1, 1), DatetimeNoLeap(2001, 2, 1)]], + ... dims=["time"], + ... ) + >>> da.sel(time="2001-01-01") array([1]) Coordinates: * time (time) object 2001-01-01 00:00:00 - >>> da = xr.DataArray([1, 2], - coords=[[pd.Timestamp(2001, 1, 1), - pd.Timestamp(2001, 2, 1)]], - dims=['time']) - >>> da.sel(time='2001-01-01') + >>> da = xr.DataArray( + ... [1, 2], + ... coords=[[pd.Timestamp(2001, 1, 1), pd.Timestamp(2001, 2, 1)]], + ... dims=["time"], + ... ) + >>> da.sel(time="2001-01-01") array(1) Coordinates: time datetime64[ns] 2001-01-01 - >>> da = xr.DataArray([1, 2], - coords=[[pd.Timestamp(2001, 1, 1, 1), - pd.Timestamp(2001, 2, 1)]], - dims=['time']) - >>> da.sel(time='2001-01-01') + >>> da = xr.DataArray( + ... [1, 2], + ... coords=[[pd.Timestamp(2001, 1, 1, 1), pd.Timestamp(2001, 2, 1)]], + ... dims=["time"], + ... ) + >>> da.sel(time="2001-01-01") array([1]) Coordinates: @@ -396,10 +399,10 @@ def shift(self, n, freq): Examples -------- - >>> index = xr.cftime_range('2000', periods=1, freq='M') + >>> index = xr.cftime_range("2000", periods=1, freq="M") >>> index CFTimeIndex([2000-01-31 00:00:00], dtype='object') - >>> index.shift(1, 'M') + >>> index.shift(1, "M") CFTimeIndex([2000-02-29 00:00:00], dtype='object') """ from .cftime_offsets import to_offset @@ -479,7 +482,7 @@ def to_datetimeindex(self, unsafe=False): Examples -------- >>> import xarray as xr - >>> times = xr.cftime_range('2000', periods=2, calendar='gregorian') + >>> times = xr.cftime_range("2000", periods=2, calendar="gregorian") >>> times CFTimeIndex([2000-01-01 00:00:00, 2000-01-02 00:00:00], dtype='object') >>> times.to_datetimeindex() @@ -518,9 +521,10 @@ def strftime(self, date_format): Examples -------- - >>> rng = xr.cftime_range(start='2000', periods=5, freq='2MS', - ... calendar='noleap') - >>> rng.strftime('%B %d, %Y, %r') + >>> rng = xr.cftime_range( + ... start="2000", periods=5, freq="2MS", calendar="noleap" + ... ) + >>> rng.strftime("%B %d, %Y, %r") Index(['January 01, 2000, 12:00:00 AM', 'March 01, 2000, 12:00:00 AM', 'May 01, 2000, 12:00:00 AM', 'July 01, 2000, 12:00:00 AM', 'September 01, 2000, 12:00:00 AM'], diff --git a/xarray/coding/strings.py b/xarray/coding/strings.py index 6d383fcf318..35cc190ffe3 100644 --- a/xarray/coding/strings.py +++ b/xarray/coding/strings.py @@ -201,7 +201,7 @@ class StackedBytesArray(indexing.ExplicitlyIndexedNDArrayMixin): """Wrapper around array-like objects to create a new indexable object where values, when accessed, are automatically stacked along the last dimension. - >>> StackedBytesArray(np.array(['a', 'b', 'c']))[:] + >>> StackedBytesArray(np.array(["a", "b", "c"]))[:] array('abc', dtype='|S3') """ diff --git a/xarray/conventions.py b/xarray/conventions.py index a8b9906c153..df24d0d3d8d 100644 --- a/xarray/conventions.py +++ b/xarray/conventions.py @@ -19,7 +19,7 @@ class NativeEndiannessArray(indexing.ExplicitlyIndexedNDArrayMixin): big endian) into native endianness, so they can be used with Cython functions, such as those found in bottleneck and pandas. - >>> x = np.arange(5, dtype='>i2') + >>> x = np.arange(5, dtype=">i2") >>> x.dtype dtype('>i2') @@ -50,7 +50,7 @@ class BoolTypeArray(indexing.ExplicitlyIndexedNDArrayMixin): This is useful for decoding boolean arrays from integer typed netCDF variables. - >>> x = np.array([1, 0, 1, 1, 0], dtype='i1') + >>> x = np.array([1, 0, 1, 1, 0], dtype="i1") >>> x.dtype dtype('>i2') diff --git a/xarray/core/accessor_dt.py b/xarray/core/accessor_dt.py index de0e332b26c..2977596036c 100644 --- a/xarray/core/accessor_dt.py +++ b/xarray/core/accessor_dt.py @@ -250,8 +250,8 @@ class DatetimeAccessor(Properties): --------- >>> import xarray as xr >>> import pandas as pd - >>> dates = pd.date_range(start='2000/01/01', freq='D', periods=10) - >>> ts = xr.DataArray(dates, dims=('time')) + >>> dates = pd.date_range(start="2000/01/01", freq="D", periods=10) + >>> ts = xr.DataArray(dates, dims=("time")) >>> ts array(['2000-01-01T00:00:00.000000000', '2000-01-02T00:00:00.000000000', @@ -296,8 +296,8 @@ def strftime(self, date_format): Examples -------- - >>> rng = xr.Dataset({'time': datetime.datetime(2000, 1, 1)}) - >>> rng['time'].dt.strftime('%B %d, %Y, %r') + >>> rng = xr.Dataset({"time": datetime.datetime(2000, 1, 1)}) + >>> rng["time"].dt.strftime("%B %d, %Y, %r") array('January 01, 2000, 12:00:00 AM', dtype=object) """ @@ -400,7 +400,7 @@ class TimedeltaAccessor(Properties): >>> import pandas as pd >>> import xarray as xr >>> dates = pd.timedelta_range(start="1 day", freq="6H", periods=20) - >>> ts = xr.DataArray(dates, dims=('time')) + >>> ts = xr.DataArray(dates, dims=("time")) >>> ts array([ 86400000000000, 108000000000000, 129600000000000, 151200000000000, diff --git a/xarray/core/accessor_str.py b/xarray/core/accessor_str.py index 6a975b948eb..5502ba72855 100644 --- a/xarray/core/accessor_str.py +++ b/xarray/core/accessor_str.py @@ -67,7 +67,7 @@ class StringAccessor: Similar to pandas, fields can be accessed through the `.str` attribute for applicable DataArrays. - >>> da = xr.DataArray(['some', 'text', 'in', 'an', 'array']) + >>> da = xr.DataArray(["some", "text", "in", "an", "array"]) >>> ds.str.len() array([4, 4, 2, 2, 5]) diff --git a/xarray/core/alignment.py b/xarray/core/alignment.py index 908119f7995..c9af8dd16be 100644 --- a/xarray/core/alignment.py +++ b/xarray/core/alignment.py @@ -121,10 +121,16 @@ def align( -------- >>> import xarray as xr - >>> x = xr.DataArray([[25, 35], [10, 24]], dims=('lat', 'lon'), - ... coords={'lat': [35., 40.], 'lon': [100., 120.]}) - >>> y = xr.DataArray([[20, 5], [7, 13]], dims=('lat', 'lon'), - ... coords={'lat': [35., 42.], 'lon': [100., 120.]}) + >>> x = xr.DataArray( + ... [[25, 35], [10, 24]], + ... dims=("lat", "lon"), + ... coords={"lat": [35.0, 40.0], "lon": [100.0, 120.0]}, + ... ) + >>> y = xr.DataArray( + ... [[20, 5], [7, 13]], + ... dims=("lat", "lon"), + ... coords={"lat": [35.0, 42.0], "lon": [100.0, 120.0]}, + ... ) >>> x @@ -156,7 +162,7 @@ def align( * lat (lat) float64 35.0 * lon (lon) float64 100.0 120.0 - >>> a, b = xr.align(x, y, join='outer') + >>> a, b = xr.align(x, y, join="outer") >>> a array([[25., 35.], @@ -174,7 +180,7 @@ def align( * lat (lat) float64 35.0 40.0 42.0 * lon (lon) float64 100.0 120.0 - >>> a, b = xr.align(x, y, join='outer', fill_value=-999) + >>> a, b = xr.align(x, y, join="outer", fill_value=-999) >>> a array([[ 25, 35], @@ -192,7 +198,7 @@ def align( * lat (lat) float64 35.0 40.0 42.0 * lon (lon) float64 100.0 120.0 - >>> a, b = xr.align(x, y, join='left') + >>> a, b = xr.align(x, y, join="left") >>> a array([[25, 35], @@ -208,7 +214,7 @@ def align( * lat (lat) float64 35.0 40.0 * lon (lon) float64 100.0 120.0 - >>> a, b = xr.align(x, y, join='right') + >>> a, b = xr.align(x, y, join="right") >>> a array([[25., 35.], @@ -224,13 +230,13 @@ def align( * lat (lat) float64 35.0 42.0 * lon (lon) float64 100.0 120.0 - >>> a, b = xr.align(x, y, join='exact') + >>> a, b = xr.align(x, y, join="exact") Traceback (most recent call last): ... "indexes along dimension {!r} are not equal".format(dim) ValueError: indexes along dimension 'lat' are not equal - >>> a, b = xr.align(x, y, join='override') + >>> a, b = xr.align(x, y, join="override") >>> a array([[25, 35], @@ -674,8 +680,8 @@ def broadcast(*args, exclude=None): Broadcast two data arrays against one another to fill out their dimensions: - >>> a = xr.DataArray([1, 2, 3], dims='x') - >>> b = xr.DataArray([5, 6], dims='y') + >>> a = xr.DataArray([1, 2, 3], dims="x") + >>> b = xr.DataArray([5, 6], dims="y") >>> a array([1, 2, 3]) @@ -706,8 +712,8 @@ def broadcast(*args, exclude=None): Fill out the dimensions of all data variables in a dataset: - >>> ds = xr.Dataset({'a': a, 'b': b}) - >>> ds2, = xr.broadcast(ds) # use tuple unpacking to extract one dataset + >>> ds = xr.Dataset({"a": a, "b": b}) + >>> (ds2,) = xr.broadcast(ds) # use tuple unpacking to extract one dataset >>> ds2 Dimensions: (x: 3, y: 2) diff --git a/xarray/core/combine.py b/xarray/core/combine.py index 3f6e0e79351..1fa2df00352 100644 --- a/xarray/core/combine.py +++ b/xarray/core/combine.py @@ -412,7 +412,7 @@ def combine_nested( precipitation (x, y) float64 5.904 2.453 3.404 ... >>> ds_grid = [[x1y1, x1y2], [x2y1, x2y2]] - >>> combined = xr.combine_nested(ds_grid, concat_dim=['x', 'y']) + >>> combined = xr.combine_nested(ds_grid, concat_dim=["x", "y"]) Dimensions: (x: 4, y: 4) Dimensions without coordinates: x, y @@ -441,7 +441,7 @@ def combine_nested( precipitation (t) float64 5.904 2.453 3.404 ... >>> ds_grid = [[t1temp, t1precip], [t2temp, t2precip]] - >>> combined = xr.combine_nested(ds_grid, concat_dim=['t', None]) + >>> combined = xr.combine_nested(ds_grid, concat_dim=["t", None]) Dimensions: (t: 10) Dimensions without coordinates: t @@ -650,7 +650,7 @@ def combine_by_coords( temperature (y, x) float64 1.654 10.63 7.015 nan ... nan 12.46 2.22 15.96 precipitation (y, x) float64 0.2136 0.9974 0.7603 ... 0.6125 0.4654 0.5953 - >>> xr.combine_by_coords([x3, x1], join='override') + >>> xr.combine_by_coords([x3, x1], join="override") Dimensions: (x: 3, y: 4) Coordinates: diff --git a/xarray/core/common.py b/xarray/core/common.py index c80cb24c5b5..39aa7982091 100644 --- a/xarray/core/common.py +++ b/xarray/core/common.py @@ -418,9 +418,9 @@ def assign_coords(self, coords=None, **coords_kwargs): -------- Convert longitude coordinates from 0-359 to -180-179: - >>> da = xr.DataArray(np.random.rand(4), - ... coords=[np.array([358, 359, 0, 1])], - ... dims='lon') + >>> da = xr.DataArray( + ... np.random.rand(4), coords=[np.array([358, 359, 0, 1])], dims="lon", + ... ) >>> da array([0.28298 , 0.667347, 0.657938, 0.177683]) @@ -434,7 +434,7 @@ def assign_coords(self, coords=None, **coords_kwargs): The function also accepts dictionary arguments: - >>> da.assign_coords({'lon': (((da.lon + 180) % 360) - 180)}) + >>> da.assign_coords({"lon": (((da.lon + 180) % 360) - 180)}) array([0.28298 , 0.667347, 0.657938, 0.177683]) Coordinates: @@ -518,19 +518,13 @@ def pipe( You can write - >>> (ds.pipe(h) - ... .pipe(g, arg1=a) - ... .pipe(f, arg2=b, arg3=c) - ... ) + >>> (ds.pipe(h).pipe(g, arg1=a).pipe(f, arg2=b, arg3=c)) If you have a function that takes the data as (say) the second argument, pass a tuple indicating which keyword expects the data. For example, suppose ``f`` takes its data as ``arg2``: - >>> (ds.pipe(h) - ... .pipe(g, arg1=a) - ... .pipe((f, 'arg2'), arg1=a, arg3=c) - ... ) + >>> (ds.pipe(h).pipe(g, arg1=a).pipe((f, "arg2"), arg1=a, arg3=c)) Examples -------- @@ -539,7 +533,10 @@ def pipe( >>> import xarray as xr >>> x = xr.Dataset( ... { - ... "temperature_c": (("lat", "lon"), 20 * np.random.rand(4).reshape(2, 2)), + ... "temperature_c": ( + ... ("lat", "lon"), + ... 20 * np.random.rand(4).reshape(2, 2), + ... ), ... "precipitation": (("lat", "lon"), np.random.rand(4).reshape(2, 2)), ... }, ... coords={"lat": [10, 20], "lon": [150, 160]}, @@ -584,10 +581,9 @@ def pipe( precipitation (lat, lon) float64 2.731 2.719 2.848 2.467 >>> ( - ... x - ... .pipe(adder, arg=2) - ... .pipe(div, arg=2) - ... .pipe(sub_mult, sub_arg=2, mult_arg=2) + ... x.pipe(adder, arg=2) + ... .pipe(div, arg=2) + ... .pipe(sub_mult, sub_arg=2, mult_arg=2) ... ) Dimensions: (lat: 2, lon: 2) @@ -639,16 +635,17 @@ def groupby(self, group, squeeze: bool = True, restore_coord_dims: bool = None): -------- Calculate daily anomalies for daily data: - >>> da = xr.DataArray(np.linspace(0, 1826, num=1827), - ... coords=[pd.date_range('1/1/2000', '31/12/2004', - ... freq='D')], - ... dims='time') + >>> da = xr.DataArray( + ... np.linspace(0, 1826, num=1827), + ... coords=[pd.date_range("1/1/2000", "31/12/2004", freq="D")], + ... dims="time", + ... ) >>> da array([0.000e+00, 1.000e+00, 2.000e+00, ..., 1.824e+03, 1.825e+03, 1.826e+03]) Coordinates: * time (time) datetime64[ns] 2000-01-01 2000-01-02 2000-01-03 ... - >>> da.groupby('time.dayofyear') - da.groupby('time.dayofyear').mean('time') + >>> da.groupby("time.dayofyear") - da.groupby("time.dayofyear").mean("time") array([-730.8, -730.8, -730.8, ..., 730.2, 730.2, 730.5]) Coordinates: @@ -787,10 +784,15 @@ def rolling( -------- Create rolling seasonal average of monthly data e.g. DJF, JFM, ..., SON: - >>> da = xr.DataArray(np.linspace(0, 11, num=12), - ... coords=[pd.date_range('15/12/1999', - ... periods=12, freq=pd.DateOffset(months=1))], - ... dims='time') + >>> da = xr.DataArray( + ... np.linspace(0, 11, num=12), + ... coords=[ + ... pd.date_range( + ... "15/12/1999", periods=12, freq=pd.DateOffset(months=1), + ... ) + ... ], + ... dims="time", + ... ) >>> da array([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11.]) @@ -804,7 +806,7 @@ def rolling( Remove the NaNs using ``dropna()``: - >>> da.rolling(time=3, center=True).mean().dropna('time') + >>> da.rolling(time=3, center=True).mean().dropna("time") array([ 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.]) Coordinates: @@ -906,10 +908,11 @@ def coarsen( -------- Coarsen the long time series by averaging over every four days. - >>> da = xr.DataArray(np.linspace(0, 364, num=364), - ... dims='time', - ... coords={'time': pd.date_range( - ... '15/12/1999', periods=364)}) + >>> da = xr.DataArray( + ... np.linspace(0, 364, num=364), + ... dims="time", + ... coords={"time": pd.date_range("15/12/1999", periods=364)}, + ... ) >>> da array([ 0. , 1.002755, 2.00551 , ..., 361.99449 , 362.997245, @@ -917,7 +920,7 @@ def coarsen( Coordinates: * time (time) datetime64[ns] 1999-12-15 1999-12-16 ... 2000-12-12 >>> - >>> da.coarsen(time=3, boundary='trim').mean() + >>> da.coarsen(time=3, boundary="trim").mean() array([ 1.002755, 4.011019, 7.019284, ..., 358.986226, 361.99449 ]) @@ -1000,10 +1003,15 @@ def resample( -------- Downsample monthly time-series data to seasonal data: - >>> da = xr.DataArray(np.linspace(0, 11, num=12), - ... coords=[pd.date_range('15/12/1999', - ... periods=12, freq=pd.DateOffset(months=1))], - ... dims='time') + >>> da = xr.DataArray( + ... np.linspace(0, 11, num=12), + ... coords=[ + ... pd.date_range( + ... "15/12/1999", periods=12, freq=pd.DateOffset(months=1), + ... ) + ... ], + ... dims="time", + ... ) >>> da array([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11.]) @@ -1017,7 +1025,7 @@ def resample( Upsample monthly time-series data to daily data: - >>> da.resample(time='1D').interpolate('linear') + >>> da.resample(time="1D").interpolate("linear") array([ 0. , 0.032258, 0.064516, ..., 10.935484, 10.967742, 11. ]) Coordinates: @@ -1025,7 +1033,7 @@ def resample( Limit scope of upsampling method - >>> da.resample(time='1D').nearest(tolerance='1D') + >>> da.resample(time="1D").nearest(tolerance="1D") array([ 0., 0., nan, ..., nan, 11., 11.]) Coordinates: @@ -1118,7 +1126,7 @@ def where(self, cond, other=dtypes.NA, drop: bool = False): -------- >>> import numpy as np - >>> a = xr.DataArray(np.arange(25).reshape(5, 5), dims=('x', 'y')) + >>> a = xr.DataArray(np.arange(25).reshape(5, 5), dims=("x", "y")) >>> a array([[ 0, 1, 2, 3, 4], @@ -1227,7 +1235,7 @@ def isin(self, test_elements): Examples -------- - >>> array = xr.DataArray([1, 2, 3], dims='x') + >>> array = xr.DataArray([1, 2, 3], dims="x") >>> array.isin([1, 3]) array([ True, False, True]) @@ -1296,9 +1304,11 @@ def full_like(other, fill_value, dtype: DTypeLike = None): >>> import numpy as np >>> import xarray as xr - >>> x = xr.DataArray(np.arange(6).reshape(2, 3), - ... dims=['lat', 'lon'], - ... coords={'lat': [1, 2], 'lon': [0, 1, 2]}) + >>> x = xr.DataArray( + ... np.arange(6).reshape(2, 3), + ... dims=["lat", "lon"], + ... coords={"lat": [1, 2], "lon": [0, 1, 2]}, + ... ) >>> x array([[0, 1, 2], @@ -1410,9 +1420,11 @@ def zeros_like(other, dtype: DTypeLike = None): >>> import numpy as np >>> import xarray as xr - >>> x = xr.DataArray(np.arange(6).reshape(2, 3), - ... dims=['lat', 'lon'], - ... coords={'lat': [1, 2], 'lon': [0, 1, 2]}) + >>> x = xr.DataArray( + ... np.arange(6).reshape(2, 3), + ... dims=["lat", "lon"], + ... coords={"lat": [1, 2], "lon": [0, 1, 2]}, + ... ) >>> x array([[0, 1, 2], @@ -1468,9 +1480,11 @@ def ones_like(other, dtype: DTypeLike = None): >>> import numpy as np >>> import xarray as xr - >>> x = xr.DataArray(np.arange(6).reshape(2, 3), - ... dims=['lat', 'lon'], - ... coords={'lat': [1, 2], 'lon': [0, 1, 2]}) + >>> x = xr.DataArray( + ... np.arange(6).reshape(2, 3), + ... dims=["lat", "lon"], + ... coords={"lat": [1, 2], "lon": [0, 1, 2]}, + ... ) >>> x array([[0, 1, 2], @@ -1479,7 +1493,7 @@ def ones_like(other, dtype: DTypeLike = None): * lat (lat) int64 1 2 * lon (lon) int64 0 1 2 - >>> >>> xr.ones_like(x) + >>> xr.ones_like(x) array([[1, 1, 1], [1, 1, 1]]) diff --git a/xarray/core/computation.py b/xarray/core/computation.py index d2c5c32bc00..f99764448da 100644 --- a/xarray/core/computation.py +++ b/xarray/core/computation.py @@ -889,7 +889,7 @@ def apply_ufunc( You can now apply ``magnitude()`` to ``xr.DataArray`` and ``xr.Dataset`` objects, with automatically preserved dimensions and coordinates, e.g., - >>> array = xr.DataArray([1, 2, 3], coords=[('x', [0.1, 0.2, 0.3])]) + >>> array = xr.DataArray([1, 2, 3], coords=[("x", [0.1, 0.2, 0.3])]) >>> magnitude(array, -array) array([1.414214, 2.828427, 4.242641]) @@ -1093,10 +1093,9 @@ def dot(*arrays, dims=None, **kwargs): >>> import numpy as np >>> import xarray as xr - >>> da_a = xr.DataArray(np.arange(3 * 2).reshape(3, 2), dims=['a', 'b']) - >>> da_b = xr.DataArray(np.arange(3 * 2 * 2).reshape(3, 2, 2), - ... dims=['a', 'b', 'c']) - >>> da_c = xr.DataArray(np.arange(2 * 3).reshape(2, 3), dims=['c', 'd']) + >>> da_a = xr.DataArray(np.arange(3 * 2).reshape(3, 2), dims=["a", "b"]) + >>> da_b = xr.DataArray(np.arange(3 * 2 * 2).reshape(3, 2, 2), dims=["a", "b", "c"]) + >>> da_c = xr.DataArray(np.arange(2 * 3).reshape(2, 3), dims=["c", "d"]) >>> da_a @@ -1121,18 +1120,18 @@ def dot(*arrays, dims=None, **kwargs): [3, 4, 5]]) Dimensions without coordinates: c, d - >>> xr.dot(da_a, da_b, dims=['a', 'b']) + >>> xr.dot(da_a, da_b, dims=["a", "b"]) array([110, 125]) Dimensions without coordinates: c - >>> xr.dot(da_a, da_b, dims=['a']) + >>> xr.dot(da_a, da_b, dims=["a"]) array([[40, 46], [70, 79]]) Dimensions without coordinates: b, c - >>> xr.dot(da_a, da_b, da_c, dims=['b', 'c']) + >>> xr.dot(da_a, da_b, da_c, dims=["b", "c"]) array([[ 9, 14, 19], [ 93, 150, 207], @@ -1238,21 +1237,25 @@ def where(cond, x, y): -------- >>> import xarray as xr >>> import numpy as np - >>> x = xr.DataArray(0.1 * np.arange(10), dims=['lat'], - ... coords={'lat': np.arange(10)}, name='sst') + >>> x = xr.DataArray( + ... 0.1 * np.arange(10), + ... dims=["lat"], + ... coords={"lat": np.arange(10)}, + ... name="sst", + ... ) >>> x array([0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]) Coordinates: * lat (lat) int64 0 1 2 3 4 5 6 7 8 9 - >>> xr.where(x < 0.5, x, 100*x) + >>> xr.where(x < 0.5, x, 100 * x) array([ 0. , 0.1, 0.2, 0.3, 0.4, 50. , 60. , 70. , 80. , 90. ]) Coordinates: * lat (lat) int64 0 1 2 3 4 5 6 7 8 9 - >>> >>> y = xr.DataArray( + >>> y = xr.DataArray( ... 0.1 * np.arange(9).reshape(3, 3), ... dims=["lat", "lon"], ... coords={"lat": np.arange(3), "lon": 10 + np.arange(3)}, @@ -1276,8 +1279,8 @@ def where(cond, x, y): * lat (lat) int64 0 1 2 * lon (lon) int64 10 11 12 - >>> cond = xr.DataArray([True, False], dims=['x']) - >>> x = xr.DataArray([1, 2], dims=['y']) + >>> cond = xr.DataArray([True, False], dims=["x"]) + >>> x = xr.DataArray([1, 2], dims=["y"]) >>> xr.where(cond, x, 0) array([[1, 2], diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index 6782070da0b..b335eeb293b 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -875,8 +875,7 @@ def copy(self, deep: bool = True, data: Any = None) -> "DataArray": Shallow versus deep copy - >>> array = xr.DataArray([1, 2, 3], dims='x', - ... coords={'x': ['a', 'b', 'c']}) + >>> array = xr.DataArray([1, 2, 3], dims="x", coords={"x": ["a", "b", "c"]}) >>> array.copy() array([1, 2, 3]) @@ -1344,7 +1343,7 @@ def interp( Examples -------- - >>> da = xr.DataArray([1, 3], [('x', np.arange(2))]) + >>> da = xr.DataArray([1, 3], [("x", np.arange(2))]) >>> da.interp(x=0.5) array(2.0) @@ -1476,8 +1475,9 @@ def swap_dims(self, dims_dict: Mapping[Hashable, Hashable]) -> "DataArray": Examples -------- - >>> arr = xr.DataArray(data=[0, 1], dims="x", - ... coords={"x": ["a", "b"], "y": ("x", [0, 1])}) + >>> arr = xr.DataArray( + ... data=[0, 1], dims="x", coords={"x": ["a", "b"], "y": ("x", [0, 1])}, + ... ) >>> arr array([0, 1]) @@ -1592,12 +1592,11 @@ def set_index( Examples -------- - >>> arr = xr.DataArray(data=np.ones((2, 3)), - ... dims=['x', 'y'], - ... coords={'x': - ... range(2), 'y': - ... range(3), 'a': ('x', [3, 4]) - ... }) + >>> arr = xr.DataArray( + ... data=np.ones((2, 3)), + ... dims=["x", "y"], + ... coords={"x": range(2), "y": range(3), "a": ("x", [3, 4])}, + ... ) >>> arr array([[1., 1., 1.], @@ -1606,7 +1605,7 @@ def set_index( * x (x) int64 0 1 * y (y) int64 0 1 2 a (x) int64 3 4 - >>> arr.set_index(x='a') + >>> arr.set_index(x="a") array([[1., 1., 1.], [1., 1., 1.]]) @@ -1721,8 +1720,10 @@ def stack( Examples -------- - >>> arr = xr.DataArray(np.arange(6).reshape(2, 3), - ... coords=[('x', ['a', 'b']), ('y', [0, 1, 2])]) + >>> arr = xr.DataArray( + ... np.arange(6).reshape(2, 3), + ... coords=[("x", ["a", "b"]), ("y", [0, 1, 2])], + ... ) >>> arr array([[0, 1, 2], @@ -1730,8 +1731,8 @@ def stack( Coordinates: * x (x) |S1 'a' 'b' * y (y) int64 0 1 2 - >>> stacked = arr.stack(z=('x', 'y')) - >>> stacked.indexes['z'] + >>> stacked = arr.stack(z=("x", "y")) + >>> stacked.indexes["z"] MultiIndex(levels=[['a', 'b'], [0, 1, 2]], codes=[[0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 1, 2]], names=['x', 'y']) @@ -1771,8 +1772,10 @@ def unstack( Examples -------- - >>> arr = xr.DataArray(np.arange(6).reshape(2, 3), - ... coords=[('x', ['a', 'b']), ('y', [0, 1, 2])]) + >>> arr = xr.DataArray( + ... np.arange(6).reshape(2, 3), + ... coords=[("x", ["a", "b"]), ("y", [0, 1, 2])], + ... ) >>> arr array([[0, 1, 2], @@ -1780,8 +1783,8 @@ def unstack( Coordinates: * x (x) |S1 'a' 'b' * y (y) int64 0 1 2 - >>> stacked = arr.stack(z=('x', 'y')) - >>> stacked.indexes['z'] + >>> stacked = arr.stack(z=("x", "y")) + >>> stacked.indexes["z"] MultiIndex(levels=[['a', 'b'], [0, 1, 2]], codes=[[0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 1, 2]], names=['x', 'y']) @@ -1820,9 +1823,11 @@ def to_unstacked_dataset(self, dim, level=0): Examples -------- >>> import xarray as xr - >>> arr = xr.DataArray(np.arange(6).reshape(2, 3), - ... coords=[('x', ['a', 'b']), ('y', [0, 1, 2])]) - >>> data = xr.Dataset({'a': arr, 'b': arr.isel(y=0)}) + >>> arr = xr.DataArray( + ... np.arange(6).reshape(2, 3), + ... coords=[("x", ["a", "b"]), ("y", [0, 1, 2])], + ... ) + >>> data = xr.Dataset({"a": arr, "b": arr.isel(y=0)}) >>> data Dimensions: (x: 2, y: 3) @@ -1832,12 +1837,12 @@ def to_unstacked_dataset(self, dim, level=0): Data variables: a (x, y) int64 0 1 2 3 4 5 b (x) int64 0 3 - >>> stacked = data.to_stacked_array("z", ['y']) - >>> stacked.indexes['z'] + >>> stacked = data.to_stacked_array("z", ["y"]) + >>> stacked.indexes["z"] MultiIndex(levels=[['a', 'b'], [0, 1, 2]], labels=[[0, 0, 0, 1], [0, 1, 2, -1]], names=['variable', 'y']) - >>> roundtripped = stacked.to_unstacked_dataset(dim='z') + >>> roundtripped = stacked.to_unstacked_dataset(dim="z") >>> data.identical(roundtripped) True @@ -2697,13 +2702,13 @@ def diff(self, dim: Hashable, n: int = 1, label: Hashable = "upper") -> "DataArr Examples -------- - >>> arr = xr.DataArray([5, 5, 6, 6], [[1, 2, 3, 4]], ['x']) - >>> arr.diff('x') + >>> arr = xr.DataArray([5, 5, 6, 6], [[1, 2, 3, 4]], ["x"]) + >>> arr.diff("x") array([0, 1, 0]) Coordinates: * x (x) int64 2 3 4 - >>> arr.diff('x', 2) + >>> arr.diff("x", 2) array([ 1, -1]) Coordinates: @@ -2753,7 +2758,7 @@ def shift( Examples -------- - >>> arr = xr.DataArray([5, 6, 7], dims='x') + >>> arr = xr.DataArray([5, 6, 7], dims="x") >>> arr.shift(x=1) array([ nan, 5., 6.]) @@ -2803,7 +2808,7 @@ def roll( Examples -------- - >>> arr = xr.DataArray([5, 6, 7], dims='x') + >>> arr = xr.DataArray([5, 6, 7], dims="x") >>> arr.roll(x=1) array([7, 5, 6]) @@ -2852,9 +2857,9 @@ def dot( -------- >>> da_vals = np.arange(6 * 5 * 4).reshape((6, 5, 4)) - >>> da = xr.DataArray(da_vals, dims=['x', 'y', 'z']) + >>> da = xr.DataArray(da_vals, dims=["x", "y", "z"]) >>> dm_vals = np.arange(4) - >>> dm = xr.DataArray(dm_vals, dims=['z']) + >>> dm = xr.DataArray(dm_vals, dims=["z"]) >>> dm.dims ('z') @@ -2914,9 +2919,11 @@ def sortby( Examples -------- - >>> da = xr.DataArray(np.random.rand(5), - ... coords=[pd.date_range('1/1/2000', periods=5)], - ... dims='time') + >>> da = xr.DataArray( + ... np.random.rand(5), + ... coords=[pd.date_range("1/1/2000", periods=5)], + ... dims="time", + ... ) >>> da array([ 0.965471, 0.615637, 0.26532 , 0.270962, 0.552878]) @@ -3057,8 +3064,8 @@ def rank( Examples -------- - >>> arr = xr.DataArray([5, 6, 7], dims='x') - >>> arr.rank('x') + >>> arr = xr.DataArray([5, 6, 7], dims="x") + >>> arr.rank("x") array([ 1., 2., 3.]) Dimensions without coordinates: x @@ -3098,8 +3105,11 @@ def differentiate( Examples -------- - >>> da = xr.DataArray(np.arange(12).reshape(4, 3), dims=['x', 'y'], - ... coords={'x': [0, 0.1, 1.1, 1.2]}) + >>> da = xr.DataArray( + ... np.arange(12).reshape(4, 3), + ... dims=["x", "y"], + ... coords={"x": [0, 0.1, 1.1, 1.2]}, + ... ) >>> da array([[ 0, 1, 2], @@ -3110,7 +3120,7 @@ def differentiate( * x (x) float64 0.0 0.1 1.1 1.2 Dimensions without coordinates: y >>> - >>> da.differentiate('x') + >>> da.differentiate("x") array([[30. , 30. , 30. ], [27.545455, 27.545455, 27.545455], @@ -3152,8 +3162,11 @@ def integrate( Examples -------- - >>> da = xr.DataArray(np.arange(12).reshape(4, 3), dims=['x', 'y'], - ... coords={'x': [0, 0.1, 1.1, 1.2]}) + >>> da = xr.DataArray( + ... np.arange(12).reshape(4, 3), + ... dims=["x", "y"], + ... coords={"x": [0, 0.1, 1.1, 1.2]}, + ... ) >>> da array([[ 0, 1, 2], @@ -3164,7 +3177,7 @@ def integrate( * x (x) float64 0.0 0.1 1.1 1.2 Dimensions without coordinates: y >>> - >>> da.integrate('x') + >>> da.integrate("x") array([5.4, 6.6, 7.8]) Dimensions without coordinates: y diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index a4d20a79b7c..4c99425976a 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -1010,8 +1010,9 @@ def copy(self, deep: bool = False, data: Mapping = None) -> "Dataset": Shallow copy versus deep copy >>> da = xr.DataArray(np.random.randn(2, 3)) - >>> ds = xr.Dataset({'foo': da, 'bar': ('x', [-1, 2])}, - ... coords={'x': ['one', 'two']}) + >>> ds = xr.Dataset( + ... {"foo": da, "bar": ("x", [-1, 2])}, coords={"x": ["one", "two"]}, + ... ) >>> ds.copy() Dimensions: (dim_0: 2, dim_1: 3, x: 2) @@ -1023,7 +1024,7 @@ def copy(self, deep: bool = False, data: Mapping = None) -> "Dataset": bar (x) int64 -1 2 >>> ds_0 = ds.copy(deep=False) - >>> ds_0['foo'][0, 0] = 7 + >>> ds_0["foo"][0, 0] = 7 >>> ds_0 Dimensions: (dim_0: 2, dim_1: 3, x: 2) @@ -1048,7 +1049,9 @@ def copy(self, deep: bool = False, data: Mapping = None) -> "Dataset": structure of the original object, but with the new data. Original object is unaffected. - >>> ds.copy(data={'foo': np.arange(6).reshape(2, 3), 'bar': ['a', 'b']}) + >>> ds.copy( + ... data={"foo": np.arange(6).reshape(2, 3), "bar": ["a", "b"]} + ... ) Dimensions: (dim_0: 2, dim_1: 3, x: 2) Coordinates: @@ -2358,9 +2361,10 @@ def reindex( >>> x = xr.Dataset( ... { ... "temperature": ("station", 20 * np.random.rand(4)), - ... "pressure": ("station", 500 * np.random.rand(4)) + ... "pressure": ("station", 500 * np.random.rand(4)), ... }, - ... coords={"station": ["boston", "nyc", "seattle", "denver"]}) + ... coords={"station": ["boston", "nyc", "seattle", "denver"]}, + ... ) >>> x Dimensions: (station: 4) @@ -2375,8 +2379,8 @@ def reindex( Create a new index and reindex the dataset. By default values in the new index that do not have corresponding records in the dataset are assigned `NaN`. - >>> new_index = ['boston', 'austin', 'seattle', 'lincoln'] - >>> x.reindex({'station': new_index}) + >>> new_index = ["boston", "austin", "seattle", "lincoln"] + >>> x.reindex({"station": new_index}) Dimensions: (station: 4) Coordinates: @@ -2387,7 +2391,7 @@ def reindex( We can fill in the missing values by passing a value to the keyword `fill_value`. - >>> x.reindex({'station': new_index}, fill_value=0) + >>> x.reindex({"station": new_index}, fill_value=0) Dimensions: (station: 4) Coordinates: @@ -2399,7 +2403,7 @@ def reindex( Because the index is not monotonically increasing or decreasing, we cannot use arguments to the keyword method to fill the `NaN` values. - >>> x.reindex({'station': new_index}, method='nearest') + >>> x.reindex({"station": new_index}, method="nearest") Traceback (most recent call last): ... raise ValueError('index must be monotonic increasing or decreasing') @@ -2410,10 +2414,14 @@ def reindex( >>> x2 = xr.Dataset( ... { - ... "temperature": ("time", [15.57, 12.77, np.nan, 0.3081, 16.59, 15.12]), - ... "pressure": ("time", 500 * np.random.rand(6)) + ... "temperature": ( + ... "time", + ... [15.57, 12.77, np.nan, 0.3081, 16.59, 15.12], + ... ), + ... "pressure": ("time", 500 * np.random.rand(6)), ... }, - ... coords={"time": pd.date_range('01/01/2019', periods=6, freq='D')}) + ... coords={"time": pd.date_range("01/01/2019", periods=6, freq="D")}, + ... ) >>> x2 Dimensions: (time: 6) @@ -2425,8 +2433,8 @@ def reindex( Suppose we decide to expand the dataset to cover a wider date range. - >>> time_index2 = pd.date_range('12/29/2018', periods=10, freq='D') - >>> x2.reindex({'time': time_index2}) + >>> time_index2 = pd.date_range("12/29/2018", periods=10, freq="D") + >>> x2.reindex({"time": time_index2}) Dimensions: (time: 10) Coordinates: @@ -2441,7 +2449,7 @@ def reindex( For example, to back-propagate the last valid value to fill the `NaN` values, pass `bfill` as an argument to the `method` keyword. - >>> x3 = x2.reindex({'time': time_index2}, method='bfill') + >>> x3 = x2.reindex({"time": time_index2}, method="bfill") >>> x3 Dimensions: (time: 10) @@ -2885,8 +2893,10 @@ def swap_dims( Examples -------- - >>> ds = xr.Dataset(data_vars={"a": ("x", [5, 7]), "b": ("x", [0.1, 2.4])}, - ... coords={"x": ["a", "b"], "y": ("x", [0, 1])}) + >>> ds = xr.Dataset( + ... data_vars={"a": ("x", [5, 7]), "b": ("x", [0.1, 2.4])}, + ... coords={"x": ["a", "b"], "y": ("x", [0, 1])}, + ... ) >>> ds Dimensions: (x: 2) @@ -3127,13 +3137,12 @@ def set_index( Examples -------- - >>> arr = xr.DataArray(data=np.ones((2, 3)), - ... dims=['x', 'y'], - ... coords={'x': - ... range(2), 'y': - ... range(3), 'a': ('x', [3, 4]) - ... }) - >>> ds = xr.Dataset({'v': arr}) + >>> arr = xr.DataArray( + ... data=np.ones((2, 3)), + ... dims=["x", "y"], + ... coords={"x": range(2), "y": range(3), "a": ("x", [3, 4])}, + ... ) + >>> ds = xr.Dataset({"v": arr}) >>> ds Dimensions: (x: 2, y: 3) @@ -3143,7 +3152,7 @@ def set_index( a (x) int64 3 4 Data variables: v (x, y) float64 1.0 1.0 1.0 1.0 1.0 1.0 - >>> ds.set_index(x='a') + >>> ds.set_index(x="a") Dimensions: (x: 2, y: 3) Coordinates: @@ -3347,9 +3356,11 @@ def to_stacked_array( Examples -------- >>> data = xr.Dataset( - ... data_vars={'a': (('x', 'y'), [[0, 1, 2], [3, 4, 5]]), - ... 'b': ('x', [6, 7])}, - ... coords={'y': ['u', 'v', 'w']} + ... data_vars={ + ... "a": (("x", "y"), [[0, 1, 2], [3, 4, 5]]), + ... "b": ("x", [6, 7]), + ... }, + ... coords={"y": ["u", "v", "w"]}, ... ) >>> data @@ -3362,7 +3373,7 @@ def to_stacked_array( a (x, y) int64 0 1 2 3 4 5 b (x) int64 6 7 - >>> data.to_stacked_array("z", sample_dims=['x']) + >>> data.to_stacked_array("z", sample_dims=["x"]) array([[0, 1, 2, 6], [3, 4, 5, 7]]) @@ -3733,9 +3744,9 @@ def drop_sel(self, labels=None, *, errors="raise", **labels_kwargs): Examples -------- >>> data = np.random.randn(2, 3) - >>> labels = ['a', 'b', 'c'] - >>> ds = xr.Dataset({'A': (['x', 'y'], data), 'y': labels}) - >>> ds.drop_sel(y=['a', 'c']) + >>> labels = ["a", "b", "c"] + >>> ds = xr.Dataset({"A": (["x", "y"], data), "y": labels}) + >>> ds.drop_sel(y=["a", "c"]) Dimensions: (x: 2, y: 1) Coordinates: @@ -3743,7 +3754,7 @@ def drop_sel(self, labels=None, *, errors="raise", **labels_kwargs): Dimensions without coordinates: x Data variables: A (x, y) float64 -0.3454 0.1734 - >>> ds.drop_sel(y='b') + >>> ds.drop_sel(y="b") Dimensions: (x: 2, y: 2) Coordinates: @@ -3948,9 +3959,10 @@ def fillna(self, value: Any) -> "Dataset": ... "A": ("x", [np.nan, 2, np.nan, 0]), ... "B": ("x", [3, 4, np.nan, 1]), ... "C": ("x", [np.nan, np.nan, np.nan, 5]), - ... "D": ("x", [np.nan, 3, np.nan, 4]) + ... "D": ("x", [np.nan, 3, np.nan, 4]), ... }, - ... coords={"x": [0, 1, 2, 3]}) + ... coords={"x": [0, 1, 2, 3]}, + ... ) >>> ds Dimensions: (x: 4) @@ -3977,7 +3989,7 @@ def fillna(self, value: Any) -> "Dataset": Replace all `NaN` elements in column ‘A’, ‘B’, ‘C’, and ‘D’, with 0, 1, 2, and 3 respectively. - >>> values = {'A': 0, 'B': 1, 'C': 2, 'D': 3} + >>> values = {"A": 0, "B": 1, "C": 2, "D": 3} >>> ds.fillna(value=values) Dimensions: (x: 4) @@ -4284,7 +4296,7 @@ def map( Examples -------- >>> da = xr.DataArray(np.random.randn(2, 3)) - >>> ds = xr.Dataset({'foo': da, 'bar': ('x', [-1, 2])}) + >>> ds = xr.Dataset({"foo": da, "bar": ("x", [-1, 2])}) >>> ds Dimensions: (dim_0: 2, dim_1: 3, x: 2) @@ -4371,7 +4383,10 @@ def assign( >>> import xarray as xr >>> x = xr.Dataset( ... { - ... "temperature_c": (("lat", "lon"), 20 * np.random.rand(4).reshape(2, 2)), + ... "temperature_c": ( + ... ("lat", "lon"), + ... 20 * np.random.rand(4).reshape(2, 2), + ... ), ... "precipitation": (("lat", "lon"), np.random.rand(4).reshape(2, 2)), ... }, ... coords={"lat": [10, 20], "lon": [150, 160]}, @@ -4388,7 +4403,7 @@ def assign( Where the value is a callable, evaluated on dataset: - >>> x.assign(temperature_f = lambda x: x.temperature_c * 9 / 5 + 32) + >>> x.assign(temperature_f=lambda x: x.temperature_c * 9 / 5 + 32) Dimensions: (lat: 2, lon: 2) Coordinates: @@ -4891,15 +4906,15 @@ def diff(self, dim, n=1, label="upper"): Examples -------- - >>> ds = xr.Dataset({'foo': ('x', [5, 5, 6, 6])}) - >>> ds.diff('x') + >>> ds = xr.Dataset({"foo": ("x", [5, 5, 6, 6])}) + >>> ds.diff("x") Dimensions: (x: 3) Coordinates: * x (x) int64 1 2 3 Data variables: foo (x) int64 0 1 0 - >>> ds.diff('x', 2) + >>> ds.diff("x", 2) Dimensions: (x: 2) Coordinates: @@ -4983,7 +4998,7 @@ def shift(self, shifts=None, fill_value=dtypes.NA, **shifts_kwargs): Examples -------- - >>> ds = xr.Dataset({'foo': ('x', list('abcde'))}) + >>> ds = xr.Dataset({"foo": ("x", list("abcde"))}) >>> ds.shift(x=2) Dimensions: (x: 5) @@ -5042,7 +5057,7 @@ def roll(self, shifts=None, roll_coords=None, **shifts_kwargs): Examples -------- - >>> ds = xr.Dataset({'foo': ('x', list('abcde'))}) + >>> ds = xr.Dataset({"foo": ("x", list("abcde"))}) >>> ds.roll(x=2) Dimensions: (x: 5) @@ -5555,19 +5570,23 @@ def filter_by_attrs(self, **kwargs): >>> precip = 10 * np.random.rand(2, 2, 3) >>> lon = [[-99.83, -99.32], [-99.79, -99.23]] >>> lat = [[42.25, 42.21], [42.63, 42.59]] - >>> dims = ['x', 'y', 'time'] - >>> temp_attr = dict(standard_name='air_potential_temperature') - >>> precip_attr = dict(standard_name='convective_precipitation_flux') - >>> ds = xr.Dataset({ - ... 'temperature': (dims, temp, temp_attr), - ... 'precipitation': (dims, precip, precip_attr)}, - ... coords={ - ... 'lon': (['x', 'y'], lon), - ... 'lat': (['x', 'y'], lat), - ... 'time': pd.date_range('2014-09-06', periods=3), - ... 'reference_time': pd.Timestamp('2014-09-05')}) + >>> dims = ["x", "y", "time"] + >>> temp_attr = dict(standard_name="air_potential_temperature") + >>> precip_attr = dict(standard_name="convective_precipitation_flux") + >>> ds = xr.Dataset( + ... { + ... "temperature": (dims, temp, temp_attr), + ... "precipitation": (dims, precip, precip_attr), + ... }, + ... coords={ + ... "lon": (["x", "y"], lon), + ... "lat": (["x", "y"], lat), + ... "time": pd.date_range("2014-09-06", periods=3), + ... "reference_time": pd.Timestamp("2014-09-05"), + ... }, + ... ) >>> # Get variables matching a specific standard_name. - >>> ds.filter_by_attrs(standard_name='convective_precipitation_flux') + >>> ds.filter_by_attrs(standard_name="convective_precipitation_flux") Dimensions: (time: 3, x: 2, y: 2) Coordinates: diff --git a/xarray/core/extensions.py b/xarray/core/extensions.py index 79abbccea39..e81070d18fd 100644 --- a/xarray/core/extensions.py +++ b/xarray/core/extensions.py @@ -110,8 +110,9 @@ def plot(self): Back in an interactive IPython session: - >>> ds = xarray.Dataset({'longitude': np.linspace(0, 10), - ... 'latitude': np.linspace(0, 20)}) + >>> ds = xarray.Dataset( + ... {"longitude": np.linspace(0, 10), "latitude": np.linspace(0, 20)} + ... ) >>> ds.geo.center (5.0, 10.0) >>> ds.geo.plot() diff --git a/xarray/core/merge.py b/xarray/core/merge.py index 10c7804d718..1d1b8d39a20 100644 --- a/xarray/core/merge.py +++ b/xarray/core/merge.py @@ -678,7 +678,7 @@ def merge( var2 (lat, lon) float64 5.0 nan 6.0 nan nan nan 7.0 nan 8.0 var3 (time, lon) float64 0.0 nan 3.0 4.0 nan 9.0 - >>> xr.merge([x, y, z], compat='identical') + >>> xr.merge([x, y, z], compat="identical") Dimensions: (lat: 3, lon: 3, time: 2) Coordinates: @@ -690,7 +690,7 @@ def merge( var2 (lat, lon) float64 5.0 nan 6.0 nan nan nan 7.0 nan 8.0 var3 (time, lon) float64 0.0 nan 3.0 4.0 nan 9.0 - >>> xr.merge([x, y, z], compat='equals') + >>> xr.merge([x, y, z], compat="equals") Dimensions: (lat: 3, lon: 3, time: 2) Coordinates: @@ -702,7 +702,7 @@ def merge( var2 (lat, lon) float64 5.0 nan 6.0 nan nan nan 7.0 nan 8.0 var3 (time, lon) float64 0.0 nan 3.0 4.0 nan 9.0 - >>> xr.merge([x, y, z], compat='equals', fill_value=-999.) + >>> xr.merge([x, y, z], compat="equals", fill_value=-999.0) Dimensions: (lat: 3, lon: 3, time: 2) Coordinates: @@ -714,7 +714,7 @@ def merge( var2 (lat, lon) float64 5.0 -999.0 6.0 -999.0 ... -999.0 7.0 -999.0 8.0 var3 (time, lon) float64 0.0 -999.0 3.0 4.0 -999.0 9.0 - >>> xr.merge([x, y, z], join='override') + >>> xr.merge([x, y, z], join="override") Dimensions: (lat: 2, lon: 2, time: 2) Coordinates: @@ -726,7 +726,7 @@ def merge( var2 (lat, lon) float64 5.0 6.0 7.0 8.0 var3 (time, lon) float64 0.0 3.0 4.0 9.0 - >>> xr.merge([x, y, z], join='inner') + >>> xr.merge([x, y, z], join="inner") Dimensions: (lat: 1, lon: 1, time: 2) Coordinates: @@ -738,7 +738,7 @@ def merge( var2 (lat, lon) float64 5.0 var3 (time, lon) float64 0.0 4.0 - >>> xr.merge([x, y, z], compat='identical', join='inner') + >>> xr.merge([x, y, z], compat="identical", join="inner") Dimensions: (lat: 1, lon: 1, time: 2) Coordinates: @@ -750,7 +750,7 @@ def merge( var2 (lat, lon) float64 5.0 var3 (time, lon) float64 0.0 4.0 - >>> xr.merge([x, y, z], compat='broadcast_equals', join='outer') + >>> xr.merge([x, y, z], compat="broadcast_equals", join="outer") Dimensions: (lat: 3, lon: 3, time: 2) Coordinates: @@ -762,7 +762,7 @@ def merge( var2 (lat, lon) float64 5.0 nan 6.0 nan nan nan 7.0 nan 8.0 var3 (time, lon) float64 0.0 nan 3.0 4.0 nan 9.0 - >>> xr.merge([x, y, z], join='exact') + >>> xr.merge([x, y, z], join="exact") Traceback (most recent call last): ... ValueError: indexes along dimension 'lat' are not equal diff --git a/xarray/core/nputils.py b/xarray/core/nputils.py index cf189e471cc..5dd8219ebca 100644 --- a/xarray/core/nputils.py +++ b/xarray/core/nputils.py @@ -165,7 +165,7 @@ def _rolling_window(a, window, axis=-1): Examples -------- - >>> x=np.arange(10).reshape((2,5)) + >>> x = np.arange(10).reshape((2, 5)) >>> np.rolling_window(x, 3, axis=-1) array([[[0, 1, 2], [1, 2, 3], [2, 3, 4]], [[5, 6, 7], [6, 7, 8], [7, 8, 9]]]) diff --git a/xarray/core/options.py b/xarray/core/options.py index 15d05159d6d..5d81ca40a6e 100644 --- a/xarray/core/options.py +++ b/xarray/core/options.py @@ -108,7 +108,7 @@ class set_options: You can use ``set_options`` either as a context manager: - >>> ds = xr.Dataset({'x': np.arange(1000)}) + >>> ds = xr.Dataset({"x": np.arange(1000)}) >>> with xr.set_options(display_width=40): ... print(ds) diff --git a/xarray/core/parallel.py b/xarray/core/parallel.py index facfa06b23c..8429d0f71ad 100644 --- a/xarray/core/parallel.py +++ b/xarray/core/parallel.py @@ -162,18 +162,19 @@ def map_blocks( ``xr.map_blocks()`` allows for parallel operations with knowledge of ``xarray``, its indices, and its methods like ``.groupby()``. - >>> def calculate_anomaly(da, groupby_type='time.month'): + >>> def calculate_anomaly(da, groupby_type="time.month"): ... # Necessary workaround to xarray's check with zero dimensions ... # https://github.com/pydata/xarray/issues/3575 ... if sum(da.shape) == 0: ... return da ... gb = da.groupby(groupby_type) - ... clim = gb.mean(dim='time') + ... clim = gb.mean(dim="time") ... return gb - clim - >>> time = xr.cftime_range('1990-01', '1992-01', freq='M') + >>> time = xr.cftime_range("1990-01", "1992-01", freq="M") >>> np.random.seed(123) - >>> array = xr.DataArray(np.random.rand(len(time)), - ... dims="time", coords=[time]).chunk() + >>> array = xr.DataArray( + ... np.random.rand(len(time)), dims="time", coords=[time] + ... ).chunk() >>> xr.map_blocks(calculate_anomaly, array).compute() array([ 0.12894847, 0.11323072, -0.0855964 , -0.09334032, 0.26848862, @@ -187,7 +188,9 @@ def map_blocks( Note that one must explicitly use ``args=[]`` and ``kwargs={}`` to pass arguments to the function being applied in ``xr.map_blocks()``: - >>> xr.map_blocks(calculate_anomaly, array, kwargs={'groupby_type': 'time.year'}) + >>> xr.map_blocks( + ... calculate_anomaly, array, kwargs={"groupby_type": "time.year"}, + ... ) array([ 0.15361741, -0.25671244, -0.31600032, 0.008463 , 0.1766172 , -0.11974531, 0.43791243, 0.14197797, -0.06191987, -0.15073425, diff --git a/xarray/core/rolling.py b/xarray/core/rolling.py index 5f633abbde6..58f0b275b21 100644 --- a/xarray/core/rolling.py +++ b/xarray/core/rolling.py @@ -231,17 +231,17 @@ def construct(self, window_dim, stride=1, fill_value=dtypes.NA): Examples -------- - >>> da = xr.DataArray(np.arange(8).reshape(2, 4), dims=('a', 'b')) + >>> da = xr.DataArray(np.arange(8).reshape(2, 4), dims=("a", "b")) >>> rolling = da.rolling(b=3) - >>> rolling.construct('window_dim') + >>> rolling.construct("window_dim") array([[[np.nan, np.nan, 0], [np.nan, 0, 1], [0, 1, 2], [1, 2, 3]], [[np.nan, np.nan, 4], [np.nan, 4, 5], [4, 5, 6], [5, 6, 7]]]) Dimensions without coordinates: a, b, window_dim >>> rolling = da.rolling(b=3, center=True) - >>> rolling.construct('window_dim') + >>> rolling.construct("window_dim") array([[[np.nan, 0, 1], [0, 1, 2], [1, 2, 3], [2, 3, np.nan]], [[np.nan, 4, 5], [4, 5, 6], [5, 6, 7], [6, 7, np.nan]]]) @@ -279,9 +279,9 @@ def reduce(self, func, **kwargs): Examples -------- - >>> da = xr.DataArray(np.arange(8).reshape(2, 4), dims=('a', 'b')) + >>> da = xr.DataArray(np.arange(8).reshape(2, 4), dims=("a", "b")) >>> rolling = da.rolling(b=3) - >>> rolling.construct('window_dim') + >>> rolling.construct("window_dim") array([[[np.nan, np.nan, 0], [np.nan, 0, 1], [0, 1, 2], [1, 2, 3]], [[np.nan, np.nan, 4], [np.nan, 4, 5], [4, 5, 6], [5, 6, 7]]]) diff --git a/xarray/core/rolling_exp.py b/xarray/core/rolling_exp.py index ac6768e8a9c..6ef63e42291 100644 --- a/xarray/core/rolling_exp.py +++ b/xarray/core/rolling_exp.py @@ -94,8 +94,8 @@ def mean(self): Examples -------- - >>> da = xr.DataArray([1,1,2,2,2], dims='x') - >>> da.rolling_exp(x=2, window_type='span').mean() + >>> da = xr.DataArray([1, 1, 2, 2, 2], dims="x") + >>> da.rolling_exp(x=2, window_type="span").mean() array([1. , 1. , 1.692308, 1.9 , 1.966942]) Dimensions without coordinates: x diff --git a/xarray/core/variable.py b/xarray/core/variable.py index 435edb6f014..01f816941b5 100644 --- a/xarray/core/variable.py +++ b/xarray/core/variable.py @@ -843,7 +843,7 @@ def copy(self, deep=True, data=None): Shallow copy versus deep copy - >>> var = xr.Variable(data=[1, 2, 3], dims='x') + >>> var = xr.Variable(data=[1, 2, 3], dims="x") >>> var.copy() array([1, 2, 3]) @@ -1844,13 +1844,13 @@ def rolling_window( Examples -------- - >>> v=Variable(('a', 'b'), np.arange(8).reshape((2,4))) - >>> v.rolling_window(x, 'b', 3, 'window_dim') + >>> v = Variable(("a", "b"), np.arange(8).reshape((2, 4))) + >>> v.rolling_window(x, "b", 3, "window_dim") array([[[nan, nan, 0], [nan, 0, 1], [0, 1, 2], [1, 2, 3]], [[nan, nan, 4], [nan, 4, 5], [4, 5, 6], [5, 6, 7]]]) - >>> v.rolling_window(x, 'b', 3, 'window_dim', center=True) + >>> v.rolling_window(x, "b", 3, "window_dim", center=True) array([[[nan, 0, 1], [0, 1, 2], [1, 2, 3], [2, 3, nan]], [[nan, 4, 5], [4, 5, 6], [5, 6, 7], [6, 7, nan]]])