Skip to content

run black and blackdoc #4381

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Aug 27, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions xarray/backends/file_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,8 +314,7 @@ def __hash__(self):


class DummyFileManager(FileManager):
"""FileManager that simply wraps an open file in the FileManager interface.
"""
"""FileManager that simply wraps an open file in the FileManager interface."""

def __init__(self, value):
self._value = value
Expand Down
3 changes: 1 addition & 2 deletions xarray/backends/h5netcdf_.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,7 @@ def _h5netcdf_create_group(dataset, name):


class H5NetCDFStore(WritableCFDataStore):
"""Store for reading and writing data via h5netcdf
"""
"""Store for reading and writing data via h5netcdf"""

__slots__ = (
"autoclose",
Expand Down
3 changes: 1 addition & 2 deletions xarray/backends/lru_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,7 @@ def __getitem__(self, key: K) -> V:
return value

def _enforce_size_limit(self, capacity: int) -> None:
"""Shrink the cache if necessary, evicting the oldest items.
"""
"""Shrink the cache if necessary, evicting the oldest items."""
while len(self._cache) > capacity:
key, value = self._cache.popitem(last=False)
if self._on_evict is not None:
Expand Down
3 changes: 1 addition & 2 deletions xarray/backends/pseudonetcdf_.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,7 @@ def _getitem(self, key):


class PseudoNetCDFDataStore(AbstractDataStore):
"""Store for accessing datasets via PseudoNetCDF
"""
"""Store for accessing datasets via PseudoNetCDF"""

@classmethod
def open(cls, filename, lock=None, mode=None, **format_kwargs):
Expand Down
3 changes: 1 addition & 2 deletions xarray/backends/pynio_.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,7 @@ def _getitem(self, key):


class NioDataStore(AbstractDataStore):
"""Store for accessing datasets via PyNIO
"""
"""Store for accessing datasets via PyNIO"""

def __init__(self, filename, mode="r", lock=None, **kwargs):
import Nio
Expand Down
2 changes: 1 addition & 1 deletion xarray/backends/rasterio_.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def shape(self):
return self._shape

def _get_indexer(self, key):
""" Get indexer for rasterio array.
"""Get indexer for rasterio array.

Parameter
---------
Expand Down
3 changes: 1 addition & 2 deletions xarray/backends/zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,8 +257,7 @@ def encode_zarr_variable(var, needs_copy=True, name=None):


class ZarrStore(AbstractWritableDataStore):
"""Store for reading and writing data via zarr
"""
"""Store for reading and writing data via zarr"""

__slots__ = (
"append_dim",
Expand Down
6 changes: 2 additions & 4 deletions xarray/coding/cftime_offsets.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,8 +221,7 @@ def _adjust_n_years(other, n, month, reference_day):


def _shift_month(date, months, day_option="start"):
"""Shift the date to a month start or end a given number of months away.
"""
"""Shift the date to a month start or end a given number of months away."""
import cftime

delta_year = (date.month + months) // 12
Expand Down Expand Up @@ -354,8 +353,7 @@ def onOffset(self, date):


class QuarterOffset(BaseCFTimeOffset):
"""Quarter representation copied off of pandas/tseries/offsets.py
"""
"""Quarter representation copied off of pandas/tseries/offsets.py"""

_freq: ClassVar[str]
_default_month: ClassVar[int]
Expand Down
6 changes: 2 additions & 4 deletions xarray/coding/strings.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,8 +145,7 @@ def bytes_to_char(arr):


def _numpy_bytes_to_char(arr):
"""Like netCDF4.stringtochar, but faster and more flexible.
"""
"""Like netCDF4.stringtochar, but faster and more flexible."""
# ensure the array is contiguous
arr = np.array(arr, copy=False, order="C", dtype=np.string_)
return arr.reshape(arr.shape + (1,)).view("S1")
Expand Down Expand Up @@ -189,8 +188,7 @@ def char_to_bytes(arr):


def _numpy_char_to_bytes(arr):
"""Like netCDF4.chartostring, but faster and more flexible.
"""
"""Like netCDF4.chartostring, but faster and more flexible."""
# based on: http://stackoverflow.com/a/10984878/809705
arr = np.array(arr, copy=False, order="C")
dtype = "S" + str(arr.shape[-1])
Expand Down
6 changes: 2 additions & 4 deletions xarray/coding/variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,15 +35,13 @@ class VariableCoder:
def encode(
self, variable: Variable, name: Hashable = None
) -> Variable: # pragma: no cover
"""Convert an encoded variable to a decoded variable
"""
"""Convert an encoded variable to a decoded variable"""
raise NotImplementedError()

def decode(
self, variable: Variable, name: Hashable = None
) -> Variable: # pragma: no cover
"""Convert an decoded variable to a encoded variable
"""
"""Convert an decoded variable to a encoded variable"""
raise NotImplementedError()


Expand Down
29 changes: 10 additions & 19 deletions xarray/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,14 +55,12 @@ def encode(var):


def _filter_attrs(attrs, ignored_attrs):
""" Return attrs that are not in ignored_attrs
"""
"""Return attrs that are not in ignored_attrs"""
return {k: v for k, v in attrs.items() if k not in ignored_attrs}


def from_cdms2(variable):
"""Convert a cdms2 variable into an DataArray
"""
"""Convert a cdms2 variable into an DataArray"""
values = np.asarray(variable)
name = variable.id
dims = variable.getAxisIds()
Expand All @@ -89,8 +87,7 @@ def from_cdms2(variable):


def to_cdms2(dataarray, copy=True):
"""Convert a DataArray into a cdms2 variable
"""
"""Convert a DataArray into a cdms2 variable"""
# we don't want cdms2 to be a hard dependency
import cdms2

Expand Down Expand Up @@ -151,14 +148,12 @@ def set_cdms2_attrs(var, attrs):


def _pick_attrs(attrs, keys):
""" Return attrs with keys in keys list
"""
"""Return attrs with keys in keys list"""
return {k: v for k, v in attrs.items() if k in keys}


def _get_iris_args(attrs):
""" Converts the xarray attrs into args that can be passed into Iris
"""
"""Converts the xarray attrs into args that can be passed into Iris"""
# iris.unit is deprecated in Iris v1.9
import cf_units

Expand All @@ -172,8 +167,7 @@ def _get_iris_args(attrs):

# TODO: Add converting bounds from xarray to Iris and back
def to_iris(dataarray):
""" Convert a DataArray into a Iris Cube
"""
"""Convert a DataArray into a Iris Cube"""
# Iris not a hard dependency
import iris
from iris.fileformats.netcdf import parse_cell_methods
Expand Down Expand Up @@ -213,8 +207,7 @@ def to_iris(dataarray):


def _iris_obj_to_attrs(obj):
""" Return a dictionary of attrs when given a Iris object
"""
"""Return a dictionary of attrs when given a Iris object"""
attrs = {"standard_name": obj.standard_name, "long_name": obj.long_name}
if obj.units.calendar:
attrs["calendar"] = obj.units.calendar
Expand All @@ -225,8 +218,7 @@ def _iris_obj_to_attrs(obj):


def _iris_cell_methods_to_str(cell_methods_obj):
""" Converts a Iris cell methods into a string
"""
"""Converts a Iris cell methods into a string"""
cell_methods = []
for cell_method in cell_methods_obj:
names = "".join(f"{n}: " for n in cell_method.coord_names)
Expand All @@ -242,7 +234,7 @@ def _iris_cell_methods_to_str(cell_methods_obj):


def _name(iris_obj, default="unknown"):
""" Mimicks `iris_obj.name()` but with different name resolution order.
"""Mimicks `iris_obj.name()` but with different name resolution order.

Similar to iris_obj.name() method, but using iris_obj.var_name first to
enable roundtripping.
Expand All @@ -251,8 +243,7 @@ def _name(iris_obj, default="unknown"):


def from_iris(cube):
""" Convert a Iris cube into an DataArray
"""
"""Convert a Iris cube into an DataArray"""
import iris.exceptions

from xarray.core.pycompat import dask_array_type
Expand Down
3 changes: 1 addition & 2 deletions xarray/core/accessor_dt.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@


def _season_from_months(months):
"""Compute season (DJF, MAM, JJA, SON) from month ordinal
"""
"""Compute season (DJF, MAM, JJA, SON) from month ordinal"""
# TODO: Move "season" accessor upstream into pandas
seasons = np.array(["DJF", "MAM", "JJA", "SON"])
months = np.asarray(months)
Expand Down
51 changes: 22 additions & 29 deletions xarray/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,8 +111,7 @@ def wrapped_func(self, dim=None, **kwargs): # type: ignore


class AbstractArray(ImplementsArrayReduce):
"""Shared base class for DataArray and Variable.
"""
"""Shared base class for DataArray and Variable."""

__slots__ = ()

Expand Down Expand Up @@ -188,8 +187,7 @@ def sizes(self: Any) -> Mapping[Hashable, int]:


class AttrAccessMixin:
"""Mixin class that allows getting keys with attribute access
"""
"""Mixin class that allows getting keys with attribute access"""

__slots__ = ()

Expand All @@ -212,14 +210,12 @@ def __init_subclass__(cls):

@property
def _attr_sources(self) -> List[Mapping[Hashable, Any]]:
"""List of places to look-up items for attribute-style access
"""
"""List of places to look-up items for attribute-style access"""
return []

@property
def _item_sources(self) -> List[Mapping[Hashable, Any]]:
"""List of places to look-up items for key-autocompletion
"""
"""List of places to look-up items for key-autocompletion"""
return []

def __getattr__(self, name: str) -> Any:
Expand All @@ -239,8 +235,7 @@ def __getattr__(self, name: str) -> Any:
# runtime before every single assignment. All of this is just temporary until the
# FutureWarning can be changed into a hard crash.
def _setattr_dict(self, name: str, value: Any) -> None:
"""Deprecated third party subclass (see ``__init_subclass__`` above)
"""
"""Deprecated third party subclass (see ``__init_subclass__`` above)"""
object.__setattr__(self, name, value)
if name in self.__dict__:
# Custom, non-slotted attr, or improperly assigned variable?
Expand Down Expand Up @@ -304,8 +299,7 @@ def get_squeeze_dims(
dim: Union[Hashable, Iterable[Hashable], None] = None,
axis: Union[int, Iterable[int], None] = None,
) -> List[Hashable]:
"""Get a list of dimensions to squeeze out.
"""
"""Get a list of dimensions to squeeze out."""
if dim is not None and axis is not None:
raise ValueError("cannot use both parameters `axis` and `dim`")
if dim is None and axis is None:
Expand Down Expand Up @@ -374,8 +368,7 @@ def squeeze(
return self.isel(drop=drop, **{d: 0 for d in dims})

def get_index(self, key: Hashable) -> pd.Index:
"""Get an index for a dimension, with fall-back to a default RangeIndex
"""
"""Get an index for a dimension, with fall-back to a default RangeIndex"""
if key not in self.dims:
raise KeyError(key)

Expand Down Expand Up @@ -423,7 +416,9 @@ def assign_coords(self, coords=None, **coords_kwargs):
Convert longitude coordinates from 0-359 to -180-179:

>>> da = xr.DataArray(
... np.random.rand(4), coords=[np.array([358, 359, 0, 1])], dims="lon",
... np.random.rand(4),
... coords=[np.array([358, 359, 0, 1])],
... dims="lon",
... )
>>> da
<xarray.DataArray (lon: 4)>
Expand Down Expand Up @@ -830,7 +825,9 @@ def rolling(
... np.linspace(0, 11, num=12),
... coords=[
... pd.date_range(
... "15/12/1999", periods=12, freq=pd.DateOffset(months=1),
... "15/12/1999",
... periods=12,
... freq=pd.DateOffset(months=1),
... )
... ],
... dims="time",
Expand Down Expand Up @@ -1037,7 +1034,9 @@ def resample(
... np.linspace(0, 11, num=12),
... coords=[
... pd.date_range(
... "15/12/1999", periods=12, freq=pd.DateOffset(months=1),
... "15/12/1999",
... periods=12,
... freq=pd.DateOffset(months=1),
... )
... ],
... dims="time",
Expand Down Expand Up @@ -1242,8 +1241,7 @@ def where(self, cond, other=dtypes.NA, drop: bool = False):
return ops.where_method(self, cond, other)

def close(self: Any) -> None:
"""Close any files linked to this object
"""
"""Close any files linked to this object"""
if self._file_obj is not None:
self._file_obj.close()
self._file_obj = None
Expand Down Expand Up @@ -1503,8 +1501,7 @@ def full_like(other, fill_value, dtype: DTypeLike = None):


def _full_like_variable(other, fill_value, dtype: DTypeLike = None):
"""Inner function of full_like, where other must be a variable
"""
"""Inner function of full_like, where other must be a variable"""
from .variable import Variable

if fill_value is dtypes.NA:
Expand Down Expand Up @@ -1637,20 +1634,17 @@ def ones_like(other, dtype: DTypeLike = None):


def is_np_datetime_like(dtype: DTypeLike) -> bool:
"""Check if a dtype is a subclass of the numpy datetime types
"""
"""Check if a dtype is a subclass of the numpy datetime types"""
return np.issubdtype(dtype, np.datetime64) or np.issubdtype(dtype, np.timedelta64)


def is_np_timedelta_like(dtype: DTypeLike) -> bool:
"""Check whether dtype is of the timedelta64 dtype.
"""
"""Check whether dtype is of the timedelta64 dtype."""
return np.issubdtype(dtype, np.timedelta64)


def _contains_cftime_datetimes(array) -> bool:
"""Check if an array contains cftime.datetime objects
"""
"""Check if an array contains cftime.datetime objects"""
try:
from cftime import datetime as cftime_datetime
except ImportError:
Expand All @@ -1668,8 +1662,7 @@ def _contains_cftime_datetimes(array) -> bool:


def contains_cftime_datetimes(var) -> bool:
"""Check if an xarray.Variable contains cftime.datetime objects
"""
"""Check if an xarray.Variable contains cftime.datetime objects"""
return _contains_cftime_datetimes(var.data)


Expand Down
3 changes: 1 addition & 2 deletions xarray/core/computation.py
Original file line number Diff line number Diff line change
Expand Up @@ -592,8 +592,7 @@ def apply_variable_ufunc(
keep_attrs=False,
dask_gufunc_kwargs=None,
):
"""Apply a ndarray level function over Variable and/or ndarray objects.
"""
"""Apply a ndarray level function over Variable and/or ndarray objects."""
from .variable import Variable, as_compatible_data

dim_sizes = unified_dim_sizes(
Expand Down
Loading