diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 6acb231bcd5..9e7b1de889f 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -108,6 +108,11 @@ Breaking changes Deprecations ~~~~~~~~~~~~ +- The `lock` keyword argument to :py:func:`open_dataset` and :py:func:`open_dataarray` has now + been deprecated, and will give a warning if passed. From the next version it will + raise an error. This is part of the refactor to support external backends (:issue:`5073`). + By `Tom Nicholas `_. + Bug fixes ~~~~~~~~~ - Properly support :py:meth:`DataArray.ffill`, :py:meth:`DataArray.bfill`, :py:meth:`Dataset.ffill`, :py:meth:`Dataset.bfill` along chunked dimensions. diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 01079025434..b858bfabdb4 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -1,4 +1,5 @@ import os +import warnings from glob import glob from io import BytesIO from numbers import Number @@ -444,11 +445,6 @@ def open_dataset( - 'group': path to the netCDF4 group in the given file to open given as a str,supported by "netcdf4", "h5netcdf", "zarr". - - 'lock': resource lock to use when reading data from disk. Only - relevant when using dask or another form of parallelism. By default, - appropriate locks are chosen to safely read and write files with the - currently active dask scheduler. Supported by "netcdf4", "h5netcdf", - "pynio", "pseudonetcdf", "cfgrib". See engine open function for kwargs accepted by each specific engine. @@ -474,6 +470,15 @@ def open_dataset( "all other options must be passed as keyword arguments" ) + # TODO remove after v0.19 + if kwargs.pop("lock", None): + warnings.warn( + "The kwarg 'lock' has been deprecated, and is now " + "ignored. In the future passing lock will " + "raise an error.", + DeprecationWarning, + ) + if cache is None: cache = chunks is None @@ -628,11 +633,6 @@ def open_dataarray( - 'group': path to the netCDF4 group in the given file to open given as a str,supported by "netcdf4", "h5netcdf", "zarr". - - 'lock': resource lock to use when reading data from disk. Only - relevant when using dask or another form of parallelism. By default, - appropriate locks are chosen to safely read and write files with the - currently active dask scheduler. Supported by "netcdf4", "h5netcdf", - "pynio", "pseudonetcdf", "cfgrib". See engine open function for kwargs accepted by each specific engine. @@ -655,6 +655,15 @@ def open_dataarray( "all other options must be passed as keyword arguments" ) + # TODO remove after v0.19 + if kwargs.pop("lock", None): + warnings.warn( + "The kwarg 'lock' has been deprecated, and is now " + "ignored. In the future passing lock will " + "raise an error.", + DeprecationWarning, + ) + dataset = open_dataset( filename_or_obj, decode_cf=decode_cf,