Skip to content

pass dask compute/persist args through from load/compute/perist #1543

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Sep 5, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions ci/requirements-py27-cdat+pynio.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ dependencies:
- pathlib2
- pynio
- pytest
- mock
- scipy
- seaborn
- toolz
Expand Down
1 change: 1 addition & 0 deletions ci/requirements-py27-min.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ name: test_env
dependencies:
- python=2.7
- pytest
- mock
- numpy==1.11
- pandas==0.18.0
- pip:
Expand Down
1 change: 1 addition & 0 deletions ci/requirements-py27-windows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ dependencies:
- netcdf4
- pathlib2
- pytest
- mock
- numpy
- pandas
- scipy
Expand Down
10 changes: 8 additions & 2 deletions doc/installing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,12 @@ pandas) installed first. Then, install xarray with pip::

$ pip install xarray

To run the test suite after installing xarray, install
`py.test <https://pytest.org>`__ (``pip install pytest``) and run
Testing
-------

To run the test suite after installing xarray, first install (via pypi or conda)
- `py.test <https://pytest.org>`__: Simple unit testing library
- `mock <https://pypi.python.org/pypi/mock>`__: additional testing library required for python version 2

and run
``py.test --pyargs xarray``.
4 changes: 4 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,10 @@ Enhancements
other means (:issue:`1459`).
By `Ryan May <https://github.com/dopplershift>`_.

- Support passing keyword arguments to ``load``, ``compute``, and ``persist``
methods. Any keyword arguments supplied to these methods are passed on to
the corresponding dask function (:issue:`1523`).
By `Joe Hamman <https://github.com/jhamman>`_.
- Encoding attributes are now preserved when xarray objects are concatenated.
The encoding is copied from the first object (:issue:`1297`).
By `Joe Hamman <https://github.com/jhamman>`_ and
Expand Down
2 changes: 2 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@

INSTALL_REQUIRES = ['numpy >= 1.11', 'pandas >= 0.18.0']
TESTS_REQUIRE = ['pytest >= 2.7.1']
if sys.version_info[0] < 3:
TESTS_REQUIRE.append('mock')

DESCRIPTION = "N-D labeled arrays and datasets in Python"
LONG_DESCRIPTION = """
Expand Down
39 changes: 33 additions & 6 deletions xarray/core/dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -565,22 +565,31 @@ def reset_coords(self, names=None, drop=False, inplace=False):
dataset[self.name] = self.variable
return dataset

def load(self):
def load(self, **kwargs):
"""Manually trigger loading of this array's data from disk or a
remote source into memory and return this array.

Normally, it should not be necessary to call this method in user code,
because all xarray functions should either work on deferred data or
load data automatically. However, this method can be necessary when
working with many file objects on disk.

Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.array.compute``.

See Also
--------
dask.array.compute
"""
ds = self._to_temp_dataset().load()
ds = self._to_temp_dataset().load(**kwargs)
new = self._from_temp_dataset(ds)
self._variable = new._variable
self._coords = new._coords
return self

def compute(self):
def compute(self, **kwargs):
"""Manually trigger loading of this array's data from disk or a
remote source into memory and return a new array. The original is
left unaltered.
Expand All @@ -589,18 +598,36 @@ def compute(self):
because all xarray functions should either work on deferred data or
load data automatically. However, this method can be necessary when
working with many file objects on disk.

Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.array.compute``.

See Also
--------
dask.array.compute
"""
new = self.copy(deep=False)
return new.load()
return new.load(**kwargs)

def persist(self):
def persist(self, **kwargs):
""" Trigger computation in constituent dask arrays

This keeps them as dask arrays but encourages them to keep data in
memory. This is particularly useful when on a distributed machine.
When on a single machine consider using ``.compute()`` instead.

Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.persist``.

See Also
--------
dask.persist
"""
ds = self._to_temp_dataset().persist()
ds = self._to_temp_dataset().persist(**kwargs)
return self._from_temp_dataset(ds)

def copy(self, deep=True):
Expand Down
43 changes: 35 additions & 8 deletions xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -445,14 +445,23 @@ def sizes(self):
"""
return self.dims

def load(self):
def load(self, **kwargs):
"""Manually trigger loading of this dataset's data from disk or a
remote source into memory and return this dataset.

Normally, it should not be necessary to call this method in user code,
because all xarray functions should either work on deferred data or
load data automatically. However, this method can be necessary when
working with many file objects on disk.

Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.array.compute``.

See Also
--------
dask.array.compute
"""
# access .data to coerce everything to numpy or dask arrays
lazy_data = {k: v._data for k, v in self.variables.items()
Expand All @@ -461,7 +470,7 @@ def load(self):
import dask.array as da

# evaluate all the dask arrays simultaneously
evaluated_data = da.compute(*lazy_data.values())
evaluated_data = da.compute(*lazy_data.values(), **kwargs)

for k, data in zip(lazy_data, evaluated_data):
self.variables[k].data = data
Expand All @@ -473,7 +482,7 @@ def load(self):

return self

def compute(self):
def compute(self, **kwargs):
"""Manually trigger loading of this dataset's data from disk or a
remote source into memory and return a new dataset. The original is
left unaltered.
Expand All @@ -482,11 +491,20 @@ def compute(self):
because all xarray functions should either work on deferred data or
load data automatically. However, this method can be necessary when
working with many file objects on disk.

Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.array.compute``.

See Also
--------
dask.array.compute
"""
new = self.copy(deep=False)
return new.load()
return new.load(**kwargs)

def _persist_inplace(self):
def _persist_inplace(self, **kwargs):
""" Persist all Dask arrays in memory """
# access .data to coerce everything to numpy or dask arrays
lazy_data = {k: v._data for k, v in self.variables.items()
Expand All @@ -495,24 +513,33 @@ def _persist_inplace(self):
import dask

# evaluate all the dask arrays simultaneously
evaluated_data = dask.persist(*lazy_data.values())
evaluated_data = dask.persist(*lazy_data.values(), **kwargs)

for k, data in zip(lazy_data, evaluated_data):
self.variables[k].data = data

return self

def persist(self):
def persist(self, **kwargs):
""" Trigger computation, keeping data as dask arrays

This operation can be used to trigger computation on underlying dask
arrays, similar to ``.compute()``. However this operation keeps the
data as dask arrays. This is particularly useful when using the
dask.distributed scheduler and you want to load a large amount of data
into distributed memory.

Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.persist``.

See Also
--------
dask.persist
"""
new = self.copy(deep=False)
return new._persist_inplace()
return new._persist_inplace(**kwargs)

@classmethod
def _construct_direct(cls, variables, coord_names, dims=None, attrs=None,
Expand Down
28 changes: 24 additions & 4 deletions xarray/core/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,29 +307,49 @@ def data(self, data):
def _indexable_data(self):
return orthogonally_indexable(self._data)

def load(self):
def load(self, **kwargs):
"""Manually trigger loading of this variable's data from disk or a
remote source into memory and return this variable.

Normally, it should not be necessary to call this method in user code,
because all xarray functions should either work on deferred data or
load data automatically.

Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.array.compute``.

See Also
--------
dask.array.compute
"""
if not isinstance(self._data, np.ndarray):
if isinstance(self._data, dask_array_type):
self._data = as_compatible_data(self._data.compute(**kwargs))
elif not isinstance(self._data, np.ndarray):
self._data = np.asarray(self._data)
return self

def compute(self):
def compute(self, **kwargs):
"""Manually trigger loading of this variable's data from disk or a
remote source into memory and return a new variable. The original is
left unaltered.

Normally, it should not be necessary to call this method in user code,
because all xarray functions should either work on deferred data or
load data automatically.

Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to ``dask.array.compute``.

See Also
--------
dask.array.compute
"""
new = self.copy(deep=False)
return new.load()
return new.load(**kwargs)

@property
def values(self):
Expand Down
5 changes: 5 additions & 0 deletions xarray/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,11 @@
except ImportError:
import unittest

try:
from unittest import mock
except ImportError:
import mock

try:
import scipy
has_scipy = True
Expand Down
45 changes: 44 additions & 1 deletion xarray/tests/test_dask.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,15 @@
import pickle
import numpy as np
import pandas as pd
import pytest

import xarray as xr
from xarray import Variable, DataArray, Dataset
import xarray.ufuncs as xu
from xarray.core.pycompat import suppress
from . import TestCase, requires_dask

from xarray.tests import unittest
from xarray.tests import unittest, mock

with suppress(ImportError):
import dask
Expand Down Expand Up @@ -394,6 +395,47 @@ def test_from_dask_variable(self):
self.assertLazyAndIdentical(self.lazy_array, a)


@requires_dask
@pytest.mark.parametrize("method", ['load', 'compute'])
def test_dask_kwargs_variable(method):
x = Variable('y', da.from_array(np.arange(3), chunks=(2,)))
# args should be passed on to da.Array.compute()
with mock.patch.object(da.Array, 'compute',
return_value=np.arange(3)) as mock_compute:
getattr(x, method)(foo='bar')
mock_compute.assert_called_with(foo='bar')


@requires_dask
@pytest.mark.parametrize("method", ['load', 'compute', 'persist'])
def test_dask_kwargs_dataarray(method):
data = da.from_array(np.arange(3), chunks=(2,))
x = DataArray(data)
if method in ['load', 'compute']:
dask_func = 'dask.array.compute'
else:
dask_func = 'dask.persist'
# args should be passed on to "dask_func"
with mock.patch(dask_func) as mock_func:
getattr(x, method)(foo='bar')
mock_func.assert_called_with(data, foo='bar')


@requires_dask
@pytest.mark.parametrize("method", ['load', 'compute', 'persist'])
def test_dask_kwargs_dataset(method):
data = da.from_array(np.arange(3), chunks=(2,))
x = Dataset({'x': (('y'), data)})
if method in ['load', 'compute']:
dask_func = 'dask.array.compute'
else:
dask_func = 'dask.persist'
# args should be passed on to "dask_func"
with mock.patch(dask_func) as mock_func:
getattr(x, method)(foo='bar')
mock_func.assert_called_with(data, foo='bar')


kernel_call_count = 0
def kernel():
"""Dask kernel to test pickling/unpickling.
Expand All @@ -403,6 +445,7 @@ def kernel():
kernel_call_count += 1
return np.ones(1)


def build_dask_array():
global kernel_call_count
kernel_call_count = 0
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Missing tests for Variable

Expand Down