-
-
Notifications
You must be signed in to change notification settings - Fork 1.1k
{full,zeros,ones}_like typing #6611
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
d080759
b41eef3
c8cc50d
e04a819
44cefd4
a2da478
0164403
3a6ef15
da5bd9b
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -13,14 +13,15 @@ | |
Iterator, | ||
Mapping, | ||
TypeVar, | ||
Union, | ||
overload, | ||
) | ||
|
||
import numpy as np | ||
import pandas as pd | ||
|
||
from . import dtypes, duck_array_ops, formatting, formatting_html, ops | ||
from .npcompat import DTypeLike | ||
from .npcompat import DTypeLike, DTypeLikeSave | ||
from .options import OPTIONS, _get_keep_attrs | ||
from .pycompat import is_duck_dask_array | ||
from .rolling_exp import RollingExp | ||
|
@@ -1577,26 +1578,45 @@ def __getitem__(self, value): | |
raise NotImplementedError() | ||
|
||
|
||
DTypeMaybeMapping = Union[DTypeLikeSave, Mapping[Any, DTypeLikeSave]] | ||
|
||
|
||
@overload | ||
def full_like( | ||
other: Dataset, | ||
fill_value, | ||
dtype: DTypeLike | Mapping[Any, DTypeLike] = None, | ||
) -> Dataset: | ||
def full_like(other: DataArray, fill_value: Any, dtype: DTypeLikeSave) -> DataArray: | ||
... | ||
|
||
|
||
@overload | ||
def full_like(other: Dataset, fill_value: Any, dtype: DTypeMaybeMapping) -> Dataset: | ||
... | ||
|
||
|
||
@overload | ||
def full_like(other: DataArray, fill_value, dtype: DTypeLike = None) -> DataArray: | ||
def full_like(other: Variable, fill_value: Any, dtype: DTypeLikeSave) -> Variable: | ||
... | ||
|
||
|
||
@overload | ||
def full_like(other: Variable, fill_value, dtype: DTypeLike = None) -> Variable: | ||
def full_like( | ||
other: Dataset | DataArray, fill_value: Any, dtype: DTypeMaybeMapping = None | ||
) -> Dataset | DataArray: | ||
... | ||
|
||
|
||
def full_like(other, fill_value, dtype=None): | ||
@overload | ||
def full_like( | ||
other: Dataset | DataArray | Variable, | ||
fill_value: Any, | ||
dtype: DTypeMaybeMapping = None, | ||
) -> Dataset | DataArray | Variable: | ||
... | ||
|
||
|
||
def full_like( | ||
other: Dataset | DataArray | Variable, | ||
fill_value: Any, | ||
dtype: DTypeMaybeMapping = None, | ||
) -> Dataset | DataArray | Variable: | ||
"""Return a new object with the same shape and type as a given object. | ||
|
||
Parameters | ||
|
@@ -1711,26 +1731,26 @@ def full_like(other, fill_value, dtype=None): | |
f"fill_value must be scalar or, for datasets, a dict-like. Received {fill_value} instead." | ||
) | ||
|
||
if not isinstance(other, Dataset) and isinstance(dtype, Mapping): | ||
raise ValueError( | ||
"'dtype' cannot be dict-like when passing a DataArray or Variable" | ||
) | ||
|
||
if isinstance(other, Dataset): | ||
if not isinstance(fill_value, dict): | ||
fill_value = {k: fill_value for k in other.data_vars.keys()} | ||
|
||
dtype_: Mapping[Any, DTypeLikeSave] | ||
if not isinstance(dtype, Mapping): | ||
dtype_ = {k: dtype for k in other.data_vars.keys()} | ||
else: | ||
dtype_ = dtype | ||
|
||
data_vars = { | ||
k: _full_like_variable(v, fill_value.get(k, dtypes.NA), dtype_.get(k, None)) | ||
k: _full_like_variable( | ||
v.variable, fill_value.get(k, dtypes.NA), dtype_.get(k, None) | ||
) | ||
for k, v in other.data_vars.items() | ||
} | ||
return Dataset(data_vars, coords=other.coords, attrs=other.attrs) | ||
elif isinstance(other, DataArray): | ||
if isinstance(dtype, Mapping): | ||
raise ValueError("'dtype' cannot be dict-like when passing a DataArray") | ||
return DataArray( | ||
_full_like_variable(other.variable, fill_value, dtype), | ||
dims=other.dims, | ||
|
@@ -1739,12 +1759,16 @@ def full_like(other, fill_value, dtype=None): | |
name=other.name, | ||
) | ||
elif isinstance(other, Variable): | ||
if isinstance(dtype, Mapping): | ||
raise ValueError("'dtype' cannot be dict-like when passing a Variable") | ||
return _full_like_variable(other, fill_value, dtype) | ||
else: | ||
raise TypeError("Expected DataArray, Dataset, or Variable") | ||
|
||
|
||
def _full_like_variable(other, fill_value, dtype: DTypeLike = None): | ||
def _full_like_variable( | ||
other: Variable, fill_value: Any, dtype: DTypeLike = None | ||
) -> Variable: | ||
"""Inner function of full_like, where other must be a variable""" | ||
from .variable import Variable | ||
|
||
|
@@ -1765,7 +1789,38 @@ def _full_like_variable(other, fill_value, dtype: DTypeLike = None): | |
return Variable(dims=other.dims, data=data, attrs=other.attrs) | ||
|
||
|
||
def zeros_like(other, dtype: DTypeLike = None): | ||
@overload | ||
def zeros_like(other: DataArray, dtype: DTypeLikeSave) -> DataArray: | ||
... | ||
|
||
|
||
@overload | ||
def zeros_like(other: Dataset, dtype: DTypeMaybeMapping) -> Dataset: | ||
... | ||
|
||
|
||
@overload | ||
def zeros_like(other: Variable, dtype: DTypeLikeSave) -> Variable: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For these ones which are generic over |
||
... | ||
|
||
|
||
@overload | ||
def zeros_like( | ||
other: Dataset | DataArray, dtype: DTypeMaybeMapping = None | ||
) -> Dataset | DataArray: | ||
Comment on lines
+1807
to
+1810
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Do you know why this overload is required? I had thought that There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I tried to adjust these locally. An issue I hit is that so much of our typing is on To the extent you know which parts are Perfect vs Not-perfect-but-required-to-pass: If you want to add a comment for the ones the latter, that will make it easier for future travelers to know why things are as they are and hopefully change them. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I was struggling a lot with This means that a "simple" I tried using So this was the only solution I could get to work. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Additionally, the code will actually create a plain e.g. DataArray, so typevars with bounds are actually wrong here. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yeah. I think the proposed code is a big upgrade, and we can refine towards perfection in the future... There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. FWIW I found this SO answer helpful in clarifying the difference — i saw that I had upvoted it before — but I'm still not confident in how we should design these methods. |
||
... | ||
|
||
|
||
@overload | ||
def zeros_like( | ||
other: Dataset | DataArray | Variable, dtype: DTypeMaybeMapping = None | ||
) -> Dataset | DataArray | Variable: | ||
... | ||
|
||
|
||
def zeros_like( | ||
other: Dataset | DataArray | Variable, dtype: DTypeMaybeMapping = None | ||
) -> Dataset | DataArray | Variable: | ||
"""Return a new object of zeros with the same shape and | ||
type as a given dataarray or dataset. | ||
|
||
|
@@ -1821,7 +1876,38 @@ def zeros_like(other, dtype: DTypeLike = None): | |
return full_like(other, 0, dtype) | ||
|
||
|
||
def ones_like(other, dtype: DTypeLike = None): | ||
@overload | ||
def ones_like(other: DataArray, dtype: DTypeLikeSave) -> DataArray: | ||
... | ||
|
||
|
||
@overload | ||
def ones_like(other: Dataset, dtype: DTypeMaybeMapping) -> Dataset: | ||
... | ||
|
||
|
||
@overload | ||
def ones_like(other: Variable, dtype: DTypeLikeSave) -> Variable: | ||
... | ||
|
||
|
||
@overload | ||
def ones_like( | ||
other: Dataset | DataArray, dtype: DTypeMaybeMapping = None | ||
) -> Dataset | DataArray: | ||
... | ||
|
||
|
||
@overload | ||
def ones_like( | ||
other: Dataset | DataArray | Variable, dtype: DTypeMaybeMapping = None | ||
) -> Dataset | DataArray | Variable: | ||
... | ||
|
||
|
||
def ones_like( | ||
other: Dataset | DataArray | Variable, dtype: DTypeMaybeMapping = None | ||
) -> Dataset | DataArray | Variable: | ||
"""Return a new object of ones with the same shape and | ||
type as a given dataarray or dataset. | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1905,7 +1905,7 @@ def polyval( | |
coeffs = coeffs.reindex( | ||
{degree_dim: np.arange(max_deg + 1)}, fill_value=0, copy=False | ||
) | ||
coord = _ensure_numeric(coord) # type: ignore # https://github.com/python/mypy/issues/1533 ? | ||
coord = _ensure_numeric(coord) | ||
|
||
# using Horner's method | ||
# https://en.wikipedia.org/wiki/Horner%27s_method | ||
|
@@ -1917,7 +1917,7 @@ def polyval( | |
return res | ||
|
||
|
||
def _ensure_numeric(data: T_Xarray) -> T_Xarray: | ||
def _ensure_numeric(data: Dataset | DataArray) -> Dataset | DataArray: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is the kind of func that would be nice at some point to make generic; with the proposed code we lose whether it's a There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I failed to make it work with Typevars since it is called with |
||
"""Converts all datetime64 variables to float64 | ||
|
||
Parameters | ||
|
Uh oh!
There was an error while loading. Please reload this page.