Skip to content

Commit 278d2e6

Browse files
authored
upgrade black verison to 19.10b0 (pydata#3456)
1 parent cb5eef1 commit 278d2e6

File tree

15 files changed

+32
-32
lines changed

15 files changed

+32
-32
lines changed

xarray/backends/api.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -677,7 +677,7 @@ def open_dataarray(
677677
"then select the variable you want."
678678
)
679679
else:
680-
data_array, = dataset.data_vars.values()
680+
(data_array,) = dataset.data_vars.values()
681681

682682
data_array._file_obj = dataset._file_obj
683683

xarray/core/alignment.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -252,7 +252,7 @@ def align(
252252

253253
if not indexes and len(objects) == 1:
254254
# fast path for the trivial case
255-
obj, = objects
255+
(obj,) = objects
256256
return (obj.copy(deep=copy),)
257257

258258
all_indexes = defaultdict(list)

xarray/core/combine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -954,7 +954,7 @@ def _auto_concat(
954954
"supply the ``concat_dim`` argument "
955955
"explicitly"
956956
)
957-
dim, = concat_dims
957+
(dim,) = concat_dims
958958
return concat(
959959
datasets,
960960
dim=dim,

xarray/core/computation.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ def result_name(objects: list) -> Any:
145145
names = {getattr(obj, "name", _DEFAULT_NAME) for obj in objects}
146146
names.discard(_DEFAULT_NAME)
147147
if len(names) == 1:
148-
name, = names
148+
(name,) = names
149149
else:
150150
name = None
151151
return name
@@ -187,7 +187,7 @@ def build_output_coords(
187187

188188
if len(coords_list) == 1 and not exclude_dims:
189189
# we can skip the expensive merge
190-
unpacked_coords, = coords_list
190+
(unpacked_coords,) = coords_list
191191
merged_vars = dict(unpacked_coords.variables)
192192
else:
193193
# TODO: save these merged indexes, instead of re-computing them later
@@ -237,7 +237,7 @@ def apply_dataarray_vfunc(
237237
for variable, coords in zip(result_var, result_coords)
238238
)
239239
else:
240-
coords, = result_coords
240+
(coords,) = result_coords
241241
out = DataArray(result_var, coords, name=name, fastpath=True)
242242

243243
return out
@@ -384,7 +384,7 @@ def apply_dataset_vfunc(
384384
if signature.num_outputs > 1:
385385
out = tuple(_fast_dataset(*args) for args in zip(result_vars, list_of_coords))
386386
else:
387-
coord_vars, = list_of_coords
387+
(coord_vars,) = list_of_coords
388388
out = _fast_dataset(result_vars, coord_vars)
389389

390390
if keep_attrs and isinstance(first_obj, Dataset):

xarray/core/concat.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -148,10 +148,10 @@ def _calc_concat_dim_coord(dim):
148148
dim = dim_name
149149
elif not isinstance(dim, DataArray):
150150
coord = as_variable(dim).to_index_variable()
151-
dim, = coord.dims
151+
(dim,) = coord.dims
152152
else:
153153
coord = dim
154-
dim, = coord.dims
154+
(dim,) = coord.dims
155155
return dim, coord
156156

157157

xarray/core/dataarray.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -616,7 +616,7 @@ def _level_coords(self) -> Dict[Hashable, Hashable]:
616616
if var.ndim == 1 and isinstance(var, IndexVariable):
617617
level_names = var.level_names
618618
if level_names is not None:
619-
dim, = var.dims
619+
(dim,) = var.dims
620620
level_coords.update({lname: dim for lname in level_names})
621621
return level_coords
622622

xarray/core/dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4066,7 +4066,7 @@ def reduce(
40664066
if len(reduce_dims) == 1:
40674067
# unpack dimensions for the benefit of functions
40684068
# like np.argmin which can't handle tuple arguments
4069-
reduce_dims, = reduce_dims
4069+
(reduce_dims,) = reduce_dims
40704070
elif len(reduce_dims) == var.ndim:
40714071
# prefer to aggregate over axis=None rather than
40724072
# axis=(0, 1) if they will be equivalent, because

xarray/core/groupby.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -321,7 +321,7 @@ def __init__(
321321
raise ValueError("`group` must have a name")
322322

323323
group, obj, stacked_dim, inserted_dims = _ensure_1d(group, obj)
324-
group_dim, = group.dims
324+
(group_dim,) = group.dims
325325

326326
expected_size = obj.sizes[group_dim]
327327
if group.size != expected_size:
@@ -470,7 +470,7 @@ def _infer_concat_args(self, applied_example):
470470
else:
471471
coord = self._unique_coord
472472
positions = None
473-
dim, = coord.dims
473+
(dim,) = coord.dims
474474
if isinstance(coord, _DummyGroup):
475475
coord = None
476476
return coord, dim, positions
@@ -644,7 +644,7 @@ def _concat_shortcut(self, applied, dim, positions=None):
644644
def _restore_dim_order(self, stacked):
645645
def lookup_order(dimension):
646646
if dimension == self._group.name:
647-
dimension, = self._group.dims
647+
(dimension,) = self._group.dims
648648
if dimension in self._obj.dims:
649649
axis = self._obj.get_axis_num(dimension)
650650
else:

xarray/core/indexing.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -212,7 +212,7 @@ def get_dim_indexers(data_obj, indexers):
212212
level_indexers = defaultdict(dict)
213213
dim_indexers = {}
214214
for key, label in indexers.items():
215-
dim, = data_obj[key].dims
215+
(dim,) = data_obj[key].dims
216216
if key != dim:
217217
# assume here multi-index level indexer
218218
level_indexers[dim][key] = label
@@ -1368,7 +1368,7 @@ def __getitem__(
13681368
if isinstance(key, tuple) and len(key) == 1:
13691369
# unpack key so it can index a pandas.Index object (pandas.Index
13701370
# objects don't like tuples)
1371-
key, = key
1371+
(key,) = key
13721372

13731373
if getattr(key, "ndim", 0) > 1: # Return np-array if multidimensional
13741374
return NumpyIndexingAdapter(self.array.values)[indexer]

xarray/core/merge.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ def append_all(variables, indexes):
277277

278278

279279
def collect_from_coordinates(
280-
list_of_coords: "List[Coordinates]"
280+
list_of_coords: "List[Coordinates]",
281281
) -> Dict[Hashable, List[MergeElement]]:
282282
"""Collect variables and indexes to be merged from Coordinate objects."""
283283
grouped: Dict[Hashable, List[Tuple[Variable, pd.Index]]] = {}
@@ -320,7 +320,7 @@ def merge_coordinates_without_align(
320320

321321

322322
def determine_coords(
323-
list_of_mappings: Iterable["DatasetLike"]
323+
list_of_mappings: Iterable["DatasetLike"],
324324
) -> Tuple[Set[Hashable], Set[Hashable]]:
325325
"""Given a list of dicts with xarray object values, identify coordinates.
326326

0 commit comments

Comments
 (0)