File tree Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -136,10 +136,10 @@ def _get_optimal_chunks_for_groups(chunks, labels):
136136 return tuple (newchunks )
137137
138138
139- def _unique (a ):
139+ def _unique (a : np . ndarray ):
140140 """Much faster to use pandas unique and sort the results.
141141 np.unique sorts before uniquifying and is slow."""
142- return np .sort (pd .unique (a ))
142+ return np .sort (pd .unique (a . reshape ( - 1 ) ))
143143
144144
145145@memoize
@@ -897,7 +897,7 @@ def _grouped_combine(
897897 # when there's only a single axis of reduction, we can just concatenate later,
898898 # reindexing is unnecessary
899899 # I bet we can minimize the amount of reindexing for mD reductions too, but it's complicated
900- unique_groups = _unique (tuple (flatten (deepmap (listify_groups , x_chunk ))))
900+ unique_groups = _unique (np . array ( tuple (flatten (deepmap (listify_groups , x_chunk ) ))))
901901 unique_groups = unique_groups [~ isnull (unique_groups )]
902902 if len (unique_groups ) == 0 :
903903 unique_groups = [np .nan ]
You can’t perform that action at this time.
0 commit comments