Skip to content

Commit 10fcd0e

Browse files
committed
FIX-#2596: Revert fix for binary ops
Signed-off-by: Igoshev, Yaroslav <[email protected]>
1 parent 825228b commit 10fcd0e

File tree

4 files changed

+44
-48
lines changed

4 files changed

+44
-48
lines changed

asv_bench/asv.conf.json

Lines changed: 24 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,57 +2,65 @@
22
// The version of the config file format. Do not change, unless
33
// you know what you are doing.
44
"version": 1,
5+
56
// The name of the project being benchmarked
67
"project": "modin",
8+
79
// The project's homepage
810
"project_url": "https://modin.readthedocs.io/",
11+
912
// The URL or local path of the source code repository for the
1013
// project being benchmarked
1114
"repo": "..",
15+
1216
// The Python project's subdirectory in your repo. If missing or
1317
// the empty string, the project is assumed to be located at the root
1418
// of the repository.
1519
// "repo_subdir": "",
20+
1621
// Customizable commands for building, installing, and
1722
// uninstalling the project. See asv.conf.json documentation.
1823
//
19-
"install_command": [
20-
"in-dir={env_dir} python -mpip install {wheel_file}[ray]"
21-
],
24+
"install_command": ["in-dir={env_dir} python -mpip install {wheel_file}[ray]"],
2225
// "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"],
2326
// "build_command": [
2427
// "python setup.py build",
2528
// "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}"
2629
// ],
30+
2731
// List of branches to benchmark. If not provided, defaults to "master"
2832
// (for git) or "default" (for mercurial).
2933
// "branches": ["master"], // for git
3034
// "branches": ["default"], // for mercurial
35+
3136
// The DVCS being used. If not set, it will be automatically
3237
// determined from "repo" by looking at the protocol in the URL
3338
// (if remote), or by looking for special directories, such as
3439
// ".git" (if local).
3540
// "dvcs": "git",
41+
3642
// The tool to use to create environments. May be "conda",
3743
// "virtualenv" or other value depending on the plugins in use.
3844
// If missing or the empty string, the tool will be automatically
3945
// determined by looking for tools on the PATH environment
4046
// variable.
4147
"environment_type": "conda",
48+
4249
// timeout in seconds for installing any dependencies in environment
4350
// defaults to 10 min
4451
//"install_timeout": 600,
52+
4553
// the base URL to show a commit for the project.
4654
"show_commit_url": "https://github.com/modin-project/modin/commit/",
55+
4756
// The Pythons you'd like to test against. If not provided, defaults
4857
// to the current version of Python used to run `asv`.
4958
// "pythons": ["3.7"],
59+
5060
// The list of conda channel names to be searched for benchmark
5161
// dependency packages in the specified order
52-
"conda_channels": [
53-
"conda-forge",
54-
"defaults"
55-
],
62+
"conda_channels": ["conda-forge", "defaults"],
63+
5664
// The matrix of dependencies to test. Each key is the name of a
5765
// package (in PyPI) and the values are version numbers. An empty
5866
// list or empty string indicates to just test against the default
@@ -98,24 +106,31 @@
98106
// // additional env if run on windows+conda
99107
// {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""},
100108
// ],
109+
101110
// The directory (relative to the current directory) that benchmarks are
102111
// stored in. If not provided, defaults to "benchmarks"
103112
// "benchmark_dir": "benchmarks",
113+
104114
// The directory (relative to the current directory) to cache the Python
105115
// environments in. If not provided, defaults to "env"
106116
"env_dir": ".asv/env",
117+
107118
// The directory (relative to the current directory) that raw benchmark
108119
// results are stored in. If not provided, defaults to "results".
109120
"results_dir": ".asv/results",
121+
110122
// The directory (relative to the current directory) that the html tree
111123
// should be written to. If not provided, defaults to "html".
112124
"html_dir": ".asv/html",
125+
113126
// The number of characters to retain in the commit hashes.
114127
// "hash_length": 8,
128+
115129
// `asv` will cache results of the recent builds in each
116130
// environment, making them faster to install next time. This is
117131
// the number of builds to keep, per environment.
118132
// "build_cache_size": 2,
133+
119134
// The commits after which the regression search in `asv publish`
120135
// should start looking for regressions. Dictionary whose keys are
121136
// regexps matching to benchmark names, and values corresponding to
@@ -128,6 +143,7 @@
128143
// "some_benchmark": "352cdf", // Consider regressions only after this commit
129144
// "another_benchmark": null, // Skip regression detection altogether
130145
// },
146+
131147
// The thresholds for relative change in results, after which `asv
132148
// publish` starts reporting regressions. Dictionary of the same
133149
// form as in ``regressions_first_commits``, with values
@@ -138,4 +154,4 @@
138154
// "some_benchmark": 0.01, // Threshold of 1%
139155
// "another_benchmark": 0.5, // Threshold of 50%
140156
// },
141-
}
157+
}

modin/data_management/functions/binary_function.py

Lines changed: 1 addition & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ def caller(query_compiler, other, *args, **kwargs):
2424
axis = kwargs.get("axis", 0)
2525
broadcast = kwargs.pop("broadcast", False)
2626
join_type = call_kwds.get("join_type", "outer")
27-
squeeze_self = kwargs.pop("squeeze_self", None)
2827
if isinstance(other, type(query_compiler)):
2928
if broadcast:
3029
assert (
@@ -55,37 +54,11 @@ def caller(query_compiler, other, *args, **kwargs):
5554
)
5655
else:
5756
if isinstance(other, (list, np.ndarray, pandas.Series)):
58-
if len(query_compiler.columns) == 1:
59-
60-
def _apply_func(df):
61-
result = func(
62-
df.squeeze(axis=1) if squeeze_self else df,
63-
other,
64-
*args,
65-
**kwargs
66-
)
67-
return pandas.DataFrame(result)
68-
69-
apply_func = _apply_func
70-
build_mapreduce_func = False
71-
else:
72-
73-
def _apply_func(df):
74-
return func(
75-
df.squeeze(axis=1) if squeeze_self else df,
76-
other,
77-
*args,
78-
**kwargs
79-
)
80-
81-
apply_func = _apply_func
82-
build_mapreduce_func = True
8357
new_modin_frame = query_compiler._modin_frame._apply_full_axis(
8458
axis,
85-
apply_func,
59+
lambda df: func(df, other, *args, **kwargs),
8660
new_index=query_compiler.index,
8761
new_columns=query_compiler.columns,
88-
build_mapreduce_func=build_mapreduce_func,
8962
)
9063
else:
9164
new_modin_frame = query_compiler._modin_frame._map(

modin/engines/base/frame/data.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1272,7 +1272,6 @@ def _apply_full_axis(
12721272
new_index=None,
12731273
new_columns=None,
12741274
dtypes=None,
1275-
build_mapreduce_func=True,
12761275
):
12771276
"""
12781277
Perform a function across an entire axis.
@@ -1310,7 +1309,6 @@ def _apply_full_axis(
13101309
new_columns=new_columns,
13111310
dtypes=dtypes,
13121311
other=None,
1313-
build_mapreduce_func=build_mapreduce_func,
13141312
)
13151313

13161314
def _apply_full_axis_select_indices(
@@ -1645,7 +1643,6 @@ def broadcast_apply_full_axis(
16451643
apply_indices=None,
16461644
enumerate_partitions=False,
16471645
dtypes=None,
1648-
build_mapreduce_func=True,
16491646
):
16501647
"""Broadcast partitions of other dataframe partitions and apply a function along full axis.
16511648
@@ -1691,9 +1688,7 @@ def broadcast_apply_full_axis(
16911688
axis=axis,
16921689
left=self._partitions,
16931690
right=other,
1694-
apply_func=self._build_mapreduce_func(axis, func)
1695-
if build_mapreduce_func
1696-
else func,
1691+
apply_func=self._build_mapreduce_func(axis, func),
16971692
apply_indices=apply_indices,
16981693
enumerate_partitions=enumerate_partitions,
16991694
keep_partitioning=True,

modin/pandas/series.py

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -142,12 +142,16 @@ def __radd__(self, left):
142142
return self.add(left)
143143

144144
def __and__(self, other):
145+
if isinstance(other, (list, np.ndarray, pandas.Series)):
146+
return self._default_to_pandas(pandas.Series.__and__, other)
145147
new_self, new_other = self._prepare_inter_op(other)
146-
return new_self._binary_op("__and__", new_other, axis=0, squeeze_self=True)
148+
return super(Series, new_self).__and__(new_other)
147149

148150
def __rand__(self, other):
151+
if isinstance(other, (list, np.ndarray, pandas.Series)):
152+
return self._default_to_pandas(pandas.Series.__rand__, other)
149153
new_self, new_other = self._prepare_inter_op(other)
150-
return self._binary_op("__rand__", other, axis=0, squeeze_self=True)
154+
return super(Series, new_self).__rand__(new_other)
151155

152156
def __array__(self, dtype=None):
153157
return super(Series, self).__array__(dtype).flatten()
@@ -215,20 +219,28 @@ def __rmul__(self, left):
215219
return self.rmul(left)
216220

217221
def __or__(self, other):
222+
if isinstance(other, (list, np.ndarray, pandas.Series)):
223+
return self._default_to_pandas(pandas.Series.__or__, other)
218224
new_self, new_other = self._prepare_inter_op(other)
219-
return new_self._binary_op("__or__", new_other, axis=0, squeeze_self=True)
225+
return super(Series, new_self).__or__(new_other)
220226

221227
def __ror__(self, other):
228+
if isinstance(other, (list, np.ndarray, pandas.Series)):
229+
return self._default_to_pandas(pandas.Series.__ror__, other)
222230
new_self, new_other = self._prepare_inter_op(other)
223-
return self._binary_op("__ror__", other, axis=0, squeeze_self=True)
231+
return super(Series, new_self).__ror__(new_other)
224232

225233
def __xor__(self, other):
234+
if isinstance(other, (list, np.ndarray, pandas.Series)):
235+
return self._default_to_pandas(pandas.Series.__xor__, other)
226236
new_self, new_other = self._prepare_inter_op(other)
227-
return new_self._binary_op("__xor__", new_other, axis=0, squeeze_self=True)
237+
return super(Series, new_self).__xor__(new_other)
228238

229239
def __rxor__(self, other):
240+
if isinstance(other, (list, np.ndarray, pandas.Series)):
241+
return self._default_to_pandas(pandas.Series.__rxor__, other)
230242
new_self, new_other = self._prepare_inter_op(other)
231-
return new_self._binary_op("__rxor__", new_other, axis=0, squeeze_self=True)
243+
return super(Series, new_self).__rxor__(new_other)
232244

233245
def __pow__(self, right):
234246
return self.pow(right)

0 commit comments

Comments
 (0)