32
32
)
33
33
34
34
35
- def _subprocess_run_print ( args , ** kwargs ):
35
+ def echo ( echo_string : str ):
36
36
# Use subprocess for printing to reduce chance of printing out of sequence
37
37
# with the subsequent calls.
38
- subprocess .run (["echo" , f"BM_RUNNER DEBUG: { ' ' .join (args )} " ])
38
+ subprocess .run (["echo" , f"BM_RUNNER DEBUG: { echo_string } " ])
39
+
40
+
41
+ def _subprocess_runner (args , asv = False , ** kwargs ):
42
+ # Avoid permanent modifications if the same arguments are used more than once.
43
+ args = args .copy ()
44
+ kwargs = kwargs .copy ()
45
+ if asv :
46
+ args .insert (0 , "asv" )
47
+ kwargs ["cwd" ] = BENCHMARKS_DIR
48
+ echo (" " .join (args ))
39
49
return subprocess .run (args , ** kwargs )
40
50
41
51
42
- def _subprocess_run_asv (args , ** kwargs ):
43
- args .insert (0 , "asv" )
44
- kwargs ["cwd" ] = BENCHMARKS_DIR
45
- return _subprocess_run_print (args , ** kwargs )
52
+ def _subprocess_runner_capture (args , ** kwargs ) -> str :
53
+ result = _subprocess_runner (args , capture_output = True , ** kwargs )
54
+ return result .stdout .decode ()
46
55
47
56
48
57
def _check_requirements (package : str ) -> None :
@@ -65,12 +74,12 @@ def _prep_data_gen_env() -> None:
65
74
python_version = "3.11"
66
75
data_gen_var = "DATA_GEN_PYTHON"
67
76
if data_gen_var in environ :
68
- print ("Using existing data generation environment." )
77
+ echo ("Using existing data generation environment." )
69
78
else :
70
- print ("Setting up the data generation environment ..." )
79
+ echo ("Setting up the data generation environment ..." )
71
80
# Get Nox to build an environment for the `tests` session, but don't
72
81
# run the session. Will re-use a cached environment if appropriate.
73
- _subprocess_run_print (
82
+ _subprocess_runner (
74
83
[
75
84
"nox" ,
76
85
f"--noxfile={ root_dir / 'noxfile.py' } " ,
@@ -86,18 +95,18 @@ def _prep_data_gen_env() -> None:
86
95
).resolve ()
87
96
environ [data_gen_var ] = str (data_gen_python )
88
97
89
- print ("Installing Mule into data generation environment ..." )
98
+ echo ("Installing Mule into data generation environment ..." )
90
99
mule_dir = data_gen_python .parents [1 ] / "resources" / "mule"
91
100
if not mule_dir .is_dir ():
92
- _subprocess_run_print (
101
+ _subprocess_runner (
93
102
[
94
103
"git" ,
95
104
"clone" ,
96
105
"https://github.com/metomi/mule.git" ,
97
106
str (mule_dir ),
98
107
]
99
108
)
100
- _subprocess_run_print (
109
+ _subprocess_runner (
101
110
[
102
111
str (data_gen_python ),
103
112
"-m" ,
@@ -107,7 +116,7 @@ def _prep_data_gen_env() -> None:
107
116
]
108
117
)
109
118
110
- print ("Data generation environment ready." )
119
+ echo ("Data generation environment ready." )
111
120
112
121
113
122
def _setup_common () -> None :
@@ -116,10 +125,10 @@ def _setup_common() -> None:
116
125
117
126
_prep_data_gen_env ()
118
127
119
- print ("Setting up ASV ..." )
120
- _subprocess_run_asv (["machine" , "--yes" ])
128
+ echo ("Setting up ASV ..." )
129
+ _subprocess_runner (["machine" , "--yes" ], asv = True )
121
130
122
- print ("Setup complete." )
131
+ echo ("Setup complete." )
123
132
124
133
125
134
def _asv_compare (* commits : str , overnight_mode : bool = False ) -> None :
@@ -132,17 +141,15 @@ def _asv_compare(*commits: str, overnight_mode: bool = False) -> None:
132
141
asv_command = (
133
142
f"compare { before } { after } --factor={ COMPARE_FACTOR } --split"
134
143
)
135
- _subprocess_run_asv (asv_command .split (" " ))
144
+ _subprocess_runner (asv_command .split (" " ), asv = True )
136
145
137
146
if overnight_mode :
138
147
# Record performance shifts.
139
148
# Run the command again but limited to only showing performance
140
149
# shifts.
141
- shifts = _subprocess_run_asv (
142
- [* asv_command .split (" " ), "--only-changed" ],
143
- capture_output = True ,
144
- text = True ,
145
- ).stdout
150
+ shifts = _subprocess_runner_capture (
151
+ [* asv_command .split (" " ), "--only-changed" ], asv = True
152
+ )
146
153
if shifts :
147
154
# Write the shifts report to a file.
148
155
# Dir is used by .github/workflows/benchmarks.yml,
@@ -221,13 +228,11 @@ def func(args: argparse.Namespace) -> None:
221
228
222
229
commit_range = f"{ args .first_commit } ^^.."
223
230
asv_command = ASV_HARNESS .format (posargs = commit_range )
224
- _subprocess_run_asv ([* asv_command .split (" " ), * args .asv_args ])
231
+ _subprocess_runner ([* asv_command .split (" " ), * args .asv_args ], asv = True )
225
232
226
233
# git rev-list --first-parent is the command ASV uses.
227
234
git_command = f"git rev-list --first-parent { commit_range } "
228
- commit_string = _subprocess_run_print (
229
- git_command .split (" " ), capture_output = True , text = True
230
- ).stdout
235
+ commit_string = _subprocess_runner_capture (git_command .split (" " ))
231
236
commit_list = commit_string .rstrip ().split ("\n " )
232
237
_asv_compare (* reversed (commit_list ), overnight_mode = True )
233
238
@@ -260,16 +265,16 @@ def func(args: argparse.Namespace) -> None:
260
265
_setup_common ()
261
266
262
267
git_command = f"git merge-base HEAD { args .base_branch } "
263
- merge_base = _subprocess_run_print (
264
- git_command .split (" " ), capture_output = True , text = True
265
- ).stdout [:8 ]
268
+ merge_base = _subprocess_runner_capture (git_command .split (" " ))[:8 ]
266
269
267
270
with NamedTemporaryFile ("w" ) as hashfile :
268
271
hashfile .writelines ([merge_base , "\n " , "HEAD" ])
269
272
hashfile .flush ()
270
273
commit_range = f"HASHFILE:{ hashfile .name } "
271
274
asv_command = ASV_HARNESS .format (posargs = commit_range )
272
- _subprocess_run_asv ([* asv_command .split (" " ), * args .asv_args ])
275
+ _subprocess_runner (
276
+ [* asv_command .split (" " ), * args .asv_args ], asv = True
277
+ )
273
278
274
279
_asv_compare (merge_base , "HEAD" )
275
280
@@ -326,14 +331,14 @@ def csperf(
326
331
asv_command = asv_command .replace (" --strict" , "" )
327
332
# Only do a single round.
328
333
asv_command = re .sub (r"rounds=\d" , "rounds=1" , asv_command )
329
- _subprocess_run_asv ([* asv_command .split (" " ), * args .asv_args ])
334
+ _subprocess_runner ([* asv_command .split (" " ), * args .asv_args ], asv = True )
330
335
331
336
asv_command = f"publish { commit_range } --html-dir={ publish_subdir } "
332
- _subprocess_run_asv (asv_command .split (" " ))
337
+ _subprocess_runner (asv_command .split (" " ), asv = True )
333
338
334
339
# Print completion message.
335
340
location = BENCHMARKS_DIR / ".asv"
336
- print (
341
+ echo (
337
342
f'New ASV results for "{ run_type } ".\n '
338
343
f'See "{ publish_subdir } ",'
339
344
f'\n or JSON files under "{ location / "results" } ".'
@@ -380,7 +385,7 @@ def add_arguments(self) -> None:
380
385
@staticmethod
381
386
def func (args : argparse .Namespace ) -> None :
382
387
_setup_common ()
383
- _subprocess_run_asv ([args .asv_sub_command , * args .asv_args ])
388
+ _subprocess_runner ([args .asv_sub_command , * args .asv_args ], asv = True )
384
389
385
390
386
391
def main ():
0 commit comments