Skip to content

Commit d696a3b

Browse files
authored
Move timing tests to their own file (#42)
Tools/pyco/perf_micro.py
1 parent 89e264d commit d696a3b

File tree

2 files changed

+203
-125
lines changed

2 files changed

+203
-125
lines changed

Lib/test/test_new_pyc.py

Lines changed: 0 additions & 125 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
11
"""Test for new PYC format"""
22

33
import dis
4-
import gc
54
import marshal
6-
import time
75
import unittest
86

97
from test import test_tools
@@ -92,128 +90,5 @@ def test_consts(self):
9290
assert (num, f"hello {num}") in fco.__code__.co_consts
9391

9492

95-
class TestNewPycSpeed(unittest.TestCase):
96-
97-
@classmethod
98-
def setUpClass(cls):
99-
cls.results = {}
100-
101-
@classmethod
102-
def tearDownClass(cls):
103-
print(f"{' ':25}{'load+exec':>15}{'steady state':>15}")
104-
for t, r in sorted(cls.results.items(), key=lambda kv: -kv[1][0]):
105-
print(f"{t:25}{r[0]:15.3f}{r[1]:15.3f}")
106-
print()
107-
cls.results = {}
108-
109-
def setUp(self):
110-
while gc.collect():
111-
pass
112-
113-
def do_test_speed(self, body, call=False):
114-
NUM_FUNCS = 100
115-
functions = [
116-
f"def f{num}(a, b):\n{body}"
117-
for num in range(NUM_FUNCS)
118-
]
119-
if call:
120-
functions.extend([
121-
f"\nf{num}(0, 0)\n"
122-
for num in range(NUM_FUNCS)]
123-
)
124-
source = "\n\n".join(functions)
125-
self.do_test_speed_for_source(source)
126-
127-
def do_test_speed_for_source(self, source):
128-
print()
129-
print(f"Starting speed test: {self._testMethodName}")
130-
def helper(data, label):
131-
timings = {}
132-
t0 = time.perf_counter()
133-
codes = []
134-
for _ in range(1000):
135-
code = marshal.loads(data)
136-
codes.append(code)
137-
t1 = time.perf_counter()
138-
print(f"{label} load: {t1-t0:.3f}")
139-
timings['load'] = t1-t0
140-
timings['execs'] = []
141-
for i in range(4):
142-
t3 = time.perf_counter()
143-
for code in codes:
144-
exec(code, {})
145-
t4 = time.perf_counter()
146-
print(f"{label} exec #{i+1}: {t4-t3:.3f}")
147-
timings['execs'].append(t4-t3)
148-
print(f" {label} total: {t4-t0:.3f}")
149-
return timings
150-
151-
code = compile(source, "<old>", "exec")
152-
data = marshal.dumps(code)
153-
classic_timings = helper(data, "Classic")
154-
155-
t0 = time.perf_counter()
156-
data = pyco.serialize_source(source, "<new>")
157-
t1 = time.perf_counter()
158-
print(f"PYCO: {t1-t0:.3f}")
159-
assert data.startswith(b"PYC.")
160-
new_timings = helper(data, "New PYC")
161-
162-
if classic_timings and new_timings:
163-
def comparison(title, f):
164-
tc = f(classic_timings)
165-
tn = f(new_timings)
166-
print(f">> {title} ratio: {tn/tc:.2f} "
167-
f"(new is {100*(tc/tn-1):.0f}% faster)")
168-
return tn/tc
169-
170-
print("Classic-to-new comparison:")
171-
self.results[self._testMethodName.lstrip('test_speed_')] = [
172-
comparison('load+exec', lambda t: t['load'] + t['execs'][0]),
173-
comparison('steady state', lambda t: t['execs'][-1])
174-
]
175-
print()
176-
177-
def test_speed_few_locals(self):
178-
body = " a, b = b, a\n"*100
179-
self.do_test_speed(body)
180-
181-
def test_speed_few_locals_with_call(self):
182-
body = " a, b = b, a\n"*100
183-
self.do_test_speed(body, call=True)
184-
185-
def test_speed_many_locals(self):
186-
body = [" a0, b0 = 1, 1"]
187-
for i in range(300):
188-
body.append(f" a{i+1}, b{i+1} = b{i}, a{i}")
189-
self.do_test_speed('\n'.join(body))
190-
191-
def test_speed_many_locals_with_call(self):
192-
body = [" a0, b0 = 1, 1"]
193-
for i in range(100):
194-
body.append(f" a{i+1}, b{i+1} = b{i}, a{i}")
195-
self.do_test_speed('\n'.join(body), call=True)
196-
197-
def test_speed_many_constants(self):
198-
body = [" a0, b0 = 1, 1"]
199-
for i in range(300):
200-
body.append(f" a{i+1}, b{i+1} = b{i}+{i}, a{i}+{float(i)}")
201-
self.do_test_speed('\n'.join(body))
202-
203-
def test_speed_many_globals(self):
204-
NUM_FUNCS = 100
205-
GLOBALS_PER_FUNC = 100
206-
source = []
207-
for f_index in range(NUM_FUNCS):
208-
for g_index in range(GLOBALS_PER_FUNC):
209-
source.append(f"a_{f_index}_{g_index} = 1")
210-
source.append(f"def f{f_index}():")
211-
source.append(f" return 0+\\")
212-
for g_index in range(GLOBALS_PER_FUNC):
213-
source.append(f" a_{f_index}_{g_index}+\\")
214-
source.append(f" 0")
215-
self.do_test_speed_for_source('\n'.join(source))
216-
217-
21893
if __name__ == "__main__":
21994
unittest.main()

Tools/pyco/perf_micro.py

Lines changed: 203 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,203 @@
1+
import csv
2+
import gc
3+
import itertools
4+
import marshal
5+
import time
6+
7+
from argparse import ArgumentParser
8+
from collections import namedtuple
9+
from test import test_tools
10+
11+
test_tools.skip_if_missing("pyco")
12+
with test_tools.imports_under_tool("pyco"):
13+
import pyco
14+
15+
16+
_LOAD_EXEC = "load+exec"
17+
_STEADY_STATE = "steady-state"
18+
19+
20+
def speed_comparison(source: str, test_name: str):
21+
print()
22+
print(f"Starting speed test: {test_name}")
23+
24+
def helper(data, label):
25+
timings = {}
26+
t0 = time.perf_counter()
27+
codes = []
28+
for _ in range(1000):
29+
code = marshal.loads(data)
30+
codes.append(code)
31+
t1 = time.perf_counter()
32+
print(f"{label} load: {t1-t0:.3f}")
33+
timings["load"] = t1 - t0
34+
timings["execs"] = []
35+
for i in range(4):
36+
t3 = time.perf_counter()
37+
for code in codes:
38+
exec(code, {})
39+
t4 = time.perf_counter()
40+
print(f"{label} exec #{i+1}: {t4-t3:.3f}")
41+
timings["execs"].append(t4 - t3)
42+
print(f" {label} total: {t4-t0:.3f}")
43+
return timings
44+
45+
code = compile(source, "<old>", "exec")
46+
data = marshal.dumps(code)
47+
classic_timings = helper(data, "Classic")
48+
49+
t0 = time.perf_counter()
50+
data = pyco.serialize_source(source, "<new>")
51+
t1 = time.perf_counter()
52+
print(f"PYCO: {t1-t0:.3f}")
53+
assert data.startswith(b"PYC.")
54+
new_timings = helper(data, "New PYC")
55+
56+
if classic_timings and new_timings:
57+
58+
def comparison(title, f):
59+
tc = f(classic_timings)
60+
tn = f(new_timings)
61+
print(
62+
f">> {title} ratio: {tn/tc:.2f} "
63+
f"(new is {100*(tn/tc-1):.0f}% faster)"
64+
)
65+
return tn / tc
66+
67+
print("Classic-to-new comparison:")
68+
69+
def load_plus_exec_time(t):
70+
return t["load"] + t["execs"][0]
71+
72+
def last_exec_time(t):
73+
return t["execs"][-1]
74+
75+
result = {
76+
_LOAD_EXEC: comparison(_LOAD_EXEC, load_plus_exec_time),
77+
_STEADY_STATE: comparison(_STEADY_STATE, last_exec_time),
78+
}
79+
print()
80+
return result
81+
82+
83+
SpeedTestParams = namedtuple(
84+
"SpeedTestParams",
85+
[
86+
"num_funcs",
87+
"func_length",
88+
"num_vars",
89+
"is_locals",
90+
"is_unique_names",
91+
"is_vary_constants",
92+
"is_call",
93+
],
94+
)
95+
96+
97+
def test_name(p: SpeedTestParams):
98+
nfuncs = p.num_funcs
99+
nvars = p.num_vars
100+
scope = "locals " if p.is_locals else "globals"
101+
shared = "unique" if p.is_unique_names else "shared"
102+
is_call = "call" if p.is_call else ""
103+
consts = "consts" if p.is_vary_constants else ""
104+
return (
105+
f" {shared:>6}{is_call:>5}{scope:>7}{consts:>7}"
106+
f" {nfuncs:>4} funcs, {nvars:>4} vars"
107+
)
108+
109+
110+
class SpeedTestBuilder:
111+
def __init__(self, params: SpeedTestParams):
112+
self.params = params
113+
114+
def function_template(self):
115+
p = self.params
116+
FUNC_INDEX = "FUNC_INDEX" if p.is_unique_names else ""
117+
# variables used in the function:
118+
vars = [f"v_{FUNC_INDEX}_{i}" for i in range(p.num_vars)]
119+
if p.is_vary_constants:
120+
init_vars = [f"{var} = {i}" for (i, var) in enumerate(vars)]
121+
else:
122+
init_vars = [f"{var} = 1" for var in vars]
123+
124+
source = []
125+
if not p.is_locals:
126+
# define globals in module scope:
127+
source.extend(init_vars)
128+
# define the function
129+
source.append(f"def f_FUNC_INDEX():")
130+
if p.is_locals:
131+
# define locals in the function:
132+
source.extend(f" {l}" for l in init_vars)
133+
134+
body = []
135+
assert p.func_length > 1
136+
body.append(f" return 0+\\")
137+
while len(body) < p.func_length:
138+
body.extend(f" {var}+ \\" for var in vars)
139+
body = body[: p.func_length - 1]
140+
body.append(f" 0")
141+
142+
source.extend(body)
143+
if p.is_call:
144+
source.append("f_FUNC_INDEX()")
145+
return "\n".join(source)
146+
147+
def get_source(self):
148+
template = self.function_template()
149+
source = [f"# {test_name(self.params)}"]
150+
for i in range(self.params.num_funcs):
151+
source.append(template.replace("FUNC_INDEX", str(i)))
152+
return "\n".join(source)
153+
154+
155+
def run_tests():
156+
results = {}
157+
for params in itertools.product(
158+
[100], # num_funcs
159+
[100], # func_length
160+
[10, 100], # num_vars
161+
[True, False], # is_locals
162+
[True, False], # is_unique_names
163+
[True, False], # is_vary_constants
164+
[False], # is_call (True chokes on a memory leak?)
165+
):
166+
p = SpeedTestParams(*params)
167+
while gc.collect():
168+
pass
169+
builder = SpeedTestBuilder(p)
170+
results[p] = speed_comparison(builder.get_source(), test_name(p))
171+
return results
172+
173+
174+
def write_csv(results: dict, filename: str):
175+
with open(filename, "w", newline="") as f:
176+
writer = None
177+
for p, r in results.items():
178+
if writer is None:
179+
fieldnames = list(p._asdict().keys()) + list(r.keys())
180+
csv.writer(f).writerow(fieldnames)
181+
writer = csv.DictWriter(f, fieldnames=fieldnames)
182+
writer.writerow(p._asdict() | r)
183+
print(f"Results were written to {filename}")
184+
185+
186+
def print_summary(results: dict):
187+
print(f"{' ':50}{_LOAD_EXEC:>15}{'steady state':>15}")
188+
for p, r in sorted(results.items(), key=lambda kv: -kv[1][_LOAD_EXEC]):
189+
name = test_name(p)
190+
print(f"{name:50}{r[_LOAD_EXEC]:15.3f}{r[_STEADY_STATE]:15.3f}")
191+
print()
192+
193+
194+
if __name__ == "__main__":
195+
parser = ArgumentParser(description="Run pyco perf micro-benchmarks.")
196+
parser.add_argument('-f', help='file for csv output')
197+
args = parser.parse_args()
198+
filename = getattr(args, 'f', None)
199+
200+
results = run_tests()
201+
if filename is not None:
202+
write_csv(results, filename)
203+
print_summary(results)

0 commit comments

Comments
 (0)