Skip to content

Commit 8fc836b

Browse files
committed
add Tools/pyco/perf_micro.py
1 parent 89e264d commit 8fc836b

File tree

2 files changed

+159
-52
lines changed

2 files changed

+159
-52
lines changed

Lib/test/test_new_pyc.py

Lines changed: 69 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,69 @@ def test_consts(self):
9191
fco = ns[f"f{num}"]
9292
assert (num, f"hello {num}") in fco.__code__.co_consts
9393

94+
_LOAD_EXEC = 'load+exec'
95+
_STEADY_STATE = 'steady-state'
96+
97+
def speed_comparison(source, test_name):
98+
print()
99+
print(f"Starting speed test: {test_name}")
100+
def helper(data, label):
101+
timings = {}
102+
t0 = time.perf_counter()
103+
codes = []
104+
for _ in range(1000):
105+
code = marshal.loads(data)
106+
codes.append(code)
107+
t1 = time.perf_counter()
108+
print(f"{label} load: {t1-t0:.3f}")
109+
timings['load'] = t1-t0
110+
timings['execs'] = []
111+
for i in range(4):
112+
t3 = time.perf_counter()
113+
for code in codes:
114+
exec(code, {})
115+
t4 = time.perf_counter()
116+
print(f"{label} exec #{i+1}: {t4-t3:.3f}")
117+
timings['execs'].append(t4-t3)
118+
print(f" {label} total: {t4-t0:.3f}")
119+
return timings
120+
121+
code = compile(source, "<old>", "exec")
122+
data = marshal.dumps(code)
123+
classic_timings = helper(data, "Classic")
124+
125+
t0 = time.perf_counter()
126+
data = pyco.serialize_source(source, "<new>")
127+
t1 = time.perf_counter()
128+
print(f"PYCO: {t1-t0:.3f}")
129+
assert data.startswith(b"PYC.")
130+
new_timings = helper(data, "New PYC")
131+
132+
if classic_timings and new_timings:
133+
def comparison(title, f):
134+
tc = f(classic_timings)
135+
tn = f(new_timings)
136+
print(f">> {title} ratio: {tn/tc:.2f} "
137+
f"(new is {100*(tn/tc-1):.0f}% faster)")
138+
return tn/tc
139+
140+
print("Classic-to-new comparison:")
141+
result = {
142+
_LOAD_EXEC: comparison(_LOAD_EXEC, lambda t: t['load'] + t['execs'][0]),
143+
_STEADY_STATE: comparison(_STEADY_STATE, lambda t: t['execs'][-1])
144+
}
145+
print()
146+
return result
147+
148+
149+
class TestSpeedComparisonApi(unittest.TestCase):
150+
def test_speed_comparison_api(self):
151+
# Note: if this test needs to change then
152+
# Tools/pyco/perf_micro.py probably does too.
153+
r = speed_comparison("x=1", "test")
154+
self.assertIsInstance(r, dict)
155+
self.assertCountEqual(r.keys(), [_LOAD_EXEC, _STEADY_STATE])
156+
94157

95158
class TestNewPycSpeed(unittest.TestCase):
96159

@@ -100,9 +163,9 @@ def setUpClass(cls):
100163

101164
@classmethod
102165
def tearDownClass(cls):
103-
print(f"{' ':25}{'load+exec':>15}{'steady state':>15}")
104-
for t, r in sorted(cls.results.items(), key=lambda kv: -kv[1][0]):
105-
print(f"{t:25}{r[0]:15.3f}{r[1]:15.3f}")
166+
print(f"{' ':25}{_LOAD_EXEC:>15}{'steady state':>15}")
167+
for t, r in sorted(cls.results.items(), key=lambda kv: -kv[1][_LOAD_EXEC]):
168+
print(f"{t:25}{r[_LOAD_EXEC]:15.3f}{r[_STEADY_STATE]:15.3f}")
106169
print()
107170
cls.results = {}
108171

@@ -124,55 +187,9 @@ def do_test_speed(self, body, call=False):
124187
source = "\n\n".join(functions)
125188
self.do_test_speed_for_source(source)
126189

127-
def do_test_speed_for_source(self, source):
128-
print()
129-
print(f"Starting speed test: {self._testMethodName}")
130-
def helper(data, label):
131-
timings = {}
132-
t0 = time.perf_counter()
133-
codes = []
134-
for _ in range(1000):
135-
code = marshal.loads(data)
136-
codes.append(code)
137-
t1 = time.perf_counter()
138-
print(f"{label} load: {t1-t0:.3f}")
139-
timings['load'] = t1-t0
140-
timings['execs'] = []
141-
for i in range(4):
142-
t3 = time.perf_counter()
143-
for code in codes:
144-
exec(code, {})
145-
t4 = time.perf_counter()
146-
print(f"{label} exec #{i+1}: {t4-t3:.3f}")
147-
timings['execs'].append(t4-t3)
148-
print(f" {label} total: {t4-t0:.3f}")
149-
return timings
150-
151-
code = compile(source, "<old>", "exec")
152-
data = marshal.dumps(code)
153-
classic_timings = helper(data, "Classic")
154-
155-
t0 = time.perf_counter()
156-
data = pyco.serialize_source(source, "<new>")
157-
t1 = time.perf_counter()
158-
print(f"PYCO: {t1-t0:.3f}")
159-
assert data.startswith(b"PYC.")
160-
new_timings = helper(data, "New PYC")
161-
162-
if classic_timings and new_timings:
163-
def comparison(title, f):
164-
tc = f(classic_timings)
165-
tn = f(new_timings)
166-
print(f">> {title} ratio: {tn/tc:.2f} "
167-
f"(new is {100*(tc/tn-1):.0f}% faster)")
168-
return tn/tc
169-
170-
print("Classic-to-new comparison:")
171-
self.results[self._testMethodName.lstrip('test_speed_')] = [
172-
comparison('load+exec', lambda t: t['load'] + t['execs'][0]),
173-
comparison('steady state', lambda t: t['execs'][-1])
174-
]
175-
print()
190+
def do_test_speed_for_source(self, source, test_name=None):
191+
test_name = test_name or self._testMethodName.lstrip('test_speed_')
192+
self.results[test_name] = speed_comparison(source, test_name)
176193

177194
def test_speed_few_locals(self):
178195
body = " a, b = b, a\n"*100

Tools/pyco/perf_micro.py

Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
2+
import csv
3+
import gc
4+
import itertools
5+
from collections import namedtuple
6+
7+
from test.test_new_pyc import speed_comparison
8+
9+
SpeedTestParams = namedtuple(
10+
'SpeedTestParams',
11+
['num_funcs', 'func_length', 'num_vars',
12+
'is_locals', 'is_unique_names', 'is_vary_constants', 'is_call'])
13+
14+
class SpeedTestBuilder:
15+
def __init__(self, params: SpeedTestParams):
16+
self.params = params
17+
18+
def get_test_name(self):
19+
p = self.params
20+
nfuncs = p.num_funcs
21+
nvars = p.num_vars
22+
scope = "locals" if p.is_locals else "globals"
23+
shared = "unique" if p.is_unique_names else "shared"
24+
is_call = " is_call" if p.is_call else ""
25+
consts = " consts" if p.is_vary_constants else ""
26+
return f" {shared}{is_call} {scope}{consts} {nfuncs} funcs, {nvars} vars"
27+
28+
def function_template(self):
29+
p = self.params
30+
FUNC_INDEX = "FUNC_INDEX" if p.is_unique_names else ""
31+
# variables used in the function:
32+
vars = [f"v_{FUNC_INDEX}_{i}" for i in range(p.num_vars)]
33+
init_vars = [f"{var} = {i if p.is_vary_constants else 1}"
34+
for (i, var) in enumerate(vars)]
35+
36+
source = []
37+
if not p.is_locals:
38+
# define globals in module scope:
39+
source.extend(init_vars)
40+
# define the function
41+
source.append(f"def f_FUNC_INDEX():")
42+
if p.is_locals:
43+
# define locals in the function:
44+
source.extend(f" {l}" for l in init_vars)
45+
46+
body = []
47+
assert p.func_length > 1
48+
body.append(f" return 0+\\")
49+
while len(body) < p.func_length:
50+
body.extend(f" {var}+ \\" for var in vars)
51+
body = body[:p.func_length-1]
52+
body.append(f" 0")
53+
54+
source.extend(body)
55+
if p.is_call:
56+
source.append("f_FUNC_INDEX()")
57+
return '\n'.join(source)
58+
59+
def get_source(self):
60+
template = self.function_template()
61+
source = [f"# {self.get_test_name()}"]
62+
for i in range(self.params.num_funcs):
63+
source.append(template.replace("FUNC_INDEX", str(i)))
64+
return '\n'.join(source)
65+
66+
if __name__ == '__main__':
67+
results = {}
68+
for params in itertools.product(
69+
[100], # num_funcs
70+
[100], # func_length
71+
[10, 50], # num_vars
72+
[True, False], # is_locals
73+
[True, False], # is_unique_names
74+
[True, False], # is_vary_constants
75+
[False], # is_call
76+
):
77+
p = SpeedTestParams(*params)
78+
while gc.collect():
79+
pass
80+
builder = SpeedTestBuilder(p)
81+
results[p] = speed_comparison(builder.get_source(), builder.get_test_name())
82+
83+
with open('perf_micro.csv', 'w', newline='') as f:
84+
writer = None
85+
for p, r in results.items():
86+
if writer is None:
87+
fieldnames = list(p._asdict().keys())+list(r.keys())
88+
csv.writer(f).writerow(fieldnames)
89+
writer = csv.DictWriter(f, fieldnames=fieldnames)
90+
writer.writerow(p._asdict()|r)

0 commit comments

Comments
 (0)