@@ -256,6 +256,8 @@ def _build(sources: List[BuildSource],
256
256
graph = dispatch (sources , manager , stdout )
257
257
if not options .fine_grained_incremental :
258
258
TypeState .reset_all_subtype_caches ()
259
+ if options .timing_stats is not None :
260
+ dump_timing_stats (options .timing_stats , graph )
259
261
return BuildResult (manager , graph )
260
262
finally :
261
263
t0 = time .time ()
@@ -1808,6 +1810,9 @@ class State:
1808
1810
1809
1811
fine_grained_deps_loaded = False
1810
1812
1813
+ # Cumulative time spent on this file (for profiling stats)
1814
+ time_spent : int = 0
1815
+
1811
1816
def __init__ (self ,
1812
1817
id : Optional [str ],
1813
1818
path : Optional [str ],
@@ -2034,6 +2039,8 @@ def parse_file(self) -> None:
2034
2039
else :
2035
2040
manager .log ("Using cached AST for %s (%s)" % (self .xpath , self .id ))
2036
2041
2042
+ t0 = time .perf_counter_ns ()
2043
+
2037
2044
with self .wrap_context ():
2038
2045
source = self .source
2039
2046
self .source = None # We won't need it again.
@@ -2079,6 +2086,8 @@ def parse_file(self) -> None:
2079
2086
self .tree .ignored_lines ,
2080
2087
self .ignore_all or self .options .ignore_errors )
2081
2088
2089
+ self .time_spent += time .perf_counter_ns () - t0
2090
+
2082
2091
if not cached :
2083
2092
# Make a copy of any errors produced during parse time so that
2084
2093
# fine-grained mode can repeat them when the module is
@@ -2113,6 +2122,9 @@ def semantic_analysis_pass1(self) -> None:
2113
2122
"""
2114
2123
options = self .options
2115
2124
assert self .tree is not None
2125
+
2126
+ t0 = time .perf_counter_ns ()
2127
+
2116
2128
# Do the first pass of semantic analysis: analyze the reachability
2117
2129
# of blocks and import statements. We must do this before
2118
2130
# processing imports, since this may mark some import statements as
@@ -2131,6 +2143,7 @@ def semantic_analysis_pass1(self) -> None:
2131
2143
if options .allow_redefinition :
2132
2144
# Perform more renaming across the AST to allow variable redefinitions
2133
2145
self .tree .accept (VariableRenameVisitor ())
2146
+ self .time_spent += time .perf_counter_ns () - t0
2134
2147
2135
2148
def add_dependency (self , dep : str ) -> None :
2136
2149
if dep not in self .dependencies_set :
@@ -2188,8 +2201,10 @@ def compute_dependencies(self) -> None:
2188
2201
def type_check_first_pass (self ) -> None :
2189
2202
if self .options .semantic_analysis_only :
2190
2203
return
2204
+ t0 = time .perf_counter_ns ()
2191
2205
with self .wrap_context ():
2192
2206
self .type_checker ().check_first_pass ()
2207
+ self .time_spent += time .perf_counter_ns () - t0
2193
2208
2194
2209
def type_checker (self ) -> TypeChecker :
2195
2210
if not self ._type_checker :
@@ -2207,14 +2222,17 @@ def type_map(self) -> Dict[Expression, Type]:
2207
2222
def type_check_second_pass (self ) -> bool :
2208
2223
if self .options .semantic_analysis_only :
2209
2224
return False
2225
+ t0 = time .perf_counter_ns ()
2210
2226
with self .wrap_context ():
2211
2227
return self .type_checker ().check_second_pass ()
2228
+ self .time_spent += time .perf_counter_ns () - t0
2212
2229
2213
2230
def finish_passes (self ) -> None :
2214
2231
assert self .tree is not None , "Internal error: method must be called on parsed file only"
2215
2232
manager = self .manager
2216
2233
if self .options .semantic_analysis_only :
2217
2234
return
2235
+ t0 = time .perf_counter_ns ()
2218
2236
with self .wrap_context ():
2219
2237
# Some tests (and tools) want to look at the set of all types.
2220
2238
options = manager .options
@@ -2237,6 +2255,7 @@ def finish_passes(self) -> None:
2237
2255
self .free_state ()
2238
2256
if not manager .options .fine_grained_incremental and not manager .options .preserve_asts :
2239
2257
free_tree (self .tree )
2258
+ self .time_spent += time .perf_counter_ns () - t0
2240
2259
2241
2260
def free_state (self ) -> None :
2242
2261
if self ._type_checker :
@@ -2771,6 +2790,16 @@ def dumps(self) -> str:
2771
2790
json .dumps (self .deps ))
2772
2791
2773
2792
2793
+ def dump_timing_stats (path : str , graph : Graph ) -> None :
2794
+ """
2795
+ Dump timing stats for each file in the given graph
2796
+ """
2797
+ with open (path , 'w' ) as f :
2798
+ for k in sorted (graph .keys ()):
2799
+ v = graph [k ]
2800
+ f .write ('{} {}\n ' .format (v .id , v .time_spent ))
2801
+
2802
+
2774
2803
def dump_graph (graph : Graph , stdout : Optional [TextIO ] = None ) -> None :
2775
2804
"""Dump the graph as a JSON string to stdout.
2776
2805
@@ -3091,6 +3120,8 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
3091
3120
manager .log ("No fresh SCCs left in queue" )
3092
3121
3093
3122
3123
+
3124
+
3094
3125
def order_ascc (graph : Graph , ascc : AbstractSet [str ], pri_max : int = PRI_ALL ) -> List [str ]:
3095
3126
"""Come up with the ideal processing order within an SCC.
3096
3127
0 commit comments