From 5427601e9ed173ad8e36adf02e5883729753af20 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= <john.kare.alsaker@gmail.com>
Date: Tue, 7 Jan 2020 21:34:08 +0100
Subject: [PATCH 1/2] Change -Z time event naming scheme and make them generic
 activities

---
 src/librustc/hir/map/mod.rs                   |   2 +-
 src/librustc/ty/query/on_disk_cache.rs        |   6 +-
 src/librustc_codegen_llvm/back/lto.rs         |  14 +-
 src/librustc_codegen_llvm/back/write.rs       |  20 ++-
 src/librustc_codegen_llvm/lib.rs              |   4 +-
 src/librustc_codegen_ssa/back/link.rs         |   4 +-
 src/librustc_codegen_ssa/back/write.rs        |   2 +-
 src/librustc_codegen_ssa/base.rs              |  10 +-
 src/librustc_data_structures/profiling.rs     | 139 ++++++------------
 src/librustc_driver/lib.rs                    |   2 +-
 src/librustc_incremental/persist/load.rs      |   2 +-
 src/librustc_incremental/persist/save.rs      |  13 +-
 src/librustc_interface/passes.rs              | 121 +++++++--------
 src/librustc_interface/queries.rs             |   2 +-
 src/librustc_lint/early.rs                    |  23 +--
 src/librustc_lint/late.rs                     |  25 +++-
 src/librustc_mir/monomorphize/collector.rs    |   8 +-
 src/librustc_mir/monomorphize/partitioning.rs |  14 +-
 src/librustc_session/utils.rs                 |   8 +-
 src/librustc_typeck/coherence/mod.rs          |   4 +-
 src/librustc_typeck/lib.rs                    |  16 +-
 21 files changed, 203 insertions(+), 236 deletions(-)

diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs
index e18ab7848feaa..46c5ee272d235 100644
--- a/src/librustc/hir/map/mod.rs
+++ b/src/librustc/hir/map/mod.rs
@@ -1270,7 +1270,7 @@ pub fn map_crate<'hir>(
         definitions,
     };
 
-    sess.time("validate HIR map", || {
+    sess.time("validate_HIR_map", || {
         hir_id_validator::check_crate(&map);
     });
 
diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs
index d01927b91b954..5d968811addd8 100644
--- a/src/librustc/ty/query/on_disk_cache.rs
+++ b/src/librustc/ty/query/on_disk_cache.rs
@@ -198,7 +198,7 @@ impl<'sess> OnDiskCache<'sess> {
             // Encode query results.
             let mut query_result_index = EncodedQueryResultIndex::new();
 
-            tcx.sess.time("encode query results", || {
+            tcx.sess.time("encode_query_results", || {
                 let enc = &mut encoder;
                 let qri = &mut query_result_index;
 
@@ -1053,8 +1053,8 @@ where
     Q: super::config::QueryDescription<'tcx, Value: Encodable>,
     E: 'a + TyEncoder,
 {
-    let desc = &format!("encode_query_results for {}", ::std::any::type_name::<Q>());
-    let _timer = tcx.sess.prof.generic_pass(desc);
+    let desc = &format!("encode_query_results_for_{}", ::std::any::type_name::<Q>());
+    let _timer = tcx.sess.prof.extra_verbose_generic_activity(desc);
 
     let shards = Q::query_cache(tcx).lock_shards();
     assert!(shards.iter().all(|shard| shard.active.is_empty()));
diff --git a/src/librustc_codegen_llvm/back/lto.rs b/src/librustc_codegen_llvm/back/lto.rs
index 3134ae5922c9a..b5895b53698dc 100644
--- a/src/librustc_codegen_llvm/back/lto.rs
+++ b/src/librustc_codegen_llvm/back/lto.rs
@@ -120,12 +120,13 @@ fn prepare_lto(
                 info!("adding bytecode {}", name);
                 let bc_encoded = data.data();
 
-                let (bc, id) = cgcx.prof.generic_pass(&format!("decode {}", name)).run(|| {
-                    match DecodedBytecode::new(bc_encoded) {
+                let (bc, id) = cgcx
+                    .prof
+                    .extra_verbose_generic_activity(&format!("decode {}", name))
+                    .run(|| match DecodedBytecode::new(bc_encoded) {
                         Ok(b) => Ok((b.bytecode(), b.identifier().to_string())),
                         Err(e) => Err(diag_handler.fatal(&e)),
-                    }
-                })?;
+                    })?;
                 let bc = SerializedModule::FromRlib(bc);
                 upstream_modules.push((bc, CString::new(id).unwrap()));
             }
@@ -280,8 +281,9 @@ fn fat_lto(
         // save and persist everything with the original module.
         let mut linker = Linker::new(llmod);
         for (bc_decoded, name) in serialized_modules {
+            let _timer = cgcx.prof.generic_activity("LLVM_fat_lto_link_module");
             info!("linking {:?}", name);
-            cgcx.prof.generic_pass(&format!("ll link {:?}", name)).run(|| {
+            cgcx.prof.extra_verbose_generic_activity(&format!("ll link {:?}", name)).run(|| {
                 let data = bc_decoded.data();
                 linker.add(&data).map_err(|()| {
                     let msg = format!("failed to load bc of {:?}", name);
@@ -633,7 +635,7 @@ pub(crate) fn run_pass_manager(
         }
 
         cgcx.prof
-            .generic_pass("LTO passes")
+            .extra_verbose_generic_activity("LTO_passes")
             .run(|| llvm::LLVMRunPassManager(pm, module.module_llvm.llmod()));
 
         llvm::LLVMDisposePassManager(pm);
diff --git a/src/librustc_codegen_llvm/back/write.rs b/src/librustc_codegen_llvm/back/write.rs
index afdfb36c2a9f6..4be7b84660d09 100644
--- a/src/librustc_codegen_llvm/back/write.rs
+++ b/src/librustc_codegen_llvm/back/write.rs
@@ -424,13 +424,23 @@ pub(crate) unsafe fn optimize(
 
         // Finally, run the actual optimization passes
         {
+            let _timer = cgcx.prof.generic_activity("LLVM_module_optimize_function_passes");
             let desc = &format!("llvm function passes [{}]", module_name.unwrap());
-            let _timer = if config.time_module { Some(cgcx.prof.generic_pass(desc)) } else { None };
+            let _timer = if config.time_module {
+                Some(cgcx.prof.extra_verbose_generic_activity(desc))
+            } else {
+                None
+            };
             llvm::LLVMRustRunFunctionPassManager(fpm, llmod);
         }
         {
+            let _timer = cgcx.prof.generic_activity("LLVM_module_optimize_module_passes");
             let desc = &format!("llvm module passes [{}]", module_name.unwrap());
-            let _timer = if config.time_module { Some(cgcx.prof.generic_pass(desc)) } else { None };
+            let _timer = if config.time_module {
+                Some(cgcx.prof.extra_verbose_generic_activity(desc))
+            } else {
+                None
+            };
             llvm::LLVMRunPassManager(mpm, llmod);
         }
 
@@ -556,7 +566,11 @@ pub(crate) unsafe fn codegen(
 
         {
             let desc = &format!("codegen passes [{}]", module_name.unwrap());
-            let _timer = if config.time_module { Some(cgcx.prof.generic_pass(desc)) } else { None };
+            let _timer = if config.time_module {
+                Some(cgcx.prof.extra_verbose_generic_activity(desc))
+            } else {
+                None
+            };
 
             if config.emit_ir {
                 let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_emit_ir");
diff --git a/src/librustc_codegen_llvm/lib.rs b/src/librustc_codegen_llvm/lib.rs
index 349cff79c7843..35c71a6675683 100644
--- a/src/librustc_codegen_llvm/lib.rs
+++ b/src/librustc_codegen_llvm/lib.rs
@@ -283,7 +283,7 @@ impl CodegenBackend for LlvmCodegenBackend {
             rustc_codegen_ssa::back::write::dump_incremental_data(&codegen_results);
         }
 
-        sess.time("serialize work products", move || {
+        sess.time("serialize_work_products", move || {
             rustc_incremental::save_work_product_index(sess, &dep_graph, work_products)
         });
 
@@ -300,7 +300,7 @@ impl CodegenBackend for LlvmCodegenBackend {
 
         // Run the linker on any artifacts that resulted from the LLVM run.
         // This should produce either a finished executable or library.
-        sess.time("linking", || {
+        sess.time("link_crate", || {
             use crate::back::archive::LlvmArchiveBuilder;
             use rustc_codegen_ssa::back::link::link_binary;
 
diff --git a/src/librustc_codegen_ssa/back/link.rs b/src/librustc_codegen_ssa/back/link.rs
index a53402ebb5c7d..253225f308e88 100644
--- a/src/librustc_codegen_ssa/back/link.rs
+++ b/src/librustc_codegen_ssa/back/link.rs
@@ -577,7 +577,7 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
     let mut i = 0;
     loop {
         i += 1;
-        prog = sess.time("running linker", || exec_linker(sess, &mut cmd, out_filename, tmpdir));
+        prog = sess.time("run_linker", || exec_linker(sess, &mut cmd, out_filename, tmpdir));
         let output = match prog {
             Ok(ref output) => output,
             Err(_) => break,
@@ -1562,7 +1562,7 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
         let name = cratepath.file_name().unwrap().to_str().unwrap();
         let name = &name[3..name.len() - 5]; // chop off lib/.rlib
 
-        sess.prof.generic_pass(&format!("altering {}.rlib", name)).run(|| {
+        sess.prof.extra_verbose_generic_activity(&format!("altering {}.rlib", name)).run(|| {
             let mut archive = <B as ArchiveBuilder>::new(sess, &dst, Some(cratepath));
             archive.update_symbols();
 
diff --git a/src/librustc_codegen_ssa/back/write.rs b/src/librustc_codegen_ssa/back/write.rs
index 32ba2dd65a3fa..1ce0a29d55d9d 100644
--- a/src/librustc_codegen_ssa/back/write.rs
+++ b/src/librustc_codegen_ssa/back/write.rs
@@ -1511,7 +1511,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
         llvm_start_time: &mut Option<VerboseTimingGuard<'a>>,
     ) {
         if config.time_module && llvm_start_time.is_none() {
-            *llvm_start_time = Some(prof.generic_pass("LLVM passes"));
+            *llvm_start_time = Some(prof.extra_verbose_generic_activity("LLVM_passes"));
         }
     }
 }
diff --git a/src/librustc_codegen_ssa/base.rs b/src/librustc_codegen_ssa/base.rs
index ededb36c7127f..c838109072775 100644
--- a/src/librustc_codegen_ssa/base.rs
+++ b/src/librustc_codegen_ssa/base.rs
@@ -566,7 +566,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
             cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("allocator")).to_string();
         let mut modules = backend.new_metadata(tcx, &llmod_id);
         tcx.sess
-            .time("write allocator module", || backend.codegen_allocator(tcx, &mut modules, kind));
+            .time("write_allocator_module", || backend.codegen_allocator(tcx, &mut modules, kind));
 
         Some(ModuleCodegen { name: llmod_id, module_llvm: modules, kind: ModuleKind::Allocator })
     } else {
@@ -582,7 +582,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
         let metadata_cgu_name =
             cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("metadata")).to_string();
         let mut metadata_llvm_module = backend.new_metadata(tcx, &metadata_cgu_name);
-        tcx.sess.time("write compressed metadata", || {
+        tcx.sess.time("write_compressed_metadata", || {
             backend.write_compressed_metadata(
                 tcx,
                 &ongoing_codegen.metadata,
@@ -652,7 +652,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
 
     // Since the main thread is sometimes blocked during codegen, we keep track
     // -Ztime-passes output manually.
-    print_time_passes_entry(tcx.sess.time_passes(), "codegen to LLVM IR", total_codegen_time);
+    print_time_passes_entry(tcx.sess.time_passes(), "codegen_to_LLVM_IR", total_codegen_time);
 
     ::rustc_incremental::assert_module_sources::assert_module_sources(tcx);
 
@@ -712,9 +712,9 @@ impl<B: ExtraBackendMethods> Drop for AbortCodegenOnDrop<B> {
 }
 
 fn assert_and_save_dep_graph(tcx: TyCtxt<'_>) {
-    tcx.sess.time("assert dep graph", || ::rustc_incremental::assert_dep_graph(tcx));
+    tcx.sess.time("assert_dep_graph", || ::rustc_incremental::assert_dep_graph(tcx));
 
-    tcx.sess.time("serialize dep graph", || ::rustc_incremental::save_dep_graph(tcx));
+    tcx.sess.time("serialize_dep_graph", || ::rustc_incremental::save_dep_graph(tcx));
 }
 
 impl CrateInfo {
diff --git a/src/librustc_data_structures/profiling.rs b/src/librustc_data_structures/profiling.rs
index 840a2603deb79..b779bcbd55d70 100644
--- a/src/librustc_data_structures/profiling.rs
+++ b/src/librustc_data_structures/profiling.rs
@@ -8,8 +8,6 @@ use std::thread::ThreadId;
 use std::time::{Duration, Instant};
 use std::u32;
 
-use crate::cold_path;
-
 use measureme::StringId;
 
 /// MmapSerializatioSink is faster on macOS and Linux
@@ -44,15 +42,11 @@ bitflags::bitflags! {
         const QUERY_CACHE_HITS   = 1 << 2;
         const QUERY_BLOCKED      = 1 << 3;
         const INCR_CACHE_LOADS   = 1 << 4;
-        const SPARSE_PASS   = 1 << 5;
-        const GENERIC_PASS   = 1 << 6;
 
         const DEFAULT = Self::GENERIC_ACTIVITIES.bits |
                         Self::QUERY_PROVIDERS.bits |
                         Self::QUERY_BLOCKED.bits |
-                        Self::INCR_CACHE_LOADS.bits |
-                        Self::SPARSE_PASS.bits |
-                        Self::GENERIC_PASS.bits;
+                        Self::INCR_CACHE_LOADS.bits;
 
         // empty() and none() aren't const-fns unfortunately
         const NONE = 0;
@@ -63,8 +57,6 @@ bitflags::bitflags! {
 const EVENT_FILTERS_BY_NAME: &[(&str, EventFilter)] = &[
     ("none", EventFilter::NONE),
     ("all", EventFilter::ALL),
-    ("sparse-pass", EventFilter::SPARSE_PASS),
-    ("generic-pass", EventFilter::GENERIC_PASS),
     ("generic-activity", EventFilter::GENERIC_ACTIVITIES),
     ("query-provider", EventFilter::QUERY_PROVIDERS),
     ("query-cache-hit", EventFilter::QUERY_CACHE_HITS),
@@ -89,101 +81,82 @@ pub struct SelfProfilerRef {
     // actually enabled.
     event_filter_mask: EventFilter,
 
-    // Print sparse passes to stdout
-    verbose_sparse: bool,
+    // Print verbose generic activities to stdout
+    print_verbose_generic_activities: bool,
 
-    // Print generic passes to stdout
-    verbose_generic: bool,
+    // Print extra verbose generic activities to stdout
+    print_extra_verbose_generic_activities: bool,
 }
 
 impl SelfProfilerRef {
     pub fn new(
         profiler: Option<Arc<SelfProfiler>>,
-        verbose_sparse: bool,
-        verbose_generic: bool,
+        print_verbose_generic_activities: bool,
+        print_extra_verbose_generic_activities: bool,
     ) -> SelfProfilerRef {
         // If there is no SelfProfiler then the filter mask is set to NONE,
         // ensuring that nothing ever tries to actually access it.
-        let mut event_filter_mask =
+        let event_filter_mask =
             profiler.as_ref().map(|p| p.event_filter_mask).unwrap_or(EventFilter::NONE);
 
-        if verbose_sparse {
-            event_filter_mask |= EventFilter::SPARSE_PASS;
-        }
-
-        if verbose_generic {
-            event_filter_mask |= EventFilter::GENERIC_PASS;
+        SelfProfilerRef {
+            profiler,
+            event_filter_mask,
+            print_verbose_generic_activities,
+            print_extra_verbose_generic_activities,
         }
-
-        SelfProfilerRef { profiler, event_filter_mask, verbose_sparse, verbose_generic }
     }
 
+    // This shim makes sure that calls only get executed if the filter mask
+    // lets them pass. It also contains some trickery to make sure that
+    // code is optimized for non-profiling compilation sessions, i.e. anything
+    // past the filter check is never inlined so it doesn't clutter the fast
+    // path.
     #[inline(always)]
     fn exec<F>(&self, event_filter: EventFilter, f: F) -> TimingGuard<'_>
     where
         F: for<'a> FnOnce(&'a SelfProfiler) -> TimingGuard<'a>,
     {
-        self.handle_event(
-            event_filter,
-            || f(self.profiler.as_ref().unwrap()),
-            || TimingGuard::none(),
-        )
-    }
+        #[inline(never)]
+        fn cold_call<F>(profiler_ref: &SelfProfilerRef, f: F) -> TimingGuard<'_>
+        where
+            F: for<'a> FnOnce(&'a SelfProfiler) -> TimingGuard<'a>,
+        {
+            let profiler = profiler_ref.profiler.as_ref().unwrap();
+            f(&**profiler)
+        }
 
-    // This shim makes sure that cold calls only get executed if the filter mask
-    // lets them pass. It also contains some trickery to make sure that
-    // code is optimized for non-profiling compilation sessions, i.e. anything
-    // past the filter check is never inlined so it doesn't clutter the fast
-    // path.
-    #[inline(always)]
-    fn handle_event<R>(
-        &self,
-        event_filter: EventFilter,
-        cold: impl FnOnce() -> R,
-        hot: impl FnOnce() -> R,
-    ) -> R {
         if unlikely!(self.event_filter_mask.contains(event_filter)) {
-            cold_path(|| cold())
+            cold_call(self, f)
         } else {
-            hot()
+            TimingGuard::none()
         }
     }
 
-    /// Start profiling a sparse pass. Profiling continues until the
+    /// Start profiling a verbose generic activity. Profiling continues until the
     /// VerboseTimingGuard returned from this call is dropped.
     #[inline(always)]
-    pub fn sparse_pass<'a>(&'a self, event_id: &'a str) -> VerboseTimingGuard<'a> {
-        self.handle_event(
-            EventFilter::SPARSE_PASS,
-            || {
-                VerboseTimingGuard::start(
-                    self.profiler
-                        .as_ref()
-                        .map(|profiler| (&**profiler, profiler.sparse_pass_event_kind)),
-                    event_id,
-                    self.verbose_sparse,
-                )
-            },
-            || VerboseTimingGuard::none(),
+    pub fn verbose_generic_activity<'a>(&'a self, event_id: &'a str) -> VerboseTimingGuard<'a> {
+        VerboseTimingGuard::start(
+            event_id,
+            self.print_verbose_generic_activities,
+            self.generic_activity(event_id),
         )
     }
 
-    /// Start profiling a generic pass. Profiling continues until the
+    /// Start profiling a extra verbose generic activity. Profiling continues until the
     /// VerboseTimingGuard returned from this call is dropped.
     #[inline(always)]
-    pub fn generic_pass<'a>(&'a self, event_id: &'a str) -> VerboseTimingGuard<'a> {
-        self.handle_event(
-            EventFilter::GENERIC_PASS,
-            || {
-                VerboseTimingGuard::start(
-                    self.profiler
-                        .as_ref()
-                        .map(|profiler| (&**profiler, profiler.generic_pass_event_kind)),
-                    event_id,
-                    self.verbose_generic,
-                )
-            },
-            || VerboseTimingGuard::none(),
+    pub fn extra_verbose_generic_activity<'a>(
+        &'a self,
+        event_id: &'a str,
+    ) -> VerboseTimingGuard<'a> {
+        // FIXME: This does not yet emit a measureme event
+        // because callers encode arguments into `event_id`.
+        VerboseTimingGuard::start(
+            event_id,
+            self.print_extra_verbose_generic_activities,
+            TimingGuard::none(),
         )
     }
 
@@ -267,8 +240,6 @@ pub struct SelfProfiler {
     profiler: Profiler,
     event_filter_mask: EventFilter,
     query_event_kind: StringId,
-    sparse_pass_event_kind: StringId,
-    generic_pass_event_kind: StringId,
     generic_activity_event_kind: StringId,
     incremental_load_result_event_kind: StringId,
     query_blocked_event_kind: StringId,
@@ -289,8 +260,6 @@ impl SelfProfiler {
         let profiler = Profiler::new(&path)?;
 
         let query_event_kind = profiler.alloc_string("Query");
-        let sparse_pass_event_kind = profiler.alloc_string("SparsePass");
-        let generic_pass_event_kind = profiler.alloc_string("GenericPass");
         let generic_activity_event_kind = profiler.alloc_string("GenericActivity");
         let incremental_load_result_event_kind = profiler.alloc_string("IncrementalLoadResult");
         let query_blocked_event_kind = profiler.alloc_string("QueryBlocked");
@@ -333,8 +302,6 @@ impl SelfProfiler {
             profiler,
             event_filter_mask,
             query_event_kind,
-            sparse_pass_event_kind,
-            generic_pass_event_kind,
             generic_activity_event_kind,
             incremental_load_result_event_kind,
             query_blocked_event_kind,
@@ -386,19 +353,11 @@ pub struct VerboseTimingGuard<'a> {
 }
 
 impl<'a> VerboseTimingGuard<'a> {
-    pub fn start(
-        profiler: Option<(&'a SelfProfiler, StringId)>,
-        event_id: &'a str,
-        verbose: bool,
-    ) -> Self {
-        let _guard = profiler.map_or(TimingGuard::none(), |(profiler, event_kind)| {
-            let event = profiler.profiler.alloc_string(event_id);
-            TimingGuard::start(profiler, event_kind, event)
-        });
+    pub fn start(event_id: &'a str, verbose: bool, _guard: TimingGuard<'a>) -> Self {
         VerboseTimingGuard {
             event_id,
             _guard,
-            start: if verbose { Some(Instant::now()) } else { None },
+            start: if unlikely!(verbose) { Some(Instant::now()) } else { None },
         }
     }
 
@@ -407,10 +366,6 @@ impl<'a> VerboseTimingGuard<'a> {
         let _timer = self;
         f()
     }
-
-    fn none() -> Self {
-        VerboseTimingGuard { event_id: "", start: None, _guard: TimingGuard::none() }
-    }
 }
 
 impl Drop for VerboseTimingGuard<'_> {
diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs
index 8fbdf44de048d..a5277bcd120ed 100644
--- a/src/librustc_driver/lib.rs
+++ b/src/librustc_driver/lib.rs
@@ -369,7 +369,7 @@ pub fn run_compiler(
                 queries.global_ctxt()?.peek_mut().enter(|tcx| {
                     let result = tcx.analysis(LOCAL_CRATE);
 
-                    sess.time("save analysis", || {
+                    sess.time("save_analysis", || {
                         save::process_crate(
                             tcx,
                             &expanded_crate,
diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs
index cb06dae6ac919..0732ddd3261a4 100644
--- a/src/librustc_incremental/persist/load.rs
+++ b/src/librustc_incremental/persist/load.rs
@@ -159,7 +159,7 @@ pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
     }
 
     MaybeAsync::Async(std::thread::spawn(move || {
-        let _prof_timer = prof.generic_pass("background load prev dep-graph");
+        let _prof_timer = prof.generic_activity("incr_comp_load_dep_graph");
 
         match load_data(report_incremental_info, &path) {
             LoadResult::DataOutOfDate => LoadResult::DataOutOfDate,
diff --git a/src/librustc_incremental/persist/save.rs b/src/librustc_incremental/persist/save.rs
index 9487f02aa8e08..588e639f28946 100644
--- a/src/librustc_incremental/persist/save.rs
+++ b/src/librustc_incremental/persist/save.rs
@@ -32,15 +32,15 @@ pub fn save_dep_graph(tcx: TyCtxt<'_>) {
         join(
             move || {
                 if tcx.sess.opts.debugging_opts.incremental_queries {
-                    sess.time("persist query result cache", || {
+                    sess.time("incr_comp_persist_result_cache", || {
                         save_in(sess, query_cache_path, |e| encode_query_cache(tcx, e));
                     });
                 }
             },
             || {
-                sess.time("persist dep-graph", || {
+                sess.time("incr_comp_persist_dep_graph", || {
                     save_in(sess, dep_graph_path, |e| {
-                        sess.time("encode dep-graph", || encode_dep_graph(tcx, e))
+                        sess.time("incr_comp_encode_dep_graph", || encode_dep_graph(tcx, e))
                     });
                 });
             },
@@ -142,7 +142,8 @@ fn encode_dep_graph(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
     tcx.sess.opts.dep_tracking_hash().encode(encoder).unwrap();
 
     // Encode the graph data.
-    let serialized_graph = tcx.sess.time("getting serialized graph", || tcx.dep_graph.serialize());
+    let serialized_graph =
+        tcx.sess.time("incr_comp_serialize_dep_graph", || tcx.dep_graph.serialize());
 
     if tcx.sess.opts.debugging_opts.incremental_info {
         #[derive(Clone)]
@@ -223,7 +224,7 @@ fn encode_dep_graph(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
         println!("[incremental]");
     }
 
-    tcx.sess.time("encoding serialized graph", || {
+    tcx.sess.time("incr_comp_encode_serialized_dep_graph", || {
         serialized_graph.encode(encoder).unwrap();
     });
 }
@@ -244,7 +245,7 @@ fn encode_work_product_index(
 }
 
 fn encode_query_cache(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
-    tcx.sess.time("serialize query result cache", || {
+    tcx.sess.time("incr_comp_serialize_result_cache", || {
         tcx.serialize_query_result_cache(encoder).unwrap();
     })
 }
diff --git a/src/librustc_interface/passes.rs b/src/librustc_interface/passes.rs
index 6e776e7d55424..a7e174f04553c 100644
--- a/src/librustc_interface/passes.rs
+++ b/src/librustc_interface/passes.rs
@@ -54,7 +54,7 @@ use std::rc::Rc;
 use std::{env, fs, iter, mem};
 
 pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
-    let krate = sess.time("parsing", || match input {
+    let krate = sess.time("parse_crate", || match input {
         Input::File(file) => parse_crate_from_file(file, &sess.parse_sess),
         Input::Str { input, name } => {
             parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess)
@@ -155,7 +155,7 @@ pub fn register_plugins<'a>(
     mut krate: ast::Crate,
     crate_name: &str,
 ) -> Result<(ast::Crate, Lrc<lint::LintStore>)> {
-    krate = sess.time("attributes injection", || {
+    krate = sess.time("attributes_injection", || {
         rustc_builtin_macros::cmdline_attrs::inject(
             krate,
             &sess.parse_sess,
@@ -180,9 +180,7 @@ pub fn register_plugins<'a>(
     rustc_incremental::prepare_session_directory(sess, &crate_name, disambiguator);
 
     if sess.opts.incremental.is_some() {
-        sess.time("garbage-collect incremental cache directory", || {
-            let _prof_timer =
-                sess.prof.generic_activity("incr_comp_garbage_collect_session_directories");
+        sess.time("incr_comp_garbage_collect_session_directories", || {
             if let Err(e) = rustc_incremental::garbage_collect_session_directories(sess) {
                 warn!(
                     "Error while trying to garbage collect incremental \
@@ -193,7 +191,7 @@ pub fn register_plugins<'a>(
         });
     }
 
-    sess.time("recursion limit", || {
+    sess.time("recursion_limit", || {
         middle::recursion_limit::update_limits(sess, &krate);
     });
 
@@ -204,8 +202,8 @@ pub fn register_plugins<'a>(
     register_lints(&sess, &mut lint_store);
 
     let registrars =
-        sess.time("plugin loading", || plugin::load::load_plugins(sess, metadata_loader, &krate));
-    sess.time("plugin registration", || {
+        sess.time("plugin_loading", || plugin::load::load_plugins(sess, metadata_loader, &krate));
+    sess.time("plugin_registration", || {
         let mut registry = plugin::Registry { lint_store: &mut lint_store };
         for registrar in registrars {
             registrar(&mut registry);
@@ -223,7 +221,7 @@ fn configure_and_expand_inner<'a>(
     resolver_arenas: &'a ResolverArenas<'a>,
     metadata_loader: &'a MetadataLoaderDyn,
 ) -> Result<(ast::Crate, Resolver<'a>)> {
-    sess.time("pre-AST-expansion lint checks", || {
+    sess.time("pre_AST_expansion_lint_checks", || {
         rustc_lint::check_ast_crate(
             sess,
             lint_store,
@@ -237,7 +235,7 @@ fn configure_and_expand_inner<'a>(
     let mut resolver = Resolver::new(sess, &krate, crate_name, metadata_loader, &resolver_arenas);
     rustc_builtin_macros::register_builtin_macros(&mut resolver, sess.edition());
 
-    krate = sess.time("crate injection", || {
+    krate = sess.time("crate_injection", || {
         let alt_std_name = sess.opts.alt_std_name.as_ref().map(|s| Symbol::intern(s));
         let (krate, name) = rustc_builtin_macros::standard_library_imports::inject(
             krate,
@@ -254,7 +252,7 @@ fn configure_and_expand_inner<'a>(
     util::check_attr_crate_type(&krate.attrs, &mut resolver.lint_buffer());
 
     // Expand all macros
-    krate = sess.time("expansion", || {
+    krate = sess.time("macro_expand_crate", || {
         // Windows dlls do not have rpaths, so they don't know how to find their
         // dependencies. It's up to us to tell the system where to find all the
         // dependent dlls. Note that this uses cfg!(windows) as opposed to
@@ -299,11 +297,11 @@ fn configure_and_expand_inner<'a>(
         let mut ecx = ExtCtxt::new(&sess.parse_sess, cfg, &mut resolver);
 
         // Expand macros now!
-        let krate = sess.time("expand crate", || ecx.monotonic_expander().expand_crate(krate));
+        let krate = sess.time("expand_crate", || ecx.monotonic_expander().expand_crate(krate));
 
         // The rest is error reporting
 
-        sess.time("check unused macros", || {
+        sess.time("check_unused_macros", || {
             ecx.check_unused_macros();
         });
 
@@ -322,7 +320,7 @@ fn configure_and_expand_inner<'a>(
         krate
     });
 
-    sess.time("maybe building test harness", || {
+    sess.time("maybe_building_test_harness", || {
         rustc_builtin_macros::test_harness::inject(
             &sess.parse_sess,
             &mut resolver,
@@ -346,7 +344,7 @@ fn configure_and_expand_inner<'a>(
         util::ReplaceBodyWithLoop::new(&mut resolver).visit_crate(&mut krate);
     }
 
-    let has_proc_macro_decls = sess.time("AST validation", || {
+    let has_proc_macro_decls = sess.time("AST_validation", || {
         ast_validation::check_crate(sess, &krate, &mut resolver.lint_buffer())
     });
 
@@ -368,7 +366,7 @@ fn configure_and_expand_inner<'a>(
         msg.warn("The generated documentation may be incorrect");
         msg.emit()
     } else {
-        krate = sess.time("maybe creating a macro crate", || {
+        krate = sess.time("maybe_create_a_macro_crate", || {
             let num_crate_types = crate_types.len();
             let is_test_crate = sess.opts.test;
             rustc_builtin_macros::proc_macro_harness::inject(
@@ -398,12 +396,10 @@ fn configure_and_expand_inner<'a>(
         println!("{}", json::as_json(&krate));
     }
 
-    sess.time("name resolution", || {
-        resolver.resolve_crate(&krate);
-    });
+    resolver.resolve_crate(&krate);
 
     // Needs to go *after* expansion to be able to check the results of macro expansion.
-    sess.time("complete gated feature checking", || {
+    sess.time("complete_gated_feature_checking", || {
         syntax::feature_gate::check_crate(
             &krate,
             &sess.parse_sess,
@@ -432,24 +428,22 @@ pub fn lower_to_hir<'res, 'tcx>(
     arena: &'tcx Arena<'tcx>,
 ) -> Result<map::Forest<'tcx>> {
     // Lower AST to HIR.
-    let hir_forest = sess.time("lowering AST -> HIR", || {
-        let hir_crate = rustc_ast_lowering::lower_crate(
-            sess,
-            &dep_graph,
-            &krate,
-            resolver,
-            rustc_parse::nt_to_tokenstream,
-            arena,
-        );
+    let hir_crate = rustc_ast_lowering::lower_crate(
+        sess,
+        &dep_graph,
+        &krate,
+        resolver,
+        rustc_parse::nt_to_tokenstream,
+        arena,
+    );
 
-        if sess.opts.debugging_opts.hir_stats {
-            hir_stats::print_hir_stats(&hir_crate);
-        }
+    if sess.opts.debugging_opts.hir_stats {
+        hir_stats::print_hir_stats(&hir_crate);
+    }
 
-        map::Forest::new(hir_crate, &dep_graph)
-    });
+    let hir_forest = map::Forest::new(hir_crate, &dep_graph);
 
-    sess.time("early lint checks", || {
+    sess.time("early_lint_checks", || {
         rustc_lint::check_ast_crate(
             sess,
             lint_store,
@@ -723,12 +717,9 @@ pub fn create_global_ctxt<'tcx>(
     let defs = mem::take(&mut resolver_outputs.definitions);
 
     // Construct the HIR map.
-    let hir_map = sess.time("indexing HIR", || {
-        map::map_crate(sess, &*resolver_outputs.cstore, &hir_forest, defs)
-    });
+    let hir_map = map::map_crate(sess, &*resolver_outputs.cstore, &hir_forest, defs);
 
-    let query_result_on_disk_cache =
-        sess.time("load query result cache", || rustc_incremental::load_query_result_cache(sess));
+    let query_result_on_disk_cache = rustc_incremental::load_query_result_cache(sess);
 
     let codegen_backend = compiler.codegen_backend();
     let mut local_providers = ty::query::Providers::default();
@@ -761,7 +752,7 @@ pub fn create_global_ctxt<'tcx>(
 
     // Do some initialization of the DepGraph that can only be done with the tcx available.
     ty::tls::enter_global(&gcx, |tcx| {
-        tcx.sess.time("dep graph tcx init", || rustc_incremental::dep_graph_tcx_init(tcx));
+        tcx.sess.time("dep_graph_tcx_init", || rustc_incremental::dep_graph_tcx_init(tcx));
     });
 
     QueryContext(gcx)
@@ -775,17 +766,17 @@ fn analysis(tcx: TyCtxt<'_>, cnum: CrateNum) -> Result<()> {
     let sess = tcx.sess;
     let mut entry_point = None;
 
-    sess.time("misc checking 1", || {
+    sess.time("misc_checking_1", || {
         parallel!(
             {
                 entry_point = sess
-                    .time("looking for entry point", || rustc_passes::entry::find_entry_point(tcx));
+                    .time("looking_for_entry_point", || rustc_passes::entry::find_entry_point(tcx));
 
-                sess.time("looking for plugin registrar", || {
+                sess.time("looking_for_plugin_registrar", || {
                     plugin::build::find_plugin_registrar(tcx)
                 });
 
-                sess.time("looking for derive registrar", || proc_macro_decls::find(tcx));
+                sess.time("looking_for_derive_registrar", || proc_macro_decls::find(tcx));
             },
             {
                 par_iter(&tcx.hir().krate().modules).for_each(|(&module, _)| {
@@ -802,17 +793,17 @@ fn analysis(tcx: TyCtxt<'_>, cnum: CrateNum) -> Result<()> {
     // passes are timed inside typeck
     typeck::check_crate(tcx)?;
 
-    sess.time("misc checking 2", || {
+    sess.time("misc_checking_2", || {
         parallel!(
             {
-                sess.time("match checking", || {
+                sess.time("match_checking", || {
                     tcx.par_body_owners(|def_id| {
                         tcx.ensure().check_match(def_id);
                     });
                 });
             },
             {
-                sess.time("liveness checking + intrinsic checking", || {
+                sess.time("liveness_and_intrinsic_checking", || {
                     par_iter(&tcx.hir().krate().modules).for_each(|(&module, _)| {
                         // this must run before MIR dump, because
                         // "not all control paths return a value" is reported here.
@@ -828,21 +819,21 @@ fn analysis(tcx: TyCtxt<'_>, cnum: CrateNum) -> Result<()> {
         );
     });
 
-    sess.time("MIR borrow checking", || {
+    sess.time("MIR_borrow_checking", || {
         tcx.par_body_owners(|def_id| tcx.ensure().mir_borrowck(def_id));
     });
 
-    sess.time("dumping Chalk-like clauses", || {
+    sess.time("dumping_chalk_like_clauses", || {
         rustc_traits::lowering::dump_program_clauses(tcx);
     });
 
-    sess.time("MIR effect checking", || {
+    sess.time("MIR_effect_checking", || {
         for def_id in tcx.body_owners() {
             mir::transform::check_unsafety::check_unsafety(tcx, def_id)
         }
     });
 
-    sess.time("layout testing", || layout_test::test_layout(tcx));
+    sess.time("layout_testing", || layout_test::test_layout(tcx));
 
     // Avoid overwhelming user with errors if borrow checking failed.
     // I'm not sure how helpful this is, to be honest, but it avoids a
@@ -853,28 +844,25 @@ fn analysis(tcx: TyCtxt<'_>, cnum: CrateNum) -> Result<()> {
         return Err(ErrorReported);
     }
 
-    sess.time("misc checking 3", || {
+    sess.time("misc_checking_3", || {
         parallel!(
             {
-                sess.time("privacy access levels", || {
-                    tcx.ensure().privacy_access_levels(LOCAL_CRATE);
-                });
+                tcx.ensure().privacy_access_levels(LOCAL_CRATE);
+
                 parallel!(
                     {
-                        sess.time("private in public", || {
-                            tcx.ensure().check_private_in_public(LOCAL_CRATE);
-                        });
+                        tcx.ensure().check_private_in_public(LOCAL_CRATE);
                     },
                     {
-                        sess.time("death checking", || rustc_passes::dead::check_crate(tcx));
+                        sess.time("death_checking", || rustc_passes::dead::check_crate(tcx));
                     },
                     {
-                        sess.time("unused lib feature checking", || {
+                        sess.time("unused_lib_feature_checking", || {
                             rustc_passes::stability::check_unused_or_stable_features(tcx)
                         });
                     },
                     {
-                        sess.time("lint checking", || {
+                        sess.time("lint_checking", || {
                             rustc_lint::check_crate(tcx, || {
                                 rustc_lint::BuiltinCombinedLateLintPass::new()
                             });
@@ -883,7 +871,7 @@ fn analysis(tcx: TyCtxt<'_>, cnum: CrateNum) -> Result<()> {
                 );
             },
             {
-                sess.time("privacy checking modules", || {
+                sess.time("privacy_checking_modules", || {
                     par_iter(&tcx.hir().krate().modules).for_each(|(&module, _)| {
                         tcx.ensure().check_mod_privacy(tcx.hir().local_def_id(module));
                     });
@@ -926,6 +914,8 @@ fn encode_and_write_metadata(
         MetadataKind::Uncompressed | MetadataKind::Compressed => tcx.encode_metadata(),
     };
 
+    let _prof_timer = tcx.sess.prof.generic_activity("write_crate_metadata");
+
     let need_metadata_file = tcx.sess.opts.output_types.contains_key(&OutputType::Metadata);
     if need_metadata_file {
         let crate_name = &tcx.crate_name(LOCAL_CRATE).as_str();
@@ -968,10 +958,9 @@ pub fn start_codegen<'tcx>(
         tcx.print_debug_stats();
     }
 
-    let (metadata, need_metadata_module) =
-        tcx.sess.time("metadata encoding and writing", || encode_and_write_metadata(tcx, outputs));
+    let (metadata, need_metadata_module) = encode_and_write_metadata(tcx, outputs);
 
-    let codegen = tcx.sess.time("codegen", move || {
+    let codegen = tcx.sess.time("codegen_crate", move || {
         codegen_backend.codegen_crate(tcx, metadata, need_metadata_module)
     });
 
diff --git a/src/librustc_interface/queries.rs b/src/librustc_interface/queries.rs
index 2de0e1ecccc83..6033569d765b4 100644
--- a/src/librustc_interface/queries.rs
+++ b/src/librustc_interface/queries.rs
@@ -195,7 +195,7 @@ impl<'tcx> Queries<'tcx> {
                 None => DepGraph::new_disabled(),
                 Some(future) => {
                     let (prev_graph, prev_work_products) =
-                        self.session().time("blocked while dep-graph loading finishes", || {
+                        self.session().time("blocked_on_dep_graph_loading", || {
                             future
                                 .open()
                                 .unwrap_or_else(|e| rustc_incremental::LoadResult::Error {
diff --git a/src/librustc_lint/early.rs b/src/librustc_lint/early.rs
index a099568545943..67c0c98b20380 100644
--- a/src/librustc_lint/early.rs
+++ b/src/librustc_lint/early.rs
@@ -350,16 +350,19 @@ pub fn check_ast_crate<T: EarlyLintPass>(
         }
     } else {
         for pass in &mut passes {
-            buffered = sess.time(&format!("running lint: {}", pass.name()), || {
-                early_lint_crate(
-                    sess,
-                    lint_store,
-                    krate,
-                    EarlyLintPassObjects { lints: slice::from_mut(pass) },
-                    buffered,
-                    pre_expansion,
-                )
-            });
+            buffered = sess
+                .prof
+                .extra_verbose_generic_activity(&format!("running lint: {}", pass.name()))
+                .run(|| {
+                    early_lint_crate(
+                        sess,
+                        lint_store,
+                        krate,
+                        EarlyLintPassObjects { lints: slice::from_mut(pass) },
+                        buffered,
+                        pre_expansion,
+                    )
+                });
         }
     }
 
diff --git a/src/librustc_lint/late.rs b/src/librustc_lint/late.rs
index a37e3be9ced92..07c3b95d69972 100644
--- a/src/librustc_lint/late.rs
+++ b/src/librustc_lint/late.rs
@@ -434,18 +434,27 @@ fn late_lint_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(tcx: TyCtxt<'tcx>, b
         late_lint_pass_crate(tcx, builtin_lints);
     } else {
         for pass in &mut passes {
-            tcx.sess.time(&format!("running late lint: {}", pass.name()), || {
-                late_lint_pass_crate(tcx, LateLintPassObjects { lints: slice::from_mut(pass) });
-            });
+            tcx.sess
+                .prof
+                .extra_verbose_generic_activity(&format!("running late lint: {}", pass.name()))
+                .run(|| {
+                    late_lint_pass_crate(tcx, LateLintPassObjects { lints: slice::from_mut(pass) });
+                });
         }
 
         let mut passes: Vec<_> =
             tcx.lint_store.late_module_passes.iter().map(|pass| (pass)()).collect();
 
         for pass in &mut passes {
-            tcx.sess.time(&format!("running late module lint: {}", pass.name()), || {
-                late_lint_pass_crate(tcx, LateLintPassObjects { lints: slice::from_mut(pass) });
-            });
+            tcx.sess
+                .prof
+                .extra_verbose_generic_activity(&format!(
+                    "running late module lint: {}",
+                    pass.name()
+                ))
+                .run(|| {
+                    late_lint_pass_crate(tcx, LateLintPassObjects { lints: slice::from_mut(pass) });
+                });
         }
     }
 }
@@ -457,13 +466,13 @@ pub fn check_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(
 ) {
     join(
         || {
-            tcx.sess.time("crate lints", || {
+            tcx.sess.time("crate_lints", || {
                 // Run whole crate non-incremental lints
                 late_lint_crate(tcx, builtin_lints());
             });
         },
         || {
-            tcx.sess.time("module lints", || {
+            tcx.sess.time("module_lints", || {
                 // Run per-module lints
                 par_iter(&tcx.hir().krate().modules).for_each(|(&module, _)| {
                     tcx.ensure().lint_mod(tcx.hir().local_def_id(module));
diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs
index 0943be9d95c38..41fbfd22e50af 100644
--- a/src/librustc_mir/monomorphize/collector.rs
+++ b/src/librustc_mir/monomorphize/collector.rs
@@ -283,10 +283,8 @@ pub fn collect_crate_mono_items(
 ) -> (FxHashSet<MonoItem<'_>>, InliningMap<'_>) {
     let _prof_timer = tcx.prof.generic_activity("monomorphization_collector");
 
-    let roots = tcx.sess.time("collecting roots", || {
-        let _prof_timer = tcx.prof.generic_activity("monomorphization_collector_root_collections");
-        collect_roots(tcx, mode)
-    });
+    let roots =
+        tcx.sess.time("monomorphization_collector_root_collections", || collect_roots(tcx, mode));
 
     debug!("building mono item graph, beginning at roots");
 
@@ -297,7 +295,7 @@ pub fn collect_crate_mono_items(
         let visited: MTRef<'_, _> = &mut visited;
         let inlining_map: MTRef<'_, _> = &mut inlining_map;
 
-        tcx.sess.time("collecting mono items", || {
+        tcx.sess.time("monomorphization_collector_graph_walk", || {
             par_iter(roots).for_each(|root| {
                 let mut recursion_depths = DefIdMap::default();
                 collect_items_rec(tcx, root, visited, &mut recursion_depths, inlining_map);
diff --git a/src/librustc_mir/monomorphize/partitioning.rs b/src/librustc_mir/monomorphize/partitioning.rs
index c004417b3373f..8fa41cab19076 100644
--- a/src/librustc_mir/monomorphize/partitioning.rs
+++ b/src/librustc_mir/monomorphize/partitioning.rs
@@ -865,9 +865,7 @@ fn collect_and_partition_mono_items(
         }
     };
 
-    let (items, inlining_map) = tcx.sess.time("monomorphization collection", || {
-        collector::collect_crate_mono_items(tcx, collection_mode)
-    });
+    let (items, inlining_map) = collector::collect_crate_mono_items(tcx, collection_mode);
 
     tcx.sess.abort_if_errors();
 
@@ -879,12 +877,10 @@ fn collect_and_partition_mono_items(
         PartitioningStrategy::FixedUnitCount(tcx.sess.codegen_units())
     };
 
-    let codegen_units = tcx.sess.time("codegen unit partitioning", || {
-        partition(tcx, items.iter().cloned(), strategy, &inlining_map)
-            .into_iter()
-            .map(Arc::new)
-            .collect::<Vec<_>>()
-    });
+    let codegen_units = partition(tcx, items.iter().cloned(), strategy, &inlining_map)
+        .into_iter()
+        .map(Arc::new)
+        .collect::<Vec<_>>();
 
     let mono_items: DefIdSet = items
         .iter()
diff --git a/src/librustc_session/utils.rs b/src/librustc_session/utils.rs
index 7806f5e8753f2..fda11b647490f 100644
--- a/src/librustc_session/utils.rs
+++ b/src/librustc_session/utils.rs
@@ -2,11 +2,11 @@ use crate::session::Session;
 use rustc_data_structures::profiling::VerboseTimingGuard;
 
 impl Session {
-    pub fn timer<'a>(&'a self, what: &'a str) -> VerboseTimingGuard<'a> {
-        self.prof.sparse_pass(what)
+    pub fn timer<'a>(&'a self, what: &'static str) -> VerboseTimingGuard<'a> {
+        self.prof.verbose_generic_activity(what)
     }
-    pub fn time<R>(&self, what: &str, f: impl FnOnce() -> R) -> R {
-        self.prof.sparse_pass(what).run(f)
+    pub fn time<R>(&self, what: &'static str, f: impl FnOnce() -> R) -> R {
+        self.prof.verbose_generic_activity(what).run(f)
     }
 }
 
diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs
index 1a008ef23fc76..ca10601f4135b 100644
--- a/src/librustc_typeck/coherence/mod.rs
+++ b/src/librustc_typeck/coherence/mod.rs
@@ -146,8 +146,8 @@ pub fn check_coherence(tcx: TyCtxt<'_>) {
         tcx.ensure().coherent_trait(trait_def_id);
     }
 
-    tcx.sess.time("unsafety checking", || unsafety::check(tcx));
-    tcx.sess.time("orphan checking", || orphan::check(tcx));
+    tcx.sess.time("unsafety_checking", || unsafety::check(tcx));
+    tcx.sess.time("orphan_checking", || orphan::check(tcx));
 
     // these queries are executed for side-effects (error reporting):
     tcx.ensure().crate_inherent_impls(LOCAL_CRATE);
diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs
index bc48e046a0cb0..0d9940cbf92c7 100644
--- a/src/librustc_typeck/lib.rs
+++ b/src/librustc_typeck/lib.rs
@@ -309,7 +309,7 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorReported> {
     // have valid types and not error
     // FIXME(matthewjasper) We shouldn't need to do this.
     tcx.sess.track_errors(|| {
-        tcx.sess.time("type collecting", || {
+        tcx.sess.time("type_collecting", || {
             for &module in tcx.hir().krate().modules.keys() {
                 tcx.ensure().collect_mod_item_types(tcx.hir().local_def_id(module));
             }
@@ -318,35 +318,35 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorReported> {
 
     if tcx.features().rustc_attrs {
         tcx.sess.track_errors(|| {
-            tcx.sess.time("outlives testing", || outlives::test::test_inferred_outlives(tcx));
+            tcx.sess.time("outlives_testing", || outlives::test::test_inferred_outlives(tcx));
         })?;
     }
 
     tcx.sess.track_errors(|| {
-        tcx.sess.time("impl wf inference", || impl_wf_check::impl_wf_check(tcx));
+        tcx.sess.time("impl_wf_inference", || impl_wf_check::impl_wf_check(tcx));
     })?;
 
     tcx.sess.track_errors(|| {
-        tcx.sess.time("coherence checking", || coherence::check_coherence(tcx));
+        tcx.sess.time("coherence_checking", || coherence::check_coherence(tcx));
     })?;
 
     if tcx.features().rustc_attrs {
         tcx.sess.track_errors(|| {
-            tcx.sess.time("variance testing", || variance::test::test_variance(tcx));
+            tcx.sess.time("variance_testing", || variance::test::test_variance(tcx));
         })?;
     }
 
     tcx.sess.track_errors(|| {
-        tcx.sess.time("wf checking", || check::check_wf_new(tcx));
+        tcx.sess.time("wf_checking", || check::check_wf_new(tcx));
     })?;
 
-    tcx.sess.time("item-types checking", || {
+    tcx.sess.time("item_types_checking", || {
         for &module in tcx.hir().krate().modules.keys() {
             tcx.ensure().check_mod_item_types(tcx.hir().local_def_id(module));
         }
     });
 
-    tcx.sess.time("item-bodies checking", || tcx.typeck_item_bodies(LOCAL_CRATE));
+    tcx.sess.time("item_bodies_checking", || tcx.typeck_item_bodies(LOCAL_CRATE));
 
     check_unused::check_crate(tcx);
     check_for_entry_fn(tcx);

From 7db4b7efa28ae62cfc95f34b6ffdad81f2e59b09 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= <john.kare.alsaker@gmail.com>
Date: Wed, 8 Jan 2020 21:32:00 +0100
Subject: [PATCH 2/2] More comments

---
 src/librustc_data_structures/profiling.rs | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/src/librustc_data_structures/profiling.rs b/src/librustc_data_structures/profiling.rs
index b779bcbd55d70..a9d3a2668aab1 100644
--- a/src/librustc_data_structures/profiling.rs
+++ b/src/librustc_data_structures/profiling.rs
@@ -134,7 +134,9 @@ impl SelfProfilerRef {
     }
 
     /// Start profiling a verbose generic activity. Profiling continues until the
-    /// VerboseTimingGuard returned from this call is dropped.
+    /// VerboseTimingGuard returned from this call is dropped. In addition to recording
+    /// a measureme event, "verbose" generic activities also print a timing entry to
+    /// stdout if the compiler is invoked with -Ztime or -Ztime-passes.
     #[inline(always)]
     pub fn verbose_generic_activity<'a>(&'a self, event_id: &'a str) -> VerboseTimingGuard<'a> {
         VerboseTimingGuard::start(
@@ -145,7 +147,9 @@ impl SelfProfilerRef {
     }
 
     /// Start profiling a extra verbose generic activity. Profiling continues until the
-    /// VerboseTimingGuard returned from this call is dropped.
+    /// VerboseTimingGuard returned from this call is dropped. In addition to recording
+    /// a measureme event, "extra verbose" generic activities also print a timing entry to
+    /// stdout if the compiler is invoked with -Ztime-passes.
     #[inline(always)]
     pub fn extra_verbose_generic_activity<'a>(
         &'a self,