diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs
index 07c6c85b89dab..210bdd8d5ddca 100644
--- a/src/librustc/dep_graph/graph.rs
+++ b/src/librustc/dep_graph/graph.rs
@@ -9,8 +9,7 @@
 // except according to those terms.
 
 use errors::DiagnosticBuilder;
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
-                                           StableHashingContextProvider};
+use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
 use rustc_data_structures::sync::Lrc;
@@ -20,7 +19,7 @@ use std::hash::Hash;
 use ty::TyCtxt;
 use util::common::{ProfileQueriesMsg, profq_msg};
 
-use ich::Fingerprint;
+use ich::{StableHashingContext, StableHashingContextProvider, Fingerprint};
 
 use super::debug::EdgeFilter;
 use super::dep_node::{DepNode, DepKind, WorkProductId};
@@ -189,21 +188,21 @@ impl DepGraph {
     ///   `arg` parameter.
     ///
     /// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/incremental-compilation.html
-    pub fn with_task<C, A, R, HCX>(&self,
+    pub fn with_task<'gcx, C, A, R>(&self,
                                    key: DepNode,
                                    cx: C,
                                    arg: A,
                                    task: fn(C, A) -> R)
                                    -> (R, DepNodeIndex)
-        where C: DepGraphSafe + StableHashingContextProvider<ContextType=HCX>,
-              R: HashStable<HCX>,
+        where C: DepGraphSafe + StableHashingContextProvider<'gcx>,
+              R: HashStable<StableHashingContext<'gcx>>,
     {
         self.with_task_impl(key, cx, arg, task,
             |data, key| data.borrow_mut().push_task(key),
             |data, key| data.borrow_mut().pop_task(key))
     }
 
-    fn with_task_impl<C, A, R, HCX>(&self,
+    fn with_task_impl<'gcx, C, A, R>(&self,
                                     key: DepNode,
                                     cx: C,
                                     arg: A,
@@ -211,25 +210,27 @@ impl DepGraph {
                                     push: fn(&RefCell<CurrentDepGraph>, DepNode),
                                     pop: fn(&RefCell<CurrentDepGraph>, DepNode) -> DepNodeIndex)
                                     -> (R, DepNodeIndex)
-        where C: DepGraphSafe + StableHashingContextProvider<ContextType=HCX>,
-              R: HashStable<HCX>,
+        where C: DepGraphSafe + StableHashingContextProvider<'gcx>,
+              R: HashStable<StableHashingContext<'gcx>>,
     {
         if let Some(ref data) = self.data {
             push(&data.current, key);
-            if cfg!(debug_assertions) {
-                profq_msg(ProfileQueriesMsg::TaskBegin(key.clone()))
-            };
 
             // In incremental mode, hash the result of the task. We don't
             // do anything with the hash yet, but we are computing it
             // anyway so that
             //  - we make sure that the infrastructure works and
             //  - we can get an idea of the runtime cost.
-            let mut hcx = cx.create_stable_hashing_context();
+            let mut hcx = cx.get_stable_hashing_context();
+
+            if cfg!(debug_assertions) {
+                profq_msg(hcx.sess(), ProfileQueriesMsg::TaskBegin(key.clone()))
+            };
 
             let result = task(cx, arg);
+
             if cfg!(debug_assertions) {
-                profq_msg(ProfileQueriesMsg::TaskEnd)
+                profq_msg(hcx.sess(), ProfileQueriesMsg::TaskEnd)
             };
 
             let dep_node_index = pop(&data.current, key);
@@ -274,7 +275,7 @@ impl DepGraph {
             (result, dep_node_index)
         } else {
             if key.kind.fingerprint_needed_for_crate_hash() {
-                let mut hcx = cx.create_stable_hashing_context();
+                let mut hcx = cx.get_stable_hashing_context();
                 let result = task(cx, arg);
                 let mut stable_hasher = StableHasher::new();
                 result.hash_stable(&mut hcx, &mut stable_hasher);
@@ -314,14 +315,14 @@ impl DepGraph {
 
     /// Execute something within an "eval-always" task which is a task
     // that runs whenever anything changes.
-    pub fn with_eval_always_task<C, A, R, HCX>(&self,
+    pub fn with_eval_always_task<'gcx, C, A, R>(&self,
                                    key: DepNode,
                                    cx: C,
                                    arg: A,
                                    task: fn(C, A) -> R)
                                    -> (R, DepNodeIndex)
-        where C: DepGraphSafe + StableHashingContextProvider<ContextType=HCX>,
-              R: HashStable<HCX>,
+        where C: DepGraphSafe + StableHashingContextProvider<'gcx>,
+              R: HashStable<StableHashingContext<'gcx>>,
     {
         self.with_task_impl(key, cx, arg, task,
             |data, key| data.borrow_mut().push_eval_always_task(key),
diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs
index 6ae588b2a07b6..33e0d0e694497 100644
--- a/src/librustc/ich/hcx.rs
+++ b/src/librustc/ich/hcx.rs
@@ -30,7 +30,7 @@ use syntax::symbol::Symbol;
 use syntax_pos::{Span, DUMMY_SP};
 use syntax_pos::hygiene;
 
-use rustc_data_structures::stable_hasher::{HashStable, StableHashingContextProvider,
+use rustc_data_structures::stable_hasher::{HashStable,
                                            StableHasher, StableHasherResult,
                                            ToStableHashKey};
 use rustc_data_structures::accumulate_vec::AccumulateVec;
@@ -192,17 +192,33 @@ impl<'a> StableHashingContext<'a> {
     }
 }
 
-impl<'a, 'gcx, 'lcx> StableHashingContextProvider for TyCtxt<'a, 'gcx, 'lcx> {
-    type ContextType = StableHashingContext<'a>;
-    fn create_stable_hashing_context(&self) -> Self::ContextType {
-        (*self).create_stable_hashing_context()
+/// Something that can provide a stable hashing context.
+pub trait StableHashingContextProvider<'a> {
+    fn get_stable_hashing_context(&self) -> StableHashingContext<'a>;
+}
+
+impl<'a, 'b, T: StableHashingContextProvider<'a>> StableHashingContextProvider<'a>
+for &'b T {
+    fn get_stable_hashing_context(&self) -> StableHashingContext<'a> {
+        (**self).get_stable_hashing_context()
     }
 }
 
+impl<'a, 'b, T: StableHashingContextProvider<'a>> StableHashingContextProvider<'a>
+for &'b mut T {
+    fn get_stable_hashing_context(&self) -> StableHashingContext<'a> {
+        (**self).get_stable_hashing_context()
+    }
+}
+
+impl<'a, 'gcx, 'lcx> StableHashingContextProvider<'a> for TyCtxt<'a, 'gcx, 'lcx> {
+    fn get_stable_hashing_context(&self) -> StableHashingContext<'a> {
+        (*self).create_stable_hashing_context()
+    }
+}
 
-impl<'a> StableHashingContextProvider for StableHashingContext<'a> {
-    type ContextType = StableHashingContext<'a>;
-    fn create_stable_hashing_context(&self) -> Self::ContextType {
+impl<'a> StableHashingContextProvider<'a> for StableHashingContext<'a> {
+    fn get_stable_hashing_context(&self) -> StableHashingContext<'a> {
         self.clone()
     }
 }
diff --git a/src/librustc/ich/mod.rs b/src/librustc/ich/mod.rs
index ce1bd07b14ce0..1b77a2e7c82b9 100644
--- a/src/librustc/ich/mod.rs
+++ b/src/librustc/ich/mod.rs
@@ -12,7 +12,7 @@
 
 pub use self::fingerprint::Fingerprint;
 pub use self::caching_codemap_view::CachingCodemapView;
-pub use self::hcx::{StableHashingContext, NodeIdHashingMode,
+pub use self::hcx::{StableHashingContextProvider, StableHashingContext, NodeIdHashingMode,
                     hash_stable_trait_impls, compute_ignored_attr_names};
 mod fingerprint;
 mod caching_codemap_view;
diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs
index cdbbcf6a8dd1b..01e1037b6222a 100644
--- a/src/librustc/session/mod.rs
+++ b/src/librustc/session/mod.rs
@@ -24,8 +24,9 @@ use session::config::{DebugInfoLevel, OutputType};
 use ty::tls;
 use util::nodemap::{FxHashMap, FxHashSet};
 use util::common::{duration_to_secs_str, ErrorReported};
+use util::common::ProfileQueriesMsg;
 
-use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::sync::{Lrc, Lock};
 
 use syntax::ast::NodeId;
 use errors::{self, DiagnosticBuilder, DiagnosticId};
@@ -53,6 +54,7 @@ use std::io::Write;
 use std::path::{Path, PathBuf};
 use std::sync::{Once, ONCE_INIT};
 use std::time::Duration;
+use std::sync::mpsc;
 
 mod code_stats;
 pub mod config;
@@ -126,6 +128,9 @@ pub struct Session {
     /// A cache of attributes ignored by StableHashingContext
     pub ignored_attr_names: FxHashSet<Symbol>,
 
+    /// Used by -Z profile-queries in util::common
+    pub profile_channel: Lock<Option<mpsc::Sender<ProfileQueriesMsg>>>,
+
     /// Some measurements that are being gathered during compilation.
     pub perf_stats: PerfStats,
 
@@ -1131,6 +1136,7 @@ pub fn build_session_(
         imported_macro_spans: RefCell::new(HashMap::new()),
         incr_comp_session: RefCell::new(IncrCompSession::NotInitialized),
         ignored_attr_names: ich::compute_ignored_attr_names(),
+        profile_channel: Lock::new(None),
         perf_stats: PerfStats {
             svh_time: Cell::new(Duration::from_secs(0)),
             incr_comp_hashes_time: Cell::new(Duration::from_secs(0)),
diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs
index fcc69f3b2c39e..68d1088890205 100644
--- a/src/librustc/ty/maps/plumbing.rs
+++ b/src/librustc/ty/maps/plumbing.rs
@@ -164,8 +164,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
 macro_rules! profq_msg {
     ($tcx:expr, $msg:expr) => {
         if cfg!(debug_assertions) {
-            if  $tcx.sess.profile_queries() {
-                profq_msg($msg)
+            if $tcx.sess.profile_queries() {
+                profq_msg($tcx.sess, $msg)
             }
         }
     }
diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs
index bdb120ea59c8a..e1ae41f24721a 100644
--- a/src/librustc/util/common.rs
+++ b/src/librustc/util/common.rs
@@ -26,6 +26,7 @@ use ty::maps::{QueryMsg};
 use dep_graph::{DepNode};
 use proc_macro;
 use lazy_static;
+use session::Session;
 
 // The name of the associated type for `Fn` return types
 pub const FN_OUTPUT_NAME: &'static str = "Output";
@@ -55,9 +56,6 @@ pub fn install_panic_hook() {
     lazy_static::initialize(&DEFAULT_HOOK);
 }
 
-/// Initialized for -Z profile-queries
-thread_local!(static PROFQ_CHAN: RefCell<Option<Sender<ProfileQueriesMsg>>> = RefCell::new(None));
-
 /// Parameters to the `Dump` variant of type `ProfileQueriesMsg`.
 #[derive(Clone,Debug)]
 pub struct ProfQDumpParams {
@@ -97,29 +95,23 @@ pub enum ProfileQueriesMsg {
 }
 
 /// If enabled, send a message to the profile-queries thread
-pub fn profq_msg(msg: ProfileQueriesMsg) {
-    PROFQ_CHAN.with(|sender|{
-        if let Some(s) = sender.borrow().as_ref() {
-            s.send(msg).unwrap()
-        } else {
-            // Do nothing.
-            //
-            // FIXME(matthewhammer): Multi-threaded translation phase triggers the panic below.
-            // From backtrace: rustc_trans::back::write::spawn_work::{{closure}}.
-            //
-            // panic!("no channel on which to send profq_msg: {:?}", msg)
-        }
-    })
+pub fn profq_msg(sess: &Session, msg: ProfileQueriesMsg) {
+    if let Some(s) = sess.profile_channel.borrow().as_ref() {
+        s.send(msg).unwrap()
+    } else {
+        // Do nothing
+    }
 }
 
 /// Set channel for profile queries channel
-pub fn profq_set_chan(s: Sender<ProfileQueriesMsg>) -> bool {
-    PROFQ_CHAN.with(|chan|{
-        if chan.borrow().is_none() {
-            *chan.borrow_mut() = Some(s);
-            true
-        } else { false }
-    })
+pub fn profq_set_chan(sess: &Session, s: Sender<ProfileQueriesMsg>) -> bool {
+    let mut channel = sess.profile_channel.borrow_mut();
+    if channel.is_none() {
+        *channel = Some(s);
+        true
+    } else {
+        false
+    }
 }
 
 /// Read the current depth of `time()` calls. This is used to
@@ -135,7 +127,13 @@ pub fn set_time_depth(depth: usize) {
     TIME_DEPTH.with(|slot| slot.set(depth));
 }
 
-pub fn time<T, F>(do_it: bool, what: &str, f: F) -> T where
+pub fn time<T, F>(sess: &Session, what: &str, f: F) -> T where
+    F: FnOnce() -> T,
+{
+    time_ext(sess.time_passes(), Some(sess), what, f)
+}
+
+pub fn time_ext<T, F>(do_it: bool, sess: Option<&Session>, what: &str, f: F) -> T where
     F: FnOnce() -> T,
 {
     if !do_it { return f(); }
@@ -146,15 +144,19 @@ pub fn time<T, F>(do_it: bool, what: &str, f: F) -> T where
         r
     });
 
-    if cfg!(debug_assertions) {
-        profq_msg(ProfileQueriesMsg::TimeBegin(what.to_string()))
-    };
+    if let Some(sess) = sess {
+        if cfg!(debug_assertions) {
+            profq_msg(sess, ProfileQueriesMsg::TimeBegin(what.to_string()))
+        }
+    }
     let start = Instant::now();
     let rv = f();
     let dur = start.elapsed();
-    if cfg!(debug_assertions) {
-        profq_msg(ProfileQueriesMsg::TimeEnd)
-    };
+    if let Some(sess) = sess {
+        if cfg!(debug_assertions) {
+            profq_msg(sess, ProfileQueriesMsg::TimeEnd)
+        }
+    }
 
     print_time_passes_entry_internal(what, dur);
 
diff --git a/src/librustc_data_structures/stable_hasher.rs b/src/librustc_data_structures/stable_hasher.rs
index 70733bc6aeda0..a8f689e5c81a3 100644
--- a/src/librustc_data_structures/stable_hasher.rs
+++ b/src/librustc_data_structures/stable_hasher.rs
@@ -165,29 +165,6 @@ impl<W> Hasher for StableHasher<W> {
     }
 }
 
-
-/// Something that can provide a stable hashing context.
-pub trait StableHashingContextProvider {
-    type ContextType;
-    fn create_stable_hashing_context(&self) -> Self::ContextType;
-}
-
-impl<'a, T: StableHashingContextProvider> StableHashingContextProvider for &'a T {
-    type ContextType = T::ContextType;
-
-    fn create_stable_hashing_context(&self) -> Self::ContextType {
-        (**self).create_stable_hashing_context()
-    }
-}
-
-impl<'a, T: StableHashingContextProvider> StableHashingContextProvider for &'a mut T {
-    type ContextType = T::ContextType;
-
-    fn create_stable_hashing_context(&self) -> Self::ContextType {
-        (**self).create_stable_hashing_context()
-    }
-}
-
 /// Something that implements `HashStable<CTX>` can be hashed in a way that is
 /// stable across multiple compilation sessions.
 pub trait HashStable<CTX> {
diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs
index f020f86b6860b..485ee1130d306 100644
--- a/src/librustc_driver/driver.rs
+++ b/src/librustc_driver/driver.rs
@@ -89,7 +89,7 @@ pub fn compile_input(trans: Box<TransCrate>,
     }
 
     if sess.profile_queries() {
-        profile::begin();
+        profile::begin(sess);
     }
 
     // We need nested scopes here, because the intermediate results can keep
@@ -181,7 +181,7 @@ pub fn compile_input(trans: Box<TransCrate>,
         let arenas = AllArenas::new();
 
         // Construct the HIR map
-        let hir_map = time(sess.time_passes(),
+        let hir_map = time(sess,
                            "indexing hir",
                            || hir_map::map_crate(sess, cstore, &mut hir_forest, &defs));
 
@@ -517,10 +517,10 @@ pub fn phase_1_parse_input<'a>(control: &CompileController,
     sess.diagnostic().set_continue_after_error(control.continue_parse_after_error);
 
     if sess.profile_queries() {
-        profile::begin();
+        profile::begin(sess);
     }
 
-    let krate = time(sess.time_passes(), "parsing", || {
+    let krate = time(sess, "parsing", || {
         match *input {
             Input::File(ref file) => {
                 parse::parse_crate_from_file(file, &sess.parse_sess)
@@ -645,8 +645,6 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
                                        -> Result<InnerExpansionResult<'a>, CompileIncomplete>
     where F: FnOnce(&ast::Crate) -> CompileResult,
 {
-    let time_passes = sess.time_passes();
-
     let (mut krate, features) = syntax::config::features(krate, &sess.parse_sess,
                                                          sess.opts.test,
                                                          sess.opts.debugging_opts.epoch);
@@ -664,7 +662,7 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
     );
 
     if sess.opts.incremental.is_some() {
-        time(time_passes, "garbage collect incremental cache directory", || {
+        time(sess, "garbage collect incremental cache directory", || {
             if let Err(e) = rustc_incremental::garbage_collect_session_directories(sess) {
                 warn!("Error while trying to garbage collect incremental \
                        compilation cache directory: {}", e);
@@ -674,22 +672,22 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
 
     // If necessary, compute the dependency graph (in the background).
     let future_dep_graph = if sess.opts.build_dep_graph() {
-        Some(rustc_incremental::load_dep_graph(sess, time_passes))
+        Some(rustc_incremental::load_dep_graph(sess))
     } else {
         None
     };
 
-    time(time_passes, "recursion limit", || {
+    time(sess, "recursion limit", || {
         middle::recursion_limit::update_limits(sess, &krate);
     });
 
-    krate = time(time_passes, "crate injection", || {
+    krate = time(sess, "crate injection", || {
         let alt_std_name = sess.opts.alt_std_name.clone();
         syntax::std_inject::maybe_inject_crates_ref(krate, alt_std_name)
     });
 
     let mut addl_plugins = Some(addl_plugins);
-    let registrars = time(time_passes, "plugin loading", || {
+    let registrars = time(sess, "plugin loading", || {
         plugin::load::load_plugins(sess,
                                    &cstore,
                                    &krate,
@@ -699,7 +697,7 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
 
     let mut registry = registry.unwrap_or(Registry::new(sess, krate.span));
 
-    time(time_passes, "plugin registration", || {
+    time(sess, "plugin registration", || {
         if sess.features_untracked().rustc_diagnostic_macros {
             registry.register_macro("__diagnostic_used",
                                     diagnostics::plugin::expand_diagnostic_used);
@@ -752,7 +750,7 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
     resolver.whitelisted_legacy_custom_derives = whitelisted_legacy_custom_derives;
     syntax_ext::register_builtins(&mut resolver, syntax_exts, sess.features_untracked().quote);
 
-    krate = time(time_passes, "expansion", || {
+    krate = time(sess, "expansion", || {
         // Windows dlls do not have rpaths, so they don't know how to find their
         // dependencies. It's up to us to tell the system where to find all the
         // dependent dlls. Note that this uses cfg!(windows) as opposed to
@@ -814,7 +812,7 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
         krate
     });
 
-    krate = time(time_passes, "maybe building test harness", || {
+    krate = time(sess, "maybe building test harness", || {
         syntax::test::modify_for_testing(&sess.parse_sess,
                                          &mut resolver,
                                          sess.opts.test,
@@ -833,7 +831,7 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
     // bunch of checks in the `modify` function below. For now just skip this
     // step entirely if we're rustdoc as it's not too useful anyway.
     if !sess.opts.actually_rustdoc {
-        krate = time(time_passes, "maybe creating a macro crate", || {
+        krate = time(sess, "maybe creating a macro crate", || {
             let crate_types = sess.crate_types.borrow();
             let num_crate_types = crate_types.len();
             let is_proc_macro_crate = crate_types.contains(&config::CrateTypeProcMacro);
@@ -848,7 +846,7 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
         });
     }
 
-    krate = time(time_passes, "creating allocators", || {
+    krate = time(sess, "creating allocators", || {
         allocator::expand::modify(&sess.parse_sess,
                                   &mut resolver,
                                   krate,
@@ -869,11 +867,11 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
         println!("{}", json::as_json(&krate));
     }
 
-    time(time_passes,
+    time(sess,
          "AST validation",
          || ast_validation::check_crate(sess, &krate));
 
-    time(time_passes, "name resolution", || -> CompileResult {
+    time(sess, "name resolution", || -> CompileResult {
         resolver.resolve_crate(&krate);
         Ok(())
     })?;
@@ -883,7 +881,7 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
     }
 
     // Needs to go *after* expansion to be able to check the results of macro expansion.
-    time(time_passes, "complete gated feature checking", || {
+    time(sess, "complete gated feature checking", || {
         sess.track_errors(|| {
             syntax::feature_gate::check_crate(&krate,
                                               &sess.parse_sess,
@@ -898,7 +896,7 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
     let dep_graph = match future_dep_graph {
         None => DepGraph::new_disabled(),
         Some(future) => {
-            let prev_graph = time(time_passes, "blocked while dep-graph loading finishes", || {
+            let prev_graph = time(sess, "blocked while dep-graph loading finishes", || {
                 future.open()
                       .expect("Could not join with background dep_graph thread")
                       .open(sess)
@@ -906,7 +904,7 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
             DepGraph::new(prev_graph)
         }
     };
-    let hir_forest = time(time_passes, "lowering ast -> hir", || {
+    let hir_forest = time(sess, "lowering ast -> hir", || {
         let hir_crate = lower_crate(sess, cstore, &dep_graph, &krate, &mut resolver);
 
         if sess.opts.debugging_opts.hir_stats {
@@ -916,7 +914,7 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
         hir_map::Forest::new(hir_crate, &dep_graph)
     });
 
-    time(time_passes,
+    time(sess,
          "early lint checks",
          || lint::check_ast_crate(sess, &krate));
 
@@ -973,22 +971,20 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(trans: &TransCrate,
                             mpsc::Receiver<Box<Any + Send>>,
                             CompileResult) -> R
 {
-    let time_passes = sess.time_passes();
-
-    let query_result_on_disk_cache = time(time_passes,
+    let query_result_on_disk_cache = time(sess,
         "load query result cache",
         || rustc_incremental::load_query_result_cache(sess));
 
-    time(time_passes,
+    time(sess,
          "looking for entry point",
          || middle::entry::find_entry_point(sess, &hir_map));
 
-    sess.plugin_registrar_fn.set(time(time_passes, "looking for plugin registrar", || {
+    sess.plugin_registrar_fn.set(time(sess, "looking for plugin registrar", || {
         plugin::build::find_plugin_registrar(sess.diagnostic(), &hir_map)
     }));
     sess.derive_registrar_fn.set(derive_registrar::find(&hir_map));
 
-    time(time_passes,
+    time(sess,
          "loop checking",
          || loops::check_crate(sess, &hir_map));
 
@@ -1020,11 +1016,11 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(trans: &TransCrate,
         // tcx available.
         rustc_incremental::dep_graph_tcx_init(tcx);
 
-        time(sess.time_passes(), "attribute checking", || {
+        time(sess, "attribute checking", || {
             hir::check_attr::check_crate(tcx)
         });
 
-        time(time_passes,
+        time(sess,
              "stability checking",
              || stability::check_unstable_api_usage(tcx));
 
@@ -1037,18 +1033,18 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(trans: &TransCrate,
             }
         }
 
-        time(time_passes,
+        time(sess,
              "rvalue promotion",
              || rvalue_promotion::check_crate(tcx));
 
         analysis.access_levels =
-            time(time_passes, "privacy checking", || rustc_privacy::check_crate(tcx));
+            time(sess, "privacy checking", || rustc_privacy::check_crate(tcx));
 
-        time(time_passes,
+        time(sess,
              "intrinsic checking",
              || middle::intrinsicck::check_crate(tcx));
 
-        time(time_passes,
+        time(sess,
              "match checking",
              || mir::matchck_crate(tcx));
 
@@ -1056,19 +1052,19 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(trans: &TransCrate,
         // "not all control paths return a value" is reported here.
         //
         // maybe move the check to a MIR pass?
-        time(time_passes,
+        time(sess,
              "liveness checking",
              || middle::liveness::check_crate(tcx));
 
-        time(time_passes,
+        time(sess,
              "borrow checking",
              || borrowck::check_crate(tcx));
 
-        time(time_passes,
+        time(sess,
              "MIR borrow checking",
              || for def_id in tcx.body_owners() { tcx.mir_borrowck(def_id); });
 
-        time(time_passes,
+        time(sess,
              "MIR effect checking",
              || for def_id in tcx.body_owners() {
                  mir::transform::check_unsafety::check_unsafety(tcx, def_id)
@@ -1083,13 +1079,13 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(trans: &TransCrate,
             return Ok(f(tcx, analysis, rx, sess.compile_status()));
         }
 
-        time(time_passes, "death checking", || middle::dead::check_crate(tcx));
+        time(sess, "death checking", || middle::dead::check_crate(tcx));
 
-        time(time_passes, "unused lib feature checking", || {
+        time(sess, "unused lib feature checking", || {
             stability::check_unused_or_stable_features(tcx)
         });
 
-        time(time_passes, "lint checking", || lint::check_crate(tcx));
+        time(sess, "lint checking", || lint::check_crate(tcx));
 
         return Ok(f(tcx, analysis, rx, tcx.sess.compile_status()));
     })
@@ -1101,18 +1097,16 @@ pub fn phase_4_translate_to_llvm<'a, 'tcx>(trans: &TransCrate,
                                            tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                            rx: mpsc::Receiver<Box<Any + Send>>)
                                            -> Box<Any> {
-    let time_passes = tcx.sess.time_passes();
-
-    time(time_passes,
+    time(tcx.sess,
          "resolving dependency formats",
          || ::rustc::middle::dependency_format::calculate(tcx));
 
     let translation =
-        time(time_passes, "translation", move || {
+        time(tcx.sess, "translation", move || {
             trans.trans_crate(tcx, rx)
         });
     if tcx.sess.profile_queries() {
-        profile::dump("profile_queries".to_string())
+        profile::dump(&tcx.sess, "profile_queries".to_string())
     }
 
     translation
diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs
index 8c0e89716cffd..f6aa58213fc90 100644
--- a/src/librustc_driver/lib.rs
+++ b/src/librustc_driver/lib.rs
@@ -914,7 +914,7 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
 pub fn enable_save_analysis(control: &mut CompileController) {
     control.keep_ast = true;
     control.after_analysis.callback = box |state| {
-        time(state.session.time_passes(), "save analysis", || {
+        time(state.session, "save analysis", || {
             save::process_crate(state.tcx.unwrap(),
                                 state.expanded_crate.unwrap(),
                                 state.analysis.unwrap(),
diff --git a/src/librustc_driver/profile/mod.rs b/src/librustc_driver/profile/mod.rs
index 061077d05a438..a362556717bdd 100644
--- a/src/librustc_driver/profile/mod.rs
+++ b/src/librustc_driver/profile/mod.rs
@@ -8,6 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use rustc::session::Session;
 use rustc::util::common::{ProfQDumpParams, ProfileQueriesMsg, profq_msg, profq_set_chan};
 use std::sync::mpsc::{Receiver};
 use std::io::{Write};
@@ -17,11 +18,11 @@ use std::time::{Duration, Instant};
 pub mod trace;
 
 /// begin a profile thread, if not already running
-pub fn begin() {
+pub fn begin(sess: &Session) {
     use std::thread;
     use std::sync::mpsc::{channel};
     let (tx, rx) = channel();
-    if profq_set_chan(tx) {
+    if profq_set_chan(sess, tx) {
         thread::spawn(move||profile_queries_thread(rx));
     }
 }
@@ -30,7 +31,7 @@ pub fn begin() {
 /// wait for this dump to complete.
 ///
 /// wraps the RPC (send/recv channel logic) of requesting a dump.
-pub fn dump(path:String) {
+pub fn dump(sess: &Session, path: String) {
     use std::sync::mpsc::{channel};
     let (tx, rx) = channel();
     let params = ProfQDumpParams{
@@ -39,7 +40,7 @@ pub fn dump(path:String) {
         // is written; false for now
         dump_profq_msg_log:true,
     };
-    profq_msg(ProfileQueriesMsg::Dump(params));
+    profq_msg(sess, ProfileQueriesMsg::Dump(params));
     let _ = rx.recv().unwrap();
 }
 
diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs
index 0e6d328d947b2..38468e29427b0 100644
--- a/src/librustc_incremental/persist/load.rs
+++ b/src/librustc_incremental/persist/load.rs
@@ -14,7 +14,7 @@ use rustc::dep_graph::{PreviousDepGraph, SerializedDepGraph};
 use rustc::session::Session;
 use rustc::ty::TyCtxt;
 use rustc::ty::maps::OnDiskCache;
-use rustc::util::common::time;
+use rustc::util::common::time_ext;
 use rustc_serialize::Decodable as RustcDecodable;
 use rustc_serialize::opaque::Decoder;
 use std::path::Path;
@@ -147,12 +147,14 @@ impl<T> MaybeAsync<T> {
 }
 
 /// Launch a thread and load the dependency graph in the background.
-pub fn load_dep_graph(sess: &Session, time_passes: bool) ->
+pub fn load_dep_graph(sess: &Session) ->
     MaybeAsync<LoadResult<PreviousDepGraph>>
 {
     // Since `sess` isn't `Sync`, we perform all accesses to `sess`
     // before we fire the background thread.
 
+    let time_passes = sess.time_passes();
+
     if sess.opts.incremental.is_none() {
         // No incremental compilation.
         return MaybeAsync::Sync(LoadResult::Ok {
@@ -167,7 +169,7 @@ pub fn load_dep_graph(sess: &Session, time_passes: bool) ->
     let expected_hash = sess.opts.dep_tracking_hash();
 
     MaybeAsync::Async(std::thread::spawn(move || {
-        time(time_passes, "background load prev dep-graph", move || {
+        time_ext(time_passes, None, "background load prev dep-graph", move || {
             match load_data(report_incremental_info, &path) {
                 LoadResult::DataOutOfDate => LoadResult::DataOutOfDate,
                 LoadResult::Error { message } => LoadResult::Error { message },
diff --git a/src/librustc_incremental/persist/save.rs b/src/librustc_incremental/persist/save.rs
index d44d1d6f26024..ca1e3563089db 100644
--- a/src/librustc_incremental/persist/save.rs
+++ b/src/librustc_incremental/persist/save.rs
@@ -33,14 +33,14 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
             return;
         }
 
-        time(sess.time_passes(), "persist query result cache", || {
+        time(sess, "persist query result cache", || {
             save_in(sess,
                     query_cache_path(sess),
                     |e| encode_query_cache(tcx, e));
         });
 
         if tcx.sess.opts.debugging_opts.incremental_queries {
-            time(sess.time_passes(), "persist dep-graph", || {
+            time(sess, "persist dep-graph", || {
                 save_in(sess,
                         dep_graph_path(sess),
                         |e| encode_dep_graph(tcx, e));
diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs
index 636b3984117d8..8984a6bfd60ee 100644
--- a/src/librustc_trans/back/link.rs
+++ b/src/librustc_trans/back/link.rs
@@ -690,7 +690,7 @@ fn link_natively(sess: &Session,
     let mut i = 0;
     loop {
         i += 1;
-        prog = time(sess.time_passes(), "running linker", || {
+        prog = time(sess, "running linker", || {
             exec_linker(sess, &mut cmd, tmpdir)
         });
         let output = match prog {
@@ -1317,7 +1317,7 @@ fn add_upstream_rust_crates(cmd: &mut Linker,
         let name = cratepath.file_name().unwrap().to_str().unwrap();
         let name = &name[3..name.len() - 5]; // chop off lib/.rlib
 
-        time(sess.time_passes(), &format!("altering {}.rlib", name), || {
+        time(sess, &format!("altering {}.rlib", name), || {
             let cfg = archive_config(sess, &dst, Some(cratepath));
             let mut archive = ArchiveBuilder::new(cfg);
             archive.update_symbols();
diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs
index f79651cef3ec8..2a473f1ecbcc5 100644
--- a/src/librustc_trans/back/lto.rs
+++ b/src/librustc_trans/back/lto.rs
@@ -19,7 +19,7 @@ use llvm;
 use rustc::hir::def_id::LOCAL_CRATE;
 use rustc::middle::exported_symbols::SymbolExportLevel;
 use rustc::session::config::{self, Lto};
-use rustc::util::common::time;
+use rustc::util::common::time_ext;
 use time_graph::Timeline;
 use {ModuleTranslation, ModuleLlvm, ModuleKind, ModuleSource};
 
@@ -172,7 +172,7 @@ pub(crate) fn run(cgcx: &CodegenContext,
                 info!("adding bytecode {}", name);
                 let bc_encoded = data.data();
 
-                let (bc, id) = time(cgcx.time_passes, &format!("decode {}", name), || {
+                let (bc, id) = time_ext(cgcx.time_passes, None, &format!("decode {}", name), || {
                     match DecodedBytecode::new(bc_encoded) {
                         Ok(b) => Ok((b.bytecode(), b.identifier().to_string())),
                         Err(e) => Err(diag_handler.fatal(&e)),
@@ -253,7 +253,7 @@ fn fat_lto(cgcx: &CodegenContext,
     let mut linker = Linker::new(llmod);
     for (bc_decoded, name) in serialized_modules {
         info!("linking {:?}", name);
-        time(cgcx.time_passes, &format!("ll link {:?}", name), || {
+        time_ext(cgcx.time_passes, None, &format!("ll link {:?}", name), || {
             let data = bc_decoded.data();
             linker.add(&data).map_err(|()| {
                 let msg = format!("failed to load bc of {:?}", name);
@@ -498,7 +498,7 @@ fn run_pass_manager(cgcx: &CodegenContext,
         assert!(!pass.is_null());
         llvm::LLVMRustAddPass(pm, pass);
 
-        time(cgcx.time_passes, "LTO passes", ||
+        time_ext(cgcx.time_passes, None, "LTO passes", ||
              llvm::LLVMRunPassManager(pm, llmod));
 
         llvm::LLVMDisposePassManager(pm);
diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs
index c0561ff0c1731..7651a8e748e3c 100644
--- a/src/librustc_trans/back/write.rs
+++ b/src/librustc_trans/back/write.rs
@@ -31,7 +31,8 @@ use {CrateTranslation, ModuleSource, ModuleTranslation, CompiledModule, ModuleKi
 use CrateInfo;
 use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
 use rustc::ty::TyCtxt;
-use rustc::util::common::{time, time_depth, set_time_depth, path2cstr, print_time_passes_entry};
+use rustc::util::common::{time_ext, time_depth, set_time_depth, print_time_passes_entry};
+use rustc::util::common::path2cstr;
 use rustc::util::fs::{link_or_copy};
 use errors::{self, Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId};
 use errors::emitter::{Emitter};
@@ -563,11 +564,19 @@ unsafe fn optimize(cgcx: &CodegenContext,
         diag_handler.abort_if_errors();
 
         // Finally, run the actual optimization passes
-        time(config.time_passes, &format!("llvm function passes [{}]", module_name.unwrap()), ||
-             llvm::LLVMRustRunFunctionPassManager(fpm, llmod));
+        time_ext(config.time_passes,
+                 None,
+                 &format!("llvm function passes [{}]", module_name.unwrap()),
+                 || {
+            llvm::LLVMRustRunFunctionPassManager(fpm, llmod)
+        });
         timeline.record("fpm");
-        time(config.time_passes, &format!("llvm module passes [{}]", module_name.unwrap()), ||
-             llvm::LLVMRunPassManager(mpm, llmod));
+        time_ext(config.time_passes,
+                 None,
+                 &format!("llvm module passes [{}]", module_name.unwrap()),
+                 || {
+            llvm::LLVMRunPassManager(mpm, llmod)
+        });
 
         // Deallocate managers that we're now done with
         llvm::LLVMDisposePassManager(fpm);
@@ -682,7 +691,7 @@ unsafe fn codegen(cgcx: &CodegenContext,
         }
     }
 
-    time(config.time_passes, &format!("codegen passes [{}]", module_name.unwrap()),
+    time_ext(config.time_passes, None, &format!("codegen passes [{}]", module_name.unwrap()),
          || -> Result<(), FatalError> {
         if config.emit_ir {
             let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
diff --git a/src/librustc_trans/base.rs b/src/librustc_trans/base.rs
index 49a5b7ac8b907..76e05ae7dcb80 100644
--- a/src/librustc_trans/base.rs
+++ b/src/librustc_trans/base.rs
@@ -712,7 +712,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     // Translate the metadata.
     let llmod_id = "metadata";
     let (metadata_llcx, metadata_llmod, metadata) =
-        time(tcx.sess.time_passes(), "write metadata", || {
+        time(tcx.sess, "write metadata", || {
             write_metadata(tcx, llmod_id, &link_meta)
         });
 
@@ -790,7 +790,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                 llcx,
                 tm: create_target_machine(tcx.sess),
             };
-            time(tcx.sess.time_passes(), "write allocator module", || {
+            time(tcx.sess, "write allocator module", || {
                 allocator::trans(tcx, &modules, kind)
             });
 
@@ -924,11 +924,11 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 }
 
 fn assert_and_save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
-    time(tcx.sess.time_passes(),
+    time(tcx.sess,
          "assert dep graph",
          || rustc_incremental::assert_dep_graph(tcx));
 
-    time(tcx.sess.time_passes(),
+    time(tcx.sess,
          "serialize dep graph",
          || rustc_incremental::save_dep_graph(tcx));
 }
@@ -939,7 +939,6 @@ fn collect_and_partition_translation_items<'a, 'tcx>(
 ) -> (Arc<DefIdSet>, Arc<Vec<Arc<CodegenUnit<'tcx>>>>)
 {
     assert_eq!(cnum, LOCAL_CRATE);
-    let time_passes = tcx.sess.time_passes();
 
     let collection_mode = match tcx.sess.opts.debugging_opts.print_trans_items {
         Some(ref s) => {
@@ -968,7 +967,7 @@ fn collect_and_partition_translation_items<'a, 'tcx>(
     };
 
     let (items, inlining_map) =
-        time(time_passes, "translation item collection", || {
+        time(tcx.sess, "translation item collection", || {
             collector::collect_crate_mono_items(tcx, collection_mode)
     });
 
@@ -982,7 +981,7 @@ fn collect_and_partition_translation_items<'a, 'tcx>(
         PartitioningStrategy::FixedUnitCount(tcx.sess.codegen_units())
     };
 
-    let codegen_units = time(time_passes, "codegen unit partitioning", || {
+    let codegen_units = time(tcx.sess, "codegen unit partitioning", || {
         partitioning::partition(tcx,
                                 items.iter().cloned(),
                                 strategy,
diff --git a/src/librustc_trans/lib.rs b/src/librustc_trans/lib.rs
index 74df5127269a7..39eb38658fee9 100644
--- a/src/librustc_trans/lib.rs
+++ b/src/librustc_trans/lib.rs
@@ -238,7 +238,7 @@ impl TransCrate for LlvmTransCrate {
             back::write::dump_incremental_data(&trans);
         }
 
-        time(sess.time_passes(),
+        time(sess,
              "serialize work products",
              move || rustc_incremental::save_work_products(sess, &dep_graph));
 
@@ -251,7 +251,7 @@ impl TransCrate for LlvmTransCrate {
 
         // Run the linker on any artifacts that resulted from the LLVM run.
         // This should produce either a finished executable or library.
-        time(sess.time_passes(), "linking", || {
+        time(sess, "linking", || {
             back::link::link_binary(sess, &trans, outputs, &trans.crate_name.as_str());
         });
 
diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs
index 40385cabf5661..49a23f14338b4 100644
--- a/src/librustc_typeck/lib.rs
+++ b/src/librustc_typeck/lib.rs
@@ -315,41 +315,39 @@ pub fn provide(providers: &mut Providers) {
 pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
                              -> Result<(), CompileIncomplete>
 {
-    let time_passes = tcx.sess.time_passes();
-
     // this ensures that later parts of type checking can assume that items
     // have valid types and not error
     tcx.sess.track_errors(|| {
-        time(time_passes, "type collecting", ||
+        time(tcx.sess, "type collecting", ||
              collect::collect_item_types(tcx));
 
     })?;
 
     tcx.sess.track_errors(|| {
-        time(time_passes, "outlives testing", ||
+        time(tcx.sess, "outlives testing", ||
             outlives::test::test_inferred_outlives(tcx));
     })?;
 
     tcx.sess.track_errors(|| {
-        time(time_passes, "impl wf inference", ||
+        time(tcx.sess, "impl wf inference", ||
              impl_wf_check::impl_wf_check(tcx));
     })?;
 
     tcx.sess.track_errors(|| {
-      time(time_passes, "coherence checking", ||
+      time(tcx.sess, "coherence checking", ||
           coherence::check_coherence(tcx));
     })?;
 
     tcx.sess.track_errors(|| {
-        time(time_passes, "variance testing", ||
+        time(tcx.sess, "variance testing", ||
              variance::test::test_variance(tcx));
     })?;
 
-    time(time_passes, "wf checking", || check::check_wf_new(tcx))?;
+    time(tcx.sess, "wf checking", || check::check_wf_new(tcx))?;
 
-    time(time_passes, "item-types checking", || check::check_item_types(tcx))?;
+    time(tcx.sess, "item-types checking", || check::check_item_types(tcx))?;
 
-    time(time_passes, "item-bodies checking", || check::check_item_bodies(tcx))?;
+    time(tcx.sess, "item-bodies checking", || check::check_item_bodies(tcx))?;
 
     check_unused::check_crate(tcx);
     check_for_entry_fn(tcx);