]> git.lizzy.rs Git - rust.git/commitdiff
Use measureme in self-profiler
authorWesley Wiser <wwiser@gmail.com>
Thu, 4 Apr 2019 23:41:49 +0000 (19:41 -0400)
committerWesley Wiser <wwiser@gmail.com>
Sat, 13 Apr 2019 00:27:29 +0000 (20:27 -0400)
Related to #58372
Related to #58967

16 files changed:
Cargo.lock
src/librustc/Cargo.toml
src/librustc/session/mod.rs
src/librustc/ty/query/config.rs
src/librustc/ty/query/mod.rs
src/librustc/ty/query/plumbing.rs
src/librustc/util/profiling.rs
src/librustc_codegen_llvm/back/lto.rs
src/librustc_codegen_llvm/back/write.rs
src/librustc_codegen_llvm/lib.rs
src/librustc_codegen_ssa/back/write.rs
src/librustc_codegen_ssa/base.rs
src/librustc_interface/interface.rs
src/librustc_interface/passes.rs
src/librustc_typeck/lib.rs
src/tools/tidy/src/deps.rs

index 8fd365b8462b7f1eea9e17086902aa9513fd0124..9841f9d502cf8d8f9c15664a0c1821d467a651ad 100644 (file)
@@ -1474,6 +1474,16 @@ dependencies = [
  "toml-query 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "measureme"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "memchr"
 version = "2.1.1"
@@ -2326,6 +2336,7 @@ dependencies = [
  "jobserver 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "measureme 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "polonius-engine 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -4106,6 +4117,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
 "checksum mdbook 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "90b5a8d7e341ceee5db3882a06078d42661ddcfa2b3687319cc5da76ec4e782f"
 "checksum mdbook 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0ba0d44cb4089c741b9a91f3e5218298a40699c2f3a070a85014eed290c60819"
+"checksum measureme 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "36bb2b263a6795d352035024d6b30ce465bb79a5e5280d74c3b5f8464c657bcc"
 "checksum memchr 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0a3eb002f0535929f1199681417029ebea04aadc0c7a4224b46be99c7f5d6a16"
 "checksum memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e2ffa2c986de11a9df78620c01eeaaf27d94d3ff02bf81bfcca953102dd0c6ff"
 "checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3"
index 31e10c19c7a60599351cd664673d9cb8f0fa60dd..a668ebacabfd5a9768f06eb08cd4f3273a08ae27 100644 (file)
@@ -36,6 +36,7 @@ byteorder = { version = "1.1", features = ["i128"]}
 chalk-engine = { version = "0.9.0", default-features=false }
 rustc_fs_util = { path = "../librustc_fs_util" }
 smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
+measureme = "0.2.1"
 
 # Note that these dependencies are a lie, they're just here to get linkage to
 # work.
index 9cd6c30e272f50970a9de3370c8af0055e526e79..a1966c02683e982c296df9c3ec01db40f4eb9f3a 100644 (file)
@@ -46,8 +46,6 @@
 use std::time::Duration;
 use std::sync::{Arc, mpsc};
 
-use parking_lot::Mutex as PlMutex;
-
 mod code_stats;
 pub mod config;
 pub mod filesearch;
@@ -130,7 +128,7 @@ pub struct Session {
     pub profile_channel: Lock<Option<mpsc::Sender<ProfileQueriesMsg>>>,
 
     /// Used by -Z self-profile
-    pub self_profiling: Option<Arc<PlMutex<SelfProfiler>>>,
+    pub self_profiling: Option<Arc<SelfProfiler>>,
 
     /// Some measurements that are being gathered during compilation.
     pub perf_stats: PerfStats,
@@ -838,19 +836,17 @@ pub fn incr_comp_session_dir_opt(&self) -> Option<cell::Ref<'_, PathBuf>> {
 
     #[inline(never)]
     #[cold]
-    fn profiler_active<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) {
+    fn profiler_active<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
         match &self.self_profiling {
             None => bug!("profiler_active() called but there was no profiler active"),
             Some(profiler) => {
-                let mut p = profiler.lock();
-
-                f(&mut p);
+                f(&profiler);
             }
         }
     }
 
     #[inline(always)]
-    pub fn profiler<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) {
+    pub fn profiler<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
         if unlikely!(self.self_profiling.is_some()) {
             self.profiler_active(f)
         }
@@ -1138,7 +1134,19 @@ fn build_session_(
     driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
 ) -> Session {
     let self_profiler =
-        if sopts.debugging_opts.self_profile { Some(Arc::new(PlMutex::new(SelfProfiler::new()))) }
+        if sopts.debugging_opts.self_profile {
+            let profiler = SelfProfiler::new();
+            match profiler {
+                Ok(profiler) => {
+                    crate::ty::query::QueryName::register_with_profiler(&profiler);
+                    Some(Arc::new(profiler))
+                },
+                Err(e) => {
+                    early_warn(sopts.error_format, &format!("failed to create profiler: {}", e));
+                    None
+                }
+            }
+        }
         else { None };
 
     let host_triple = TargetTriple::from_triple(config::host_triple());
index 73b79027972427a4ad8dcd47f7b2a2b038807b90..0eda92ea8b822b6816d6fdce975e0d42e6809830 100644 (file)
@@ -3,7 +3,7 @@
 use crate::hir::def_id::{CrateNum, DefId};
 use crate::ty::TyCtxt;
 use crate::ty::query::queries;
-use crate::ty::query::Query;
+use crate::ty::query::{Query, QueryName};
 use crate::ty::query::QueryCache;
 use crate::ty::query::plumbing::CycleError;
 use crate::util::profiling::ProfileCategory;
@@ -18,7 +18,7 @@
 // Query configuration and description traits.
 
 pub trait QueryConfig<'tcx> {
-    const NAME: &'static str;
+    const NAME: QueryName;
     const CATEGORY: ProfileCategory;
 
     type Key: Eq + Hash + Clone + Debug;
index c4bc35fff66b8ec11aac57b16440d8c1ff907b62..cd29ca818908ca988882bf9ee257ef2ae3e3f0a3 100644 (file)
@@ -41,7 +41,6 @@
 use crate::util::nodemap::{DefIdSet, DefIdMap, ItemLocalSet};
 use crate::util::common::{ErrorReported};
 use crate::util::profiling::ProfileCategory::*;
-use crate::session::Session;
 
 use rustc_data_structures::svh::Svh;
 use rustc_data_structures::bit_set::BitSet;
index a03cfd19b9165c6f17d7fb8b54594290a4847acb..d671b58470c6a5736cd322ab16923fa687145329 100644 (file)
@@ -114,7 +114,7 @@ pub(super) fn try_get(
             let mut lock = cache.borrow_mut();
             if let Some(value) = lock.results.get(key) {
                 profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
-                tcx.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY));
+                tcx.sess.profiler(|p| p.record_query_hit(Q::NAME));
                 let result = (value.value.clone(), value.index);
                 #[cfg(debug_assertions)]
                 {
@@ -130,7 +130,7 @@ pub(super) fn try_get(
                             //in another thread has completed. Record how long we wait in the
                             //self-profiler
                             #[cfg(parallel_compiler)]
-                            tcx.sess.profiler(|p| p.query_blocked_start(Q::NAME, Q::CATEGORY));
+                            tcx.sess.profiler(|p| p.query_blocked_start(Q::NAME));
 
                             job.clone()
                         },
@@ -172,7 +172,7 @@ pub(super) fn try_get(
             #[cfg(parallel_compiler)]
             {
                 let result = job.r#await(tcx, span);
-                tcx.sess.profiler(|p| p.query_blocked_end(Q::NAME, Q::CATEGORY));
+                tcx.sess.profiler(|p| p.query_blocked_end(Q::NAME));
 
                 if let Err(cycle) = result {
                     return TryGetJob::Cycle(Q::handle_cycle_error(tcx, cycle));
@@ -358,14 +358,14 @@ pub(super) fn get_query<Q: QueryDescription<'gcx>>(
         key: Q::Key)
     -> Q::Value {
         debug!("ty::query::get_query<{}>(key={:?}, span={:?})",
-               Q::NAME,
+               Q::NAME.as_str(),
                key,
                span);
 
         profq_msg!(self,
             ProfileQueriesMsg::QueryBegin(
                 span.data(),
-                profq_query_msg!(Q::NAME, self, key),
+                profq_query_msg!(Q::NAME.as_str(), self, key),
             )
         );
 
@@ -389,7 +389,7 @@ pub(super) fn get_query<Q: QueryDescription<'gcx>>(
 
         if dep_node.kind.is_anon() {
             profq_msg!(self, ProfileQueriesMsg::ProviderBegin);
-            self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
+            self.sess.profiler(|p| p.start_query(Q::NAME));
 
             let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
                 self.start_query(job.job.clone(), diagnostics, |tcx| {
@@ -399,7 +399,7 @@ pub(super) fn get_query<Q: QueryDescription<'gcx>>(
                 })
             });
 
-            self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
+            self.sess.profiler(|p| p.end_query(Q::NAME));
             profq_msg!(self, ProfileQueriesMsg::ProviderEnd);
 
             self.dep_graph.read_index(dep_node_index);
@@ -474,14 +474,14 @@ fn load_from_disk_and_cache_in_memory<Q: QueryDescription<'gcx>>(
 
         let result = if let Some(result) = result {
             profq_msg!(self, ProfileQueriesMsg::CacheHit);
-            self.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY));
+            self.sess.profiler(|p| p.record_query_hit(Q::NAME));
 
             result
         } else {
             // We could not load a result from the on-disk cache, so
             // recompute.
 
-            self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
+            self.sess.profiler(|p| p.start_query(Q::NAME));
 
             // The dep-graph for this computation is already in
             // place
@@ -489,7 +489,7 @@ fn load_from_disk_and_cache_in_memory<Q: QueryDescription<'gcx>>(
                 Q::compute(self, key)
             });
 
-            self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
+            self.sess.profiler(|p| p.end_query(Q::NAME));
             result
         };
 
@@ -552,7 +552,7 @@ fn force_query_with_job<Q: QueryDescription<'gcx>>(
                 key, dep_node);
 
         profq_msg!(self, ProfileQueriesMsg::ProviderBegin);
-        self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
+        self.sess.profiler(|p| p.start_query(Q::NAME));
 
         let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
             self.start_query(job.job.clone(), diagnostics, |tcx| {
@@ -572,7 +572,7 @@ fn force_query_with_job<Q: QueryDescription<'gcx>>(
             })
         });
 
-        self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
+        self.sess.profiler(|p| p.end_query(Q::NAME));
         profq_msg!(self, ProfileQueriesMsg::ProviderEnd);
 
         if unlikely!(self.sess.opts.debugging_opts.query_dep_graph) {
@@ -619,7 +619,7 @@ pub(super) fn ensure_query<Q: QueryDescription<'gcx>>(self, key: Q::Key) -> () {
             let _ = self.get_query::<Q>(DUMMY_SP, key);
         } else {
             profq_msg!(self, ProfileQueriesMsg::CacheHit);
-            self.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY));
+            self.sess.profiler(|p| p.record_query_hit(Q::NAME));
         }
     }
 
@@ -632,7 +632,8 @@ fn force_query<Q: QueryDescription<'gcx>>(
     ) {
         profq_msg!(
             self,
-            ProfileQueriesMsg::QueryBegin(span.data(), profq_query_msg!(Q::NAME, self, key))
+            ProfileQueriesMsg::QueryBegin(span.data(),
+                                          profq_query_msg!(Q::NAME.as_str(), self, key))
         );
 
         // We may be concurrently trying both execute and force a query
@@ -725,18 +726,6 @@ pub fn new(
                 }
             }
 
-            pub fn record_computed_queries(&self, sess: &Session) {
-                sess.profiler(|p| {
-                    $(
-                        p.record_computed_queries(
-                            <queries::$name<'_> as QueryConfig<'_>>::NAME,
-                            <queries::$name<'_> as QueryConfig<'_>>::CATEGORY,
-                            self.$name.lock().results.len()
-                        );
-                    )*
-                });
-            }
-
             #[cfg(parallel_compiler)]
             pub fn collect_active_jobs(&self) -> Vec<Lrc<QueryJob<$tcx>>> {
                 let mut jobs = Vec::new();
@@ -854,6 +843,24 @@ fn stats<'tcx, Q: QueryConfig<'tcx>>(
             }
         }
 
+        #[allow(nonstandard_style)]
+        #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+        pub enum QueryName {
+            $($name),*
+        }
+
+        impl QueryName {
+            pub fn register_with_profiler(profiler: &crate::util::profiling::SelfProfiler) {
+                $(profiler.register_query_name(QueryName::$name);)*
+            }
+
+            pub fn as_str(&self) -> &'static str {
+                match self {
+                    $(QueryName::$name => stringify!($name),)*
+                }
+            }
+        }
+
         #[allow(nonstandard_style)]
         #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
         pub enum Query<$tcx> {
@@ -894,6 +901,12 @@ pub fn default_span(&self, tcx: TyCtxt<'_, $tcx, '_>, span: Span) -> Span {
                     $(Query::$name(key) => key.default_span(tcx),)*
                 }
             }
+
+            pub fn query_name(&self) -> QueryName {
+                match self {
+                    $(Query::$name(_) => QueryName::$name,)*
+                }
+            }
         }
 
         impl<'a, $tcx> HashStable<StableHashingContext<'a>> for Query<$tcx> {
@@ -930,7 +943,7 @@ pub fn $name<F: FnOnce() -> R, R>(f: F) -> R {
             type Key = $K;
             type Value = $V;
 
-            const NAME: &'static str = stringify!($name);
+            const NAME: QueryName = QueryName::$name;
             const CATEGORY: ProfileCategory = $category;
         }
 
index 2739a30a291352e79bdca861ae5b6ea1d2335002..aabf9a401c690e7998d547634fda5d01e35945d3 100644 (file)
@@ -1,14 +1,20 @@
 use std::borrow::Cow;
-use std::fs;
-use std::io::{BufWriter, Write};
-use std::mem;
+use std::error::Error;
+use std::mem::{self, Discriminant};
 use std::process;
 use std::thread::ThreadId;
-use std::time::{Duration, Instant, SystemTime};
+use std::u32;
 
-use crate::session::config::Options;
+use crate::ty::query::QueryName;
 
-use rustc_data_structures::fx::FxHashMap;
+use measureme::{StringId, TimestampKind};
+
+/// MmapSerializatioSink is faster on macOS and Linux
+/// but FileSerializationSink is faster on Windows
+#[cfg(not(windows))]
+type Profiler = measureme::Profiler<measureme::MmapSerializationSink>;
+#[cfg(windows)]
+type Profiler = measureme::Profiler<measureme::FileSerializationSink>;
 
 #[derive(Clone, Copy, Debug, PartialEq, Eq, Ord, PartialOrd)]
 pub enum ProfileCategory {
@@ -35,409 +41,129 @@ pub enum ProfilerEvent {
     QueryBlockedEnd { query_name: &'static str, category: ProfileCategory, time: u64 },
 }
 
-impl ProfilerEvent {
-    fn timestamp(&self) -> u64 {
-        use self::ProfilerEvent::*;
-
-        match self {
-            QueryStart { time, .. } |
-            QueryEnd { time, .. } |
-            GenericActivityStart { time, .. } |
-            GenericActivityEnd { time, .. } |
-            QueryCacheHit { time, .. } |
-            QueryCount { time, .. } |
-            IncrementalLoadResultStart { time, .. } |
-            IncrementalLoadResultEnd { time, .. } |
-            QueryBlockedStart { time, .. } |
-            QueryBlockedEnd { time, .. } => *time
-        }
-    }
-}
-
 fn thread_id_to_u64(tid: ThreadId) -> u64 {
     unsafe { mem::transmute::<ThreadId, u64>(tid) }
 }
 
 pub struct SelfProfiler {
-    events: FxHashMap<ThreadId, Vec<ProfilerEvent>>,
-    start_time: SystemTime,
-    start_instant: Instant,
+    profiler: Profiler,
+    query_event_kind: StringId,
+    generic_activity_event_kind: StringId,
+    incremental_load_result_event_kind: StringId,
+    query_blocked_event_kind: StringId,
+    query_cache_hit_event_kind: StringId,
 }
 
 impl SelfProfiler {
-    pub fn new() -> SelfProfiler {
-        let profiler = SelfProfiler {
-            events: Default::default(),
-            start_time: SystemTime::now(),
-            start_instant: Instant::now(),
+    pub fn new() -> Result<SelfProfiler, Box<dyn Error>> {
+        let filename = format!("pid-{}.rustc_profile", process::id());
+        let path = std::path::Path::new(&filename);
+        let profiler = Profiler::new(path)?;
+
+        let query_event_kind = profiler.alloc_string("Query");
+        let generic_activity_event_kind = profiler.alloc_string("GenericActivity");
+        let incremental_load_result_event_kind = profiler.alloc_string("IncrementalLoadResult");
+        let query_blocked_event_kind = profiler.alloc_string("QueryBlocked");
+        let query_cache_hit_event_kind = profiler.alloc_string("QueryCacheHit");
+
+        Ok(SelfProfiler {
+            profiler,
+            query_event_kind,
+            generic_activity_event_kind,
+            incremental_load_result_event_kind,
+            query_blocked_event_kind,
+            query_cache_hit_event_kind,
+        })
+    }
+
+    fn get_query_name_string_id(query_name: QueryName) -> StringId {
+        let discriminant = unsafe {
+            mem::transmute::<Discriminant<QueryName>, u64>(mem::discriminant(&query_name))
         };
 
-        profiler
+        StringId::reserved(discriminant as u32)
+    }
+
+    pub fn register_query_name(&self, query_name: QueryName) {
+        let id = SelfProfiler::get_query_name_string_id(query_name);
+
+        self.profiler.alloc_string_with_reserved_id(id, query_name.as_str());
     }
 
     #[inline]
     pub fn start_activity(
-        &mut self,
-        category: ProfileCategory,
+        &self,
         label: impl Into<Cow<'static, str>>,
     ) {
-        self.record(ProfilerEvent::GenericActivityStart {
-            category,
-            label: label.into(),
-            time: self.get_time_from_start(),
-        })
+        self.record(&label.into(), self.generic_activity_event_kind, TimestampKind::Start);
     }
 
     #[inline]
     pub fn end_activity(
-        &mut self,
-        category: ProfileCategory,
+        &self,
         label: impl Into<Cow<'static, str>>,
     ) {
-        self.record(ProfilerEvent::GenericActivityEnd {
-            category,
-            label: label.into(),
-            time: self.get_time_from_start(),
-        })
+        self.record(&label.into(), self.generic_activity_event_kind, TimestampKind::End);
     }
 
     #[inline]
-    pub fn record_computed_queries(
-        &mut self,
-        query_name: &'static str,
-        category: ProfileCategory,
-        count: usize)
-        {
-        self.record(ProfilerEvent::QueryCount {
-            query_name,
-            category,
-            count,
-            time: self.get_time_from_start(),
-        })
+    pub fn record_query_hit(&self, query_name: QueryName) {
+        self.record_query(query_name, self.query_cache_hit_event_kind, TimestampKind::Instant);
     }
 
     #[inline]
-    pub fn record_query_hit(&mut self, query_name: &'static str, category: ProfileCategory) {
-        self.record(ProfilerEvent::QueryCacheHit {
-            query_name,
-            category,
-            time: self.get_time_from_start(),
-        })
+    pub fn start_query(&self, query_name: QueryName) {
+        self.record_query(query_name, self.query_event_kind, TimestampKind::Start);
     }
 
     #[inline]
-    pub fn start_query(&mut self, query_name: &'static str, category: ProfileCategory) {
-        self.record(ProfilerEvent::QueryStart {
-            query_name,
-            category,
-            time: self.get_time_from_start(),
-        });
+    pub fn end_query(&self, query_name: QueryName) {
+        self.record_query(query_name, self.query_event_kind, TimestampKind::End);
     }
 
     #[inline]
-    pub fn end_query(&mut self, query_name: &'static str, category: ProfileCategory) {
-        self.record(ProfilerEvent::QueryEnd {
+    pub fn incremental_load_result_start(&self, query_name: QueryName) {
+        self.record_query(
             query_name,
-            category,
-            time: self.get_time_from_start(),
-        })
+            self.incremental_load_result_event_kind,
+            TimestampKind::Start
+        );
     }
 
     #[inline]
-    pub fn incremental_load_result_start(&mut self, query_name: &'static str) {
-        self.record(ProfilerEvent::IncrementalLoadResultStart {
-            query_name,
-            time: self.get_time_from_start(),
-        })
+    pub fn incremental_load_result_end(&self, query_name: QueryName) {
+        self.record_query(query_name, self.incremental_load_result_event_kind, TimestampKind::End);
     }
 
     #[inline]
-    pub fn incremental_load_result_end(&mut self, query_name: &'static str) {
-        self.record(ProfilerEvent::IncrementalLoadResultEnd {
-            query_name,
-            time: self.get_time_from_start(),
-        })
-    }
-
-    #[inline]
-    pub fn query_blocked_start(&mut self, query_name: &'static str, category: ProfileCategory) {
-        self.record(ProfilerEvent::QueryBlockedStart {
-            query_name,
-            category,
-            time: self.get_time_from_start(),
-        })
+    pub fn query_blocked_start(&self, query_name: QueryName) {
+        self.record_query(query_name, self.query_blocked_event_kind, TimestampKind::Start);
     }
 
     #[inline]
-    pub fn query_blocked_end(&mut self, query_name: &'static str, category: ProfileCategory) {
-        self.record(ProfilerEvent::QueryBlockedEnd {
-            query_name,
-            category,
-            time: self.get_time_from_start(),
-        })
+    pub fn query_blocked_end(&self, query_name: QueryName) {
+        self.record_query(query_name, self.query_blocked_event_kind, TimestampKind::End);
     }
 
     #[inline]
-    fn record(&mut self, event: ProfilerEvent) {
-        let thread_id = std::thread::current().id();
-        let events = self.events.entry(thread_id).or_default();
+    fn record(&self, event_id: &str, event_kind: StringId, timestamp_kind: TimestampKind) {
+        let thread_id = thread_id_to_u64(std::thread::current().id());
 
-        events.push(event);
+        let event_id = self.profiler.alloc_string(event_id);
+        self.profiler.record_event(event_kind, event_id, thread_id, timestamp_kind);
     }
 
     #[inline]
-    fn get_time_from_start(&self) -> u64 {
-        let duration = Instant::now() - self.start_instant;
-        duration.as_nanos() as u64
-    }
-
-    pub fn dump_raw_events(&self, opts: &Options) {
-        use self::ProfilerEvent::*;
-
-        let pid = process::id();
-
-        let filename =
-            format!("{}.profile_events.json", opts.crate_name.clone().unwrap_or_default());
-
-        let mut file = BufWriter::new(fs::File::create(filename).unwrap());
-
-        let threads: Vec<_> =
-            self.events
-                .keys()
-                .into_iter()
-                .map(|tid| format!("{}", thread_id_to_u64(*tid)))
-                .collect();
-
-        write!(file,
-            "{{\
-                \"processes\": {{\
-                    \"{}\": {{\
-                        \"threads\": [{}],\
-                        \"crate_name\": \"{}\",\
-                        \"opt_level\": \"{:?}\",\
-                        \"incremental\": {}\
-                    }}\
-                }},\
-                \"events\": [\
-             ",
-            pid,
-            threads.join(","),
-            opts.crate_name.clone().unwrap_or_default(),
-            opts.optimize,
-            if opts.incremental.is_some() { "true" } else { "false" },
-        ).unwrap();
-
-        let mut is_first = true;
-        for (thread_id, events) in &self.events {
-            let thread_id = thread_id_to_u64(*thread_id);
-
-            for event in events {
-                if is_first {
-                    is_first = false;
-                } else {
-                    writeln!(file, ",").unwrap();
-                }
-
-                let (secs, nanos) = {
-                    let time = self.start_time + Duration::from_nanos(event.timestamp());
-                    let time_since_unix =
-                        time.duration_since(SystemTime::UNIX_EPOCH).unwrap_or_default();
-                    (time_since_unix.as_secs(), time_since_unix.subsec_nanos())
-                };
+    fn record_query(
+        &self,
+        query_name: QueryName,
+        event_kind: StringId,
+        timestamp_kind: TimestampKind,
+    ) {
+        let dep_node_name = SelfProfiler::get_query_name_string_id(query_name);
 
-                match event {
-                    QueryStart { query_name, category, time: _ } =>
-                        write!(file,
-                            "{{ \
-                                \"QueryStart\": {{ \
-                                    \"query_name\": \"{}\",\
-                                    \"category\": \"{:?}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            category,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    QueryEnd { query_name, category, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"QueryEnd\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"category\": \"{:?}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            category,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    GenericActivityStart { category, label, time: _ } =>
-                        write!(file,
-                            "{{
-                                \"GenericActivityStart\": {{\
-                                    \"category\": \"{:?}\",\
-                                    \"label\": \"{}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            category,
-                            label,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    GenericActivityEnd { category, label, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"GenericActivityEnd\": {{\
-                                    \"category\": \"{:?}\",\
-                                    \"label\": \"{}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            category,
-                            label,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    QueryCacheHit { query_name, category, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"QueryCacheHit\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"category\": \"{:?}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            category,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    QueryCount { query_name, category, count, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"QueryCount\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"category\": \"{:?}\",\
-                                    \"count\": {},\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            category,
-                            count,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    IncrementalLoadResultStart { query_name, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"IncrementalLoadResultStart\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    IncrementalLoadResultEnd { query_name, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"IncrementalLoadResultEnd\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    QueryBlockedStart { query_name, category, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"QueryBlockedStart\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"category\": \"{:?}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            category,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                    QueryBlockedEnd { query_name, category, time: _ } =>
-                        write!(file,
-                            "{{\
-                                \"QueryBlockedEnd\": {{\
-                                    \"query_name\": \"{}\",\
-                                    \"category\": \"{:?}\",\
-                                    \"time\": {{\
-                                        \"secs\": {},\
-                                        \"nanos\": {}\
-                                    }},\
-                                    \"thread_id\": {}\
-                                }}\
-                            }}",
-                            query_name,
-                            category,
-                            secs,
-                            nanos,
-                            thread_id,
-                        ).unwrap(),
-                }
-            }
-        }
+        let thread_id = thread_id_to_u64(std::thread::current().id());
 
-        write!(file, "] }}").unwrap();
+        self.profiler.record_event(event_kind, dep_node_name, thread_id, timestamp_kind);
     }
 }
index 84c652ff238afb4914a1f96e12cc814287554eb9..348f1a121269ed965413ec39e6396c9e93c7e888 100644 (file)
@@ -15,7 +15,6 @@
 use rustc::middle::exported_symbols::SymbolExportLevel;
 use rustc::session::config::{self, Lto};
 use rustc::util::common::time_ext;
-use rustc::util::profiling::ProfileCategory;
 use rustc_data_structures::fx::FxHashMap;
 use rustc_codegen_ssa::{ModuleCodegen, ModuleKind};
 
@@ -67,8 +66,7 @@ fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
         .iter()
         .filter_map(symbol_filter)
         .collect::<Vec<CString>>();
-    let _timer = cgcx.profile_activity(ProfileCategory::Codegen,
-                                       "generate_symbol_white_list_for_thinlto");
+    let _timer = cgcx.profile_activity("generate_symbol_white_list_for_thinlto");
     info!("{} symbols to preserve in this crate", symbol_white_list.len());
 
     // If we're performing LTO for the entire crate graph, then for each of our
@@ -97,8 +95,7 @@ fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
         }
 
         for &(cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() {
-            let _timer = cgcx.profile_activity(ProfileCategory::Codegen,
-                                               format!("load: {}", path.display()));
+            let _timer = cgcx.profile_activity(format!("load: {}", path.display()));
             let exported_symbols = cgcx.exported_symbols
                 .as_ref().expect("needs exported symbols for LTO");
             symbol_white_list.extend(
@@ -727,8 +724,7 @@ pub unsafe fn optimize_thin_module(
         // Like with "fat" LTO, get some better optimizations if landing pads
         // are disabled by removing all landing pads.
         if cgcx.no_landing_pads {
-            let _timer = cgcx.profile_activity(ProfileCategory::Codegen,
-                                               "LLVM_remove_landing_pads");
+            let _timer = cgcx.profile_activity("LLVM_remove_landing_pads");
             llvm::LLVMRustMarkAllFunctionsNounwind(llmod);
             save_temp_bitcode(&cgcx, &module, "thin-lto-after-nounwind");
         }
index f0ed201ad5c27f155767aa3d0dbd34484f9a9c4d..4d52df91a8d91b16303dc4d45beffb13c97e31dc 100644 (file)
@@ -18,7 +18,6 @@
 use rustc::ty::TyCtxt;
 use rustc_codegen_ssa::{ModuleCodegen, CompiledModule};
 use rustc::util::common::time_ext;
-use rustc::util::profiling::ProfileCategory;
 use rustc_fs_util::{path_to_c_string, link_or_copy};
 use rustc_data_structures::small_c_str::SmallCStr;
 use errors::{Handler, FatalError};
@@ -414,7 +413,7 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>,
 
         // Finally, run the actual optimization passes
         {
-            let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_function_passes");
+            let _timer = cgcx.profile_activity("LLVM_function_passes");
             time_ext(config.time_passes,
                         None,
                         &format!("llvm function passes [{}]", module_name.unwrap()),
@@ -423,7 +422,7 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>,
             });
         }
         {
-            let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_module_passes");
+            let _timer = cgcx.profile_activity("LLVM_module_passes");
             time_ext(config.time_passes,
                     None,
                     &format!("llvm module passes [{}]", module_name.unwrap()),
@@ -445,7 +444,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
                   config: &ModuleConfig)
     -> Result<CompiledModule, FatalError>
 {
-    let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "codegen");
+    let _timer = cgcx.profile_activity("codegen");
     {
         let llmod = module.module_llvm.llmod();
         let llcx = &*module.module_llvm.llcx;
@@ -496,12 +495,12 @@ unsafe fn with_codegen<'ll, F, R>(tm: &'ll llvm::TargetMachine,
 
 
         if write_bc || config.emit_bc_compressed || config.embed_bitcode {
-            let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_make_bitcode");
+            let _timer = cgcx.profile_activity("LLVM_make_bitcode");
             let thin = ThinBuffer::new(llmod);
             let data = thin.data();
 
             if write_bc {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_bitcode");
+                let _timer = cgcx.profile_activity("LLVM_emit_bitcode");
                 if let Err(e) = fs::write(&bc_out, data) {
                     let msg = format!("failed to write bytecode to {}: {}", bc_out.display(), e);
                     diag_handler.err(&msg);
@@ -509,13 +508,12 @@ unsafe fn with_codegen<'ll, F, R>(tm: &'ll llvm::TargetMachine,
             }
 
             if config.embed_bitcode {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_embed_bitcode");
+                let _timer = cgcx.profile_activity("LLVM_embed_bitcode");
                 embed_bitcode(cgcx, llcx, llmod, Some(data));
             }
 
             if config.emit_bc_compressed {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen,
-                                                   "LLVM_compress_bitcode");
+                let _timer = cgcx.profile_activity("LLVM_compress_bitcode");
                 let dst = bc_out.with_extension(RLIB_BYTECODE_EXTENSION);
                 let data = bytecode::encode(&module.name, data);
                 if let Err(e) = fs::write(&dst, data) {
@@ -530,7 +528,7 @@ unsafe fn with_codegen<'ll, F, R>(tm: &'ll llvm::TargetMachine,
         time_ext(config.time_passes, None, &format!("codegen passes [{}]", module_name.unwrap()),
             || -> Result<(), FatalError> {
             if config.emit_ir {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_ir");
+                let _timer = cgcx.profile_activity("LLVM_emit_ir");
                 let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
                 let out_c = path_to_c_string(&out);
 
@@ -577,7 +575,7 @@ extern "C" fn demangle_callback(input_ptr: *const c_char,
             }
 
             if config.emit_asm || asm_to_obj {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_asm");
+                let _timer = cgcx.profile_activity("LLVM_emit_asm");
                 let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
 
                 // We can't use the same module for asm and binary output, because that triggers
@@ -595,13 +593,13 @@ extern "C" fn demangle_callback(input_ptr: *const c_char,
             }
 
             if write_obj {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_obj");
+                let _timer = cgcx.profile_activity("LLVM_emit_obj");
                 with_codegen(tm, llmod, config.no_builtins, |cpm| {
                     write_output_file(diag_handler, tm, cpm, llmod, &obj_out,
                                       llvm::FileType::ObjectFile)
                 })?;
             } else if asm_to_obj {
-                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_asm_to_obj");
+                let _timer = cgcx.profile_activity("LLVM_asm_to_obj");
                 let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
                 run_assembler(cgcx, diag_handler, &assembly, &obj_out);
 
index 0aae6b46e3def279749084bc64dcc1b24180fa75..da91217e95e3d80522a81ed18ed4d426c30609ab 100644 (file)
@@ -65,7 +65,6 @@
 use rustc::session::Session;
 use rustc::session::config::{OutputFilenames, OutputType, PrintRequest, OptLevel};
 use rustc::ty::{self, TyCtxt};
-use rustc::util::profiling::ProfileCategory;
 use rustc::util::common::ErrorReported;
 use rustc_mir::monomorphize;
 use rustc_codegen_ssa::ModuleCodegen;
@@ -330,12 +329,12 @@ fn join_codegen_and_link(
 
         // Run the linker on any artifacts that resulted from the LLVM run.
         // This should produce either a finished executable or library.
-        sess.profiler(|p| p.start_activity(ProfileCategory::Linking, "link_crate"));
+        sess.profiler(|p| p.start_activity("link_crate"));
         time(sess, "linking", || {
             back::link::link_binary(sess, &codegen_results,
                                     outputs, &codegen_results.crate_name.as_str());
         });
-        sess.profiler(|p| p.end_activity(ProfileCategory::Linking, "link_crate"));
+        sess.profiler(|p| p.end_activity("link_crate"));
 
         // Now that we won't touch anything in the incremental compilation directory
         // any more, we can finalize it (which involves renaming it)
index fa8c4177eafe23fb900cad53b86c625bcc2ffb0d..f5affad4cce0c8f22c90c242bb50d05721bf40f5 100644 (file)
@@ -18,7 +18,7 @@
 use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
 use rustc::ty::TyCtxt;
 use rustc::util::common::{time_depth, set_time_depth, print_time_passes_entry};
-use rustc::util::profiling::{ProfileCategory, SelfProfiler};
+use rustc::util::profiling::SelfProfiler;
 use rustc_fs_util::link_or_copy;
 use rustc_data_structures::svh::Svh;
 use rustc_errors::{Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId};
@@ -29,7 +29,6 @@
 use syntax_pos::MultiSpan;
 use syntax_pos::symbol::Symbol;
 use jobserver::{Client, Acquired};
-use parking_lot::Mutex as PlMutex;
 
 use std::any::Any;
 use std::borrow::Cow;
@@ -198,25 +197,21 @@ fn clone(&self) -> Self {
 }
 
 pub struct ProfileGenericActivityTimer {
-    profiler: Option<Arc<PlMutex<SelfProfiler>>>,
-    category: ProfileCategory,
+    profiler: Option<Arc<SelfProfiler>>,
     label: Cow<'static, str>,
 }
 
 impl ProfileGenericActivityTimer {
     pub fn start(
-        profiler: Option<Arc<PlMutex<SelfProfiler>>>,
-        category: ProfileCategory,
+        profiler: Option<Arc<SelfProfiler>>,
         label: Cow<'static, str>,
     ) -> ProfileGenericActivityTimer {
         if let Some(profiler) = &profiler {
-            let mut p = profiler.lock();
-            p.start_activity(category, label.clone());
+            profiler.start_activity(label.clone());
         }
 
         ProfileGenericActivityTimer {
             profiler,
-            category,
             label,
         }
     }
@@ -225,8 +220,7 @@ pub fn start(
 impl Drop for ProfileGenericActivityTimer {
     fn drop(&mut self) {
         if let Some(profiler) = &self.profiler {
-            let mut p = profiler.lock();
-            p.end_activity(self.category, self.label.clone());
+            profiler.end_activity(self.label.clone());
         }
     }
 }
@@ -237,7 +231,7 @@ pub struct CodegenContext<B: WriteBackendMethods> {
     // Resources needed when running LTO
     pub backend: B,
     pub time_passes: bool,
-    pub profiler: Option<Arc<PlMutex<SelfProfiler>>>,
+    pub profiler: Option<Arc<SelfProfiler>>,
     pub lto: Lto,
     pub no_landing_pads: bool,
     pub save_temps: bool,
@@ -291,19 +285,17 @@ pub fn config(&self, kind: ModuleKind) -> &ModuleConfig {
 
     #[inline(never)]
     #[cold]
-    fn profiler_active<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) {
+    fn profiler_active<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
         match &self.profiler {
             None => bug!("profiler_active() called but there was no profiler active"),
             Some(profiler) => {
-                let mut p = profiler.lock();
-
-                f(&mut p);
+                f(&*profiler);
             }
         }
     }
 
     #[inline(always)]
-    pub fn profile<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) {
+    pub fn profile<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
         if unlikely!(self.profiler.is_some()) {
             self.profiler_active(f)
         }
@@ -311,10 +303,9 @@ pub fn profile<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) {
 
     pub fn profile_activity(
         &self,
-        category: ProfileCategory,
         label: impl Into<Cow<'static, str>>,
     ) -> ProfileGenericActivityTimer {
-        ProfileGenericActivityTimer::start(self.profiler.clone(), category, label.into())
+        ProfileGenericActivityTimer::start(self.profiler.clone(), label.into())
     }
 }
 
@@ -324,7 +315,7 @@ fn generate_lto_work<B: ExtraBackendMethods>(
     needs_thin_lto: Vec<(String, B::ThinBuffer)>,
     import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)>
 ) -> Vec<(WorkItem<B>, u64)> {
-    cgcx.profile(|p| p.start_activity(ProfileCategory::Linking, "codegen_run_lto"));
+    cgcx.profile(|p| p.start_activity("codegen_run_lto"));
 
     let (lto_modules, copy_jobs) = if !needs_fat_lto.is_empty() {
         assert!(needs_thin_lto.is_empty());
@@ -351,7 +342,7 @@ fn generate_lto_work<B: ExtraBackendMethods>(
         }), 0)
     })).collect();
 
-    cgcx.profile(|p| p.end_activity(ProfileCategory::Linking, "codegen_run_lto"));
+    cgcx.profile(|p| p.end_activity("codegen_run_lto"));
 
     result
 }
@@ -1655,9 +1646,9 @@ fn drop(&mut self) {
         // surface that there was an error in this worker.
         bomb.result = {
             let label = work.name();
-            cgcx.profile(|p| p.start_activity(ProfileCategory::Codegen, label.clone()));
+            cgcx.profile(|p| p.start_activity(label.clone()));
             let result = execute_work_item(&cgcx, work).ok();
-            cgcx.profile(|p| p.end_activity(ProfileCategory::Codegen, label));
+            cgcx.profile(|p| p.end_activity(label));
 
             result
         };
index 27e3e306699053ee4bc74818cc72a50576ee105e..6cb54831a074368b2a8d9ef98dcbfdb837cde457 100644 (file)
@@ -25,7 +25,6 @@
 use rustc::ty::query::Providers;
 use rustc::middle::cstore::{self, LinkagePreference};
 use rustc::util::common::{time, print_time_passes_entry};
-use rustc::util::profiling::ProfileCategory;
 use rustc::session::config::{self, EntryFnType, Lto};
 use rustc::session::Session;
 use rustc_mir::monomorphize::item::DefPathBasedNames;
@@ -539,7 +538,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
     let cgu_name_builder = &mut CodegenUnitNameBuilder::new(tcx);
 
     // Codegen the metadata.
-    tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen, "codegen crate metadata"));
+    tcx.sess.profiler(|p| p.start_activity("codegen crate metadata"));
 
     let metadata_cgu_name = cgu_name_builder.build_cgu_name(LOCAL_CRATE,
                                                             &["crate"],
@@ -549,7 +548,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
     let metadata = time(tcx.sess, "write metadata", || {
         backend.write_metadata(tcx, &mut metadata_llvm_module)
     });
-    tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen, "codegen crate metadata"));
+    tcx.sess.profiler(|p| p.end_activity("codegen crate metadata"));
 
     let metadata_module = ModuleCodegen {
         name: metadata_cgu_name,
@@ -662,14 +661,12 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
 
         match cgu_reuse {
             CguReuse::No => {
-                tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen,
-                                                       format!("codegen {}", cgu.name())));
+                tcx.sess.profiler(|p| p.start_activity(format!("codegen {}", cgu.name())));
                 let start_time = Instant::now();
                 let stats = backend.compile_codegen_unit(tcx, *cgu.name());
                 all_stats.extend(stats);
                 total_codegen_time += start_time.elapsed();
-                tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen,
-                                                     format!("codegen {}", cgu.name())));
+                tcx.sess.profiler(|p| p.end_activity(format!("codegen {}", cgu.name())));
                 false
             }
             CguReuse::PreLto => {
index 74085123f89ee345d075f83d5492d52778a9edb0..23575e51dfcd9c52d75b76eb067dc6070d9b242a 100644 (file)
@@ -123,10 +123,6 @@ pub fn run_compiler_in_existing_thread_pool<F, R>(config: Config, f: F) -> R
         profile::dump(&compiler.sess, "profile_queries".to_string())
     }
 
-    if compiler.sess.opts.debugging_opts.self_profile {
-        compiler.sess.profiler(|p| p.dump_raw_events(&compiler.sess.opts));
-    }
-
     r
 }
 
index 1547e15fd48c579733e8d0ed9828100496a5c312..3fd684f5d97e99e5f3f6ab38bd2fd8661e6ed8b3 100644 (file)
@@ -69,7 +69,7 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
         .set_continue_after_error(sess.opts.debugging_opts.continue_parse_after_error);
     hygiene::set_default_edition(sess.edition());
 
-    sess.profiler(|p| p.start_activity(ProfileCategory::Parsing, "parsing"));
+    sess.profiler(|p| p.start_activity("parsing"));
     let krate = time(sess, "parsing", || match *input {
         Input::File(ref file) => parse::parse_crate_from_file(file, &sess.parse_sess),
         Input::Str {
@@ -77,7 +77,7 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
             ref name,
         } => parse::parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess),
     })?;
-    sess.profiler(|p| p.end_activity(ProfileCategory::Parsing, "parsing"));
+    sess.profiler(|p| p.end_activity("parsing"));
 
     sess.diagnostic().set_continue_after_error(true);
 
@@ -375,7 +375,7 @@ fn configure_and_expand_inner<'a>(
     syntax_ext::register_builtins(&mut resolver, plugin_info.syntax_exts);
 
     // Expand all macros
-    sess.profiler(|p| p.start_activity(ProfileCategory::Expansion, "macro expansion"));
+    sess.profiler(|p| p.start_activity("macro expansion"));
     krate = time(sess, "expansion", || {
         // Windows dlls do not have rpaths, so they don't know how to find their
         // dependencies. It's up to us to tell the system where to find all the
@@ -450,7 +450,7 @@ fn configure_and_expand_inner<'a>(
         }
         krate
     });
-    sess.profiler(|p| p.end_activity(ProfileCategory::Expansion, "macro expansion"));
+    sess.profiler(|p| p.end_activity("macro expansion"));
 
     time(sess, "maybe building test harness", || {
         syntax::test::modify_for_testing(
@@ -869,8 +869,6 @@ pub fn create_global_ctxt(
         yield BoxedGlobalCtxt::initial_yield(());
         box_region_allow_access!(for('gcx), (&'gcx GlobalCtxt<'gcx>), (gcx));
 
-        gcx.queries.record_computed_queries(sess);
-
         if sess.opts.debugging_opts.query_stats {
             gcx.queries.print_stats();
         }
@@ -1022,9 +1020,9 @@ pub fn start_codegen<'tcx>(
         ::rustc::middle::dependency_format::calculate(tcx)
     });
 
-    tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen, "codegen crate"));
+    tcx.sess.profiler(|p| p.start_activity("codegen crate"));
     let codegen = time(tcx.sess, "codegen", move || codegen_backend.codegen_crate(tcx, rx));
-    tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen, "codegen crate"));
+    tcx.sess.profiler(|p| p.end_activity("codegen crate"));
 
     if log_enabled!(::log::Level::Info) {
         println!("Post-codegen");
index 710c84a6bc980f04b855cb5a84f221a6d4606868..21d1af229ddc2815a4536ed9499cff0adf4378e6 100644 (file)
 use rustc::ty::{self, Ty, TyCtxt};
 use rustc::ty::query::Providers;
 use rustc::util;
-use rustc::util::profiling::ProfileCategory;
 use syntax_pos::Span;
 use util::common::time;
 
@@ -319,7 +318,7 @@ pub fn provide(providers: &mut Providers<'_>) {
 pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
                              -> Result<(), ErrorReported>
 {
-    tcx.sess.profiler(|p| p.start_activity(ProfileCategory::TypeChecking, "type-check crate"));
+    tcx.sess.profiler(|p| p.start_activity("type-check crate"));
 
     // this ensures that later parts of type checking can assume that items
     // have valid types and not error
@@ -370,7 +369,7 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
     check_unused::check_crate(tcx);
     check_for_entry_fn(tcx);
 
-    tcx.sess.profiler(|p| p.end_activity(ProfileCategory::TypeChecking, "type-check crate"));
+    tcx.sess.profiler(|p| p.end_activity("type-check crate"));
 
     if tcx.sess.err_count() == 0 {
         Ok(())
index 30fe327cac4ff59d0abc4dfd5c2f082d62804f62..411961d70bf88afb30cd59583e3a541a16c40719 100644 (file)
@@ -96,6 +96,7 @@
     Crate("lock_api"),
     Crate("log"),
     Crate("log_settings"),
+    Crate("measureme"),
     Crate("memchr"),
     Crate("memmap"),
     Crate("memoffset"),