]> git.lizzy.rs Git - rust.git/commitdiff
Replace TimeLine with SelfProfiler
authorWesley Wiser <wwiser@gmail.com>
Wed, 13 Feb 2019 13:13:30 +0000 (08:13 -0500)
committerWesley Wiser <wwiser@gmail.com>
Sun, 10 Mar 2019 15:10:55 +0000 (11:10 -0400)
12 files changed:
src/librustc/lib.rs
src/librustc/util/profiling.rs
src/librustc/util/time_graph.rs [deleted file]
src/librustc_codegen_llvm/back/lto.rs
src/librustc_codegen_llvm/back/write.rs
src/librustc_codegen_llvm/lib.rs
src/librustc_codegen_ssa/back/lto.rs
src/librustc_codegen_ssa/back/write.rs
src/librustc_codegen_ssa/base.rs
src/librustc_codegen_ssa/traits/write.rs
src/librustc_interface/passes.rs
src/librustc_typeck/lib.rs

index 6adfaa53946ac29615932017d041d8d49fd83840..681dffc0116e3d2941165dcc3f19e28c9254993b 100644 (file)
@@ -136,7 +136,6 @@ pub mod util {
     pub mod common;
     pub mod ppaux;
     pub mod nodemap;
-    pub mod time_graph;
     pub mod profiling;
     pub mod bug;
 }
index c134d48f987be5ded1c711a6b0f8786545d30c8a..2739a30a291352e79bdca861ae5b6ea1d2335002 100644 (file)
@@ -1,3 +1,4 @@
+use std::borrow::Cow;
 use std::fs;
 use std::io::{BufWriter, Write};
 use std::mem;
@@ -20,12 +21,12 @@ pub enum ProfileCategory {
     Other,
 }
 
-#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[derive(Clone, Debug, Eq, PartialEq)]
 pub enum ProfilerEvent {
     QueryStart { query_name: &'static str, category: ProfileCategory, time: u64 },
     QueryEnd { query_name: &'static str, category: ProfileCategory, time: u64 },
-    GenericActivityStart { category: ProfileCategory, time: u64 },
-    GenericActivityEnd { category: ProfileCategory, time: u64 },
+    GenericActivityStart { category: ProfileCategory, label: Cow<'static, str>, time: u64 },
+    GenericActivityEnd { category: ProfileCategory, label: Cow<'static, str>, time: u64 },
     IncrementalLoadResultStart { query_name: &'static str, time: u64 },
     IncrementalLoadResultEnd { query_name: &'static str, time: u64 },
     QueryCacheHit { query_name: &'static str, category: ProfileCategory, time: u64 },
@@ -75,17 +76,27 @@ pub fn new() -> SelfProfiler {
     }
 
     #[inline]
-    pub fn start_activity(&mut self, category: ProfileCategory) {
+    pub fn start_activity(
+        &mut self,
+        category: ProfileCategory,
+        label: impl Into<Cow<'static, str>>,
+    ) {
         self.record(ProfilerEvent::GenericActivityStart {
             category,
+            label: label.into(),
             time: self.get_time_from_start(),
         })
     }
 
     #[inline]
-    pub fn end_activity(&mut self, category: ProfileCategory) {
+    pub fn end_activity(
+        &mut self,
+        category: ProfileCategory,
+        label: impl Into<Cow<'static, str>>,
+    ) {
         self.record(ProfilerEvent::GenericActivityEnd {
             category,
+            label: label.into(),
             time: self.get_time_from_start(),
         })
     }
@@ -273,11 +284,12 @@ pub fn dump_raw_events(&self, opts: &Options) {
                             nanos,
                             thread_id,
                         ).unwrap(),
-                    GenericActivityStart { category, time: _ } =>
+                    GenericActivityStart { category, label, time: _ } =>
                         write!(file,
                             "{{
                                 \"GenericActivityStart\": {{\
                                     \"category\": \"{:?}\",\
+                                    \"label\": \"{}\",\
                                     \"time\": {{\
                                         \"secs\": {},\
                                         \"nanos\": {}\
@@ -286,15 +298,17 @@ pub fn dump_raw_events(&self, opts: &Options) {
                                 }}\
                             }}",
                             category,
+                            label,
                             secs,
                             nanos,
                             thread_id,
                         ).unwrap(),
-                    GenericActivityEnd { category, time: _ } =>
+                    GenericActivityEnd { category, label, time: _ } =>
                         write!(file,
                             "{{\
                                 \"GenericActivityEnd\": {{\
                                     \"category\": \"{:?}\",\
+                                    \"label\": \"{}\",\
                                     \"time\": {{\
                                         \"secs\": {},\
                                         \"nanos\": {}\
@@ -303,6 +317,7 @@ pub fn dump_raw_events(&self, opts: &Options) {
                                 }}\
                             }}",
                             category,
+                            label,
                             secs,
                             nanos,
                             thread_id,
@@ -418,7 +433,7 @@ pub fn dump_raw_events(&self, opts: &Options) {
                             secs,
                             nanos,
                             thread_id,
-                        ).unwrap()
+                        ).unwrap(),
                 }
             }
         }
diff --git a/src/librustc/util/time_graph.rs b/src/librustc/util/time_graph.rs
deleted file mode 100644 (file)
index 4dd383f..0000000
+++ /dev/null
@@ -1,268 +0,0 @@
-use rustc_data_structures::fx::FxHashMap;
-use std::fs::File;
-use std::io::prelude::*;
-use std::marker::PhantomData;
-use std::mem;
-use std::sync::{Arc, Mutex};
-use std::time::Instant;
-
-const OUTPUT_WIDTH_IN_PX: u64 = 1000;
-const TIME_LINE_HEIGHT_IN_PX: u64 = 20;
-const TIME_LINE_HEIGHT_STRIDE_IN_PX: usize = 30;
-
-#[derive(Clone)]
-struct Timing {
-    start: Instant,
-    end: Instant,
-    work_package_kind: WorkPackageKind,
-    name: String,
-    events: Vec<(String, Instant)>,
-}
-
-#[derive(Clone, Copy, Hash, Eq, PartialEq, Debug)]
-pub struct TimelineId(pub usize);
-
-#[derive(Clone)]
-struct PerThread {
-    timings: Vec<Timing>,
-    open_work_package: Option<(Instant, WorkPackageKind, String)>,
-}
-
-#[derive(Clone)]
-pub struct TimeGraph {
-    data: Arc<Mutex<FxHashMap<TimelineId, PerThread>>>,
-}
-
-#[derive(Clone, Copy)]
-pub struct WorkPackageKind(pub &'static [&'static str]);
-
-pub struct Timeline {
-    token: Option<RaiiToken>,
-}
-
-struct RaiiToken {
-    graph: TimeGraph,
-    timeline: TimelineId,
-    events: Vec<(String, Instant)>,
-    // The token must not be Send:
-    _marker: PhantomData<*const ()>
-}
-
-
-impl Drop for RaiiToken {
-    fn drop(&mut self) {
-        self.graph.end(self.timeline, mem::replace(&mut self.events, Vec::new()));
-    }
-}
-
-impl TimeGraph {
-    pub fn new() -> TimeGraph {
-        TimeGraph {
-            data: Arc::new(Mutex::new(FxHashMap::default()))
-        }
-    }
-
-    pub fn start(&self,
-                 timeline: TimelineId,
-                 work_package_kind: WorkPackageKind,
-                 name: &str) -> Timeline {
-        {
-            let mut table = self.data.lock().unwrap();
-
-            let data = table.entry(timeline).or_insert(PerThread {
-                timings: Vec::new(),
-                open_work_package: None,
-            });
-
-            assert!(data.open_work_package.is_none());
-            data.open_work_package = Some((Instant::now(), work_package_kind, name.to_string()));
-        }
-
-        Timeline {
-            token: Some(RaiiToken {
-                graph: self.clone(),
-                timeline,
-                events: Vec::new(),
-                _marker: PhantomData,
-            }),
-        }
-    }
-
-    fn end(&self, timeline: TimelineId, events: Vec<(String, Instant)>) {
-        let end = Instant::now();
-
-        let mut table = self.data.lock().unwrap();
-        let data = table.get_mut(&timeline).unwrap();
-
-        if let Some((start, work_package_kind, name)) = data.open_work_package.take() {
-            data.timings.push(Timing {
-                start,
-                end,
-                work_package_kind,
-                name,
-                events,
-            });
-        } else {
-            bug!("end timing without start?")
-        }
-    }
-
-    pub fn dump(&self, output_filename: &str) {
-        let table = self.data.lock().unwrap();
-
-        for data in table.values() {
-            assert!(data.open_work_package.is_none());
-        }
-
-        let mut threads: Vec<PerThread> =
-            table.values().map(|data| data.clone()).collect();
-
-        threads.sort_by_key(|timeline| timeline.timings[0].start);
-
-        let earliest_instant = threads[0].timings[0].start;
-        let latest_instant = threads.iter()
-                                       .map(|timeline| timeline.timings
-                                                               .last()
-                                                               .unwrap()
-                                                               .end)
-                                       .max()
-                                       .unwrap();
-        let max_distance = distance(earliest_instant, latest_instant);
-
-        let mut file = File::create(format!("{}.html", output_filename)).unwrap();
-
-        writeln!(file, "
-            <html>
-            <head>
-                <style>
-                    #threads a {{
-                        position: absolute;
-                        overflow: hidden;
-                    }}
-                    #threads {{
-                        height: {total_height}px;
-                        width: {width}px;
-                    }}
-
-                    .timeline {{
-                        display: none;
-                        width: {width}px;
-                        position: relative;
-                    }}
-
-                    .timeline:target {{
-                        display: block;
-                    }}
-
-                    .event {{
-                        position: absolute;
-                    }}
-                </style>
-            </head>
-            <body>
-                <div id='threads'>
-        ",
-            total_height = threads.len() * TIME_LINE_HEIGHT_STRIDE_IN_PX,
-            width = OUTPUT_WIDTH_IN_PX,
-        ).unwrap();
-
-        let mut color = 0;
-        for (line_index, thread) in threads.iter().enumerate() {
-            let line_top = line_index * TIME_LINE_HEIGHT_STRIDE_IN_PX;
-
-            for span in &thread.timings {
-                let start = distance(earliest_instant, span.start);
-                let end = distance(earliest_instant, span.end);
-
-                let start = normalize(start, max_distance, OUTPUT_WIDTH_IN_PX);
-                let end = normalize(end, max_distance, OUTPUT_WIDTH_IN_PX);
-
-                let colors = span.work_package_kind.0;
-
-                writeln!(file, "<a href='#timing{}'
-                                   style='top:{}px; \
-                                          left:{}px; \
-                                          width:{}px; \
-                                          height:{}px; \
-                                          background:{};'>{}</a>",
-                    color,
-                    line_top,
-                    start,
-                    end - start,
-                    TIME_LINE_HEIGHT_IN_PX,
-                    colors[color % colors.len()],
-                    span.name,
-                    ).unwrap();
-
-                color += 1;
-            }
-        }
-
-        writeln!(file, "
-            </div>
-        ").unwrap();
-
-        let mut idx = 0;
-        for thread in threads.iter() {
-            for timing in &thread.timings {
-                let colors = timing.work_package_kind.0;
-                let height = TIME_LINE_HEIGHT_STRIDE_IN_PX * timing.events.len();
-                writeln!(file, "<div class='timeline'
-                                     id='timing{}'
-                                     style='background:{};height:{}px;'>",
-                         idx,
-                         colors[idx % colors.len()],
-                         height).unwrap();
-                idx += 1;
-                let max = distance(timing.start, timing.end);
-                for (i, &(ref event, time)) in timing.events.iter().enumerate() {
-                    let i = i as u64;
-                    let time = distance(timing.start, time);
-                    let at = normalize(time, max, OUTPUT_WIDTH_IN_PX);
-                    writeln!(file, "<span class='event'
-                                          style='left:{}px;\
-                                                 top:{}px;'>{}</span>",
-                             at,
-                             TIME_LINE_HEIGHT_IN_PX * i,
-                             event).unwrap();
-                }
-                writeln!(file, "</div>").unwrap();
-            }
-        }
-
-        writeln!(file, "
-            </body>
-            </html>
-        ").unwrap();
-    }
-}
-
-impl Timeline {
-    pub fn noop() -> Timeline {
-        Timeline { token: None }
-    }
-
-    /// Record an event which happened at this moment on this timeline.
-    ///
-    /// Events are displayed in the eventual HTML output where you can click on
-    /// a particular timeline and it'll expand to all of the events that
-    /// happened on that timeline. This can then be used to drill into a
-    /// particular timeline and see what events are happening and taking the
-    /// most time.
-    pub fn record(&mut self, name: &str) {
-        if let Some(ref mut token) = self.token {
-            token.events.push((name.to_string(), Instant::now()));
-        }
-    }
-}
-
-fn distance(zero: Instant, x: Instant) -> u64 {
-
-    let duration = x.duration_since(zero);
-    (duration.as_secs() * 1_000_000_000 + duration.subsec_nanos() as u64) // / div
-}
-
-fn normalize(distance: u64, max: u64, max_pixels: u64) -> u64 {
-    (max_pixels * distance) / max
-}
-
index 944569c8b744bad4cf1081a822dabe5972780d32..84c652ff238afb4914a1f96e12cc814287554eb9 100644 (file)
@@ -3,7 +3,6 @@
     to_llvm_opt_settings};
 use crate::llvm::archive_ro::ArchiveRO;
 use crate::llvm::{self, True, False};
-use crate::time_graph::Timeline;
 use crate::{ModuleLlvm, LlvmCodegenBackend};
 use rustc_codegen_ssa::back::symbol_export;
 use rustc_codegen_ssa::back::write::{ModuleConfig, CodegenContext, FatLTOInput};
@@ -16,6 +15,7 @@
 use rustc::middle::exported_symbols::SymbolExportLevel;
 use rustc::session::config::{self, Lto};
 use rustc::util::common::time_ext;
+use rustc::util::profiling::ProfileCategory;
 use rustc_data_structures::fx::FxHashMap;
 use rustc_codegen_ssa::{ModuleCodegen, ModuleKind};
 
@@ -37,7 +37,6 @@ pub fn crate_type_allows_lto(crate_type: config::CrateType) -> bool {
 }
 
 fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
-               timeline: &mut Timeline,
                diag_handler: &Handler)
     -> Result<(Vec<CString>, Vec<(SerializedModule<ModuleBuffer>, CString)>), FatalError>
 {
@@ -68,7 +67,8 @@ fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
         .iter()
         .filter_map(symbol_filter)
         .collect::<Vec<CString>>();
-    timeline.record("whitelist");
+    let _timer = cgcx.profile_activity(ProfileCategory::Codegen,
+                                       "generate_symbol_white_list_for_thinlto");
     info!("{} symbols to preserve in this crate", symbol_white_list.len());
 
     // If we're performing LTO for the entire crate graph, then for each of our
@@ -97,6 +97,8 @@ fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
         }
 
         for &(cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() {
+            let _timer = cgcx.profile_activity(ProfileCategory::Codegen,
+                                               format!("load: {}", path.display()));
             let exported_symbols = cgcx.exported_symbols
                 .as_ref().expect("needs exported symbols for LTO");
             symbol_white_list.extend(
@@ -121,7 +123,6 @@ fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
                 let bc = SerializedModule::FromRlib(bc);
                 upstream_modules.push((bc, CString::new(id).unwrap()));
             }
-            timeline.record(&format!("load: {}", path.display()));
         }
     }
 
@@ -132,12 +133,11 @@ fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
 /// for further optimization.
 pub(crate) fn run_fat(cgcx: &CodegenContext<LlvmCodegenBackend>,
                       modules: Vec<FatLTOInput<LlvmCodegenBackend>>,
-                      cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
-                      timeline: &mut Timeline)
+                      cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>)
     -> Result<LtoModuleCodegen<LlvmCodegenBackend>, FatalError>
 {
     let diag_handler = cgcx.create_diag_handler();
-    let (symbol_white_list, upstream_modules) = prepare_lto(cgcx, timeline, &diag_handler)?;
+    let (symbol_white_list, upstream_modules) = prepare_lto(cgcx, &diag_handler)?;
     let symbol_white_list = symbol_white_list.iter()
                                              .map(|c| c.as_ptr())
                                              .collect::<Vec<_>>();
@@ -148,7 +148,6 @@ pub(crate) fn run_fat(cgcx: &CodegenContext<LlvmCodegenBackend>,
         cached_modules,
         upstream_modules,
         &symbol_white_list,
-        timeline,
     )
 }
 
@@ -157,12 +156,11 @@ pub(crate) fn run_fat(cgcx: &CodegenContext<LlvmCodegenBackend>,
 /// can simply be copied over from the incr. comp. cache.
 pub(crate) fn run_thin(cgcx: &CodegenContext<LlvmCodegenBackend>,
                        modules: Vec<(String, ThinBuffer)>,
-                       cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
-                       timeline: &mut Timeline)
+                       cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>)
     -> Result<(Vec<LtoModuleCodegen<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError>
 {
     let diag_handler = cgcx.create_diag_handler();
-    let (symbol_white_list, upstream_modules) = prepare_lto(cgcx, timeline, &diag_handler)?;
+    let (symbol_white_list, upstream_modules) = prepare_lto(cgcx, &diag_handler)?;
     let symbol_white_list = symbol_white_list.iter()
                                              .map(|c| c.as_ptr())
                                              .collect::<Vec<_>>();
@@ -175,8 +173,7 @@ pub(crate) fn run_thin(cgcx: &CodegenContext<LlvmCodegenBackend>,
              modules,
              upstream_modules,
              cached_modules,
-             &symbol_white_list,
-             timeline)
+             &symbol_white_list)
 }
 
 pub(crate) fn prepare_thin(
@@ -192,8 +189,7 @@ fn fat_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
            mut modules: Vec<FatLTOInput<LlvmCodegenBackend>>,
            cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
            mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
-           symbol_white_list: &[*const libc::c_char],
-           timeline: &mut Timeline)
+           symbol_white_list: &[*const libc::c_char])
     -> Result<LtoModuleCodegen<LlvmCodegenBackend>, FatalError>
 {
     info!("going for a fat lto");
@@ -303,7 +299,6 @@ fn fat_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
                     write::llvm_err(&diag_handler, &msg)
                 })
             })?;
-            timeline.record(&format!("link {:?}", name));
             serialized_bitcode.push(bc_decoded);
         }
         drop(linker);
@@ -325,7 +320,6 @@ fn fat_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
             }
             save_temp_bitcode(&cgcx, &module, "lto.after-nounwind");
         }
-        timeline.record("passes");
     }
 
     Ok(LtoModuleCodegen::Fat {
@@ -395,8 +389,7 @@ fn thin_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
             modules: Vec<(String, ThinBuffer)>,
             serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
             cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
-            symbol_white_list: &[*const libc::c_char],
-            timeline: &mut Timeline)
+            symbol_white_list: &[*const libc::c_char])
     -> Result<(Vec<LtoModuleCodegen<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError>
 {
     unsafe {
@@ -422,7 +415,6 @@ fn thin_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
             });
             thin_buffers.push(buffer);
             module_names.push(cname);
-            timeline.record(&name);
         }
 
         // FIXME: All upstream crates are deserialized internally in the
@@ -475,7 +467,6 @@ fn thin_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
         })?;
 
         info!("thin LTO data created");
-        timeline.record("data");
 
         let import_map = if cgcx.incr_comp_session_dir.is_some() {
             ThinLTOImports::from_thin_lto_data(data)
@@ -486,7 +477,6 @@ fn thin_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
             ThinLTOImports::default()
         };
         info!("thin LTO import map loaded");
-        timeline.record("import-map-loaded");
 
         let data = ThinData(data);
 
@@ -691,7 +681,6 @@ fn drop(&mut self) {
 pub unsafe fn optimize_thin_module(
     thin_module: &mut ThinModule<LlvmCodegenBackend>,
     cgcx: &CodegenContext<LlvmCodegenBackend>,
-    timeline: &mut Timeline
 ) -> Result<ModuleCodegen<ModuleLlvm>, FatalError> {
     let diag_handler = cgcx.create_diag_handler();
     let tm = (cgcx.tm_factory.0)().map_err(|e| {
@@ -738,9 +727,10 @@ pub unsafe fn optimize_thin_module(
         // Like with "fat" LTO, get some better optimizations if landing pads
         // are disabled by removing all landing pads.
         if cgcx.no_landing_pads {
+            let _timer = cgcx.profile_activity(ProfileCategory::Codegen,
+                                               "LLVM_remove_landing_pads");
             llvm::LLVMRustMarkAllFunctionsNounwind(llmod);
             save_temp_bitcode(&cgcx, &module, "thin-lto-after-nounwind");
-            timeline.record("nounwind");
         }
 
         // Up next comes the per-module local analyses that we do for Thin LTO.
@@ -756,25 +746,21 @@ pub unsafe fn optimize_thin_module(
             return Err(write::llvm_err(&diag_handler, msg))
         }
         save_temp_bitcode(cgcx, &module, "thin-lto-after-rename");
-        timeline.record("rename");
         if !llvm::LLVMRustPrepareThinLTOResolveWeak(thin_module.shared.data.0, llmod) {
             let msg = "failed to prepare thin LTO module";
             return Err(write::llvm_err(&diag_handler, msg))
         }
         save_temp_bitcode(cgcx, &module, "thin-lto-after-resolve");
-        timeline.record("resolve");
         if !llvm::LLVMRustPrepareThinLTOInternalize(thin_module.shared.data.0, llmod) {
             let msg = "failed to prepare thin LTO module";
             return Err(write::llvm_err(&diag_handler, msg))
         }
         save_temp_bitcode(cgcx, &module, "thin-lto-after-internalize");
-        timeline.record("internalize");
         if !llvm::LLVMRustPrepareThinLTOImport(thin_module.shared.data.0, llmod) {
             let msg = "failed to prepare thin LTO module";
             return Err(write::llvm_err(&diag_handler, msg))
         }
         save_temp_bitcode(cgcx, &module, "thin-lto-after-import");
-        timeline.record("import");
 
         // Ok now this is a bit unfortunate. This is also something you won't
         // find upstream in LLVM's ThinLTO passes! This is a hack for now to
@@ -807,7 +793,6 @@ pub unsafe fn optimize_thin_module(
         // fixed in LLVM.
         llvm::LLVMRustThinLTOPatchDICompileUnit(llmod, cu1);
         save_temp_bitcode(cgcx, &module, "thin-lto-after-patch");
-        timeline.record("patch");
 
         // Alright now that we've done everything related to the ThinLTO
         // analysis it's time to run some optimizations! Here we use the same
@@ -818,7 +803,6 @@ pub unsafe fn optimize_thin_module(
         let config = cgcx.config(module.kind);
         run_pass_manager(cgcx, &module, config, true);
         save_temp_bitcode(cgcx, &module, "thin-lto-after-pm");
-        timeline.record("thin-done");
     }
     Ok(module)
 }
index db5430a4219a0acea6d5f90f655360642fd76c36..dc4dd4e39e17b502da7a0213e9ef92dceb03d150 100644 (file)
@@ -3,7 +3,6 @@
 use crate::back::lto::ThinBuffer;
 use crate::base;
 use crate::consts;
-use crate::time_graph::Timeline;
 use crate::llvm::{self, DiagnosticInfo, PassManager, SMDiagnostic};
 use crate::llvm_util;
 use crate::ModuleLlvm;
@@ -19,6 +18,7 @@
 use rustc::ty::TyCtxt;
 use rustc_codegen_ssa::{ModuleCodegen, CompiledModule};
 use rustc::util::common::time_ext;
+use rustc::util::profiling::ProfileCategory;
 use rustc_fs_util::{path_to_c_string, link_or_copy};
 use rustc_data_structures::small_c_str::SmallCStr;
 use errors::{Handler, FatalError};
@@ -305,8 +305,7 @@ fn drop(&mut self) {
 pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>,
                    diag_handler: &Handler,
                    module: &ModuleCodegen<ModuleLlvm>,
-                   config: &ModuleConfig,
-                   timeline: &mut Timeline)
+                   config: &ModuleConfig)
     -> Result<(), FatalError>
 {
     let llmod = module.module_llvm.llmod();
@@ -415,19 +414,24 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>,
         diag_handler.abort_if_errors();
 
         // Finally, run the actual optimization passes
-        time_ext(config.time_passes,
-                 None,
-                 &format!("llvm function passes [{}]", module_name.unwrap()),
-                 || {
-            llvm::LLVMRustRunFunctionPassManager(fpm, llmod)
-        });
-        timeline.record("fpm");
-        time_ext(config.time_passes,
-                 None,
-                 &format!("llvm module passes [{}]", module_name.unwrap()),
-                 || {
-            llvm::LLVMRunPassManager(mpm, llmod)
-        });
+        {
+            let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_function_passes");
+            time_ext(config.time_passes,
+                        None,
+                        &format!("llvm function passes [{}]", module_name.unwrap()),
+                        || {
+                llvm::LLVMRustRunFunctionPassManager(fpm, llmod)
+            });
+        }
+        {
+            let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_module_passes");
+            time_ext(config.time_passes,
+                    None,
+                    &format!("llvm module passes [{}]", module_name.unwrap()),
+                    || {
+                llvm::LLVMRunPassManager(mpm, llmod)
+            });
+        }
 
         // Deallocate managers that we're now done with
         llvm::LLVMDisposePassManager(fpm);
@@ -439,11 +443,10 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>,
 pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
                   diag_handler: &Handler,
                   module: ModuleCodegen<ModuleLlvm>,
-                  config: &ModuleConfig,
-                  timeline: &mut Timeline)
+                  config: &ModuleConfig)
     -> Result<CompiledModule, FatalError>
 {
-    timeline.record("codegen");
+    let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "codegen");
     {
         let llmod = module.module_llvm.llmod();
         let llcx = &*module.module_llvm.llcx;
@@ -494,29 +497,30 @@ unsafe fn with_codegen<'ll, F, R>(tm: &'ll llvm::TargetMachine,
 
 
         if write_bc || config.emit_bc_compressed || config.embed_bitcode {
+            let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_make_bitcode");
             let thin = ThinBuffer::new(llmod);
             let data = thin.data();
-            timeline.record("make-bc");
 
             if write_bc {
+                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_bitcode");
                 if let Err(e) = fs::write(&bc_out, data) {
                     diag_handler.err(&format!("failed to write bytecode: {}", e));
                 }
-                timeline.record("write-bc");
             }
 
             if config.embed_bitcode {
+                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_embed_bitcode");
                 embed_bitcode(cgcx, llcx, llmod, Some(data));
-                timeline.record("embed-bc");
             }
 
             if config.emit_bc_compressed {
+                let _timer = cgcx.profile_activity(ProfileCategory::Codegen,
+                                                   "LLVM_compress_bitcode");
                 let dst = bc_out.with_extension(RLIB_BYTECODE_EXTENSION);
                 let data = bytecode::encode(&module.name, data);
                 if let Err(e) = fs::write(&dst, data) {
                     diag_handler.err(&format!("failed to write bytecode: {}", e));
                 }
-                timeline.record("compress-bc");
             }
         } else if config.embed_bitcode_marker {
             embed_bitcode(cgcx, llcx, llmod, None);
@@ -525,6 +529,7 @@ unsafe fn with_codegen<'ll, F, R>(tm: &'ll llvm::TargetMachine,
         time_ext(config.time_passes, None, &format!("codegen passes [{}]", module_name.unwrap()),
             || -> Result<(), FatalError> {
             if config.emit_ir {
+                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_ir");
                 let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
                 let out = path_to_c_string(&out);
 
@@ -563,10 +568,10 @@ extern "C" fn demangle_callback(input_ptr: *const c_char,
                     llvm::LLVMRustPrintModule(cpm, llmod, out.as_ptr(), demangle_callback);
                     llvm::LLVMDisposePassManager(cpm);
                 });
-                timeline.record("ir");
             }
 
             if config.emit_asm || asm_to_obj {
+                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_asm");
                 let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
 
                 // We can't use the same module for asm and binary output, because that triggers
@@ -581,19 +586,18 @@ extern "C" fn demangle_callback(input_ptr: *const c_char,
                     write_output_file(diag_handler, tm, cpm, llmod, &path,
                                       llvm::FileType::AssemblyFile)
                 })?;
-                timeline.record("asm");
             }
 
             if write_obj {
+                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_obj");
                 with_codegen(tm, llmod, config.no_builtins, |cpm| {
                     write_output_file(diag_handler, tm, cpm, llmod, &obj_out,
                                       llvm::FileType::ObjectFile)
                 })?;
-                timeline.record("obj");
             } else if asm_to_obj {
+                let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_asm_to_obj");
                 let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
                 run_assembler(cgcx, diag_handler, &assembly, &obj_out);
-                timeline.record("asm_to_obj");
 
                 if !config.emit_asm && !cgcx.save_temps {
                     drop(fs::remove_file(&assembly));
index 7b2b9ec24ea0f2d964b43e9d17e05b785d3a9907..0a295c202e655418ed8fcb87d697dc941e63c0b2 100644 (file)
@@ -53,7 +53,6 @@
 use rustc_codegen_ssa::CompiledModule;
 use errors::{FatalError, Handler};
 use rustc::dep_graph::WorkProduct;
-use rustc::util::time_graph::Timeline;
 use syntax_pos::symbol::InternedString;
 use rustc::mir::mono::Stats;
 pub use llvm_util::target_features;
@@ -66,7 +65,6 @@
 use rustc::session::Session;
 use rustc::session::config::{OutputFilenames, OutputType, PrintRequest, OptLevel};
 use rustc::ty::{self, TyCtxt};
-use rustc::util::time_graph;
 use rustc::util::profiling::ProfileCategory;
 use rustc::util::common::ErrorReported;
 use rustc_mir::monomorphize;
@@ -167,42 +165,37 @@ fn run_fat_lto(
         cgcx: &CodegenContext<Self>,
         modules: Vec<FatLTOInput<Self>>,
         cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
-        timeline: &mut Timeline
     ) -> Result<LtoModuleCodegen<Self>, FatalError> {
-        back::lto::run_fat(cgcx, modules, cached_modules, timeline)
+        back::lto::run_fat(cgcx, modules, cached_modules)
     }
     fn run_thin_lto(
         cgcx: &CodegenContext<Self>,
         modules: Vec<(String, Self::ThinBuffer)>,
         cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
-        timeline: &mut Timeline
     ) -> Result<(Vec<LtoModuleCodegen<Self>>, Vec<WorkProduct>), FatalError> {
-        back::lto::run_thin(cgcx, modules, cached_modules, timeline)
+        back::lto::run_thin(cgcx, modules, cached_modules)
     }
     unsafe fn optimize(
         cgcx: &CodegenContext<Self>,
         diag_handler: &Handler,
         module: &ModuleCodegen<Self::Module>,
         config: &ModuleConfig,
-        timeline: &mut Timeline
     ) -> Result<(), FatalError> {
-        back::write::optimize(cgcx, diag_handler, module, config, timeline)
+        back::write::optimize(cgcx, diag_handler, module, config)
     }
     unsafe fn optimize_thin(
         cgcx: &CodegenContext<Self>,
         thin: &mut ThinModule<Self>,
-        timeline: &mut Timeline
     ) -> Result<ModuleCodegen<Self::Module>, FatalError> {
-        back::lto::optimize_thin_module(thin, cgcx, timeline)
+        back::lto::optimize_thin_module(thin, cgcx)
     }
     unsafe fn codegen(
         cgcx: &CodegenContext<Self>,
         diag_handler: &Handler,
         module: ModuleCodegen<Self::Module>,
         config: &ModuleConfig,
-        timeline: &mut Timeline
     ) -> Result<CompiledModule, FatalError> {
-        back::write::codegen(cgcx, diag_handler, module, config, timeline)
+        back::write::codegen(cgcx, diag_handler, module, config)
     }
     fn prepare_thin(
         module: ModuleCodegen<Self::Module>
@@ -336,12 +329,12 @@ fn join_codegen_and_link(
 
         // Run the linker on any artifacts that resulted from the LLVM run.
         // This should produce either a finished executable or library.
-        sess.profiler(|p| p.start_activity(ProfileCategory::Linking));
+        sess.profiler(|p| p.start_activity(ProfileCategory::Linking, "link_crate"));
         time(sess, "linking", || {
             back::link::link_binary(sess, &codegen_results,
                                     outputs, &codegen_results.crate_name.as_str());
         });
-        sess.profiler(|p| p.end_activity(ProfileCategory::Linking));
+        sess.profiler(|p| p.end_activity(ProfileCategory::Linking, "link_crate"));
 
         // Now that we won't touch anything in the incremental compilation directory
         // any more, we can finalize it (which involves renaming it)
index 7f0eba7b0850bc5fd886d0c046e28bb717f94b88..47e5d9af33ba40d2e9c39300ef30825ddbe178af 100644 (file)
@@ -2,7 +2,6 @@
 use crate::traits::*;
 use crate::ModuleCodegen;
 
-use rustc::util::time_graph::Timeline;
 use rustc_errors::FatalError;
 
 use std::sync::Arc;
@@ -67,7 +66,6 @@ pub fn name(&self) -> &str {
     pub unsafe fn optimize(
         &mut self,
         cgcx: &CodegenContext<B>,
-        timeline: &mut Timeline
     ) -> Result<ModuleCodegen<B::Module>, FatalError> {
         match *self {
             LtoModuleCodegen::Fat { ref mut module, .. } => {
@@ -75,11 +73,10 @@ pub unsafe fn optimize(
                 {
                     let config = cgcx.config(module.kind);
                     B::run_lto_pass_manager(cgcx, &module, config, false);
-                    timeline.record("fat-done");
                 }
                 Ok(module)
             }
-            LtoModuleCodegen::Thin(ref mut thin) => B::optimize_thin(cgcx, thin, timeline),
+            LtoModuleCodegen::Thin(ref mut thin) => B::optimize_thin(cgcx, thin),
         }
     }
 
index c84b38cde81853f39aae0869024b6f26e700c361..859dfb99d92b8598306d5af13c6fdfef1c218ed8 100644 (file)
 use rustc::session::config::{self, OutputFilenames, OutputType, Passes, Sanitizer, Lto};
 use rustc::session::Session;
 use rustc::util::nodemap::FxHashMap;
-use rustc::util::time_graph::{self, TimeGraph, Timeline};
 use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
 use rustc::ty::TyCtxt;
 use rustc::util::common::{time_depth, set_time_depth, print_time_passes_entry};
-use rustc::util::profiling::SelfProfiler;
+use rustc::util::profiling::{ProfileCategory, SelfProfiler};
 use rustc_fs_util::link_or_copy;
 use rustc_data_structures::svh::Svh;
 use rustc_errors::{Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId};
@@ -33,6 +32,7 @@
 use parking_lot::Mutex as PlMutex;
 
 use std::any::Any;
+use std::borrow::Cow;
 use std::fs;
 use std::io;
 use std::mem;
@@ -197,6 +197,40 @@ fn clone(&self) -> Self {
     }
 }
 
+pub struct ProfileGenericActivityTimer {
+    profiler: Option<Arc<PlMutex<SelfProfiler>>>,
+    category: ProfileCategory,
+    label: Cow<'static, str>,
+}
+
+impl ProfileGenericActivityTimer {
+    pub fn start(
+        profiler: Option<Arc<PlMutex<SelfProfiler>>>,
+        category: ProfileCategory,
+        label: Cow<'static, str>,
+    ) -> ProfileGenericActivityTimer {
+        if let Some(profiler) = &profiler {
+            let mut p = profiler.lock();
+            p.start_activity(category, label.clone());
+        }
+
+        ProfileGenericActivityTimer {
+            profiler,
+            category,
+            label,
+        }
+    }
+}
+
+impl Drop for ProfileGenericActivityTimer {
+    fn drop(&mut self) {
+        if let Some(profiler) = &self.profiler {
+            let mut p = profiler.lock();
+            p.end_activity(self.category, self.label.clone());
+        }
+    }
+}
+
 /// Additional resources used by optimize_and_codegen (not module specific)
 #[derive(Clone)]
 pub struct CodegenContext<B: WriteBackendMethods> {
@@ -238,9 +272,6 @@ pub struct CodegenContext<B: WriteBackendMethods> {
     pub cgu_reuse_tracker: CguReuseTracker,
     // Channel back to the main control thread to send messages to
     pub coordinator_send: Sender<Box<dyn Any + Send>>,
-    // A reference to the TimeGraph so we can register timings. None means that
-    // measuring is disabled.
-    pub time_graph: Option<TimeGraph>,
     // The assembler command if no_integrated_as option is enabled, None otherwise
     pub assembler_cmd: Option<Arc<AssemblerCommand>>
 }
@@ -277,6 +308,14 @@ pub fn profile<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) {
             self.profiler_active(f)
         }
     }
+
+    pub fn profile_activity(
+        &self,
+        category: ProfileCategory,
+        label: impl Into<Cow<'static, str>>,
+    ) -> ProfileGenericActivityTimer {
+        ProfileGenericActivityTimer::start(self.profiler.clone(), category, label.into())
+    }
 }
 
 fn generate_lto_work<B: ExtraBackendMethods>(
@@ -285,11 +324,7 @@ fn generate_lto_work<B: ExtraBackendMethods>(
     needs_thin_lto: Vec<(String, B::ThinBuffer)>,
     import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)>
 ) -> Vec<(WorkItem<B>, u64)> {
-    let mut timeline = cgcx.time_graph.as_ref().map(|tg| {
-        tg.start(CODEGEN_WORKER_TIMELINE,
-                 CODEGEN_WORK_PACKAGE_KIND,
-                 "generate lto")
-    }).unwrap_or(Timeline::noop());
+    cgcx.profile(|p| p.start_activity(ProfileCategory::Linking, "codegen_run_lto"));
 
     let (lto_modules, copy_jobs) = if !needs_fat_lto.is_empty() {
         assert!(needs_thin_lto.is_empty());
@@ -297,17 +332,16 @@ fn generate_lto_work<B: ExtraBackendMethods>(
             cgcx,
             needs_fat_lto,
             import_only_modules,
-            &mut timeline,
         )
         .unwrap_or_else(|e| e.raise());
         (vec![lto_module], vec![])
     } else {
         assert!(needs_fat_lto.is_empty());
-        B::run_thin_lto(cgcx, needs_thin_lto, import_only_modules, &mut timeline)
+        B::run_thin_lto(cgcx, needs_thin_lto, import_only_modules)
             .unwrap_or_else(|e| e.raise())
     };
 
-    lto_modules.into_iter().map(|module| {
+    let result = lto_modules.into_iter().map(|module| {
         let cost = module.cost();
         (WorkItem::LTO(module), cost)
     }).chain(copy_jobs.into_iter().map(|wp| {
@@ -315,7 +349,11 @@ fn generate_lto_work<B: ExtraBackendMethods>(
             name: wp.cgu_name.clone(),
             source: wp,
         }), 0)
-    })).collect()
+    })).collect();
+
+    cgcx.profile(|p| p.end_activity(ProfileCategory::Linking, "codegen_run_lto"));
+
+    result
 }
 
 pub struct CompiledModules {
@@ -345,7 +383,6 @@ fn need_pre_lto_bitcode_for_incr_comp(sess: &Session) -> bool {
 pub fn start_async_codegen<B: ExtraBackendMethods>(
     backend: B,
     tcx: TyCtxt<'_, '_, '_>,
-    time_graph: Option<TimeGraph>,
     metadata: EncodedMetadata,
     coordinator_receive: Receiver<Box<dyn Any + Send>>,
     total_cgus: usize
@@ -469,7 +506,6 @@ pub fn start_async_codegen<B: ExtraBackendMethods>(
                                                   coordinator_receive,
                                                   total_cgus,
                                                   sess.jobserver.clone(),
-                                                  time_graph.clone(),
                                                   Arc::new(modules_config),
                                                   Arc::new(metadata_config),
                                                   Arc::new(allocator_config));
@@ -483,7 +519,6 @@ pub fn start_async_codegen<B: ExtraBackendMethods>(
         linker_info,
         crate_info,
 
-        time_graph,
         coordinator_send: tcx.tx_to_llvm_workers.lock().clone(),
         codegen_worker_receive,
         shared_emitter_main,
@@ -728,19 +763,18 @@ pub enum FatLTOInput<B: WriteBackendMethods> {
 fn execute_work_item<B: ExtraBackendMethods>(
     cgcx: &CodegenContext<B>,
     work_item: WorkItem<B>,
-    timeline: &mut Timeline
 ) -> Result<WorkItemResult<B>, FatalError> {
     let module_config = cgcx.config(work_item.module_kind());
 
     match work_item {
         WorkItem::Optimize(module) => {
-            execute_optimize_work_item(cgcx, module, module_config, timeline)
+            execute_optimize_work_item(cgcx, module, module_config)
         }
         WorkItem::CopyPostLtoArtifacts(module) => {
-            execute_copy_from_cache_work_item(cgcx, module, module_config, timeline)
+            execute_copy_from_cache_work_item(cgcx, module, module_config)
         }
         WorkItem::LTO(module) => {
-            execute_lto_work_item(cgcx, module, module_config, timeline)
+            execute_lto_work_item(cgcx, module, module_config)
         }
     }
 }
@@ -756,12 +790,11 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>(
     cgcx: &CodegenContext<B>,
     module: ModuleCodegen<B::Module>,
     module_config: &ModuleConfig,
-    timeline: &mut Timeline
 ) -> Result<WorkItemResult<B>, FatalError> {
     let diag_handler = cgcx.create_diag_handler();
 
     unsafe {
-        B::optimize(cgcx, &diag_handler, &module, module_config, timeline)?;
+        B::optimize(cgcx, &diag_handler, &module, module_config)?;
     }
 
     // After we've done the initial round of optimizations we need to
@@ -818,7 +851,7 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>(
     Ok(match lto_type {
         ComputedLtoType::No => {
             let module = unsafe {
-                B::codegen(cgcx, &diag_handler, module, module_config, timeline)?
+                B::codegen(cgcx, &diag_handler, module, module_config)?
             };
             WorkItemResult::Compiled(module)
         }
@@ -854,7 +887,6 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
     cgcx: &CodegenContext<B>,
     module: CachedModuleCodegen,
     module_config: &ModuleConfig,
-    _: &mut Timeline
 ) -> Result<WorkItemResult<B>, FatalError> {
     let incr_comp_session_dir = cgcx.incr_comp_session_dir
                                     .as_ref()
@@ -916,13 +948,12 @@ fn execute_lto_work_item<B: ExtraBackendMethods>(
     cgcx: &CodegenContext<B>,
     mut module: lto::LtoModuleCodegen<B>,
     module_config: &ModuleConfig,
-    timeline: &mut Timeline
 ) -> Result<WorkItemResult<B>, FatalError> {
     let diag_handler = cgcx.create_diag_handler();
 
     unsafe {
-        let module = module.optimize(cgcx, timeline)?;
-        let module = B::codegen(cgcx, &diag_handler, module, module_config, timeline)?;
+        let module = module.optimize(cgcx)?;
+        let module = B::codegen(cgcx, &diag_handler, module, module_config)?;
         Ok(WorkItemResult::Compiled(module))
     }
 }
@@ -977,7 +1008,6 @@ fn start_executing_work<B: ExtraBackendMethods>(
     coordinator_receive: Receiver<Box<dyn Any + Send>>,
     total_cgus: usize,
     jobserver: Client,
-    time_graph: Option<TimeGraph>,
     modules_config: Arc<ModuleConfig>,
     metadata_config: Arc<ModuleConfig>,
     allocator_config: Arc<ModuleConfig>
@@ -1065,7 +1095,6 @@ fn start_executing_work<B: ExtraBackendMethods>(
         cgu_reuse_tracker: sess.cgu_reuse_tracker.clone(),
         coordinator_send,
         diag_emitter: shared_emitter.clone(),
-        time_graph,
         output_filenames: tcx.output_filenames(LOCAL_CRATE),
         regular_module_config: modules_config,
         metadata_module_config: metadata_config,
@@ -1570,12 +1599,6 @@ fn maybe_start_llvm_timer(config: &ModuleConfig,
 }
 
 pub const CODEGEN_WORKER_ID: usize = ::std::usize::MAX;
-pub const CODEGEN_WORKER_TIMELINE: time_graph::TimelineId =
-    time_graph::TimelineId(CODEGEN_WORKER_ID);
-pub const CODEGEN_WORK_PACKAGE_KIND: time_graph::WorkPackageKind =
-    time_graph::WorkPackageKind(&["#DE9597", "#FED1D3", "#FDC5C7", "#B46668", "#88494B"]);
-const LLVM_WORK_PACKAGE_KIND: time_graph::WorkPackageKind =
-    time_graph::WorkPackageKind(&["#7DB67A", "#C6EEC4", "#ACDAAA", "#579354", "#3E6F3C"]);
 
 fn spawn_work<B: ExtraBackendMethods>(
     cgcx: CodegenContext<B>,
@@ -1625,13 +1648,12 @@ fn drop(&mut self) {
         // as a diagnostic was already sent off to the main thread - just
         // surface that there was an error in this worker.
         bomb.result = {
-            let timeline = cgcx.time_graph.as_ref().map(|tg| {
-                tg.start(time_graph::TimelineId(cgcx.worker),
-                         LLVM_WORK_PACKAGE_KIND,
-                         &work.name())
-            });
-            let mut timeline = timeline.unwrap_or(Timeline::noop());
-            execute_work_item(&cgcx, work, &mut timeline).ok()
+            let label = work.name();
+            cgcx.profile(|p| p.start_activity(ProfileCategory::Codegen, label.clone()));
+            let result = execute_work_item(&cgcx, work).ok();
+            cgcx.profile(|p| p.end_activity(ProfileCategory::Codegen, label));
+
+            result
         };
     });
 }
@@ -1785,7 +1807,6 @@ pub struct OngoingCodegen<B: ExtraBackendMethods> {
     pub windows_subsystem: Option<String>,
     pub linker_info: LinkerInfo,
     pub crate_info: CrateInfo,
-    pub time_graph: Option<TimeGraph>,
     pub coordinator_send: Sender<Box<dyn Any + Send>>,
     pub codegen_worker_receive: Receiver<Message<B>>,
     pub shared_emitter_main: SharedEmitterMain,
@@ -1814,10 +1835,6 @@ pub fn join(
 
         sess.abort_if_errors();
 
-        if let Some(time_graph) = self.time_graph {
-            time_graph.dump(&format!("{}-timings", self.crate_name));
-        }
-
         let work_products =
             copy_all_cgu_workproducts_to_incr_comp_cache_dir(sess,
                                                              &compiled_modules);
index 39ce15e4772967b9e4512738022deef64f54ad47..48743be3a2551375d48fe74ae674c6306265a949 100644 (file)
@@ -29,7 +29,6 @@
 use rustc::session::config::{self, EntryFnType, Lto};
 use rustc::session::Session;
 use rustc_mir::monomorphize::item::DefPathBasedNames;
-use rustc::util::time_graph;
 use rustc_mir::monomorphize::Instance;
 use rustc_mir::monomorphize::partitioning::{CodegenUnit, CodegenUnitExt};
 use rustc::util::nodemap::FxHashMap;
@@ -528,11 +527,6 @@ fn create_entry_fn<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
 }
 
 pub const CODEGEN_WORKER_ID: usize = ::std::usize::MAX;
-pub const CODEGEN_WORKER_TIMELINE: time_graph::TimelineId =
-    time_graph::TimelineId(CODEGEN_WORKER_ID);
-pub const CODEGEN_WORK_PACKAGE_KIND: time_graph::WorkPackageKind =
-    time_graph::WorkPackageKind(&["#DE9597", "#FED1D3", "#FDC5C7", "#B46668", "#88494B"]);
-
 
 pub fn codegen_crate<B: ExtraBackendMethods>(
     backend: B,
@@ -545,7 +539,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
     let cgu_name_builder = &mut CodegenUnitNameBuilder::new(tcx);
 
     // Codegen the metadata.
-    tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen));
+    tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen, "codegen crate metadata"));
 
     let metadata_cgu_name = cgu_name_builder.build_cgu_name(LOCAL_CRATE,
                                                             &["crate"],
@@ -555,7 +549,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
     let metadata = time(tcx.sess, "write metadata", || {
         backend.write_metadata(tcx, &mut metadata_llvm_module)
     });
-    tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen));
+    tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen, "codegen crate metadata"));
 
     let metadata_module = ModuleCodegen {
         name: metadata_cgu_name,
@@ -563,19 +557,12 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
         kind: ModuleKind::Metadata,
     };
 
-    let time_graph = if tcx.sess.opts.debugging_opts.codegen_time_graph {
-        Some(time_graph::TimeGraph::new())
-    } else {
-        None
-    };
-
     // Skip crate items and just output metadata in -Z no-codegen mode.
     if tcx.sess.opts.debugging_opts.no_codegen ||
        !tcx.sess.opts.output_types.should_codegen() {
         let ongoing_codegen = start_async_codegen(
             backend,
             tcx,
-            time_graph,
             metadata,
             rx,
             1);
@@ -609,7 +596,6 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
     let ongoing_codegen = start_async_codegen(
         backend.clone(),
         tcx,
-        time_graph.clone(),
         metadata,
         rx,
         codegen_units.len());
@@ -676,15 +662,14 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
 
         match cgu_reuse {
             CguReuse::No => {
-                let _timing_guard = time_graph.as_ref().map(|time_graph| {
-                    time_graph.start(CODEGEN_WORKER_TIMELINE,
-                                     CODEGEN_WORK_PACKAGE_KIND,
-                                     &format!("codegen {}", cgu.name()))
-                });
+                tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen,
+                                                       format!("codegen {}", cgu.name())));
                 let start_time = Instant::now();
                 let stats = backend.compile_codegen_unit(tcx, *cgu.name());
                 all_stats.extend(stats);
                 total_codegen_time += start_time.elapsed();
+                tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen,
+                                                     format!("codegen {}", cgu.name())));
                 false
             }
             CguReuse::PreLto => {
index d8fb7c608c8afb72890d0219217034c5a56f8bbd..23bb7179557b910540b148dbaad92e497fa6af21 100644 (file)
@@ -3,7 +3,6 @@
 use crate::{CompiledModule, ModuleCodegen};
 
 use rustc::dep_graph::WorkProduct;
-use rustc::util::time_graph::Timeline;
 use rustc_errors::{FatalError, Handler};
 
 pub trait WriteBackendMethods: 'static + Sized + Clone {
@@ -20,7 +19,6 @@ fn run_fat_lto(
         cgcx: &CodegenContext<Self>,
         modules: Vec<FatLTOInput<Self>>,
         cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
-        timeline: &mut Timeline,
     ) -> Result<LtoModuleCodegen<Self>, FatalError>;
     /// Performs thin LTO by performing necessary global analysis and returning two
     /// lists, one of the modules that need optimization and another for modules that
@@ -29,7 +27,6 @@ fn run_thin_lto(
         cgcx: &CodegenContext<Self>,
         modules: Vec<(String, Self::ThinBuffer)>,
         cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
-        timeline: &mut Timeline,
     ) -> Result<(Vec<LtoModuleCodegen<Self>>, Vec<WorkProduct>), FatalError>;
     fn print_pass_timings(&self);
     unsafe fn optimize(
@@ -37,19 +34,16 @@ unsafe fn optimize(
         diag_handler: &Handler,
         module: &ModuleCodegen<Self::Module>,
         config: &ModuleConfig,
-        timeline: &mut Timeline,
     ) -> Result<(), FatalError>;
     unsafe fn optimize_thin(
         cgcx: &CodegenContext<Self>,
         thin: &mut ThinModule<Self>,
-        timeline: &mut Timeline,
     ) -> Result<ModuleCodegen<Self::Module>, FatalError>;
     unsafe fn codegen(
         cgcx: &CodegenContext<Self>,
         diag_handler: &Handler,
         module: ModuleCodegen<Self::Module>,
         config: &ModuleConfig,
-        timeline: &mut Timeline,
     ) -> Result<CompiledModule, FatalError>;
     fn prepare_thin(
         module: ModuleCodegen<Self::Module>
index d61ccd5605b60a276b3c14ee826bfd9e13dc7851..5bb47bda17b330902442dd3735d8ea326d4ed813 100644 (file)
@@ -67,7 +67,7 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
         .set_continue_after_error(sess.opts.debugging_opts.continue_parse_after_error);
     hygiene::set_default_edition(sess.edition());
 
-    sess.profiler(|p| p.start_activity(ProfileCategory::Parsing));
+    sess.profiler(|p| p.start_activity(ProfileCategory::Parsing, "parsing"));
     let krate = time(sess, "parsing", || match *input {
         Input::File(ref file) => parse::parse_crate_from_file(file, &sess.parse_sess),
         Input::Str {
@@ -75,7 +75,7 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
             ref name,
         } => parse::parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess),
     })?;
-    sess.profiler(|p| p.end_activity(ProfileCategory::Parsing));
+    sess.profiler(|p| p.end_activity(ProfileCategory::Parsing, "parsing"));
 
     sess.diagnostic().set_continue_after_error(true);
 
@@ -374,7 +374,7 @@ fn configure_and_expand_inner<'a>(
     syntax_ext::register_builtins(&mut resolver, plugin_info.syntax_exts);
 
     // Expand all macros
-    sess.profiler(|p| p.start_activity(ProfileCategory::Expansion));
+    sess.profiler(|p| p.start_activity(ProfileCategory::Expansion, "macro expansion"));
     krate = time(sess, "expansion", || {
         // Windows dlls do not have rpaths, so they don't know how to find their
         // dependencies. It's up to us to tell the system where to find all the
@@ -449,7 +449,7 @@ fn configure_and_expand_inner<'a>(
         }
         krate
     });
-    sess.profiler(|p| p.end_activity(ProfileCategory::Expansion));
+    sess.profiler(|p| p.end_activity(ProfileCategory::Expansion, "macro expansion"));
 
     time(sess, "maybe building test harness", || {
         syntax::test::modify_for_testing(
@@ -1018,9 +1018,9 @@ pub fn start_codegen<'tcx>(
         ::rustc::middle::dependency_format::calculate(tcx)
     });
 
-    tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen));
+    tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen, "codegen crate"));
     let codegen = time(tcx.sess, "codegen", move || codegen_backend.codegen_crate(tcx, rx));
-    tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen));
+    tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen, "codegen crate"));
 
     if log_enabled!(::log::Level::Info) {
         println!("Post-codegen");
index ebb617c23c6ca72ee85d3f84a411d3b8b2928b16..cbed7d26a9950b413a68448e16c37179d05efe4a 100644 (file)
@@ -317,7 +317,7 @@ pub fn provide(providers: &mut Providers<'_>) {
 pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
                              -> Result<(), ErrorReported>
 {
-    tcx.sess.profiler(|p| p.start_activity(ProfileCategory::TypeChecking));
+    tcx.sess.profiler(|p| p.start_activity(ProfileCategory::TypeChecking, "type-check crate"));
 
     // this ensures that later parts of type checking can assume that items
     // have valid types and not error
@@ -368,7 +368,7 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
     check_unused::check_crate(tcx);
     check_for_entry_fn(tcx);
 
-    tcx.sess.profiler(|p| p.end_activity(ProfileCategory::TypeChecking));
+    tcx.sess.profiler(|p| p.end_activity(ProfileCategory::TypeChecking, "type-check crate"));
 
     if tcx.sess.err_count() == 0 {
         Ok(())