"dump the dependency graph to $RUST_DEP_GRAPH (default: /tmp/dep_graph.gv)"),
query_dep_graph: bool = (false, parse_bool, [UNTRACKED],
"enable queries of the dependency graph for regression testing"),
+ profile_queries: bool = (false, parse_bool, [UNTRACKED],
+ "trace and profile the queries of the incremental compilation framework"),
no_analysis: bool = (false, parse_bool, [UNTRACKED],
"parse and expand the source, but run no analysis"),
extra_plugins: Vec<String> = (Vec::new(), parse_list, [TRACKED],
}
pub fn verbose(&self) -> bool { self.opts.debugging_opts.verbose }
pub fn time_passes(&self) -> bool { self.opts.debugging_opts.time_passes }
+ pub fn profile_queries(&self) -> bool { self.opts.debugging_opts.profile_queries }
pub fn count_llvm_insns(&self) -> bool {
self.opts.debugging_opts.count_llvm_insns
}
use ty::subst::Substs;
use ty::fast_reject::SimplifiedType;
use util::nodemap::{DefIdSet, NodeSet};
+use util::common::{profq_msg, ProfileQueriesMsg};
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::fx::FxHashMap;
impl<'tcx> QueryDescription for queries::lint_levels<'tcx> {
fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
format!("computing the lint levels for items in this crate")
+}
+
+// If enabled, send a message to the profile-queries thread
+macro_rules! profq_msg {
+ ($tcx:expr, $msg:expr) => {
+ if cfg!(debug_assertions) {
+ if $tcx.sess.opts.debugging_opts.profile_queries {
+ profq_msg($msg)
+ }
+ }
}
}
$($(#[$attr])* $name($K)),*
}
+ #[allow(bad_style)]
+ #[derive(Clone, Debug, PartialEq, Eq)]
+ pub enum QueryMsg {
+ $($name(String)),*
+ }
+
impl<$tcx> Query<$tcx> {
pub fn describe(&self, tcx: TyCtxt) -> String {
match *self {
key,
span);
+ profq_msg!(tcx,
+ ProfileQueriesMsg::QueryBegin(span.clone(),
+ QueryMsg::$name(format!("{:?}", key))));
+
if let Some(&(ref result, dep_node_index)) = tcx.maps.$name.borrow().map.get(&key) {
tcx.dep_graph.read_index(dep_node_index);
+ profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
return Ok(f(result));
}
+ // else, we are going to run the provider:
+ profq_msg!(tcx, ProfileQueriesMsg::ProviderBegin);
// FIXME(eddyb) Get more valid Span's on queries.
// def_span guard is necessary to prevent a recursive loop,
tcx.dep_graph.with_task(dep_node, tcx, key, run_provider)
}
})?;
+ profq_msg!(tcx, ProfileQueriesMsg::ProviderEnd);
tcx.dep_graph.read_index(dep_node_index);
use std::path::Path;
use std::time::{Duration, Instant};
+use std::sync::mpsc::{Sender};
+use syntax_pos::{Span};
+use ty::maps::{QueryMsg};
+
// The name of the associated type for `Fn` return types
pub const FN_OUTPUT_NAME: &'static str = "Output";
thread_local!(static TIME_DEPTH: Cell<usize> = Cell::new(0));
+/// Initialized for -Z profile-queries
+thread_local!(static PROFQ_CHAN: RefCell<Option<Sender<ProfileQueriesMsg>>> = RefCell::new(None));
+
+/// Parameters to the `Dump` variant of type `ProfileQueriesMsg`.
+#[derive(Clone,Debug)]
+pub struct ProfQDumpParams {
+ /// A base path for the files we will dump
+ pub path:String,
+ /// To ensure that the compiler waits for us to finish our dumps
+ pub ack:Sender<()>,
+ /// toggle dumping a log file with every `ProfileQueriesMsg`
+ pub dump_profq_msg_log:bool,
+}
+
+/// A sequence of these messages induce a trace of query-based incremental compilation.
+/// FIXME(matthewhammer): Determine whether we should include cycle detection here or not.
+#[derive(Clone,Debug)]
+pub enum ProfileQueriesMsg {
+ /// begin a new query
+ QueryBegin(Span,QueryMsg),
+ /// query is satisfied by using an already-known value for the given key
+ CacheHit,
+ /// query requires running a provider; providers may nest, permitting queries to nest.
+ ProviderBegin,
+ /// query is satisfied by a provider terminating with a value
+ ProviderEnd,
+ /// dump a record of the queries to the given path
+ Dump(ProfQDumpParams),
+ /// halt the profiling/monitoring background thread
+ Halt
+}
+
+/// If enabled, send a message to the profile-queries thread
+pub fn profq_msg(msg: ProfileQueriesMsg) {
+ PROFQ_CHAN.with(|sender|{
+ if let Some(s) = sender.borrow().as_ref() {
+ s.send(msg).unwrap()
+ } else {
+ panic!("no channel on which to send profq_msg: {:?}", msg)
+ }
+ })
+}
+
+/// Set channel for profile queries channel
+pub fn profq_set_chan(s: Sender<ProfileQueriesMsg>) -> bool {
+ PROFQ_CHAN.with(|chan|{
+ if chan.borrow().is_none() {
+ *chan.borrow_mut() = Some(s);
+ true
+ } else { false }
+ })
+}
+
/// Read the current depth of `time()` calls. This is used to
/// encourage indentation across threads.
pub fn time_depth() -> usize {
use derive_registrar;
+use profile;
+
pub fn compile_input(sess: &Session,
cstore: &CStore,
input: &Input,
sess.abort_if_errors();
}
+ if sess.opts.debugging_opts.profile_queries {
+ profile::begin();
+ }
+
// We need nested scopes here, because the intermediate results can keep
// large chunks of memory alive and we want to free them as soon as
// possible to keep the peak memory usage low
-> PResult<'a, ast::Crate> {
sess.diagnostic().set_continue_after_error(control.continue_parse_after_error);
+ if sess.opts.debugging_opts.profile_queries {
+ profile::begin();
+ }
+
let krate = time(sess.time_passes(), "parsing", || {
match *input {
Input::File(ref file) => {
"translation",
move || trans::trans_crate(tcx, analysis, incremental_hashes_map, output_filenames));
+ if tcx.sess.opts.debugging_opts.profile_queries {
+ profile::dump("profile_queries".to_string())
+ }
+
translation
}
#[cfg(test)]
mod test;
+pub mod profile;
pub mod driver;
pub mod pretty;
pub mod target_features;
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::util::common::{ProfQDumpParams, ProfileQueriesMsg, profq_msg, profq_set_chan};
+use std::sync::mpsc::{Receiver};
+use std::io::{Write};
+
+pub mod trace;
+
+/// begin a profile thread, if not already running
+pub fn begin() {
+ use std::thread;
+ use std::sync::mpsc::{channel};
+ let (tx, rx) = channel();
+ if profq_set_chan(tx) {
+ thread::spawn(move||profile_queries_thread(rx));
+ }
+}
+
+/// dump files with profiling information to the given base path, and
+/// wait for this dump to complete.
+///
+/// wraps the RPC (send/recv channel logic) of requesting a dump.
+pub fn dump(path:String) {
+ use std::sync::mpsc::{channel};
+ let (tx, rx) = channel();
+ let params = ProfQDumpParams{
+ path, ack:tx,
+ // FIXME: Add another compiler flag to toggle whether this log
+ // is written; false for now
+ dump_profq_msg_log:false,
+ };
+ profq_msg(ProfileQueriesMsg::Dump(params));
+ let _ = rx.recv().unwrap();
+}
+
+// profiling thread; retains state (in local variables) and dump traces, upon request.
+fn profile_queries_thread(r:Receiver<ProfileQueriesMsg>) {
+ use self::trace::*;
+ use std::fs::File;
+ use std::time::{Instant};
+
+ let mut profq_msgs : Vec<ProfileQueriesMsg> = vec![];
+ let mut frame : StackFrame = StackFrame{ parse_st:ParseState::NoQuery, traces:vec![] };
+ let mut stack : Vec<StackFrame> = vec![];
+ loop {
+ let msg = r.recv();
+ if let Err(_recv_err) = msg {
+ // FIXME: Perhaps do something smarter than simply quitting?
+ break
+ };
+ let msg = msg.unwrap();
+ debug!("profile_queries_thread: {:?}", msg);
+
+ // Meta-level versus _actual_ queries messages
+ match msg {
+ ProfileQueriesMsg::Halt => return,
+ ProfileQueriesMsg::Dump(params) => {
+ assert!(stack.len() == 0);
+ assert!(frame.parse_st == trace::ParseState::NoQuery);
+ {
+ // write log of all messages
+ if params.dump_profq_msg_log {
+ let mut log_file =
+ File::create(format!("{}.log.txt", params.path)).unwrap();
+ for m in profq_msgs.iter() {
+ writeln!(&mut log_file, "{:?}", m).unwrap()
+ };
+ }
+
+ // write HTML file, and counts file
+ let html_path = format!("{}.html", params.path);
+ let mut html_file = File::create(&html_path).unwrap();
+
+ let counts_path = format!("{}.counts.txt", params.path);
+ let mut counts_file = File::create(&counts_path).unwrap();
+
+ write!(html_file, "<html>\n").unwrap();
+ write!(html_file,
+ "<head>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n",
+ "profile_queries.css").unwrap();
+ write!(html_file, "<style>\n").unwrap();
+ trace::write_style(&mut html_file);
+ write!(html_file, "</style>\n").unwrap();
+ write!(html_file, "</head>\n").unwrap();
+ write!(html_file, "<body>\n").unwrap();
+ trace::write_traces(&mut html_file, &mut counts_file, &frame.traces);
+ write!(html_file, "</body>\n</html>\n").unwrap();
+
+ // Tell main thread that we are done, e.g., so it can exit
+ params.ack.send(()).unwrap();
+ }
+ continue
+ }
+ // Actual query message:
+ msg => {
+ // Record msg in our log
+ profq_msgs.push(msg.clone());
+ // Respond to the message, knowing that we've already handled Halt and Dump, above.
+ match (frame.parse_st.clone(), msg) {
+ (_,ProfileQueriesMsg::Halt) => unreachable!(),
+ (_,ProfileQueriesMsg::Dump(_)) => unreachable!(),
+
+ // Parse State: NoQuery
+ (ParseState::NoQuery,
+ ProfileQueriesMsg::QueryBegin(span,querymsg)) => {
+ let start = Instant::now();
+ frame.parse_st = ParseState::HaveQuery
+ (Query{span:span, msg:querymsg}, start)
+ },
+ (ParseState::NoQuery,
+ ProfileQueriesMsg::CacheHit) => {
+ panic!("parse error: unexpected CacheHit; expected QueryBegin")
+ },
+ (ParseState::NoQuery,
+ ProfileQueriesMsg::ProviderBegin) => {
+ panic!("parse error: expected QueryBegin before beginning a provider")
+ },
+ (ParseState::NoQuery,
+ ProfileQueriesMsg::ProviderEnd) => {
+ let provider_extent = frame.traces;
+ match stack.pop() {
+ None =>
+ panic!("parse error: expected a stack frame; found an empty stack"),
+ Some(old_frame) => {
+ match old_frame.parse_st {
+ ParseState::NoQuery =>
+ panic!("parse error: expected a stack frame for a query"),
+ ParseState::HaveQuery(q,start) => {
+ let duration = start.elapsed();
+ frame = StackFrame{
+ parse_st:ParseState::NoQuery,
+ traces:old_frame.traces
+ };
+ let trace = Rec {
+ effect: Effect::QueryBegin(q, CacheCase::Miss),
+ extent: Box::new(provider_extent),
+ start: start,
+ duration: duration,
+ };
+ frame.traces.push( trace );
+ }
+ }
+ }
+ }
+ }
+
+ // Parse State: HaveQuery
+ (ParseState::HaveQuery(q,start),
+ ProfileQueriesMsg::CacheHit) => {
+ let duration = start.elapsed();
+ let trace : Rec = Rec{
+ effect: Effect::QueryBegin(q, CacheCase::Hit),
+ extent: Box::new(vec![]),
+ start: start,
+ duration: duration,
+ };
+ frame.traces.push( trace );
+ frame.parse_st = ParseState::NoQuery;
+ },
+ (ParseState::HaveQuery(_,_),
+ ProfileQueriesMsg::ProviderBegin) => {
+ stack.push(frame);
+ frame = StackFrame{parse_st:ParseState::NoQuery, traces:vec![]};
+ },
+ (ParseState::HaveQuery(q,_),
+ ProfileQueriesMsg::ProviderEnd) => {
+ panic!("parse error: unexpected ProviderEnd; \
+ expected something else to follow BeginQuery for {:?}", q)
+ },
+ (ParseState::HaveQuery(q1,_),
+ ProfileQueriesMsg::QueryBegin(span2,querymsg2)) => {
+ panic!("parse error: unexpected QueryBegin; \
+ earlier query is unfinished: {:?} and now {:?}",
+ q1, Query{span:span2, msg:querymsg2})
+ },
+ }
+ }
+ }
+ }
+}
--- /dev/null
+// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::*;
+use syntax_pos::Span;
+use rustc::ty::maps::QueryMsg;
+use std::fs::File;
+use std::time::{Duration, Instant};
+use std::collections::hash_map::HashMap;
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Query {
+ pub span: Span,
+ pub msg: QueryMsg,
+}
+pub enum Effect {
+ QueryBegin(Query, CacheCase),
+}
+pub enum CacheCase {
+ Hit, Miss
+}
+/// Recursive trace structure
+pub struct Rec {
+ pub effect: Effect,
+ pub start: Instant,
+ pub duration: Duration,
+ pub extent: Box<Vec<Rec>>,
+}
+/// State for parsing recursive trace structure
+#[derive(Clone, Eq, PartialEq)]
+pub enum ParseState {
+ NoQuery,
+ HaveQuery(Query, Instant),
+}
+pub struct StackFrame {
+ pub parse_st: ParseState,
+ pub traces: Vec<Rec>,
+}
+pub struct QueryMetric {
+ pub count: usize,
+ pub duration: Duration,
+}
+
+pub fn cons_of_query_msg(q: &trace::Query) -> String {
+ let s = format!("{:?}", q.msg);
+ let cons: Vec<&str> = s.split(|d| d == '(' || d == '{').collect();
+ assert!(cons.len() > 0 && cons[0] != "");
+ cons[0].to_string()
+}
+
+// First return value is text; second return value is a CSS class
+pub fn html_of_effect(eff: &Effect) -> (String, String) {
+ match *eff {
+ Effect::QueryBegin(ref qmsg, ref cc) => {
+ let cons = cons_of_query_msg(qmsg);
+ (cons.clone(),
+ format!("{} {}",
+ cons,
+ match *cc {
+ CacheCase::Hit => "hit",
+ CacheCase::Miss => "miss",
+ }))
+ }
+ }
+}
+
+// First return value is text; second return value is a CSS class
+fn html_of_duration(_start: &Instant, dur: &Duration) -> (String, String) {
+ use rustc::util::common::duration_to_secs_str;
+ (duration_to_secs_str(dur.clone()),
+ "".to_string()
+ )
+}
+
+fn html_of_fraction(frac: f64) -> (String, String) {
+ let css = {
+ if frac > 0.50 { format!("frac-50") }
+ else if frac > 0.40 { format!("frac-40") }
+ else if frac > 0.30 { format!("frac-30") }
+ else if frac > 0.20 { format!("frac-20") }
+ else if frac > 0.10 { format!("frac-10") }
+ else if frac > 0.05 { format!("frac-05") }
+ else if frac > 0.02 { format!("frac-02") }
+ else if frac > 0.01 { format!("frac-01") }
+ else if frac > 0.001 { format!("frac-001") }
+ else { format!("frac-0") }
+ };
+ let percent = frac * 100.0;
+ if percent > 0.1 { (format!("{:.1}%", percent), css) }
+ else { (format!("< 0.1%", ), css) }
+}
+
+fn total_duration(traces: &Vec<Rec>) -> Duration {
+ let mut sum : Duration = Duration::new(0,0);
+ for t in traces.iter() {
+ sum += t.duration;
+ }
+ return sum
+}
+
+fn duration_div(nom: Duration, den: Duration) -> f64 {
+ fn to_nanos(d: Duration) -> u64 {
+ d.as_secs() * 1_000_000_000 + d.subsec_nanos() as u64
+ }
+
+ to_nanos(nom) as f64 / to_nanos(den) as f64
+}
+
+fn write_traces_rec(file: &mut File, traces: &Vec<Rec>, total: Duration, depth: usize) {
+ for t in traces {
+ let (eff_text, eff_css_classes) = html_of_effect(&t.effect);
+ let (dur_text, dur_css_classes) = html_of_duration(&t.start, &t.duration);
+ let fraction = duration_div(t.duration, total);
+ let percent = fraction * 100.0;
+ let (frc_text, frc_css_classes) = html_of_fraction(fraction);
+ write!(file, "<div class=\"trace depth-{} extent-{}{} {} {} {}\">\n",
+ depth,
+ t.extent.len(),
+ /* Heuristic for 'important' CSS class: */
+ if t.extent.len() > 5 || percent >= 1.0 {
+ " important" }
+ else { "" },
+ eff_css_classes,
+ dur_css_classes,
+ frc_css_classes,
+ ).unwrap();
+ write!(file, "<div class=\"eff\">{}</div>\n", eff_text).unwrap();
+ write!(file, "<div class=\"dur\">{}</div>\n", dur_text).unwrap();
+ write!(file, "<div class=\"frc\">{}</div>\n", frc_text).unwrap();
+ write_traces_rec(file, &t.extent, total, depth + 1);
+ write!(file, "</div>\n").unwrap();
+ }
+}
+
+fn compute_counts_rec(counts: &mut HashMap<String,QueryMetric>, traces: &Vec<Rec>) {
+ for t in traces.iter() {
+ match t.effect {
+ Effect::QueryBegin(ref qmsg, ref _cc) => {
+ let qcons = cons_of_query_msg(qmsg);
+ let qm = match counts.get(&qcons) {
+ Some(qm) =>
+ QueryMetric{
+ count: qm.count + 1,
+ duration: qm.duration + t.duration
+ },
+ None => QueryMetric{
+ count: 1,
+ duration: t.duration
+ }
+ };
+ counts.insert(qcons, qm);
+ }
+ }
+ compute_counts_rec(counts, &t.extent)
+ }
+}
+
+pub fn write_counts(count_file: &mut File, counts: &mut HashMap<String,QueryMetric>) {
+ use rustc::util::common::duration_to_secs_str;
+ use std::cmp::Ordering;
+
+ let mut data = vec![];
+ for (ref cons, ref qm) in counts.iter() {
+ data.push((cons.clone(), qm.count.clone(), qm.duration.clone()));
+ };
+ data.sort_by(|&(_,_,d1),&(_,_,d2)|
+ if d1 > d2 { Ordering::Less } else { Ordering::Greater } );
+ for (cons, count, duration) in data {
+ write!(count_file, "{},{},{}\n",
+ cons, count, duration_to_secs_str(duration)
+ ).unwrap();
+ }
+}
+
+pub fn write_traces(html_file: &mut File, counts_file: &mut File, traces: &Vec<Rec>) {
+ let mut counts : HashMap<String,QueryMetric> = HashMap::new();
+ compute_counts_rec(&mut counts, traces);
+ write_counts(counts_file, &mut counts);
+
+ let total : Duration = total_duration(traces);
+ write_traces_rec(html_file, traces, total, 0)
+}
+
+pub fn write_style(html_file: &mut File) {
+ write!(html_file,"{}", "
+body {
+ font-family: sans-serif;
+ background: black;
+}
+.trace {
+ color: black;
+ display: inline-block;
+ border-style: solid;
+ border-color: red;
+ border-width: 1px;
+ border-radius: 5px;
+ padding: 0px;
+ margin: 1px;
+ font-size: 0px;
+}
+.miss {
+ border-color: red;
+ border-width: 1px;
+}
+.extent-0 {
+ padding: 2px;
+}
+.important {
+ border-width: 3px;
+ font-size: 12px;
+ color: white;
+ border-color: #f77;
+}
+.hit {
+ padding: 0px;
+ border-color: blue;
+ border-width: 3px;
+}
+.eff {
+ color: #fff;
+ display: inline-block;
+}
+.frc {
+ color: #7f7;
+ display: inline-block;
+}
+.dur {
+ display: none
+}
+").unwrap();
+}