1 use rustc::dep_graph::{DepGraph, DepKind, WorkProduct, WorkProductId};
2 use rustc::session::Session;
4 use rustc_data_structures::fx::FxHashMap;
5 use rustc_data_structures::sync::join;
6 use rustc_serialize::opaque::Encoder;
7 use rustc_serialize::Encodable as RustcEncodable;
9 use std::path::PathBuf;
12 use super::dirty_clean;
13 use super::file_format;
15 use super::work_product;
17 pub fn save_dep_graph(tcx: TyCtxt<'_>) {
18 debug!("save_dep_graph()");
19 tcx.dep_graph.with_ignore(|| {
21 if sess.opts.incremental.is_none() {
24 // This is going to be deleted in finalize_session_directory, so let's not create it
25 if sess.has_errors_or_delayed_span_bugs() {
29 let query_cache_path = query_cache_path(sess);
30 let dep_graph_path = dep_graph_path(sess);
34 if tcx.sess.opts.debugging_opts.incremental_queries {
35 sess.time("incr_comp_persist_result_cache", || {
36 save_in(sess, query_cache_path, |e| encode_query_cache(tcx, e));
41 sess.time("incr_comp_persist_dep_graph", || {
42 save_in(sess, dep_graph_path, |e| {
43 sess.time("incr_comp_encode_dep_graph", || encode_dep_graph(tcx, e))
49 dirty_clean::check_dirty_clean_annotations(tcx);
53 pub fn save_work_product_index(
56 new_work_products: FxHashMap<WorkProductId, WorkProduct>,
58 if sess.opts.incremental.is_none() {
61 // This is going to be deleted in finalize_session_directory, so let's not create it
62 if sess.has_errors_or_delayed_span_bugs() {
66 debug!("save_work_product_index()");
67 dep_graph.assert_ignored();
68 let path = work_products_path(sess);
69 save_in(sess, path, |e| encode_work_product_index(&new_work_products, e));
71 // We also need to clean out old work-products, as not all of them are
72 // deleted during invalidation. Some object files don't change their
73 // content, they are just not needed anymore.
74 let previous_work_products = dep_graph.previous_work_products();
75 for (id, wp) in previous_work_products.iter() {
76 if !new_work_products.contains_key(id) {
77 work_product::delete_workproduct_files(sess, wp);
79 wp.saved_files.iter().all(|&(_, ref file_name)| {
80 !in_incr_comp_dir_sess(sess, file_name).exists()
86 // Check that we did not delete one of the current work-products:
90 .flat_map(|(_, wp)| wp.saved_files.iter().map(|&(_, ref name)| name))
91 .map(|name| in_incr_comp_dir_sess(sess, name))
92 .all(|path| path.exists())
96 fn save_in<F>(sess: &Session, path_buf: PathBuf, encode: F)
98 F: FnOnce(&mut Encoder),
100 debug!("save: storing data in {}", path_buf.display());
102 // delete the old dep-graph, if any
103 // Note: It's important that we actually delete the old file and not just
104 // truncate and overwrite it, since it might be a shared hard-link, the
105 // underlying data of which we don't want to modify
106 if path_buf.exists() {
107 match fs::remove_file(&path_buf) {
109 debug!("save: remove old file");
113 "unable to delete old dep-graph at `{}`: {}",
122 // generate the data in a memory buffer
123 let mut encoder = Encoder::new(Vec::new());
124 file_format::write_file_header(&mut encoder);
125 encode(&mut encoder);
127 // write the data out
128 let data = encoder.into_inner();
129 match fs::write(&path_buf, data) {
131 debug!("save: data written to disk successfully");
134 sess.err(&format!("failed to write dep-graph to `{}`: {}", path_buf.display(), err));
140 fn encode_dep_graph(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
141 // First encode the commandline arguments hash
142 tcx.sess.opts.dep_tracking_hash().encode(encoder).unwrap();
144 // Encode the graph data.
145 let serialized_graph =
146 tcx.sess.time("incr_comp_serialize_dep_graph", || tcx.dep_graph.serialize());
148 if tcx.sess.opts.debugging_opts.incremental_info {
156 let total_node_count = serialized_graph.nodes.len();
157 let total_edge_count = serialized_graph.edge_list_data.len();
159 let mut counts: FxHashMap<_, Stat> = FxHashMap::default();
161 for (i, &node) in serialized_graph.nodes.iter_enumerated() {
162 let stat = counts.entry(node.kind).or_insert(Stat {
168 stat.node_counter += 1;
169 let (edge_start, edge_end) = serialized_graph.edge_list_indices[i];
170 stat.edge_counter += (edge_end - edge_start) as u64;
173 let mut counts: Vec<_> = counts.values().cloned().collect();
174 counts.sort_by_key(|s| -(s.node_counter as i64));
176 let percentage_of_all_nodes: Vec<f64> = counts
178 .map(|s| (100.0 * (s.node_counter as f64)) / (total_node_count as f64))
181 let average_edges_per_kind: Vec<f64> =
182 counts.iter().map(|s| (s.edge_counter as f64) / (s.node_counter as f64)).collect();
184 println!("[incremental]");
185 println!("[incremental] DepGraph Statistics");
187 const SEPARATOR: &str = "[incremental] --------------------------------\
188 ----------------------------------------------\
191 println!("{}", SEPARATOR);
192 println!("[incremental]");
193 println!("[incremental] Total Node Count: {}", total_node_count);
194 println!("[incremental] Total Edge Count: {}", total_edge_count);
195 if let Some((total_edge_reads, total_duplicate_edge_reads)) =
196 tcx.dep_graph.edge_deduplication_data()
198 println!("[incremental] Total Edge Reads: {}", total_edge_reads);
199 println!("[incremental] Total Duplicate Edge Reads: {}", total_duplicate_edge_reads);
201 println!("[incremental]");
203 "[incremental] {:<36}| {:<17}| {:<12}| {:<17}|",
204 "Node Kind", "Node Frequency", "Node Count", "Avg. Edge Count"
207 "[incremental] -------------------------------------\
210 |------------------|"
213 for (i, stat) in counts.iter().enumerate() {
215 "[incremental] {:<36}|{:>16.1}% |{:>12} |{:>17.1} |",
216 format!("{:?}", stat.kind),
217 percentage_of_all_nodes[i],
219 average_edges_per_kind[i]
223 println!("{}", SEPARATOR);
224 println!("[incremental]");
227 tcx.sess.time("incr_comp_encode_serialized_dep_graph", || {
228 serialized_graph.encode(encoder).unwrap();
232 fn encode_work_product_index(
233 work_products: &FxHashMap<WorkProductId, WorkProduct>,
234 encoder: &mut Encoder,
236 let serialized_products: Vec<_> = work_products
238 .map(|(id, work_product)| SerializedWorkProduct {
240 work_product: work_product.clone(),
244 serialized_products.encode(encoder).unwrap();
247 fn encode_query_cache(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
248 tcx.sess.time("incr_comp_serialize_result_cache", || {
249 tcx.serialize_query_result_cache(encoder).unwrap();