]> git.lizzy.rs Git - rust.git/blob - src/librustc_incremental/persist/save.rs
Do not base path to append extension
[rust.git] / src / librustc_incremental / persist / save.rs
1 use rustc::dep_graph::{DepGraph, DepKind, WorkProduct, WorkProductId};
2 use rustc::session::Session;
3 use rustc::ty::TyCtxt;
4 use rustc_data_structures::fx::FxHashMap;
5 use rustc_data_structures::sync::join;
6 use rustc_serialize::opaque::Encoder;
7 use rustc_serialize::Encodable as RustcEncodable;
8 use std::fs;
9 use std::path::PathBuf;
10
11 use super::data::*;
12 use super::dirty_clean;
13 use super::file_format;
14 use super::fs::*;
15 use super::work_product;
16
17 pub fn save_dep_graph(tcx: TyCtxt<'_>) {
18     debug!("save_dep_graph()");
19     tcx.dep_graph.with_ignore(|| {
20         let sess = tcx.sess;
21         if sess.opts.incremental.is_none() {
22             return;
23         }
24         // This is going to be deleted in finalize_session_directory, so let's not create it
25         if sess.has_errors_or_delayed_span_bugs() {
26             return;
27         }
28
29         let query_cache_path = query_cache_path(sess);
30         let dep_graph_path = dep_graph_path(sess);
31
32         join(
33             move || {
34                 if tcx.sess.opts.debugging_opts.incremental_queries {
35                     sess.time("incr_comp_persist_result_cache", || {
36                         save_in(sess, query_cache_path, |e| encode_query_cache(tcx, e));
37                     });
38                 }
39             },
40             || {
41                 sess.time("incr_comp_persist_dep_graph", || {
42                     save_in(sess, dep_graph_path, |e| {
43                         sess.time("incr_comp_encode_dep_graph", || encode_dep_graph(tcx, e))
44                     });
45                 });
46             },
47         );
48
49         dirty_clean::check_dirty_clean_annotations(tcx);
50     })
51 }
52
53 pub fn save_work_product_index(
54     sess: &Session,
55     dep_graph: &DepGraph,
56     new_work_products: FxHashMap<WorkProductId, WorkProduct>,
57 ) {
58     if sess.opts.incremental.is_none() {
59         return;
60     }
61     // This is going to be deleted in finalize_session_directory, so let's not create it
62     if sess.has_errors_or_delayed_span_bugs() {
63         return;
64     }
65
66     debug!("save_work_product_index()");
67     dep_graph.assert_ignored();
68     let path = work_products_path(sess);
69     save_in(sess, path, |e| encode_work_product_index(&new_work_products, e));
70
71     // We also need to clean out old work-products, as not all of them are
72     // deleted during invalidation. Some object files don't change their
73     // content, they are just not needed anymore.
74     let previous_work_products = dep_graph.previous_work_products();
75     for (id, wp) in previous_work_products.iter() {
76         if !new_work_products.contains_key(id) {
77             work_product::delete_workproduct_files(sess, wp);
78             debug_assert!(
79                 wp.saved_files.iter().all(|&(_, ref file_name)| {
80                     !in_incr_comp_dir_sess(sess, file_name).exists()
81                 })
82             );
83         }
84     }
85
86     // Check that we did not delete one of the current work-products:
87     debug_assert!({
88         new_work_products
89             .iter()
90             .flat_map(|(_, wp)| wp.saved_files.iter().map(|&(_, ref name)| name))
91             .map(|name| in_incr_comp_dir_sess(sess, name))
92             .all(|path| path.exists())
93     });
94 }
95
96 fn save_in<F>(sess: &Session, path_buf: PathBuf, encode: F)
97 where
98     F: FnOnce(&mut Encoder),
99 {
100     debug!("save: storing data in {}", path_buf.display());
101
102     // delete the old dep-graph, if any
103     // Note: It's important that we actually delete the old file and not just
104     // truncate and overwrite it, since it might be a shared hard-link, the
105     // underlying data of which we don't want to modify
106     if path_buf.exists() {
107         match fs::remove_file(&path_buf) {
108             Ok(()) => {
109                 debug!("save: remove old file");
110             }
111             Err(err) => {
112                 sess.err(&format!(
113                     "unable to delete old dep-graph at `{}`: {}",
114                     path_buf.display(),
115                     err
116                 ));
117                 return;
118             }
119         }
120     }
121
122     // generate the data in a memory buffer
123     let mut encoder = Encoder::new(Vec::new());
124     file_format::write_file_header(&mut encoder);
125     encode(&mut encoder);
126
127     // write the data out
128     let data = encoder.into_inner();
129     match fs::write(&path_buf, data) {
130         Ok(_) => {
131             debug!("save: data written to disk successfully");
132         }
133         Err(err) => {
134             sess.err(&format!("failed to write dep-graph to `{}`: {}", path_buf.display(), err));
135             return;
136         }
137     }
138 }
139
140 fn encode_dep_graph(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
141     // First encode the commandline arguments hash
142     tcx.sess.opts.dep_tracking_hash().encode(encoder).unwrap();
143
144     // Encode the graph data.
145     let serialized_graph =
146         tcx.sess.time("incr_comp_serialize_dep_graph", || tcx.dep_graph.serialize());
147
148     if tcx.sess.opts.debugging_opts.incremental_info {
149         #[derive(Clone)]
150         struct Stat {
151             kind: DepKind,
152             node_counter: u64,
153             edge_counter: u64,
154         }
155
156         let total_node_count = serialized_graph.nodes.len();
157         let total_edge_count = serialized_graph.edge_list_data.len();
158
159         let mut counts: FxHashMap<_, Stat> = FxHashMap::default();
160
161         for (i, &node) in serialized_graph.nodes.iter_enumerated() {
162             let stat = counts.entry(node.kind).or_insert(Stat {
163                 kind: node.kind,
164                 node_counter: 0,
165                 edge_counter: 0,
166             });
167
168             stat.node_counter += 1;
169             let (edge_start, edge_end) = serialized_graph.edge_list_indices[i];
170             stat.edge_counter += (edge_end - edge_start) as u64;
171         }
172
173         let mut counts: Vec<_> = counts.values().cloned().collect();
174         counts.sort_by_key(|s| -(s.node_counter as i64));
175
176         let percentage_of_all_nodes: Vec<f64> = counts
177             .iter()
178             .map(|s| (100.0 * (s.node_counter as f64)) / (total_node_count as f64))
179             .collect();
180
181         let average_edges_per_kind: Vec<f64> =
182             counts.iter().map(|s| (s.edge_counter as f64) / (s.node_counter as f64)).collect();
183
184         println!("[incremental]");
185         println!("[incremental] DepGraph Statistics");
186
187         const SEPARATOR: &str = "[incremental] --------------------------------\
188                                  ----------------------------------------------\
189                                  ------------";
190
191         println!("{}", SEPARATOR);
192         println!("[incremental]");
193         println!("[incremental] Total Node Count: {}", total_node_count);
194         println!("[incremental] Total Edge Count: {}", total_edge_count);
195         if let Some((total_edge_reads, total_duplicate_edge_reads)) =
196             tcx.dep_graph.edge_deduplication_data()
197         {
198             println!("[incremental] Total Edge Reads: {}", total_edge_reads);
199             println!("[incremental] Total Duplicate Edge Reads: {}", total_duplicate_edge_reads);
200         }
201         println!("[incremental]");
202         println!(
203             "[incremental]  {:<36}| {:<17}| {:<12}| {:<17}|",
204             "Node Kind", "Node Frequency", "Node Count", "Avg. Edge Count"
205         );
206         println!(
207             "[incremental] -------------------------------------\
208                   |------------------\
209                   |-------------\
210                   |------------------|"
211         );
212
213         for (i, stat) in counts.iter().enumerate() {
214             println!(
215                 "[incremental]  {:<36}|{:>16.1}% |{:>12} |{:>17.1} |",
216                 format!("{:?}", stat.kind),
217                 percentage_of_all_nodes[i],
218                 stat.node_counter,
219                 average_edges_per_kind[i]
220             );
221         }
222
223         println!("{}", SEPARATOR);
224         println!("[incremental]");
225     }
226
227     tcx.sess.time("incr_comp_encode_serialized_dep_graph", || {
228         serialized_graph.encode(encoder).unwrap();
229     });
230 }
231
232 fn encode_work_product_index(
233     work_products: &FxHashMap<WorkProductId, WorkProduct>,
234     encoder: &mut Encoder,
235 ) {
236     let serialized_products: Vec<_> = work_products
237         .iter()
238         .map(|(id, work_product)| SerializedWorkProduct {
239             id: *id,
240             work_product: work_product.clone(),
241         })
242         .collect();
243
244     serialized_products.encode(encoder).unwrap();
245 }
246
247 fn encode_query_cache(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
248     tcx.sess.time("incr_comp_serialize_result_cache", || {
249         tcx.serialize_query_result_cache(encoder).unwrap();
250     })
251 }