1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Code to save/load the dep-graph from files.
13 use rustc::dep_graph::{DepNode, WorkProductId};
14 use rustc::hir::def_id::DefId;
15 use rustc::hir::svh::Svh;
16 use rustc::session::Session;
17 use rustc::ty::TyCtxt;
18 use rustc_data_structures::fx::{FxHashSet, FxHashMap};
19 use rustc_serialize::Decodable as RustcDecodable;
20 use rustc_serialize::opaque::Decoder;
21 use std::path::{Path};
24 use IncrementalHashesMap;
27 use super::directory::*;
28 use super::dirty_clean;
31 use super::file_format;
32 use super::work_product;
34 // The key is a dirty node. The value is **some** base-input that we
36 pub type DirtyNodes = FxHashMap<DepNode<DefPathIndex>, DepNode<DefPathIndex>>;
38 /// If we are in incremental mode, and a previous dep-graph exists,
39 /// then load up those nodes/edges that are still valid into the
40 /// dep-graph for this session. (This is assumed to be running very
41 /// early in compilation, before we've really done any work, but
42 /// actually it doesn't matter all that much.) See `README.md` for
43 /// more general overview.
44 pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
45 incremental_hashes_map: &IncrementalHashesMap) {
46 if tcx.sess.opts.incremental.is_none() {
50 match prepare_session_directory(tcx) {
52 // We successfully allocated a session directory and there is
53 // something in it to load, so continue
56 // We successfully allocated a session directory, but there is no
57 // dep-graph data in it to load (because this is the first
58 // compilation session with this incr. comp. dir.)
62 // Something went wrong while trying to allocate the session
63 // directory. Don't try to use it any further.
68 let _ignore = tcx.dep_graph.in_ignore();
69 load_dep_graph_if_exists(tcx, incremental_hashes_map);
72 fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
73 incremental_hashes_map: &IncrementalHashesMap) {
74 let dep_graph_path = dep_graph_path(tcx.sess);
75 let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) {
77 None => return // no file
80 let work_products_path = work_products_path(tcx.sess);
81 let work_products_data = match load_data(tcx.sess, &work_products_path) {
83 None => return // no file
86 match decode_dep_graph(tcx, incremental_hashes_map, &dep_graph_data, &work_products_data) {
87 Ok(dirty_nodes) => dirty_nodes,
90 &format!("decoding error in dep-graph from `{}` and `{}`: {}",
91 dep_graph_path.display(),
92 work_products_path.display(),
98 fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
99 match file_format::read_file(sess, path) {
100 Ok(Some(data)) => return Some(data),
102 // The file either didn't exist or was produced by an incompatible
103 // compiler version. Neither is an error.
107 &format!("could not load dep-graph from `{}`: {}",
108 path.display(), err));
112 if let Err(err) = delete_all_session_dir_contents(sess) {
113 sess.err(&format!("could not clear incompatible incremental \
114 compilation session directory `{}`: {}",
115 path.display(), err));
121 /// Decode the dep graph and load the edges/nodes that are still clean
122 /// into `tcx.dep_graph`.
123 pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
124 incremental_hashes_map: &IncrementalHashesMap,
125 dep_graph_data: &[u8],
126 work_products_data: &[u8])
127 -> Result<(), String>
129 // Decode the list of work_products
130 let mut work_product_decoder = Decoder::new(work_products_data, 0);
131 let work_products = <Vec<SerializedWorkProduct>>::decode(&mut work_product_decoder)?;
133 // Deserialize the directory and dep-graph.
134 let mut dep_graph_decoder = Decoder::new(dep_graph_data, 0);
135 let prev_commandline_args_hash = u64::decode(&mut dep_graph_decoder)?;
137 if prev_commandline_args_hash != tcx.sess.opts.dep_tracking_hash() {
138 if tcx.sess.opts.debugging_opts.incremental_info {
139 println!("incremental: completely ignoring cache because of \
140 differing commandline arguments");
142 // We can't reuse the cache, purge it.
143 debug!("decode_dep_graph: differing commandline arg hashes");
144 for swp in work_products {
145 delete_dirty_work_product(tcx, swp);
148 // No need to do any further work
152 let directory = DefIdDirectory::decode(&mut dep_graph_decoder)?;
153 let serialized_dep_graph = SerializedDepGraph::decode(&mut dep_graph_decoder)?;
155 let edge_map: FxHashMap<_, _> = serialized_dep_graph.edges
157 .map(|s| (s.source, s.targets))
160 // Retrace the paths in the directory to find their current location (if any).
161 let retraced = directory.retrace(tcx);
163 // Compute the set of nodes from the old graph where some input
164 // has changed or been removed. These are "raw" source nodes,
165 // which means that they still use the original `DefPathIndex`
166 // values from the encoding, rather than having been retraced to a
167 // `DefId`. The reason for this is that this way we can include
168 // nodes that have been removed (which no longer have a `DefId` in
169 // the current compilation).
170 let dirty_raw_nodes = initial_dirty_nodes(tcx,
171 incremental_hashes_map,
172 &serialized_dep_graph.hashes,
174 let dirty_raw_nodes = transitive_dirty_nodes(&edge_map, dirty_raw_nodes);
176 // Recreate the edges in the graph that are still clean.
177 let mut clean_work_products = FxHashSet();
178 let mut dirty_work_products = FxHashSet(); // incomplete; just used to suppress debug output
179 for (source, targets) in &edge_map {
180 for target in targets {
181 // If the target is dirty, skip the edge. If this is an edge
182 // that targets a work-product, we can print the blame
184 if let Some(blame) = dirty_raw_nodes.get(target) {
185 if let DepNode::WorkProduct(ref wp) = *target {
186 if tcx.sess.opts.debugging_opts.incremental_info {
187 if dirty_work_products.insert(wp.clone()) {
188 // It'd be nice to pretty-print these paths better than just
189 // using the `Debug` impls, but wev.
190 println!("incremental: module {:?} is dirty because {:?} \
191 changed or was removed",
193 blame.map_def(|&index| {
194 Some(directory.def_path_string(tcx, index))
202 // If the source is dirty, the target will be dirty.
203 assert!(!dirty_raw_nodes.contains_key(source));
205 // Retrace the source -> target edges to def-ids and then
206 // create an edge in the graph. Retracing may yield none if
207 // some of the data happens to have been removed; this ought
208 // to be impossible unless it is dirty, so we can unwrap.
209 let source_node = retraced.map(source).unwrap();
210 let target_node = retraced.map(target).unwrap();
211 let _task = tcx.dep_graph.in_task(target_node);
212 tcx.dep_graph.read(source_node);
213 if let DepNode::WorkProduct(ref wp) = *target {
214 clean_work_products.insert(wp.clone());
219 // Add in work-products that are still clean, and delete those that are
221 reconcile_work_products(tcx, work_products, &clean_work_products);
223 dirty_clean::check_dirty_clean_annotations(tcx, &dirty_raw_nodes, &retraced);
225 load_prev_metadata_hashes(tcx,
227 &mut *incremental_hashes_map.prev_metadata_hashes.borrow_mut());
231 /// Computes which of the original set of def-ids are dirty. Stored in
232 /// a bit vector where the index is the DefPathIndex.
233 fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
234 incremental_hashes_map: &IncrementalHashesMap,
235 serialized_hashes: &[SerializedHash],
236 retraced: &RetracedDefIdDirectory)
238 let mut hcx = HashContext::new(tcx, incremental_hashes_map);
239 let mut dirty_nodes = FxHashMap();
241 for hash in serialized_hashes {
242 if let Some(dep_node) = retraced.map(&hash.dep_node) {
243 let current_hash = hcx.hash(&dep_node).unwrap();
244 if current_hash == hash.hash {
245 debug!("initial_dirty_nodes: {:?} is clean (hash={:?})",
246 dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
251 if tcx.sess.opts.debugging_opts.incremental_dump_hash {
252 println!("node {:?} is dirty as hash is {:?} was {:?}",
253 dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
258 debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
259 dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
263 if tcx.sess.opts.debugging_opts.incremental_dump_hash {
264 println!("node {:?} is dirty as it was removed",
268 debug!("initial_dirty_nodes: {:?} is dirty as it was removed",
272 dirty_nodes.insert(hash.dep_node.clone(), hash.dep_node.clone());
278 fn transitive_dirty_nodes(edge_map: &FxHashMap<DepNode<DefPathIndex>, Vec<DepNode<DefPathIndex>>>,
279 mut dirty_nodes: DirtyNodes)
282 let mut stack: Vec<(DepNode<DefPathIndex>, DepNode<DefPathIndex>)> = vec![];
283 stack.extend(dirty_nodes.iter().map(|(s, b)| (s.clone(), b.clone())));
284 while let Some((source, blame)) = stack.pop() {
285 // we know the source is dirty (because of the node `blame`)...
286 assert!(dirty_nodes.contains_key(&source));
288 // ...so we dirty all the targets (with the same blame)
289 if let Some(targets) = edge_map.get(&source) {
290 for target in targets {
291 if !dirty_nodes.contains_key(target) {
292 dirty_nodes.insert(target.clone(), blame.clone());
293 stack.push((target.clone(), blame.clone()));
301 /// Go through the list of work-products produced in the previous run.
302 /// Delete any whose nodes have been found to be dirty or which are
303 /// otherwise no longer applicable.
304 fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
305 work_products: Vec<SerializedWorkProduct>,
306 clean_work_products: &FxHashSet<Arc<WorkProductId>>) {
307 debug!("reconcile_work_products({:?})", work_products);
308 for swp in work_products {
309 if !clean_work_products.contains(&swp.id) {
310 debug!("reconcile_work_products: dep-node for {:?} is dirty", swp);
311 delete_dirty_work_product(tcx, swp);
313 let mut all_files_exist = true;
314 for &(_, ref file_name) in swp.work_product.saved_files.iter() {
315 let path = in_incr_comp_dir_sess(tcx.sess, file_name);
317 all_files_exist = false;
319 if tcx.sess.opts.debugging_opts.incremental_info {
320 println!("incremental: could not find file for up-to-date work product: {}",
327 debug!("reconcile_work_products: all files for {:?} exist", swp);
328 tcx.dep_graph.insert_previous_work_product(&swp.id, swp.work_product);
330 debug!("reconcile_work_products: some file for {:?} does not exist", swp);
331 delete_dirty_work_product(tcx, swp);
337 fn delete_dirty_work_product(tcx: TyCtxt,
338 swp: SerializedWorkProduct) {
339 debug!("delete_dirty_work_product({:?})", swp);
340 work_product::delete_workproduct_files(tcx.sess, &swp.work_product);
343 fn load_prev_metadata_hashes(tcx: TyCtxt,
344 retraced: &RetracedDefIdDirectory,
345 output: &mut FxHashMap<DefId, Fingerprint>) {
346 if !tcx.sess.opts.debugging_opts.query_dep_graph {
350 debug!("load_prev_metadata_hashes() - Loading previous metadata hashes");
352 let file_path = metadata_hash_export_path(tcx.sess);
354 if !file_path.exists() {
355 debug!("load_prev_metadata_hashes() - Couldn't find file containing \
356 hashes at `{}`", file_path.display());
360 debug!("load_prev_metadata_hashes() - File: {}", file_path.display());
362 let data = match file_format::read_file(tcx.sess, &file_path) {
363 Ok(Some(data)) => data,
365 debug!("load_prev_metadata_hashes() - File produced by incompatible \
366 compiler version: {}", file_path.display());
370 debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}",
371 file_path.display(), err);
376 debug!("load_prev_metadata_hashes() - Decoding hashes");
377 let mut decoder = Decoder::new(&data, 0);
378 let _ = Svh::decode(&mut decoder).unwrap();
379 let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap();
381 debug!("load_prev_metadata_hashes() - Mapping DefIds");
383 assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.hashes.len());
384 for serialized_hash in serialized_hashes.hashes {
385 let def_path_index = serialized_hashes.index_map[&serialized_hash.def_index];
386 if let Some(def_id) = retraced.def_id(def_path_index) {
387 let old = output.insert(def_id, serialized_hash.hash);
388 assert!(old.is_none(), "already have hash for {:?}", def_id);
392 debug!("load_prev_metadata_hashes() - successfully loaded {} hashes",
393 serialized_hashes.index_map.len());