1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Code to save/load the dep-graph from files.
13 use rustc::dep_graph::{DepNode, WorkProductId, DepKind};
14 use rustc::hir::def_id::DefId;
15 use rustc::hir::svh::Svh;
16 use rustc::ich::Fingerprint;
17 use rustc::session::Session;
18 use rustc::ty::TyCtxt;
19 use rustc_data_structures::fx::{FxHashSet, FxHashMap};
20 use rustc_data_structures::indexed_vec::IndexVec;
21 use rustc_serialize::Decodable as RustcDecodable;
22 use rustc_serialize::opaque::Decoder;
23 use std::path::{Path};
25 use IncrementalHashesMap;
27 use super::dirty_clean;
30 use super::file_format;
31 use super::work_product;
33 // The key is a dirty node. The value is **some** base-input that we
35 pub type DirtyNodes = FxHashMap<DepNodeIndex, DepNodeIndex>;
37 /// If we are in incremental mode, and a previous dep-graph exists,
38 /// then load up those nodes/edges that are still valid into the
39 /// dep-graph for this session. (This is assumed to be running very
40 /// early in compilation, before we've really done any work, but
41 /// actually it doesn't matter all that much.) See `README.md` for
42 /// more general overview.
43 pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
44 incremental_hashes_map: &IncrementalHashesMap) {
45 if tcx.sess.opts.incremental.is_none() {
49 match prepare_session_directory(tcx) {
51 // We successfully allocated a session directory and there is
52 // something in it to load, so continue
55 // We successfully allocated a session directory, but there is no
56 // dep-graph data in it to load (because this is the first
57 // compilation session with this incr. comp. dir.)
61 // Something went wrong while trying to allocate the session
62 // directory. Don't try to use it any further.
67 let _ignore = tcx.dep_graph.in_ignore();
68 load_dep_graph_if_exists(tcx, incremental_hashes_map);
71 fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
72 incremental_hashes_map: &IncrementalHashesMap) {
73 let dep_graph_path = dep_graph_path(tcx.sess);
74 let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) {
76 None => return // no file
79 let work_products_path = work_products_path(tcx.sess);
80 let work_products_data = match load_data(tcx.sess, &work_products_path) {
82 None => return // no file
85 match decode_dep_graph(tcx, incremental_hashes_map, &dep_graph_data, &work_products_data) {
86 Ok(dirty_nodes) => dirty_nodes,
89 &format!("decoding error in dep-graph from `{}` and `{}`: {}",
90 dep_graph_path.display(),
91 work_products_path.display(),
97 fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
98 match file_format::read_file(sess, path) {
99 Ok(Some(data)) => return Some(data),
101 // The file either didn't exist or was produced by an incompatible
102 // compiler version. Neither is an error.
106 &format!("could not load dep-graph from `{}`: {}",
107 path.display(), err));
111 if let Err(err) = delete_all_session_dir_contents(sess) {
112 sess.err(&format!("could not clear incompatible incremental \
113 compilation session directory `{}`: {}",
114 path.display(), err));
120 /// Check if a DepNode from the previous dep-graph refers to something that
121 /// still exists in the current compilation session. Only works for DepNode
122 /// variants that represent inputs (HIR and imported Metadata).
123 fn does_still_exist(tcx: TyCtxt, dep_node: &DepNode) -> bool {
124 match dep_node.kind {
127 DepKind::MetaData => {
128 dep_node.extract_def_id(tcx).is_some()
131 bug!("unexpected Input DepNode: {:?}", dep_node)
136 /// Decode the dep graph and load the edges/nodes that are still clean
137 /// into `tcx.dep_graph`.
138 pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
139 incremental_hashes_map: &IncrementalHashesMap,
140 dep_graph_data: &[u8],
141 work_products_data: &[u8])
142 -> Result<(), String>
144 // Decode the list of work_products
145 let mut work_product_decoder = Decoder::new(work_products_data, 0);
146 let work_products = <Vec<SerializedWorkProduct>>::decode(&mut work_product_decoder)?;
148 // Deserialize the directory and dep-graph.
149 let mut dep_graph_decoder = Decoder::new(dep_graph_data, 0);
150 let prev_commandline_args_hash = u64::decode(&mut dep_graph_decoder)?;
152 if prev_commandline_args_hash != tcx.sess.opts.dep_tracking_hash() {
153 if tcx.sess.opts.debugging_opts.incremental_info {
154 eprintln!("incremental: completely ignoring cache because of \
155 differing commandline arguments");
157 // We can't reuse the cache, purge it.
158 debug!("decode_dep_graph: differing commandline arg hashes");
159 for swp in work_products {
160 delete_dirty_work_product(tcx, swp);
163 // No need to do any further work
167 let serialized_dep_graph = SerializedDepGraph::decode(&mut dep_graph_decoder)?;
169 // Compute the set of nodes from the old graph where some input
170 // has changed or been removed.
171 let dirty_raw_nodes = initial_dirty_nodes(tcx,
172 incremental_hashes_map,
173 &serialized_dep_graph.nodes,
174 &serialized_dep_graph.hashes);
175 let dirty_raw_nodes = transitive_dirty_nodes(&serialized_dep_graph,
178 // Recreate the edges in the graph that are still clean.
179 let mut clean_work_products = FxHashSet();
180 let mut dirty_work_products = FxHashSet(); // incomplete; just used to suppress debug output
181 for (source, targets) in serialized_dep_graph.edge_list_indices.iter_enumerated() {
182 let target_begin = targets.0 as usize;
183 let target_end = targets.1 as usize;
185 for &target in &serialized_dep_graph.edge_list_data[target_begin .. target_end] {
189 &serialized_dep_graph.nodes,
191 &mut clean_work_products,
192 &mut dirty_work_products,
197 // Recreate bootstrap outputs, which are outputs that have no incoming edges
198 // (and hence cannot be dirty).
199 for bootstrap_output in &serialized_dep_graph.bootstrap_outputs {
200 if let DepKind::WorkProduct = bootstrap_output.kind {
201 let wp_id = WorkProductId::from_fingerprint(bootstrap_output.hash);
202 clean_work_products.insert(wp_id);
205 tcx.dep_graph.add_node_directly(*bootstrap_output);
208 // Add in work-products that are still clean, and delete those that are
210 reconcile_work_products(tcx, work_products, &clean_work_products);
212 dirty_clean::check_dirty_clean_annotations(tcx,
213 &serialized_dep_graph.nodes,
216 load_prev_metadata_hashes(tcx,
217 &mut *incremental_hashes_map.prev_metadata_hashes.borrow_mut());
221 /// Computes which of the original set of def-ids are dirty. Stored in
222 /// a bit vector where the index is the DefPathIndex.
223 fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
224 incremental_hashes_map: &IncrementalHashesMap,
225 nodes: &IndexVec<DepNodeIndex, DepNode>,
226 serialized_hashes: &[(DepNodeIndex, Fingerprint)])
228 let mut hcx = HashContext::new(tcx, incremental_hashes_map);
229 let mut dirty_nodes = FxHashMap();
231 for &(dep_node_index, prev_hash) in serialized_hashes {
232 let dep_node = nodes[dep_node_index];
233 if does_still_exist(tcx, &dep_node) {
234 let current_hash = hcx.hash(&dep_node).unwrap_or_else(|| {
235 bug!("Cannot find current ICH for input that still exists?")
238 if current_hash == prev_hash {
239 debug!("initial_dirty_nodes: {:?} is clean (hash={:?})",
245 if tcx.sess.opts.debugging_opts.incremental_dump_hash {
246 println!("node {:?} is dirty as hash is {:?}, was {:?}",
252 debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
257 if tcx.sess.opts.debugging_opts.incremental_dump_hash {
258 println!("node {:?} is dirty as it was removed", dep_node);
261 debug!("initial_dirty_nodes: {:?} is dirty as it was removed", dep_node);
263 dirty_nodes.insert(dep_node_index, dep_node_index);
269 fn transitive_dirty_nodes(serialized_dep_graph: &SerializedDepGraph,
270 mut dirty_nodes: DirtyNodes)
273 let mut stack: Vec<(DepNodeIndex, DepNodeIndex)> = vec![];
274 stack.extend(dirty_nodes.iter().map(|(&s, &b)| (s, b)));
275 while let Some((source, blame)) = stack.pop() {
276 // we know the source is dirty (because of the node `blame`)...
277 debug_assert!(dirty_nodes.contains_key(&source));
279 // ...so we dirty all the targets (with the same blame)
280 for &target in serialized_dep_graph.edge_targets_from(source) {
281 if !dirty_nodes.contains_key(&target) {
282 dirty_nodes.insert(target, blame);
283 stack.push((target, blame));
290 /// Go through the list of work-products produced in the previous run.
291 /// Delete any whose nodes have been found to be dirty or which are
292 /// otherwise no longer applicable.
293 fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
294 work_products: Vec<SerializedWorkProduct>,
295 clean_work_products: &FxHashSet<WorkProductId>) {
296 debug!("reconcile_work_products({:?})", work_products);
297 for swp in work_products {
298 if !clean_work_products.contains(&swp.id) {
299 debug!("reconcile_work_products: dep-node for {:?} is dirty", swp);
300 delete_dirty_work_product(tcx, swp);
302 let mut all_files_exist = true;
303 for &(_, ref file_name) in swp.work_product.saved_files.iter() {
304 let path = in_incr_comp_dir_sess(tcx.sess, file_name);
306 all_files_exist = false;
308 if tcx.sess.opts.debugging_opts.incremental_info {
309 eprintln!("incremental: could not find file for \
310 up-to-date work product: {}", path.display());
316 debug!("reconcile_work_products: all files for {:?} exist", swp);
317 tcx.dep_graph.insert_previous_work_product(&swp.id, swp.work_product);
319 debug!("reconcile_work_products: some file for {:?} does not exist", swp);
320 delete_dirty_work_product(tcx, swp);
326 fn delete_dirty_work_product(tcx: TyCtxt,
327 swp: SerializedWorkProduct) {
328 debug!("delete_dirty_work_product({:?})", swp);
329 work_product::delete_workproduct_files(tcx.sess, &swp.work_product);
332 fn load_prev_metadata_hashes(tcx: TyCtxt,
333 output: &mut FxHashMap<DefId, Fingerprint>) {
334 if !tcx.sess.opts.debugging_opts.query_dep_graph {
335 // Previous metadata hashes are only needed for testing.
339 debug!("load_prev_metadata_hashes() - Loading previous metadata hashes");
341 let file_path = metadata_hash_export_path(tcx.sess);
343 if !file_path.exists() {
344 debug!("load_prev_metadata_hashes() - Couldn't find file containing \
345 hashes at `{}`", file_path.display());
349 debug!("load_prev_metadata_hashes() - File: {}", file_path.display());
351 let data = match file_format::read_file(tcx.sess, &file_path) {
352 Ok(Some(data)) => data,
354 debug!("load_prev_metadata_hashes() - File produced by incompatible \
355 compiler version: {}", file_path.display());
359 debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}",
360 file_path.display(), err);
365 debug!("load_prev_metadata_hashes() - Decoding hashes");
366 let mut decoder = Decoder::new(&data, 0);
367 let _ = Svh::decode(&mut decoder).unwrap();
368 let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap();
370 debug!("load_prev_metadata_hashes() - Mapping DefIds");
372 assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.entry_hashes.len());
373 let def_path_hash_to_def_id = tcx.def_path_hash_to_def_id.as_ref().unwrap();
375 for serialized_hash in serialized_hashes.entry_hashes {
376 let def_path_hash = serialized_hashes.index_map[&serialized_hash.def_index];
377 if let Some(&def_id) = def_path_hash_to_def_id.get(&def_path_hash) {
378 let old = output.insert(def_id, serialized_hash.hash);
379 assert!(old.is_none(), "already have hash for {:?}", def_id);
383 debug!("load_prev_metadata_hashes() - successfully loaded {} hashes",
384 serialized_hashes.index_map.len());
387 fn process_edge<'a, 'tcx, 'edges>(
388 tcx: TyCtxt<'a, 'tcx, 'tcx>,
389 source: DepNodeIndex,
390 target: DepNodeIndex,
391 nodes: &IndexVec<DepNodeIndex, DepNode>,
392 dirty_raw_nodes: &DirtyNodes,
393 clean_work_products: &mut FxHashSet<WorkProductId>,
394 dirty_work_products: &mut FxHashSet<WorkProductId>,
395 work_products: &[SerializedWorkProduct])
397 // If the target is dirty, skip the edge. If this is an edge
398 // that targets a work-product, we can print the blame
400 if let Some(&blame) = dirty_raw_nodes.get(&target) {
401 let target = nodes[target];
402 if let DepKind::WorkProduct = target.kind {
403 if tcx.sess.opts.debugging_opts.incremental_info {
404 let wp_id = WorkProductId::from_fingerprint(target.hash);
406 if dirty_work_products.insert(wp_id) {
407 // Try to reconstruct the human-readable version of the
408 // DepNode. This cannot be done for things that where
410 let blame = nodes[blame];
411 let blame_str = if let Some(def_id) = blame.extract_def_id(tcx) {
414 tcx.def_path(def_id).to_string(tcx))
416 format!("{:?}", blame)
419 let wp = work_products.iter().find(|swp| swp.id == wp_id).unwrap();
421 eprintln!("incremental: module {:?} is dirty because \
422 {:?} changed or was removed",
423 wp.work_product.cgu_name,
431 // At this point we have asserted that the target is clean -- otherwise, we
432 // would have hit the return above. We can do some further consistency
433 // checks based on this fact:
435 // We should never have an edge where the target is clean but the source
436 // was dirty. Otherwise something was wrong with the dirtying pass above:
437 debug_assert!(!dirty_raw_nodes.contains_key(&source));
439 // We also never should encounter an edge going from a removed input to a
440 // clean target because removing the input would have dirtied the input
441 // node and transitively dirtied the target.
442 debug_assert!(match nodes[source].kind {
443 DepKind::Hir | DepKind::HirBody | DepKind::MetaData => {
444 does_still_exist(tcx, &nodes[source])
449 if !dirty_raw_nodes.contains_key(&target) {
450 let target = nodes[target];
451 let source = nodes[source];
452 tcx.dep_graph.add_edge_directly(source, target);
454 if let DepKind::WorkProduct = target.kind {
455 let wp_id = WorkProductId::from_fingerprint(target.hash);
456 clean_work_products.insert(wp_id);