1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Code to save/load the dep-graph from files.
13 use rustc::dep_graph::{DepNode, WorkProductId, DepKind, PreviousDepGraph};
14 use rustc::hir::svh::Svh;
15 use rustc::ich::Fingerprint;
16 use rustc::session::Session;
17 use rustc::ty::TyCtxt;
18 use rustc::util::nodemap::DefIdMap;
19 use rustc_data_structures::fx::{FxHashSet, FxHashMap};
20 use rustc_data_structures::indexed_vec::IndexVec;
21 use rustc_serialize::Decodable as RustcDecodable;
22 use rustc_serialize::opaque::Decoder;
23 use std::path::{Path};
27 use super::file_format;
28 use super::work_product;
30 // The key is a dirty node. The value is **some** base-input that we
32 pub type DirtyNodes = FxHashMap<DepNodeIndex, DepNodeIndex>;
34 /// If we are in incremental mode, and a previous dep-graph exists,
35 /// then load up those nodes/edges that are still valid into the
36 /// dep-graph for this session. (This is assumed to be running very
37 /// early in compilation, before we've really done any work, but
38 /// actually it doesn't matter all that much.) See `README.md` for
39 /// more general overview.
40 pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
41 tcx.allocate_metadata_dep_nodes();
42 tcx.precompute_in_scope_traits_hashes();
43 if tcx.sess.incr_session_load_dep_graph() {
44 let _ignore = tcx.dep_graph.in_ignore();
45 load_dep_graph_if_exists(tcx);
49 fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
50 let dep_graph_path = dep_graph_path(tcx.sess);
51 let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) {
53 None => return // no file
56 let work_products_path = work_products_path(tcx.sess);
57 let work_products_data = match load_data(tcx.sess, &work_products_path) {
59 None => return // no file
62 match decode_dep_graph(tcx, &dep_graph_data, &work_products_data) {
63 Ok(dirty_nodes) => dirty_nodes,
66 &format!("decoding error in dep-graph from `{}` and `{}`: {}",
67 dep_graph_path.display(),
68 work_products_path.display(),
74 fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
75 match file_format::read_file(sess, path) {
76 Ok(Some(data)) => return Some(data),
78 // The file either didn't exist or was produced by an incompatible
79 // compiler version. Neither is an error.
83 &format!("could not load dep-graph from `{}`: {}",
84 path.display(), err));
88 if let Err(err) = delete_all_session_dir_contents(sess) {
89 sess.err(&format!("could not clear incompatible incremental \
90 compilation session directory `{}`: {}",
91 path.display(), err));
97 /// Check if a DepNode from the previous dep-graph refers to something that
98 /// still exists in the current compilation session. Only works for DepNode
99 /// variants that represent inputs (HIR and imported Metadata).
100 fn does_still_exist(tcx: TyCtxt, dep_node: &DepNode) -> bool {
101 match dep_node.kind {
105 DepKind::InScopeTraits |
106 DepKind::CrateMetadata => {
107 dep_node.extract_def_id(tcx).is_some()
110 bug!("unexpected Input DepNode: {:?}", dep_node)
115 /// Decode the dep graph and load the edges/nodes that are still clean
116 /// into `tcx.dep_graph`.
117 pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
118 dep_graph_data: &[u8],
119 work_products_data: &[u8])
120 -> Result<(), String>
122 // Decode the list of work_products
123 let mut work_product_decoder = Decoder::new(work_products_data, 0);
124 let work_products = <Vec<SerializedWorkProduct>>::decode(&mut work_product_decoder)?;
126 // Deserialize the directory and dep-graph.
127 let mut dep_graph_decoder = Decoder::new(dep_graph_data, 0);
128 let prev_commandline_args_hash = u64::decode(&mut dep_graph_decoder)?;
130 if prev_commandline_args_hash != tcx.sess.opts.dep_tracking_hash() {
131 if tcx.sess.opts.debugging_opts.incremental_info {
132 eprintln!("incremental: completely ignoring cache because of \
133 differing commandline arguments");
135 // We can't reuse the cache, purge it.
136 debug!("decode_dep_graph: differing commandline arg hashes");
137 for swp in work_products {
138 delete_dirty_work_product(tcx, swp);
141 // No need to do any further work
145 let serialized_dep_graph = SerializedDepGraph::decode(&mut dep_graph_decoder)?;
147 // Compute the set of nodes from the old graph where some input
148 // has changed or been removed.
149 let dirty_raw_nodes = initial_dirty_nodes(tcx,
150 &serialized_dep_graph.nodes,
151 &serialized_dep_graph.hashes);
152 let dirty_raw_nodes = transitive_dirty_nodes(&serialized_dep_graph,
155 // Recreate the edges in the graph that are still clean.
156 let mut clean_work_products = FxHashSet();
157 let mut dirty_work_products = FxHashSet(); // incomplete; just used to suppress debug output
158 for (source, targets) in serialized_dep_graph.edge_list_indices.iter_enumerated() {
159 let target_begin = targets.0 as usize;
160 let target_end = targets.1 as usize;
162 for &target in &serialized_dep_graph.edge_list_data[target_begin .. target_end] {
166 &serialized_dep_graph.nodes,
168 &mut clean_work_products,
169 &mut dirty_work_products,
174 // Recreate bootstrap outputs, which are outputs that have no incoming edges
175 // (and hence cannot be dirty).
176 for bootstrap_output in &serialized_dep_graph.bootstrap_outputs {
177 if let DepKind::WorkProduct = bootstrap_output.kind {
178 let wp_id = WorkProductId::from_fingerprint(bootstrap_output.hash);
179 clean_work_products.insert(wp_id);
182 tcx.dep_graph.add_node_directly(*bootstrap_output);
185 // Add in work-products that are still clean, and delete those that are
187 reconcile_work_products(tcx, work_products, &clean_work_products);
192 /// Computes which of the original set of def-ids are dirty. Stored in
193 /// a bit vector where the index is the DefPathIndex.
194 fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
195 nodes: &IndexVec<DepNodeIndex, DepNode>,
196 serialized_hashes: &[(DepNodeIndex, Fingerprint)])
198 let mut dirty_nodes = FxHashMap();
200 for &(dep_node_index, prev_hash) in serialized_hashes {
201 let dep_node = nodes[dep_node_index];
202 if does_still_exist(tcx, &dep_node) {
203 let current_hash = tcx.dep_graph.fingerprint_of(&dep_node);
205 if current_hash == prev_hash {
206 debug!("initial_dirty_nodes: {:?} is clean (hash={:?})",
212 if tcx.sess.opts.debugging_opts.incremental_dump_hash {
213 println!("node {:?} is dirty as hash is {:?}, was {:?}",
219 debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
224 if tcx.sess.opts.debugging_opts.incremental_dump_hash {
225 println!("node {:?} is dirty as it was removed", dep_node);
228 debug!("initial_dirty_nodes: {:?} is dirty as it was removed", dep_node);
230 dirty_nodes.insert(dep_node_index, dep_node_index);
236 fn transitive_dirty_nodes(serialized_dep_graph: &SerializedDepGraph,
237 mut dirty_nodes: DirtyNodes)
240 let mut stack: Vec<(DepNodeIndex, DepNodeIndex)> = vec![];
241 stack.extend(dirty_nodes.iter().map(|(&s, &b)| (s, b)));
242 while let Some((source, blame)) = stack.pop() {
243 // we know the source is dirty (because of the node `blame`)...
244 debug_assert!(dirty_nodes.contains_key(&source));
246 // ...so we dirty all the targets (with the same blame)
247 for &target in serialized_dep_graph.edge_targets_from(source) {
248 if !dirty_nodes.contains_key(&target) {
249 dirty_nodes.insert(target, blame);
250 stack.push((target, blame));
257 /// Go through the list of work-products produced in the previous run.
258 /// Delete any whose nodes have been found to be dirty or which are
259 /// otherwise no longer applicable.
260 fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
261 work_products: Vec<SerializedWorkProduct>,
262 _clean_work_products: &FxHashSet<WorkProductId>) {
263 debug!("reconcile_work_products({:?})", work_products);
264 for swp in work_products {
265 let mut all_files_exist = true;
266 for &(_, ref file_name) in swp.work_product.saved_files.iter() {
267 let path = in_incr_comp_dir_sess(tcx.sess, file_name);
269 all_files_exist = false;
271 if tcx.sess.opts.debugging_opts.incremental_info {
272 eprintln!("incremental: could not find file for \
273 up-to-date work product: {}", path.display());
279 debug!("reconcile_work_products: all files for {:?} exist", swp);
280 tcx.dep_graph.insert_previous_work_product(&swp.id, swp.work_product);
282 debug!("reconcile_work_products: some file for {:?} does not exist", swp);
283 delete_dirty_work_product(tcx, swp);
288 fn delete_dirty_work_product(tcx: TyCtxt,
289 swp: SerializedWorkProduct) {
290 debug!("delete_dirty_work_product({:?})", swp);
291 work_product::delete_workproduct_files(tcx.sess, &swp.work_product);
294 pub fn load_prev_metadata_hashes(tcx: TyCtxt) -> DefIdMap<Fingerprint> {
295 let mut output = DefIdMap();
297 if !tcx.sess.opts.debugging_opts.query_dep_graph {
298 // Previous metadata hashes are only needed for testing.
302 debug!("load_prev_metadata_hashes() - Loading previous metadata hashes");
304 let file_path = metadata_hash_export_path(tcx.sess);
306 if !file_path.exists() {
307 debug!("load_prev_metadata_hashes() - Couldn't find file containing \
308 hashes at `{}`", file_path.display());
312 debug!("load_prev_metadata_hashes() - File: {}", file_path.display());
314 let data = match file_format::read_file(tcx.sess, &file_path) {
315 Ok(Some(data)) => data,
317 debug!("load_prev_metadata_hashes() - File produced by incompatible \
318 compiler version: {}", file_path.display());
322 debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}",
323 file_path.display(), err);
328 debug!("load_prev_metadata_hashes() - Decoding hashes");
329 let mut decoder = Decoder::new(&data, 0);
330 let _ = Svh::decode(&mut decoder).unwrap();
331 let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap();
333 debug!("load_prev_metadata_hashes() - Mapping DefIds");
335 assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.entry_hashes.len());
336 let def_path_hash_to_def_id = tcx.def_path_hash_to_def_id.as_ref().unwrap();
338 for serialized_hash in serialized_hashes.entry_hashes {
339 let def_path_hash = serialized_hashes.index_map[&serialized_hash.def_index];
340 if let Some(&def_id) = def_path_hash_to_def_id.get(&def_path_hash) {
341 let old = output.insert(def_id, serialized_hash.hash);
342 assert!(old.is_none(), "already have hash for {:?}", def_id);
346 debug!("load_prev_metadata_hashes() - successfully loaded {} hashes",
347 serialized_hashes.index_map.len());
352 fn process_edge<'a, 'tcx, 'edges>(
353 tcx: TyCtxt<'a, 'tcx, 'tcx>,
354 source: DepNodeIndex,
355 target: DepNodeIndex,
356 nodes: &IndexVec<DepNodeIndex, DepNode>,
357 dirty_raw_nodes: &DirtyNodes,
358 clean_work_products: &mut FxHashSet<WorkProductId>,
359 dirty_work_products: &mut FxHashSet<WorkProductId>,
360 work_products: &[SerializedWorkProduct])
362 // If the target is dirty, skip the edge. If this is an edge
363 // that targets a work-product, we can print the blame
365 if let Some(&blame) = dirty_raw_nodes.get(&target) {
366 let target = nodes[target];
367 if let DepKind::WorkProduct = target.kind {
368 if tcx.sess.opts.debugging_opts.incremental_info {
369 let wp_id = WorkProductId::from_fingerprint(target.hash);
371 if dirty_work_products.insert(wp_id) {
372 // Try to reconstruct the human-readable version of the
373 // DepNode. This cannot be done for things that where
375 let blame = nodes[blame];
376 let blame_str = if let Some(def_id) = blame.extract_def_id(tcx) {
379 tcx.def_path(def_id).to_string(tcx))
381 format!("{:?}", blame)
384 let wp = work_products.iter().find(|swp| swp.id == wp_id).unwrap();
386 eprintln!("incremental: module {:?} is dirty because \
387 {:?} changed or was removed",
388 wp.work_product.cgu_name,
396 // At this point we have asserted that the target is clean -- otherwise, we
397 // would have hit the return above. We can do some further consistency
398 // checks based on this fact:
400 // We should never have an edge where the target is clean but the source
401 // was dirty. Otherwise something was wrong with the dirtying pass above:
402 debug_assert!(!dirty_raw_nodes.contains_key(&source));
404 // We also never should encounter an edge going from a removed input to a
405 // clean target because removing the input would have dirtied the input
406 // node and transitively dirtied the target.
407 debug_assert!(match nodes[source].kind {
408 DepKind::Hir | DepKind::HirBody | DepKind::CrateMetadata => {
409 does_still_exist(tcx, &nodes[source])
414 if !dirty_raw_nodes.contains_key(&target) {
415 let target = nodes[target];
416 let source = nodes[source];
417 tcx.dep_graph.add_edge_directly(source, target);
419 if let DepKind::WorkProduct = target.kind {
420 let wp_id = WorkProductId::from_fingerprint(target.hash);
421 clean_work_products.insert(wp_id);
426 pub fn load_dep_graph_new(sess: &Session) -> PreviousDepGraph {
427 use rustc::dep_graph::SerializedDepGraph as SerializedDepGraphNew;
429 let empty = PreviousDepGraph::new(SerializedDepGraphNew::new());
431 if sess.opts.incremental.is_none() {
435 if let Some(bytes) = load_data(sess, &dep_graph_path_new(sess)) {
436 let mut decoder = Decoder::new(&bytes, 0);
437 let prev_commandline_args_hash = u64::decode(&mut decoder)
438 .expect("Error reading commandline arg hash from cached dep-graph");
440 if prev_commandline_args_hash != sess.opts.dep_tracking_hash() {
441 if sess.opts.debugging_opts.incremental_info {
442 eprintln!("incremental: completely ignoring cache because of \
443 differing commandline arguments");
445 // We can't reuse the cache, purge it.
446 debug!("load_dep_graph_new: differing commandline arg hashes");
448 // No need to do any further work
452 let dep_graph = SerializedDepGraphNew::decode(&mut decoder)
453 .expect("Error reading cached dep-graph");
455 PreviousDepGraph::new(dep_graph)