1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Code to save/load the dep-graph from files.
13 use rustc::dep_graph::DepNode;
14 use rustc::hir::def_id::DefId;
15 use rustc::hir::svh::Svh;
16 use rustc::session::Session;
17 use rustc::ty::TyCtxt;
18 use rustc_data_structures::fx::{FxHashSet, FxHashMap};
19 use rustc_serialize::Decodable as RustcDecodable;
20 use rustc_serialize::opaque::Decoder;
22 use std::path::{Path};
24 use IncrementalHashesMap;
27 use super::directory::*;
28 use super::dirty_clean;
31 use super::file_format;
33 pub type DirtyNodes = FxHashSet<DepNode<DefPathIndex>>;
35 /// If we are in incremental mode, and a previous dep-graph exists,
36 /// then load up those nodes/edges that are still valid into the
37 /// dep-graph for this session. (This is assumed to be running very
38 /// early in compilation, before we've really done any work, but
39 /// actually it doesn't matter all that much.) See `README.md` for
40 /// more general overview.
41 pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
42 incremental_hashes_map: &IncrementalHashesMap) {
43 if tcx.sess.opts.incremental.is_none() {
47 match prepare_session_directory(tcx) {
49 // We successfully allocated a session directory and there is
50 // something in it to load, so continue
53 // We successfully allocated a session directory, but there is no
54 // dep-graph data in it to load (because this is the first
55 // compilation session with this incr. comp. dir.)
59 // Something went wrong while trying to allocate the session
60 // directory. Don't try to use it any further.
65 let _ignore = tcx.dep_graph.in_ignore();
66 load_dep_graph_if_exists(tcx, incremental_hashes_map);
69 fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
70 incremental_hashes_map: &IncrementalHashesMap) {
71 let dep_graph_path = dep_graph_path(tcx.sess);
72 let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) {
74 None => return // no file
77 let work_products_path = work_products_path(tcx.sess);
78 let work_products_data = match load_data(tcx.sess, &work_products_path) {
80 None => return // no file
83 match decode_dep_graph(tcx, incremental_hashes_map, &dep_graph_data, &work_products_data) {
84 Ok(dirty_nodes) => dirty_nodes,
87 &format!("decoding error in dep-graph from `{}` and `{}`: {}",
88 dep_graph_path.display(),
89 work_products_path.display(),
95 fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
96 match file_format::read_file(sess, path) {
97 Ok(Some(data)) => return Some(data),
99 // The file either didn't exist or was produced by an incompatible
100 // compiler version. Neither is an error.
104 &format!("could not load dep-graph from `{}`: {}",
105 path.display(), err));
109 if let Err(err) = delete_all_session_dir_contents(sess) {
110 sess.err(&format!("could not clear incompatible incremental \
111 compilation session directory `{}`: {}",
112 path.display(), err));
118 /// Decode the dep graph and load the edges/nodes that are still clean
119 /// into `tcx.dep_graph`.
120 pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
121 incremental_hashes_map: &IncrementalHashesMap,
122 dep_graph_data: &[u8],
123 work_products_data: &[u8])
124 -> Result<(), String>
126 // Decode the list of work_products
127 let mut work_product_decoder = Decoder::new(work_products_data, 0);
128 let work_products = <Vec<SerializedWorkProduct>>::decode(&mut work_product_decoder)?;
130 // Deserialize the directory and dep-graph.
131 let mut dep_graph_decoder = Decoder::new(dep_graph_data, 0);
132 let prev_commandline_args_hash = u64::decode(&mut dep_graph_decoder)?;
134 if prev_commandline_args_hash != tcx.sess.opts.dep_tracking_hash() {
135 if tcx.sess.opts.debugging_opts.incremental_info {
136 println!("incremental: completely ignoring cache because of \
137 differing commandline arguments");
139 // We can't reuse the cache, purge it.
140 debug!("decode_dep_graph: differing commandline arg hashes");
141 for swp in work_products {
142 delete_dirty_work_product(tcx, swp);
145 // No need to do any further work
149 let directory = DefIdDirectory::decode(&mut dep_graph_decoder)?;
150 let serialized_dep_graph = SerializedDepGraph::decode(&mut dep_graph_decoder)?;
152 // Retrace the paths in the directory to find their current location (if any).
153 let retraced = directory.retrace(tcx);
155 // Compute the set of Hir nodes whose data has changed or which
156 // have been removed. These are "raw" source nodes, which means
157 // that they still use the original `DefPathIndex` values from the
158 // encoding, rather than having been retraced to a `DefId`. The
159 // reason for this is that this way we can include nodes that have
160 // been removed (which no longer have a `DefId` in the current
162 let dirty_raw_source_nodes = dirty_nodes(tcx,
163 incremental_hashes_map,
164 &serialized_dep_graph.hashes,
167 // Create a list of (raw-source-node ->
168 // retracted-target-node) edges. In the process of retracing the
169 // target nodes, we may discover some of them def-paths no longer exist,
170 // in which case there is no need to mark the corresopnding nodes as dirty
171 // (they are just not present). So this list may be smaller than the original.
173 // Note though that in the common case the target nodes are
174 // `DepNode::WorkProduct` instances, and those don't have a
175 // def-id, so they will never be considered to not exist. Instead,
176 // we do a secondary hashing step (later, in trans) when we know
177 // the set of symbols that go into a work-product: if any symbols
178 // have been removed (or added) the hash will be different and
179 // we'll ignore the work-product then.
180 let retraced_edges: Vec<_> =
181 serialized_dep_graph.edges.iter()
182 .filter_map(|&(ref raw_source_node, ref raw_target_node)| {
183 retraced.map(raw_target_node)
184 .map(|target_node| (raw_source_node, target_node))
188 // Compute which work-products have an input that has changed or
189 // been removed. Put the dirty ones into a set.
190 let mut dirty_target_nodes = FxHashSet();
191 for &(raw_source_node, ref target_node) in &retraced_edges {
192 if dirty_raw_source_nodes.contains(raw_source_node) {
193 if !dirty_target_nodes.contains(target_node) {
194 dirty_target_nodes.insert(target_node.clone());
196 if tcx.sess.opts.debugging_opts.incremental_info {
197 // It'd be nice to pretty-print these paths better than just
198 // using the `Debug` impls, but wev.
199 println!("incremental: module {:?} is dirty because {:?} \
200 changed or was removed",
202 raw_source_node.map_def(|&index| {
203 Some(directory.def_path_string(tcx, index))
210 // For work-products that are still clean, add their deps into the
211 // graph. This is needed because later we will have to save this
213 let dep_graph = tcx.dep_graph.clone();
214 for (raw_source_node, target_node) in retraced_edges {
215 if dirty_target_nodes.contains(&target_node) {
219 let source_node = retraced.map(raw_source_node).unwrap();
221 debug!("decode_dep_graph: clean edge: {:?} -> {:?}", source_node, target_node);
223 let _task = dep_graph.in_task(target_node);
224 dep_graph.read(source_node);
227 // Add in work-products that are still clean, and delete those that are
229 reconcile_work_products(tcx, work_products, &dirty_target_nodes);
231 dirty_clean::check_dirty_clean_annotations(tcx, &dirty_raw_source_nodes, &retraced);
233 load_prev_metadata_hashes(tcx,
235 &mut *incremental_hashes_map.prev_metadata_hashes.borrow_mut());
239 /// Computes which of the original set of def-ids are dirty. Stored in
240 /// a bit vector where the index is the DefPathIndex.
241 fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
242 incremental_hashes_map: &IncrementalHashesMap,
243 serialized_hashes: &[SerializedHash],
244 retraced: &RetracedDefIdDirectory)
246 let mut hcx = HashContext::new(tcx, incremental_hashes_map);
247 let mut dirty_nodes = FxHashSet();
249 for hash in serialized_hashes {
250 if let Some(dep_node) = retraced.map(&hash.dep_node) {
251 let current_hash = hcx.hash(&dep_node).unwrap();
252 if current_hash == hash.hash {
253 debug!("initial_dirty_nodes: {:?} is clean (hash={:?})",
254 dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
259 if tcx.sess.opts.debugging_opts.incremental_dump_hash {
260 println!("node {:?} is dirty as hash is {:?} was {:?}",
261 dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
266 debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
267 dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
271 if tcx.sess.opts.debugging_opts.incremental_dump_hash {
272 println!("node {:?} is dirty as it was removed",
276 debug!("initial_dirty_nodes: {:?} is dirty as it was removed",
280 dirty_nodes.insert(hash.dep_node.clone());
286 /// Go through the list of work-products produced in the previous run.
287 /// Delete any whose nodes have been found to be dirty or which are
288 /// otherwise no longer applicable.
289 fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
290 work_products: Vec<SerializedWorkProduct>,
291 dirty_target_nodes: &FxHashSet<DepNode<DefId>>) {
292 debug!("reconcile_work_products({:?})", work_products);
293 for swp in work_products {
294 if dirty_target_nodes.contains(&DepNode::WorkProduct(swp.id.clone())) {
295 debug!("reconcile_work_products: dep-node for {:?} is dirty", swp);
296 delete_dirty_work_product(tcx, swp);
298 let mut all_files_exist = true;
299 for &(_, ref file_name) in swp.work_product.saved_files.iter() {
300 let path = in_incr_comp_dir_sess(tcx.sess, file_name);
302 all_files_exist = false;
304 if tcx.sess.opts.debugging_opts.incremental_info {
305 println!("incremental: could not find file for up-to-date work product: {}",
312 debug!("reconcile_work_products: all files for {:?} exist", swp);
313 tcx.dep_graph.insert_previous_work_product(&swp.id, swp.work_product);
315 debug!("reconcile_work_products: some file for {:?} does not exist", swp);
316 delete_dirty_work_product(tcx, swp);
322 fn delete_dirty_work_product(tcx: TyCtxt,
323 swp: SerializedWorkProduct) {
324 debug!("delete_dirty_work_product({:?})", swp);
325 for &(_, ref file_name) in &swp.work_product.saved_files {
326 let path = in_incr_comp_dir_sess(tcx.sess, file_name);
327 match fs::remove_file(&path) {
331 &format!("file-system error deleting outdated file `{}`: {}",
332 path.display(), err));
338 fn load_prev_metadata_hashes(tcx: TyCtxt,
339 retraced: &RetracedDefIdDirectory,
340 output: &mut FxHashMap<DefId, Fingerprint>) {
341 if !tcx.sess.opts.debugging_opts.query_dep_graph {
345 debug!("load_prev_metadata_hashes() - Loading previous metadata hashes");
347 let file_path = metadata_hash_export_path(tcx.sess);
349 if !file_path.exists() {
350 debug!("load_prev_metadata_hashes() - Couldn't find file containing \
351 hashes at `{}`", file_path.display());
355 debug!("load_prev_metadata_hashes() - File: {}", file_path.display());
357 let data = match file_format::read_file(tcx.sess, &file_path) {
358 Ok(Some(data)) => data,
360 debug!("load_prev_metadata_hashes() - File produced by incompatible \
361 compiler version: {}", file_path.display());
365 debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}",
366 file_path.display(), err);
371 debug!("load_prev_metadata_hashes() - Decoding hashes");
372 let mut decoder = Decoder::new(&data, 0);
373 let _ = Svh::decode(&mut decoder).unwrap();
374 let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap();
376 debug!("load_prev_metadata_hashes() - Mapping DefIds");
378 assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.hashes.len());
379 for serialized_hash in serialized_hashes.hashes {
380 let def_path_index = serialized_hashes.index_map[&serialized_hash.def_index];
381 if let Some(def_id) = retraced.def_id(def_path_index) {
382 let old = output.insert(def_id, serialized_hash.hash);
383 assert!(old.is_none(), "already have hash for {:?}", def_id);
387 debug!("load_prev_metadata_hashes() - successfully loaded {} hashes",
388 serialized_hashes.index_map.len());