/// All NodeIds that are numerically greater or equal to this value come
/// from inlined items.
local_node_id_watermark: NodeId,
+
+ /// All def-indices that are numerically greater or equal to this value come
+ /// from inlined items.
+ local_def_id_watermark: usize,
}
impl<'ast> Map<'ast> {
+ pub fn is_inlined_def_id(&self, id: DefId) -> bool {
+ id.is_local() && id.index.as_usize() >= self.local_def_id_watermark
+ }
+
+ pub fn is_inlined_node_id(&self, id: NodeId) -> bool {
+ id >= self.local_node_id_watermark
+ }
+
/// Registers a read in the dependency graph of the AST node with
/// the given `id`. This needs to be called each time a public
/// function returns the HIR for a node -- in other words, when it
pub fn node_to_user_string(&self, id: NodeId) -> String {
node_id_to_string(self, id, false)
}
-
- pub fn is_inlined(&self, id: NodeId) -> bool {
- id >= self.local_node_id_watermark
- }
}
pub struct NodesMatchingSuffix<'a, 'ast:'a> {
}
let local_node_id_watermark = map.len() as NodeId;
+ let local_def_id_watermark = definitions.len();
Map {
forest: forest,
dep_graph: forest.dep_graph.clone(),
map: RefCell::new(map),
definitions: RefCell::new(definitions),
- local_node_id_watermark: local_node_id_watermark
+ local_node_id_watermark: local_node_id_watermark,
+ local_def_id_watermark: local_def_id_watermark,
}
}
}
pub fn retrace_path(self, path: &DefPath) -> Option<DefId> {
- debug!("retrace_path(path={:?})", path);
+ debug!("retrace_path(path={:?}, krate={:?})", path, self.crate_name(path.krate));
let root_key = DefKey {
parent: None,
match *dep_node {
// HIR nodes (which always come from our crate) are an input:
DepNode::Hir(def_id) => {
- assert!(def_id.is_local());
Some(self.hir_hash(def_id))
}
}
fn hir_hash(&mut self, def_id: DefId) -> u64 {
- assert!(def_id.is_local());
+ assert!(def_id.is_local(),
+ "cannot hash HIR for non-local def-id {:?} => {:?}",
+ def_id,
+ self.tcx.item_path_str(def_id));
+
// FIXME(#32753) -- should we use a distinct hash here
self.tcx.calculate_item_hash(def_id)
}
None
}
}
-
}
/// Decode the dep graph and load the edges/nodes that are still clean
let directory = try!(DefIdDirectory::decode(&mut dep_graph_decoder));
let serialized_dep_graph = try!(SerializedDepGraph::decode(&mut dep_graph_decoder));
- debug!("decode_dep_graph: directory = {:#?}", directory);
- debug!("decode_dep_graph: serialized_dep_graph = {:#?}", serialized_dep_graph);
-
// Retrace the paths in the directory to find their current location (if any).
let retraced = directory.retrace(tcx);
- debug!("decode_dep_graph: retraced = {:#?}", retraced);
-
// Compute the set of Hir nodes whose data has changed.
let mut dirty_nodes =
initial_dirty_nodes(tcx, &serialized_dep_graph.hashes, &retraced);
let mut items_removed = false;
let mut dirty_nodes = FnvHashSet();
for hash in hashes {
+ debug!("initial_dirty_nodes: retracing {:?}", hash);
match hash.node.map_def(|&i| retraced.def_id(i)) {
Some(dep_node) => {
let current_hash = hcx.hash(&dep_node).unwrap();
// except according to those terms.
use rbml::opaque::Encoder;
-use rustc::dep_graph::DepNode;
+use rustc::dep_graph::{DepGraphQuery, DepNode};
+use rustc::hir::def_id::DefId;
use rustc::middle::cstore::LOCAL_CRATE;
use rustc::session::Session;
use rustc::ty::TyCtxt;
+use rustc_data_structures::fnv::FnvHashMap;
use rustc_serialize::{Encodable as RustcEncodable};
use std::hash::{Hasher, SipHasher};
use std::io::{self, Cursor, Write};
{
let tcx = hcx.tcx;
let query = tcx.dep_graph.query();
+ let (nodes, edges) = post_process_graph(hcx, query);
let mut builder = DefIdDirectoryBuilder::new(tcx);
// Create hashes for inputs.
let hashes =
- query.nodes()
- .into_iter()
+ nodes.iter()
.filter_map(|dep_node| {
- hcx.hash(&dep_node)
+ hcx.hash(dep_node)
.map(|hash| {
let node = builder.map(dep_node);
SerializedHash { node: node, hash: hash }
// Create the serialized dep-graph.
let graph = SerializedDepGraph {
- nodes: query.nodes().into_iter()
- .map(|node| builder.map(node))
- .collect(),
- edges: query.edges().into_iter()
- .map(|(source_node, target_node)| {
- let source = builder.map(source_node);
- let target = builder.map(target_node);
- (source, target)
- })
- .collect(),
+ nodes: nodes.iter().map(|node| builder.map(node)).collect(),
+ edges: edges.iter()
+ .map(|&(ref source_node, ref target_node)| {
+ let source = builder.map(source_node);
+ let target = builder.map(target_node);
+ (source, target)
+ })
+ .collect(),
hashes: hashes,
};
Ok(())
}
+pub fn post_process_graph<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
+ query: DepGraphQuery<DefId>)
+ -> (Vec<DepNode<DefId>>, Vec<(DepNode<DefId>, DepNode<DefId>)>)
+{
+ let tcx = hcx.tcx;
+ let mut cache = FnvHashMap();
+
+ let mut uninline_def_id = |def_id: DefId| -> Option<DefId> {
+ if tcx.map.is_inlined_def_id(def_id) {
+ Some(
+ cache.entry(def_id)
+ .or_insert_with(|| {
+ let def_path = tcx.def_path(def_id);
+ debug!("post_process_graph: uninlining def-id {:?} to yield {:?}",
+ def_id, def_path);
+ let retraced_def_id = tcx.retrace_path(&def_path).unwrap();
+ debug!("post_process_graph: retraced to {:?}", retraced_def_id);
+ retraced_def_id
+ })
+ .clone())
+ } else {
+ None
+ }
+ };
+
+ let mut uninline_metadata = |node: &DepNode<DefId>| -> DepNode<DefId> {
+ match *node {
+ DepNode::Hir(def_id) => {
+ match uninline_def_id(def_id) {
+ Some(uninlined_def_id) => DepNode::MetaData(uninlined_def_id),
+ None => DepNode::Hir(def_id)
+ }
+ }
+ _ => node.clone()
+ }
+ };
+
+ let nodes = query.nodes()
+ .into_iter()
+ .map(|node| uninline_metadata(node))
+ .collect();
+
+ let edges = query.edges()
+ .into_iter()
+ .map(|(from, to)| (uninline_metadata(from), uninline_metadata(to)))
+ .collect();
+
+ (nodes, edges)
+}
+
+
pub fn encode_metadata_hashes<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
encoder: &mut Encoder)
-> io::Result<()>
}), ..}) => ty,
_ => ctor_ty
}.ty_adt_def().unwrap();
- let variant_def_id = if ccx.tcx().map.is_inlined(inlined_vid) {
+ let variant_def_id = if ccx.tcx().map.is_inlined_node_id(inlined_vid) {
ccx.defid_for_inlined_node(inlined_vid).unwrap()
} else {
ccx.tcx().map.local_def_id(inlined_vid)
.get(TransItem::Static(id))
.expect("Local statics should always be in the SymbolMap");
// Make sure that this is never executed for something inlined.
- assert!(!ccx.tcx().map.is_inlined(id));
+ assert!(!ccx.tcx().map.is_inlined_node_id(id));
let defined_in_current_codegen_unit = ccx.codegen_unit()
.items()
// First, find out the 'real' def_id of the type. Items inlined from
// other crates have to be mapped back to their source.
let def_id = if let Some(node_id) = cx.tcx().map.as_local_node_id(def_id) {
- if cx.tcx().map.is_inlined(node_id) {
+ if cx.tcx().map.is_inlined_node_id(node_id) {
// The given def_id identifies the inlined copy of a
// type definition, let's take the source of the copy.
cx.defid_for_inlined_node(node_id).unwrap()
// crate should already contain debuginfo for it. More importantly, the
// global might not even exist in un-inlined form anywhere which would lead
// to a linker errors.
- if cx.tcx().map.is_inlined(node_id) {
+ if cx.tcx().map.is_inlined_node_id(node_id) {
return;
}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #34991: an ICE occurred here because we inline
+// some of the vector routines and give them a local def-id `X`. This
+// got hashed after trans (`Hir(X)`). When we load back up, we get an
+// error because the `X` is remapped to the original def-id (in
+// libstd), and we can't hash a HIR node from std.
+
+// revisions:rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+use std::vec::Vec;
+
+pub fn foo() -> Vec<i32> {
+ vec![1, 2, 3]
+}
+
+pub fn bar() {
+ foo();
+}
+
+pub fn main() {
+ bar();
+}