TOOLS := compiletest rustdoc rustc rustbook error-index-generator
DEPS_core :=
+DEPS_alloc := core libc alloc_system
+DEPS_alloc_system := core libc
+DEPS_collections := core alloc rustc_unicode
DEPS_libc := core
+DEPS_rand := core
+DEPS_rustc_bitflags := core
DEPS_rustc_unicode := core
-DEPS_alloc := core libc alloc_system
+
DEPS_std := core libc rand alloc collections rustc_unicode \
native:rust_builtin native:backtrace \
alloc_system
+DEPS_arena := std
+DEPS_glob := std
+DEPS_flate := std native:miniz
+DEPS_fmt_macros = std
+DEPS_getopts := std
DEPS_graphviz := std
+DEPS_log := std
+DEPS_num := std
+DEPS_rbml := std log serialize
+DEPS_serialize := std log
+DEPS_term := std log
+DEPS_test := std getopts serialize rbml term native:rust_test_helpers
+
DEPS_syntax := std term serialize log fmt_macros arena libc rustc_bitflags
+
+DEPS_rustc := syntax flate arena serialize getopts rbml rustc_front\
+ log graphviz rustc_llvm rustc_back rustc_data_structures
+DEPS_rustc_back := std syntax rustc_llvm rustc_front flate log libc
+DEPS_rustc_borrowck := rustc rustc_front log graphviz syntax
+DEPS_rustc_data_structures := std log serialize
DEPS_rustc_driver := arena flate getopts graphviz libc rustc rustc_back rustc_borrowck \
rustc_typeck rustc_mir rustc_resolve log syntax serialize rustc_llvm \
rustc_trans rustc_privacy rustc_lint rustc_front
-DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back \
- log syntax serialize rustc_llvm rustc_front rustc_platform_intrinsics
-DEPS_rustc_mir := rustc rustc_front syntax
-DEPS_rustc_typeck := rustc syntax rustc_front rustc_platform_intrinsics
-DEPS_rustc_borrowck := rustc rustc_front log graphviz syntax
-DEPS_rustc_resolve := rustc rustc_front log syntax
-DEPS_rustc_privacy := rustc rustc_front log syntax
DEPS_rustc_front := std syntax log serialize
DEPS_rustc_lint := rustc log syntax
-DEPS_rustc := syntax flate arena serialize getopts rbml rustc_front\
- log graphviz rustc_llvm rustc_back rustc_data_structures
DEPS_rustc_llvm := native:rustllvm libc std rustc_bitflags
+DEPS_rustc_mir := rustc rustc_front syntax
+DEPS_rustc_resolve := rustc rustc_front log syntax
DEPS_rustc_platform_intrinsics := rustc rustc_llvm
-DEPS_rustc_back := std syntax rustc_llvm rustc_front flate log libc
-DEPS_rustc_data_structures := std log serialize
+DEPS_rustc_privacy := rustc rustc_front log syntax
+DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back \
+ log syntax serialize rustc_llvm rustc_front rustc_platform_intrinsics
+DEPS_rustc_typeck := rustc syntax rustc_front rustc_platform_intrinsics
+
DEPS_rustdoc := rustc rustc_driver native:hoedown serialize getopts \
test rustc_lint rustc_front
-DEPS_rustc_bitflags := core
-DEPS_flate := std native:miniz
-DEPS_arena := std
-DEPS_graphviz := std
-DEPS_glob := std
-DEPS_serialize := std log
-DEPS_rbml := std log serialize
-DEPS_term := std log
-DEPS_getopts := std
-DEPS_collections := core alloc rustc_unicode
-DEPS_num := std
-DEPS_test := std getopts serialize rbml term native:rust_test_helpers
-DEPS_rand := core
-DEPS_log := std
-DEPS_fmt_macros = std
-DEPS_alloc_system := core libc
+
TOOL_DEPS_compiletest := test getopts
TOOL_DEPS_rustdoc := rustdoc
fn pp_ann<'a>(&'a self) -> &'a pprust_hir::PpAnn;
}
-struct NoAnn<'ast, 'tcx> {
- sess: &'tcx Session,
+struct NoAnn<'ast> {
+ sess: &'ast Session,
ast_map: Option<hir_map::Map<'ast>>
}
-impl<'ast, 'tcx> PrinterSupport<'ast> for NoAnn<'ast, 'tcx> {
+impl<'ast> PrinterSupport<'ast> for NoAnn<'ast> {
fn sess<'a>(&'a self) -> &'a Session { self.sess }
fn ast_map<'a>(&'a self) -> Option<&'a hir_map::Map<'ast>> {
fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn { self }
}
-impl<'ast, 'tcx> HirPrinterSupport<'ast> for NoAnn<'ast, 'tcx> {
+impl<'ast> HirPrinterSupport<'ast> for NoAnn<'ast> {
fn sess<'a>(&'a self) -> &'a Session { self.sess }
fn ast_map<'a>(&'a self) -> Option<&'a hir_map::Map<'ast>> {
fn pp_ann<'a>(&'a self) -> &'a pprust_hir::PpAnn { self }
}
-impl<'ast, 'tcx> pprust::PpAnn for NoAnn<'ast, 'tcx> {}
-impl<'ast, 'tcx> pprust_hir::PpAnn for NoAnn<'ast, 'tcx> {}
+impl<'ast> pprust::PpAnn for NoAnn<'ast> {}
+impl<'ast> pprust_hir::PpAnn for NoAnn<'ast> {}
-struct IdentifiedAnnotation<'ast, 'tcx> {
- sess: &'tcx Session,
+struct IdentifiedAnnotation<'ast> {
+ sess: &'ast Session,
ast_map: Option<hir_map::Map<'ast>>,
}
-impl<'ast, 'tcx> PrinterSupport<'ast> for IdentifiedAnnotation<'ast, 'tcx> {
+impl<'ast> PrinterSupport<'ast> for IdentifiedAnnotation<'ast> {
fn sess<'a>(&'a self) -> &'a Session { self.sess }
fn ast_map<'a>(&'a self) -> Option<&'a hir_map::Map<'ast>> {
fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn { self }
}
-impl<'ast, 'tcx> pprust::PpAnn for IdentifiedAnnotation<'ast, 'tcx> {
+impl<'ast> pprust::PpAnn for IdentifiedAnnotation<'ast> {
fn pre(&self,
s: &mut pprust::State,
node: pprust::AnnNode) -> io::Result<()> {
}
}
-impl<'ast, 'tcx> HirPrinterSupport<'ast> for IdentifiedAnnotation<'ast, 'tcx> {
+impl<'ast> HirPrinterSupport<'ast> for IdentifiedAnnotation<'ast> {
fn sess<'a>(&'a self) -> &'a Session { self.sess }
fn ast_map<'a>(&'a self) -> Option<&'a hir_map::Map<'ast>> {
fn pp_ann<'a>(&'a self) -> &'a pprust_hir::PpAnn { self }
}
-impl<'ast, 'tcx> pprust_hir::PpAnn for IdentifiedAnnotation<'ast, 'tcx> {
+impl<'ast> pprust_hir::PpAnn for IdentifiedAnnotation<'ast> {
fn pre(&self,
s: &mut pprust_hir::State,
node: pprust_hir::AnnNode) -> io::Result<()> {
}
}
-struct HygieneAnnotation<'ast, 'tcx> {
- sess: &'tcx Session,
+struct HygieneAnnotation<'ast> {
+ sess: &'ast Session,
ast_map: Option<hir_map::Map<'ast>>,
}
-impl<'ast, 'tcx> PrinterSupport<'ast> for HygieneAnnotation<'ast, 'tcx> {
+impl<'ast> PrinterSupport<'ast> for HygieneAnnotation<'ast> {
fn sess<'a>(&'a self) -> &'a Session { self.sess }
fn ast_map<'a>(&'a self) -> Option<&'a hir_map::Map<'ast>> {
fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn { self }
}
-impl<'ast, 'tcx> pprust::PpAnn for HygieneAnnotation<'ast, 'tcx> {
+impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> {
fn post(&self,
s: &mut pprust::State,
node: pprust::AnnNode) -> io::Result<()> {
// 'folding' an existing one), then you create a new id using `next_id()`.
//
// You must ensure that ids are unique. That means that you should only use the
-// is from an AST node in a single HIR node (you can assume that AST node ids
+// id from an AST node in a single HIR node (you can assume that AST node ids
// are unique). Every new node must have a unique id. Avoid cloning HIR nodes.
-// If you do, you must then set one of the node's id to a fresh one.
+// If you do, you must then set the new node's id to a fresh one.
//
// Lowering must be reproducable (the compiler only lowers once, but tools and
// custom lints may lower an AST node to a HIR node to interact with the
-// compiler). The only interesting bit of this is ids - if you lower an AST node
+// compiler). The most interesting bit of this is ids - if you lower an AST node
// and create new HIR nodes with fresh ids, when re-lowering the same node, you
// must ensure you get the same ids! To do this, we keep track of the next id
// when we translate a node which requires new ids. By checking this cache and
// all increments being for lowering. This means that you should not call any
// non-lowering function which will use new node ids.
//
+// We must also cache gensym'ed Idents to ensure that we get the same Ident
+// every time we lower a node with gensym'ed names. One consequence of this is
+// that you can only gensym a name once in a lowering (you don't need to worry
+// about nested lowering though). That's because we cache based on the name and
+// the currently cached node id, which is unique per lowered node.
+//
// Spans are used for error messages and for tools to map semantics back to
// source code. It is therefore not as important with spans as ids to be strict
// about use (you can't break the compiler by screwing up a span). Obviously, a
// 0 == no cached id. Must be incremented to align with previous id
// incrementing.
cached_id: Cell<u32>,
+ // Keep track of gensym'ed idents.
+ gensym_cache: RefCell<HashMap<(NodeId, &'static str), Ident>>,
+ // A copy of cached_id, but is also set to an id while it is being cached.
+ gensym_key: Cell<u32>,
}
impl<'a, 'hir> LoweringContext<'a> {
id_cache: RefCell::new(HashMap::new()),
id_assigner: id_assigner,
cached_id: Cell::new(0),
+ gensym_cache: RefCell::new(HashMap::new()),
+ gensym_key: Cell::new(0),
}
}
self.cached_id.set(cached + 1);
cached
}
+
+ fn str_to_ident(&self, s: &'static str) -> Ident {
+ let cached_id = self.gensym_key.get();
+ if cached_id == 0 {
+ return token::gensym_ident(s);
+ }
+
+ let cached = self.gensym_cache.borrow().contains_key(&(cached_id, s));
+ if cached {
+ self.gensym_cache.borrow()[&(cached_id, s)]
+ } else {
+ let result = token::gensym_ident(s);
+ self.gensym_cache.borrow_mut().insert((cached_id, s), result);
+ result
+ }
+ }
}
pub fn lower_view_path(_lctx: &LoweringContext, view_path: &ViewPath) -> P<hir::ViewPath> {
impl<'a> CachedIdSetter<'a> {
fn new(lctx: &'a LoweringContext, expr_id: NodeId) -> CachedIdSetter<'a> {
+ // Only reset the id if it was previously 0, i.e., was not cached.
+ // If it was cached, we are in a nested node, but our id count will
+ // still count towards the parent's count.
+ let reset_cached_id = lctx.cached_id.get() == 0;
+
let id_cache: &mut HashMap<_, _> = &mut lctx.id_cache.borrow_mut();
if id_cache.contains_key(&expr_id) {
// We're entering a node where we need to track ids, but are not
// yet tracking.
lctx.cached_id.set(id_cache[&expr_id]);
+ lctx.gensym_key.set(id_cache[&expr_id]);
} else {
// We're already tracking - check that the tracked id is the same
// as the expected id.
assert!(cached_id == id_cache[&expr_id], "id mismatch");
}
} else {
- id_cache.insert(expr_id, lctx.id_assigner.peek_node_id());
+ let next_id = lctx.id_assigner.peek_node_id();
+ id_cache.insert(expr_id, next_id);
+ lctx.gensym_key.set(next_id);
}
CachedIdSetter {
- // Only reset the id if it was previously 0, i.e., was not cached.
- // If it was cached, we are in a nested node, but our id count will
- // still count towards the parent's count.
- reset: lctx.cached_id.get() == 0,
+ reset: reset_cached_id,
lctx: lctx,
}
}
fn drop(&mut self) {
if self.reset {
self.lctx.cached_id.set(0);
+ self.lctx.gensym_key.set(0);
}
}
}
let placer_expr = lower_expr(lctx, placer);
let value_expr = lower_expr(lctx, value_expr);
- let placer_ident = token::gensym_ident("placer");
- let agent_ident = token::gensym_ident("place");
- let p_ptr_ident = token::gensym_ident("p_ptr");
+ let placer_ident = lctx.str_to_ident("placer");
+ let agent_ident = lctx.str_to_ident("place");
+ let p_ptr_ident = lctx.str_to_ident("p_ptr");
let make_place = ["ops", "Placer", "make_place"];
let place_pointer = ["ops", "Place", "pointer"];
let mk_stmt_let_mut = |lctx, bind, expr| stmt_let(lctx, e.span, true, bind, expr);
// let placer = <placer_expr> ;
- let s1 = mk_stmt_let(lctx, placer_ident, placer_expr);
+ let s1 = mk_stmt_let(lctx,
+ placer_ident,
+ signal_block_expr(lctx,
+ vec![],
+ placer_expr,
+ e.span,
+ hir::PopUnstableBlock));
// let mut place = Placer::make_place(placer);
let s2 = {
// expand <head>
let head = lower_expr(lctx, head);
- let iter = token::gensym_ident("iter");
+ let iter = lctx.str_to_ident("iter");
// `::std::option::Option::Some(<pat>) => <body>`
let pat_arm = {
let match_expr = expr_match(lctx, e.span, into_iter_expr, vec![iter_arm]);
// `{ let result = ...; result }`
- let result_ident = token::gensym_ident("result");
+ let result_ident = lctx.str_to_ident("result");
return expr_block(lctx,
block_all(lctx,
e.span,
expr: Some(expr),
}))
}
+
+
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use syntax::ast::{self, NodeId, NodeIdAssigner};
+ use syntax::{parse, codemap};
+ use syntax::fold::Folder;
+ use std::cell::Cell;
+
+ struct MockAssigner {
+ next_id: Cell<NodeId>,
+ }
+
+ impl MockAssigner {
+ fn new() -> MockAssigner {
+ MockAssigner {
+ next_id: Cell::new(0),
+ }
+ }
+ }
+
+ trait FakeExtCtxt {
+ fn call_site(&self) -> codemap::Span;
+ fn cfg(&self) -> ast::CrateConfig;
+ fn ident_of(&self, st: &str) -> ast::Ident;
+ fn name_of(&self, st: &str) -> ast::Name;
+ fn parse_sess(&self) -> &parse::ParseSess;
+ }
+
+ impl FakeExtCtxt for parse::ParseSess {
+ fn call_site(&self) -> codemap::Span {
+ codemap::Span {
+ lo: codemap::BytePos(0),
+ hi: codemap::BytePos(0),
+ expn_id: codemap::NO_EXPANSION,
+ }
+ }
+ fn cfg(&self) -> ast::CrateConfig { Vec::new() }
+ fn ident_of(&self, st: &str) -> ast::Ident {
+ parse::token::str_to_ident(st)
+ }
+ fn name_of(&self, st: &str) -> ast::Name {
+ parse::token::intern(st)
+ }
+ fn parse_sess(&self) -> &parse::ParseSess { self }
+ }
+
+ impl NodeIdAssigner for MockAssigner {
+ fn next_node_id(&self) -> NodeId {
+ let result = self.next_id.get();
+ self.next_id.set(result + 1);
+ result
+ }
+
+ fn peek_node_id(&self) -> NodeId {
+ self.next_id.get()
+ }
+ }
+
+ impl Folder for MockAssigner {
+ fn new_id(&mut self, old_id: NodeId) -> NodeId {
+ assert_eq!(old_id, ast::DUMMY_NODE_ID);
+ self.next_node_id()
+ }
+ }
+
+ #[test]
+ fn test_preserves_ids() {
+ let cx = parse::ParseSess::new();
+ let mut assigner = MockAssigner::new();
+
+ let ast_if_let = quote_expr!(&cx, if let Some(foo) = baz { bar(foo); });
+ let ast_if_let = assigner.fold_expr(ast_if_let);
+ let ast_while_let = quote_expr!(&cx, while let Some(foo) = baz { bar(foo); });
+ let ast_while_let = assigner.fold_expr(ast_while_let);
+ let ast_for = quote_expr!(&cx, for i in 0..10 { foo(i); });
+ let ast_for = assigner.fold_expr(ast_for);
+ let ast_in = quote_expr!(&cx, in HEAP { foo() });
+ let ast_in = assigner.fold_expr(ast_in);
+
+ let lctx = LoweringContext::new(&assigner, None);
+ let hir1 = lower_expr(&lctx, &ast_if_let);
+ let hir2 = lower_expr(&lctx, &ast_if_let);
+ assert!(hir1 == hir2);
+
+ let hir1 = lower_expr(&lctx, &ast_while_let);
+ let hir2 = lower_expr(&lctx, &ast_while_let);
+ assert!(hir1 == hir2);
+
+ let hir1 = lower_expr(&lctx, &ast_for);
+ let hir2 = lower_expr(&lctx, &ast_for);
+ assert!(hir1 == hir2);
+
+ let hir1 = lower_expr(&lctx, &ast_in);
+ let hir2 = lower_expr(&lctx, &ast_in);
+ assert!(hir1 == hir2);
+ }
+}