1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Lowers the AST to the HIR.
13 //! Since the AST and HIR are fairly similar, this is mostly a simple procedure,
14 //! much like a fold. Where lowering involves a bit more work things get more
15 //! interesting and there are some invariants you should know about. These mostly
16 //! concern spans and ids.
18 //! Spans are assigned to AST nodes during parsing and then are modified during
19 //! expansion to indicate the origin of a node and the process it went through
20 //! being expanded. Ids are assigned to AST nodes just before lowering.
22 //! For the simpler lowering steps, ids and spans should be preserved. Unlike
23 //! expansion we do not preserve the process of lowering in the spans, so spans
24 //! should not be modified here. When creating a new node (as opposed to
25 //! 'folding' an existing one), then you create a new id using `next_id()`.
27 //! You must ensure that ids are unique. That means that you should only use the
28 //! id from an AST node in a single HIR node (you can assume that AST node ids
29 //! are unique). Every new node must have a unique id. Avoid cloning HIR nodes.
30 //! If you do, you must then set the new node's id to a fresh one.
32 //! Spans are used for error messages and for tools to map semantics back to
33 //! source code. It is therefore not as important with spans as ids to be strict
34 //! about use (you can't break the compiler by screwing up a span). Obviously, a
35 //! HIR node can only have a single span. But multiple nodes can have the same
36 //! span and spans don't need to be kept in order, etc. Where code is preserved
37 //! by lowering, it should have the same span as in the AST. Where HIR nodes are
38 //! new it is probably best to give a span for the whole AST node being lowered.
39 //! All nodes should have real spans, don't use dummy spans. Tools are likely to
40 //! get confused if the spans from leaf AST nodes occur in multiple places
41 //! in the HIR, especially for multiple identifiers.
44 use hir::map::{Definitions, DefKey};
45 use hir::def_id::{DefIndex, DefId, CRATE_DEF_INDEX};
46 use hir::def::{Def, PathResolution};
47 use lint::builtin::PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES;
48 use rustc_data_structures::indexed_vec::IndexVec;
50 use util::common::FN_OUTPUT_NAME;
51 use util::nodemap::{DefIdMap, FxHashMap, NodeMap};
53 use std::collections::BTreeMap;
60 use syntax::ext::hygiene::{Mark, SyntaxContext};
62 use syntax::codemap::{self, respan, Spanned, CompilerDesugaringKind};
63 use syntax::std_inject;
64 use syntax::symbol::{Symbol, keywords};
65 use syntax::util::small_vector::SmallVector;
66 use syntax::visit::{self, Visitor};
69 const HIR_ID_COUNTER_LOCKED: u32 = 0xFFFFFFFF;
71 pub struct LoweringContext<'a> {
72 crate_root: Option<&'static str>,
74 // Use to assign ids to hir nodes that do not directly correspond to an ast node
77 // As we walk the AST we must keep track of the current 'parent' def id (in
78 // the form of a DefIndex) so that if we create a new node which introduces
79 // a definition, then we can properly create the def id.
80 parent_def: Option<DefIndex>,
81 resolver: &'a mut Resolver,
82 name_map: FxHashMap<Ident, Name>,
84 /// The items being lowered are collected here.
85 items: BTreeMap<NodeId, hir::Item>,
87 trait_items: BTreeMap<hir::TraitItemId, hir::TraitItem>,
88 impl_items: BTreeMap<hir::ImplItemId, hir::ImplItem>,
89 bodies: BTreeMap<hir::BodyId, hir::Body>,
90 exported_macros: Vec<hir::MacroDef>,
92 trait_impls: BTreeMap<DefId, Vec<NodeId>>,
93 trait_default_impl: BTreeMap<DefId, NodeId>,
97 catch_scopes: Vec<NodeId>,
98 loop_scopes: Vec<NodeId>,
99 is_in_loop_condition: bool,
101 type_def_lifetime_params: DefIdMap<usize>,
103 current_hir_id_owner: Vec<(DefIndex, u32)>,
104 item_local_id_counters: NodeMap<u32>,
105 node_id_to_hir_id: IndexVec<NodeId, hir::HirId>,
109 /// Resolve a hir path generated by the lowerer when expanding `for`, `if let`, etc.
110 fn resolve_hir_path(&mut self, path: &mut hir::Path, is_value: bool);
112 /// Obtain the resolution for a node id
113 fn get_resolution(&mut self, id: NodeId) -> Option<PathResolution>;
115 /// We must keep the set of definitions up to date as we add nodes that weren't in the AST.
116 /// This should only return `None` during testing.
117 fn definitions(&mut self) -> &mut Definitions;
120 pub fn lower_crate(sess: &Session,
122 resolver: &mut Resolver)
124 // We're constructing the HIR here; we don't care what we will
125 // read, since we haven't even constructed the *input* to
127 let _ignore = sess.dep_graph.in_ignore();
130 crate_root: std_inject::injected_crate_name(krate),
134 name_map: FxHashMap(),
135 items: BTreeMap::new(),
136 trait_items: BTreeMap::new(),
137 impl_items: BTreeMap::new(),
138 bodies: BTreeMap::new(),
139 trait_impls: BTreeMap::new(),
140 trait_default_impl: BTreeMap::new(),
141 exported_macros: Vec::new(),
142 catch_scopes: Vec::new(),
143 loop_scopes: Vec::new(),
144 is_in_loop_condition: false,
145 type_def_lifetime_params: DefIdMap(),
146 current_hir_id_owner: vec![(CRATE_DEF_INDEX, 0)],
147 item_local_id_counters: NodeMap(),
148 node_id_to_hir_id: IndexVec::new(),
153 #[derive(Copy, Clone, PartialEq, Eq)]
155 /// Any path in a type context.
157 /// The `module::Type` in `module::Type::method` in an expression.
161 struct LoweredNodeId {
166 enum ParenthesizedGenericArgs {
172 impl<'a> LoweringContext<'a> {
173 fn lower_crate(mut self, c: &Crate) -> hir::Crate {
174 /// Full-crate AST visitor that inserts into a fresh
175 /// `LoweringContext` any information that may be
176 /// needed from arbitrary locations in the crate.
177 /// E.g. The number of lifetime generic parameters
178 /// declared for every type and trait definition.
179 struct MiscCollector<'lcx, 'interner: 'lcx> {
180 lctx: &'lcx mut LoweringContext<'interner>,
183 impl<'lcx, 'interner> Visitor<'lcx> for MiscCollector<'lcx, 'interner> {
184 fn visit_item(&mut self, item: &'lcx Item) {
185 self.lctx.allocate_hir_id_counter(item.id, item);
188 ItemKind::Struct(_, ref generics) |
189 ItemKind::Union(_, ref generics) |
190 ItemKind::Enum(_, ref generics) |
191 ItemKind::Ty(_, ref generics) |
192 ItemKind::Trait(_, ref generics, ..) => {
193 let def_id = self.lctx.resolver.definitions().local_def_id(item.id);
194 let count = generics.lifetimes.len();
195 self.lctx.type_def_lifetime_params.insert(def_id, count);
199 visit::walk_item(self, item);
202 fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
203 self.lctx.allocate_hir_id_counter(item.id, item);
204 visit::walk_trait_item(self, item);
207 fn visit_impl_item(&mut self, item: &'lcx ImplItem) {
208 self.lctx.allocate_hir_id_counter(item.id, item);
209 visit::walk_impl_item(self, item);
213 struct ItemLowerer<'lcx, 'interner: 'lcx> {
214 lctx: &'lcx mut LoweringContext<'interner>,
217 impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> {
218 fn visit_item(&mut self, item: &'lcx Item) {
219 let mut item_lowered = true;
220 self.lctx.with_hir_id_owner(item.id, |lctx| {
221 if let Some(hir_item) = lctx.lower_item(item) {
222 lctx.items.insert(item.id, hir_item);
224 item_lowered = false;
229 visit::walk_item(self, item);
233 fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
234 self.lctx.with_hir_id_owner(item.id, |lctx| {
235 let id = hir::TraitItemId { node_id: item.id };
236 let hir_item = lctx.lower_trait_item(item);
237 lctx.trait_items.insert(id, hir_item);
240 visit::walk_trait_item(self, item);
243 fn visit_impl_item(&mut self, item: &'lcx ImplItem) {
244 self.lctx.with_hir_id_owner(item.id, |lctx| {
245 let id = hir::ImplItemId { node_id: item.id };
246 let hir_item = lctx.lower_impl_item(item);
247 lctx.impl_items.insert(id, hir_item);
249 visit::walk_impl_item(self, item);
253 self.lower_node_id(CRATE_NODE_ID);
254 debug_assert!(self.node_id_to_hir_id[CRATE_NODE_ID] == hir::CRATE_HIR_ID);
256 visit::walk_crate(&mut MiscCollector { lctx: &mut self }, c);
257 visit::walk_crate(&mut ItemLowerer { lctx: &mut self }, c);
259 let module = self.lower_mod(&c.module);
260 let attrs = self.lower_attrs(&c.attrs);
261 let body_ids = body_ids(&self.bodies);
265 .init_node_id_to_hir_id_mapping(self.node_id_to_hir_id);
271 exported_macros: hir::HirVec::from(self.exported_macros),
273 trait_items: self.trait_items,
274 impl_items: self.impl_items,
277 trait_impls: self.trait_impls,
278 trait_default_impl: self.trait_default_impl,
282 fn allocate_hir_id_counter<T: Debug>(&mut self,
285 if self.item_local_id_counters.insert(owner, 0).is_some() {
286 bug!("Tried to allocate item_local_id_counter for {:?} twice", debug);
288 // Always allocate the first HirId for the owner itself
289 self.lower_node_id_with_owner(owner, owner);
292 fn lower_node_id_generic<F>(&mut self,
296 where F: FnOnce(&mut Self) -> hir::HirId
298 if ast_node_id == DUMMY_NODE_ID {
299 return LoweredNodeId {
300 node_id: DUMMY_NODE_ID,
301 hir_id: hir::DUMMY_HIR_ID,
305 let min_size = ast_node_id.as_usize() + 1;
307 if min_size > self.node_id_to_hir_id.len() {
308 self.node_id_to_hir_id.resize(min_size, hir::DUMMY_HIR_ID);
311 let existing_hir_id = self.node_id_to_hir_id[ast_node_id];
313 if existing_hir_id == hir::DUMMY_HIR_ID {
314 // Generate a new HirId
315 let hir_id = alloc_hir_id(self);
316 self.node_id_to_hir_id[ast_node_id] = hir_id;
318 node_id: ast_node_id,
323 node_id: ast_node_id,
324 hir_id: existing_hir_id,
329 fn with_hir_id_owner<F>(&mut self, owner: NodeId, f: F)
330 where F: FnOnce(&mut Self)
332 let counter = self.item_local_id_counters
333 .insert(owner, HIR_ID_COUNTER_LOCKED)
335 let def_index = self.resolver.definitions().opt_def_index(owner).unwrap();
336 self.current_hir_id_owner.push((def_index, counter));
338 let (new_def_index, new_counter) = self.current_hir_id_owner.pop().unwrap();
340 debug_assert!(def_index == new_def_index);
341 debug_assert!(new_counter >= counter);
343 let prev = self.item_local_id_counters.insert(owner, new_counter).unwrap();
344 debug_assert!(prev == HIR_ID_COUNTER_LOCKED);
347 /// This method allocates a new HirId for the given NodeId and stores it in
348 /// the LoweringContext's NodeId => HirId map.
349 /// Take care not to call this method if the resulting HirId is then not
350 /// actually used in the HIR, as that would trigger an assertion in the
351 /// HirIdValidator later on, which makes sure that all NodeIds got mapped
352 /// properly. Calling the method twice with the same NodeId is fine though.
353 fn lower_node_id(&mut self, ast_node_id: NodeId) -> LoweredNodeId {
354 self.lower_node_id_generic(ast_node_id, |this| {
355 let &mut (def_index, ref mut local_id_counter) = this.current_hir_id_owner
358 let local_id = *local_id_counter;
359 *local_id_counter += 1;
362 local_id: hir::ItemLocalId(local_id),
367 fn lower_node_id_with_owner(&mut self,
371 self.lower_node_id_generic(ast_node_id, |this| {
372 let local_id_counter = this.item_local_id_counters
375 let local_id = *local_id_counter;
377 // We want to be sure not to modify the counter in the map while it
378 // is also on the stack. Otherwise we'll get lost updates when writing
379 // back from the stack to the map.
380 debug_assert!(local_id != HIR_ID_COUNTER_LOCKED);
382 *local_id_counter += 1;
383 let def_index = this.resolver.definitions().opt_def_index(owner).unwrap();
387 local_id: hir::ItemLocalId(local_id),
392 fn record_body(&mut self, value: hir::Expr, decl: Option<&FnDecl>)
394 let body = hir::Body {
395 arguments: decl.map_or(hir_vec![], |decl| {
396 decl.inputs.iter().map(|x| self.lower_arg(x)).collect()
398 is_generator: self.is_generator,
402 self.bodies.insert(id, body);
406 fn next_id(&mut self) -> LoweredNodeId {
407 self.lower_node_id(self.sess.next_node_id())
410 fn expect_full_def(&mut self, id: NodeId) -> Def {
411 self.resolver.get_resolution(id).map_or(Def::Err, |pr| {
412 if pr.unresolved_segments() != 0 {
413 bug!("path not fully resolved: {:?}", pr);
419 fn diagnostic(&self) -> &errors::Handler {
420 self.sess.diagnostic()
423 fn str_to_ident(&self, s: &'static str) -> Name {
427 fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, span: Span) -> Span
429 let mark = Mark::fresh(Mark::root());
430 mark.set_expn_info(codemap::ExpnInfo {
432 callee: codemap::NameAndSpan {
433 format: codemap::CompilerDesugaring(reason),
435 allow_internal_unstable: true,
436 allow_internal_unsafe: false,
439 span.with_ctxt(SyntaxContext::empty().apply_mark(mark))
442 fn with_catch_scope<T, F>(&mut self, catch_id: NodeId, f: F) -> T
443 where F: FnOnce(&mut LoweringContext) -> T
445 let len = self.catch_scopes.len();
446 self.catch_scopes.push(catch_id);
448 let result = f(self);
449 assert_eq!(len + 1, self.catch_scopes.len(),
450 "catch scopes should be added and removed in stack order");
452 self.catch_scopes.pop().unwrap();
457 fn lower_body<F>(&mut self, decl: Option<&FnDecl>, f: F) -> hir::BodyId
458 where F: FnOnce(&mut LoweringContext) -> hir::Expr
460 let prev = mem::replace(&mut self.is_generator, false);
461 let result = f(self);
462 let r = self.record_body(result, decl);
463 self.is_generator = prev;
467 fn with_loop_scope<T, F>(&mut self, loop_id: NodeId, f: F) -> T
468 where F: FnOnce(&mut LoweringContext) -> T
470 // We're no longer in the base loop's condition; we're in another loop.
471 let was_in_loop_condition = self.is_in_loop_condition;
472 self.is_in_loop_condition = false;
474 let len = self.loop_scopes.len();
475 self.loop_scopes.push(loop_id);
477 let result = f(self);
478 assert_eq!(len + 1, self.loop_scopes.len(),
479 "Loop scopes should be added and removed in stack order");
481 self.loop_scopes.pop().unwrap();
483 self.is_in_loop_condition = was_in_loop_condition;
488 fn with_loop_condition_scope<T, F>(&mut self, f: F) -> T
489 where F: FnOnce(&mut LoweringContext) -> T
491 let was_in_loop_condition = self.is_in_loop_condition;
492 self.is_in_loop_condition = true;
494 let result = f(self);
496 self.is_in_loop_condition = was_in_loop_condition;
501 fn with_new_scopes<T, F>(&mut self, f: F) -> T
502 where F: FnOnce(&mut LoweringContext) -> T
504 let was_in_loop_condition = self.is_in_loop_condition;
505 self.is_in_loop_condition = false;
507 let catch_scopes = mem::replace(&mut self.catch_scopes, Vec::new());
508 let loop_scopes = mem::replace(&mut self.loop_scopes, Vec::new());
509 let result = f(self);
510 self.catch_scopes = catch_scopes;
511 self.loop_scopes = loop_scopes;
513 self.is_in_loop_condition = was_in_loop_condition;
518 fn with_parent_def<T, F>(&mut self, parent_id: NodeId, f: F) -> T
519 where F: FnOnce(&mut LoweringContext) -> T
521 let old_def = self.parent_def;
523 let defs = self.resolver.definitions();
524 Some(defs.opt_def_index(parent_id).unwrap())
527 let result = f(self);
529 self.parent_def = old_def;
533 fn def_key(&mut self, id: DefId) -> DefKey {
535 self.resolver.definitions().def_key(id.index)
537 self.sess.cstore.def_key(id)
541 fn lower_ident(&mut self, ident: Ident) -> Name {
542 let ident = ident.modern();
543 if ident.ctxt == SyntaxContext::empty() {
546 *self.name_map.entry(ident).or_insert_with(|| Symbol::from_ident(ident))
549 fn lower_opt_sp_ident(&mut self, o_id: Option<Spanned<Ident>>) -> Option<Spanned<Name>> {
550 o_id.map(|sp_ident| respan(sp_ident.span, sp_ident.node.name))
553 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Spanned<Ident>)>)
557 Some((id, label_ident)) => {
558 let target = if let Def::Label(loop_id) = self.expect_full_def(id) {
559 hir::LoopIdResult::Ok(self.lower_node_id(loop_id).node_id)
561 hir::LoopIdResult::Err(hir::LoopIdError::UnresolvedLabel)
564 ident: Some(label_ident),
565 target_id: hir::ScopeTarget::Loop(target),
569 let loop_id = self.loop_scopes
571 .map(|innermost_loop_id| *innermost_loop_id);
575 target_id: hir::ScopeTarget::Loop(
576 loop_id.map(|id| Ok(self.lower_node_id(id).node_id))
577 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope))
584 fn lower_attrs(&mut self, attrs: &Vec<Attribute>) -> hir::HirVec<Attribute> {
588 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm {
590 attrs: self.lower_attrs(&arm.attrs),
591 pats: arm.pats.iter().map(|x| self.lower_pat(x)).collect(),
592 guard: arm.guard.as_ref().map(|ref x| P(self.lower_expr(x))),
593 body: P(self.lower_expr(&arm.body)),
597 fn lower_ty_binding(&mut self, b: &TypeBinding) -> hir::TypeBinding {
599 id: self.lower_node_id(b.id).node_id,
600 name: self.lower_ident(b.ident),
601 ty: self.lower_ty(&b.ty),
606 fn lower_ty(&mut self, t: &Ty) -> P<hir::Ty> {
607 let kind = match t.node {
608 TyKind::Infer => hir::TyInfer,
609 TyKind::Err => hir::TyErr,
610 TyKind::Slice(ref ty) => hir::TySlice(self.lower_ty(ty)),
611 TyKind::Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt)),
612 TyKind::Rptr(ref region, ref mt) => {
613 let span = t.span.with_hi(t.span.lo());
614 let lifetime = match *region {
615 Some(ref lt) => self.lower_lifetime(lt),
616 None => self.elided_lifetime(span)
618 hir::TyRptr(lifetime, self.lower_mt(mt))
620 TyKind::BareFn(ref f) => {
621 hir::TyBareFn(P(hir::BareFnTy {
622 lifetimes: self.lower_lifetime_defs(&f.lifetimes),
623 unsafety: self.lower_unsafety(f.unsafety),
625 decl: self.lower_fn_decl(&f.decl),
628 TyKind::Never => hir::TyNever,
629 TyKind::Tup(ref tys) => {
630 hir::TyTup(tys.iter().map(|ty| self.lower_ty(ty)).collect())
632 TyKind::Paren(ref ty) => {
633 return self.lower_ty(ty);
635 TyKind::Path(ref qself, ref path) => {
636 let id = self.lower_node_id(t.id).node_id;
637 let qpath = self.lower_qpath(t.id, qself, path, ParamMode::Explicit);
638 return self.ty_path(id, t.span, qpath);
640 TyKind::ImplicitSelf => {
641 hir::TyPath(hir::QPath::Resolved(None, P(hir::Path {
642 def: self.expect_full_def(t.id),
643 segments: hir_vec![hir::PathSegment {
644 name: keywords::SelfType.name(),
645 parameters: hir::PathParameters::none()
650 TyKind::Array(ref ty, ref length) => {
651 let length = self.lower_body(None, |this| this.lower_expr(length));
652 hir::TyArray(self.lower_ty(ty), length)
654 TyKind::Typeof(ref expr) => {
655 let expr = self.lower_body(None, |this| this.lower_expr(expr));
658 TyKind::TraitObject(ref bounds) => {
659 let mut lifetime_bound = None;
660 let bounds = bounds.iter().filter_map(|bound| {
662 TraitTyParamBound(ref ty, TraitBoundModifier::None) => {
663 Some(self.lower_poly_trait_ref(ty))
665 TraitTyParamBound(_, TraitBoundModifier::Maybe) => None,
666 RegionTyParamBound(ref lifetime) => {
667 if lifetime_bound.is_none() {
668 lifetime_bound = Some(self.lower_lifetime(lifetime));
674 let lifetime_bound = lifetime_bound.unwrap_or_else(|| {
675 self.elided_lifetime(t.span)
677 hir::TyTraitObject(bounds, lifetime_bound)
679 TyKind::ImplTrait(ref bounds) => {
680 hir::TyImplTrait(self.lower_bounds(bounds))
682 TyKind::Mac(_) => panic!("TyMac should have been expanded by now."),
686 id: self.lower_node_id(t.id).node_id,
692 fn lower_foreign_mod(&mut self, fm: &ForeignMod) -> hir::ForeignMod {
695 items: fm.items.iter().map(|x| self.lower_foreign_item(x)).collect(),
699 fn lower_global_asm(&mut self, ga: &GlobalAsm) -> P<hir::GlobalAsm> {
706 fn lower_variant(&mut self, v: &Variant) -> hir::Variant {
708 node: hir::Variant_ {
709 name: v.node.name.name,
710 attrs: self.lower_attrs(&v.node.attrs),
711 data: self.lower_variant_data(&v.node.data),
712 disr_expr: v.node.disr_expr.as_ref().map(|e| {
713 self.lower_body(None, |this| this.lower_expr(e))
720 fn lower_qpath(&mut self,
722 qself: &Option<QSelf>,
724 param_mode: ParamMode)
726 let qself_position = qself.as_ref().map(|q| q.position);
727 let qself = qself.as_ref().map(|q| self.lower_ty(&q.ty));
729 let resolution = self.resolver.get_resolution(id)
730 .unwrap_or(PathResolution::new(Def::Err));
732 let proj_start = p.segments.len() - resolution.unresolved_segments();
733 let path = P(hir::Path {
734 def: resolution.base_def(),
735 segments: p.segments[..proj_start].iter().enumerate().map(|(i, segment)| {
736 let param_mode = match (qself_position, param_mode) {
737 (Some(j), ParamMode::Optional) if i < j => {
738 // This segment is part of the trait path in a
739 // qualified path - one of `a`, `b` or `Trait`
740 // in `<X as a::b::Trait>::T::U::method`.
746 // Figure out if this is a type/trait segment,
747 // which may need lifetime elision performed.
748 let parent_def_id = |this: &mut Self, def_id: DefId| {
751 index: this.def_key(def_id).parent.expect("missing parent")
754 let type_def_id = match resolution.base_def() {
755 Def::AssociatedTy(def_id) if i + 2 == proj_start => {
756 Some(parent_def_id(self, def_id))
758 Def::Variant(def_id) if i + 1 == proj_start => {
759 Some(parent_def_id(self, def_id))
761 Def::Struct(def_id) |
764 Def::TyAlias(def_id) |
765 Def::Trait(def_id) if i + 1 == proj_start => Some(def_id),
768 let parenthesized_generic_args = match resolution.base_def() {
769 // `a::b::Trait(Args)`
770 Def::Trait(..) if i + 1 == proj_start => ParenthesizedGenericArgs::Ok,
771 // `a::b::Trait(Args)::TraitItem`
773 Def::AssociatedConst(..) |
774 Def::AssociatedTy(..) if i + 2 == proj_start => ParenthesizedGenericArgs::Ok,
775 // Avoid duplicated errors
776 Def::Err => ParenthesizedGenericArgs::Ok,
778 Def::Struct(..) | Def::Enum(..) | Def::Union(..) | Def::TyAlias(..) |
779 Def::Variant(..) if i + 1 == proj_start => ParenthesizedGenericArgs::Err,
780 // A warning for now, for compatibility reasons
781 _ => ParenthesizedGenericArgs::Warn,
784 let num_lifetimes = type_def_id.map_or(0, |def_id| {
785 if let Some(&n) = self.type_def_lifetime_params.get(&def_id) {
788 assert!(!def_id.is_local());
789 let n = self.sess.cstore.item_generics_cloned_untracked(def_id).regions.len();
790 self.type_def_lifetime_params.insert(def_id, n);
793 self.lower_path_segment(p.span, segment, param_mode, num_lifetimes,
794 parenthesized_generic_args)
799 // Simple case, either no projections, or only fully-qualified.
800 // E.g. `std::mem::size_of` or `<I as Iterator>::Item`.
801 if resolution.unresolved_segments() == 0 {
802 return hir::QPath::Resolved(qself, path);
805 // Create the innermost type that we're projecting from.
806 let mut ty = if path.segments.is_empty() {
807 // If the base path is empty that means there exists a
808 // syntactical `Self`, e.g. `&i32` in `<&i32>::clone`.
809 qself.expect("missing QSelf for <T>::...")
811 // Otherwise, the base path is an implicit `Self` type path,
812 // e.g. `Vec` in `Vec::new` or `<I as Iterator>::Item` in
813 // `<I as Iterator>::Item::default`.
814 let new_id = self.next_id().node_id;
815 self.ty_path(new_id, p.span, hir::QPath::Resolved(qself, path))
818 // Anything after the base path are associated "extensions",
819 // out of which all but the last one are associated types,
820 // e.g. for `std::vec::Vec::<T>::IntoIter::Item::clone`:
821 // * base path is `std::vec::Vec<T>`
822 // * "extensions" are `IntoIter`, `Item` and `clone`
824 // 1. `std::vec::Vec<T>` (created above)
825 // 2. `<std::vec::Vec<T>>::IntoIter`
826 // 3. `<<std::vec::Vec<T>>::IntoIter>::Item`
827 // * final path is `<<<std::vec::Vec<T>>::IntoIter>::Item>::clone`
828 for (i, segment) in p.segments.iter().enumerate().skip(proj_start) {
829 let segment = P(self.lower_path_segment(p.span, segment, param_mode, 0,
830 ParenthesizedGenericArgs::Warn));
831 let qpath = hir::QPath::TypeRelative(ty, segment);
833 // It's finished, return the extension of the right node type.
834 if i == p.segments.len() - 1 {
838 // Wrap the associated extension in another type node.
839 let new_id = self.next_id().node_id;
840 ty = self.ty_path(new_id, p.span, qpath);
843 // Should've returned in the for loop above.
844 span_bug!(p.span, "lower_qpath: no final extension segment in {}..{}",
845 proj_start, p.segments.len())
848 fn lower_path_extra(&mut self,
852 param_mode: ParamMode,
853 defaults_to_global: bool)
855 let mut segments = p.segments.iter();
856 if defaults_to_global && p.is_global() {
861 def: self.expect_full_def(id),
862 segments: segments.map(|segment| {
863 self.lower_path_segment(p.span, segment, param_mode, 0,
864 ParenthesizedGenericArgs::Err)
865 }).chain(name.map(|name| {
868 parameters: hir::PathParameters::none()
875 fn lower_path(&mut self,
878 param_mode: ParamMode,
879 defaults_to_global: bool)
881 self.lower_path_extra(id, p, None, param_mode, defaults_to_global)
884 fn lower_path_segment(&mut self,
886 segment: &PathSegment,
887 param_mode: ParamMode,
888 expected_lifetimes: usize,
889 parenthesized_generic_args: ParenthesizedGenericArgs)
890 -> hir::PathSegment {
891 let mut parameters = if let Some(ref parameters) = segment.parameters {
892 let msg = "parenthesized parameters may only be used with a trait";
894 PathParameters::AngleBracketed(ref data) => {
895 self.lower_angle_bracketed_parameter_data(data, param_mode)
897 PathParameters::Parenthesized(ref data) => match parenthesized_generic_args {
898 ParenthesizedGenericArgs::Ok => self.lower_parenthesized_parameter_data(data),
899 ParenthesizedGenericArgs::Warn => {
900 self.sess.buffer_lint(PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
901 CRATE_NODE_ID, data.span, msg.into());
902 hir::PathParameters::none()
904 ParenthesizedGenericArgs::Err => {
905 struct_span_err!(self.sess, data.span, E0214, "{}", msg)
906 .span_label(data.span, "only traits may use parentheses").emit();
907 hir::PathParameters::none()
912 self.lower_angle_bracketed_parameter_data(&Default::default(), param_mode)
915 if !parameters.parenthesized && parameters.lifetimes.is_empty() {
916 parameters.lifetimes = (0..expected_lifetimes).map(|_| {
917 self.elided_lifetime(path_span)
922 name: self.lower_ident(segment.identifier),
927 fn lower_angle_bracketed_parameter_data(&mut self,
928 data: &AngleBracketedParameterData,
929 param_mode: ParamMode)
930 -> hir::PathParameters {
931 let &AngleBracketedParameterData { ref lifetimes, ref types, ref bindings, .. } = data;
932 hir::PathParameters {
933 lifetimes: self.lower_lifetimes(lifetimes),
934 types: types.iter().map(|ty| self.lower_ty(ty)).collect(),
935 infer_types: types.is_empty() && param_mode == ParamMode::Optional,
936 bindings: bindings.iter().map(|b| self.lower_ty_binding(b)).collect(),
937 parenthesized: false,
941 fn lower_parenthesized_parameter_data(&mut self,
942 data: &ParenthesizedParameterData)
943 -> hir::PathParameters {
944 let &ParenthesizedParameterData { ref inputs, ref output, span } = data;
945 let inputs = inputs.iter().map(|ty| self.lower_ty(ty)).collect();
946 let mk_tup = |this: &mut Self, tys, span| {
947 P(hir::Ty { node: hir::TyTup(tys), id: this.next_id().node_id, span })
950 hir::PathParameters {
951 lifetimes: hir::HirVec::new(),
952 types: hir_vec![mk_tup(self, inputs, span)],
954 bindings: hir_vec![hir::TypeBinding {
955 id: self.next_id().node_id,
956 name: Symbol::intern(FN_OUTPUT_NAME),
957 ty: output.as_ref().map(|ty| self.lower_ty(&ty))
958 .unwrap_or_else(|| mk_tup(self, hir::HirVec::new(), span)),
959 span: output.as_ref().map_or(span, |ty| ty.span),
965 fn lower_local(&mut self, l: &Local) -> P<hir::Local> {
966 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(l.id);
970 ty: l.ty.as_ref().map(|t| self.lower_ty(t)),
971 pat: self.lower_pat(&l.pat),
972 init: l.init.as_ref().map(|e| P(self.lower_expr(e))),
974 attrs: l.attrs.clone(),
975 source: hir::LocalSource::Normal,
979 fn lower_mutability(&mut self, m: Mutability) -> hir::Mutability {
981 Mutability::Mutable => hir::MutMutable,
982 Mutability::Immutable => hir::MutImmutable,
986 fn lower_arg(&mut self, arg: &Arg) -> hir::Arg {
987 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(arg.id);
991 pat: self.lower_pat(&arg.pat),
995 fn lower_fn_args_to_names(&mut self, decl: &FnDecl)
996 -> hir::HirVec<Spanned<Name>> {
997 decl.inputs.iter().map(|arg| {
999 PatKind::Ident(_, ident, None) => {
1000 respan(ident.span, ident.node.name)
1002 _ => respan(arg.pat.span, keywords::Invalid.name()),
1007 fn lower_fn_decl(&mut self, decl: &FnDecl) -> P<hir::FnDecl> {
1009 inputs: decl.inputs.iter().map(|arg| self.lower_ty(&arg.ty)).collect(),
1010 output: match decl.output {
1011 FunctionRetTy::Ty(ref ty) => hir::Return(self.lower_ty(ty)),
1012 FunctionRetTy::Default(span) => hir::DefaultReturn(span),
1014 variadic: decl.variadic,
1015 has_implicit_self: decl.inputs.get(0).map_or(false, |arg| {
1017 TyKind::ImplicitSelf => true,
1018 TyKind::Rptr(_, ref mt) => mt.ty.node == TyKind::ImplicitSelf,
1025 fn lower_ty_param_bound(&mut self, tpb: &TyParamBound) -> hir::TyParamBound {
1027 TraitTyParamBound(ref ty, modifier) => {
1028 hir::TraitTyParamBound(self.lower_poly_trait_ref(ty),
1029 self.lower_trait_bound_modifier(modifier))
1031 RegionTyParamBound(ref lifetime) => {
1032 hir::RegionTyParamBound(self.lower_lifetime(lifetime))
1037 fn lower_ty_param(&mut self, tp: &TyParam, add_bounds: &[TyParamBound]) -> hir::TyParam {
1038 let mut name = self.lower_ident(tp.ident);
1040 // Don't expose `Self` (recovered "keyword used as ident" parse error).
1041 // `rustc::ty` expects `Self` to be only used for a trait's `Self`.
1042 // Instead, use gensym("Self") to create a distinct name that looks the same.
1043 if name == keywords::SelfType.name() {
1044 name = Symbol::gensym("Self");
1047 let mut bounds = self.lower_bounds(&tp.bounds);
1048 if !add_bounds.is_empty() {
1049 bounds = bounds.into_iter().chain(self.lower_bounds(add_bounds).into_iter()).collect();
1053 id: self.lower_node_id(tp.id).node_id,
1056 default: tp.default.as_ref().map(|x| self.lower_ty(x)),
1058 pure_wrt_drop: tp.attrs.iter().any(|attr| attr.check_name("may_dangle")),
1062 fn lower_ty_params(&mut self, tps: &Vec<TyParam>, add_bounds: &NodeMap<Vec<TyParamBound>>)
1063 -> hir::HirVec<hir::TyParam> {
1064 tps.iter().map(|tp| {
1065 self.lower_ty_param(tp, add_bounds.get(&tp.id).map_or(&[][..], |x| &x))
1069 fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime {
1071 id: self.lower_node_id(l.id).node_id,
1072 name: self.lower_ident(l.ident),
1077 fn lower_lifetime_def(&mut self, l: &LifetimeDef) -> hir::LifetimeDef {
1079 lifetime: self.lower_lifetime(&l.lifetime),
1080 bounds: self.lower_lifetimes(&l.bounds),
1081 pure_wrt_drop: l.attrs.iter().any(|attr| attr.check_name("may_dangle")),
1085 fn lower_lifetimes(&mut self, lts: &Vec<Lifetime>) -> hir::HirVec<hir::Lifetime> {
1086 lts.iter().map(|l| self.lower_lifetime(l)).collect()
1089 fn lower_lifetime_defs(&mut self, lts: &Vec<LifetimeDef>) -> hir::HirVec<hir::LifetimeDef> {
1090 lts.iter().map(|l| self.lower_lifetime_def(l)).collect()
1093 fn lower_generics(&mut self, g: &Generics) -> hir::Generics {
1094 // Collect `?Trait` bounds in where clause and move them to parameter definitions.
1095 let mut add_bounds = NodeMap();
1096 for pred in &g.where_clause.predicates {
1097 if let WherePredicate::BoundPredicate(ref bound_pred) = *pred {
1098 'next_bound: for bound in &bound_pred.bounds {
1099 if let TraitTyParamBound(_, TraitBoundModifier::Maybe) = *bound {
1100 let report_error = |this: &mut Self| {
1101 this.diagnostic().span_err(bound_pred.bounded_ty.span,
1102 "`?Trait` bounds are only permitted at the \
1103 point where a type parameter is declared");
1105 // Check if the where clause type is a plain type parameter.
1106 match bound_pred.bounded_ty.node {
1107 TyKind::Path(None, ref path)
1108 if path.segments.len() == 1 &&
1109 bound_pred.bound_lifetimes.is_empty() => {
1110 if let Some(Def::TyParam(def_id)) =
1111 self.resolver.get_resolution(bound_pred.bounded_ty.id)
1112 .map(|d| d.base_def()) {
1113 if let Some(node_id) =
1114 self.resolver.definitions().as_local_node_id(def_id) {
1115 for ty_param in &g.ty_params {
1116 if node_id == ty_param.id {
1117 add_bounds.entry(ty_param.id).or_insert(Vec::new())
1118 .push(bound.clone());
1119 continue 'next_bound;
1126 _ => report_error(self)
1134 ty_params: self.lower_ty_params(&g.ty_params, &add_bounds),
1135 lifetimes: self.lower_lifetime_defs(&g.lifetimes),
1136 where_clause: self.lower_where_clause(&g.where_clause),
1141 fn lower_where_clause(&mut self, wc: &WhereClause) -> hir::WhereClause {
1143 id: self.lower_node_id(wc.id).node_id,
1144 predicates: wc.predicates
1146 .map(|predicate| self.lower_where_predicate(predicate))
1151 fn lower_where_predicate(&mut self, pred: &WherePredicate) -> hir::WherePredicate {
1153 WherePredicate::BoundPredicate(WhereBoundPredicate{ ref bound_lifetimes,
1157 hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate {
1158 bound_lifetimes: self.lower_lifetime_defs(bound_lifetimes),
1159 bounded_ty: self.lower_ty(bounded_ty),
1160 bounds: bounds.iter().filter_map(|bound| match *bound {
1161 // Ignore `?Trait` bounds, they were copied into type parameters already.
1162 TraitTyParamBound(_, TraitBoundModifier::Maybe) => None,
1163 _ => Some(self.lower_ty_param_bound(bound))
1168 WherePredicate::RegionPredicate(WhereRegionPredicate{ ref lifetime,
1171 hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
1173 lifetime: self.lower_lifetime(lifetime),
1174 bounds: bounds.iter().map(|bound| self.lower_lifetime(bound)).collect(),
1177 WherePredicate::EqPredicate(WhereEqPredicate{ id,
1181 hir::WherePredicate::EqPredicate(hir::WhereEqPredicate {
1182 id: self.lower_node_id(id).node_id,
1183 lhs_ty: self.lower_ty(lhs_ty),
1184 rhs_ty: self.lower_ty(rhs_ty),
1191 fn lower_variant_data(&mut self, vdata: &VariantData) -> hir::VariantData {
1193 VariantData::Struct(ref fields, id) => {
1194 hir::VariantData::Struct(fields.iter()
1196 .map(|f| self.lower_struct_field(f))
1198 self.lower_node_id(id).node_id)
1200 VariantData::Tuple(ref fields, id) => {
1201 hir::VariantData::Tuple(fields.iter()
1203 .map(|f| self.lower_struct_field(f))
1205 self.lower_node_id(id).node_id)
1207 VariantData::Unit(id) => hir::VariantData::Unit(self.lower_node_id(id).node_id),
1211 fn lower_trait_ref(&mut self, p: &TraitRef) -> hir::TraitRef {
1212 let path = match self.lower_qpath(p.ref_id, &None, &p.path, ParamMode::Explicit) {
1213 hir::QPath::Resolved(None, path) => path.and_then(|path| path),
1214 qpath => bug!("lower_trait_ref: unexpected QPath `{:?}`", qpath)
1218 ref_id: self.lower_node_id(p.ref_id).node_id,
1222 fn lower_poly_trait_ref(&mut self, p: &PolyTraitRef) -> hir::PolyTraitRef {
1224 bound_lifetimes: self.lower_lifetime_defs(&p.bound_lifetimes),
1225 trait_ref: self.lower_trait_ref(&p.trait_ref),
1230 fn lower_struct_field(&mut self, (index, f): (usize, &StructField)) -> hir::StructField {
1233 id: self.lower_node_id(f.id).node_id,
1234 name: self.lower_ident(match f.ident {
1235 Some(ident) => ident,
1236 // FIXME(jseyfried) positional field hygiene
1237 None => Ident { name: Symbol::intern(&index.to_string()), ctxt: f.span.ctxt() },
1239 vis: self.lower_visibility(&f.vis, None),
1240 ty: self.lower_ty(&f.ty),
1241 attrs: self.lower_attrs(&f.attrs),
1245 fn lower_field(&mut self, f: &Field) -> hir::Field {
1247 name: respan(f.ident.span, self.lower_ident(f.ident.node)),
1248 expr: P(self.lower_expr(&f.expr)),
1250 is_shorthand: f.is_shorthand,
1254 fn lower_mt(&mut self, mt: &MutTy) -> hir::MutTy {
1256 ty: self.lower_ty(&mt.ty),
1257 mutbl: self.lower_mutability(mt.mutbl),
1261 fn lower_bounds(&mut self, bounds: &[TyParamBound]) -> hir::TyParamBounds {
1262 bounds.iter().map(|bound| self.lower_ty_param_bound(bound)).collect()
1265 fn lower_block(&mut self, b: &Block, targeted_by_break: bool) -> P<hir::Block> {
1266 let mut expr = None;
1268 let mut stmts = vec![];
1270 for (index, stmt) in b.stmts.iter().enumerate() {
1271 if index == b.stmts.len() - 1 {
1272 if let StmtKind::Expr(ref e) = stmt.node {
1273 expr = Some(P(self.lower_expr(e)));
1275 stmts.extend(self.lower_stmt(stmt));
1278 stmts.extend(self.lower_stmt(stmt));
1282 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(b.id);
1287 stmts: stmts.into(),
1289 rules: self.lower_block_check_mode(&b.rules),
1295 fn lower_item_kind(&mut self,
1298 attrs: &hir::HirVec<Attribute>,
1299 vis: &mut hir::Visibility,
1303 ItemKind::ExternCrate(string) => hir::ItemExternCrate(string),
1304 ItemKind::Use(ref view_path) => {
1305 let path = match view_path.node {
1306 ViewPathSimple(_, ref path) => path,
1307 ViewPathGlob(ref path) => path,
1308 ViewPathList(ref path, ref path_list_idents) => {
1309 for &Spanned { node: ref import, span } in path_list_idents {
1310 // `use a::{self as x, b as y};` lowers to
1311 // `use a as x; use a::b as y;`
1312 let mut ident = import.name;
1313 let suffix = if ident.name == keywords::SelfValue.name() {
1314 if let Some(last) = path.segments.last() {
1315 ident = last.identifier;
1322 let mut path = self.lower_path_extra(import.id, path, suffix,
1323 ParamMode::Explicit, true);
1326 self.allocate_hir_id_counter(import.id, import);
1328 node_id: import_node_id,
1329 hir_id: import_hir_id,
1330 } = self.lower_node_id(import.id);
1332 self.with_hir_id_owner(import_node_id, |this| {
1333 let vis = match *vis {
1334 hir::Visibility::Public => hir::Visibility::Public,
1335 hir::Visibility::Crate => hir::Visibility::Crate,
1336 hir::Visibility::Inherited => hir::Visibility::Inherited,
1337 hir::Visibility::Restricted { ref path, id: _ } => {
1338 hir::Visibility::Restricted {
1340 // We are allocating a new NodeId here
1341 id: this.next_id().node_id,
1346 this.items.insert(import_node_id, hir::Item {
1348 hir_id: import_hir_id,
1349 name: import.rename.unwrap_or(ident).name,
1350 attrs: attrs.clone(),
1351 node: hir::ItemUse(P(path), hir::UseKind::Single),
1360 let path = P(self.lower_path(id, path, ParamMode::Explicit, true));
1361 let kind = match view_path.node {
1362 ViewPathSimple(ident, _) => {
1364 hir::UseKind::Single
1366 ViewPathGlob(_) => {
1369 ViewPathList(..) => {
1370 // Privatize the degenerate import base, used only to check
1371 // the stability of `use a::{};`, to avoid it showing up as
1372 // a reexport by accident when `pub`, e.g. in documentation.
1373 *vis = hir::Inherited;
1374 hir::UseKind::ListStem
1377 hir::ItemUse(path, kind)
1379 ItemKind::Static(ref t, m, ref e) => {
1380 let value = self.lower_body(None, |this| this.lower_expr(e));
1381 hir::ItemStatic(self.lower_ty(t),
1382 self.lower_mutability(m),
1385 ItemKind::Const(ref t, ref e) => {
1386 let value = self.lower_body(None, |this| this.lower_expr(e));
1387 hir::ItemConst(self.lower_ty(t), value)
1389 ItemKind::Fn(ref decl, unsafety, constness, abi, ref generics, ref body) => {
1390 self.with_new_scopes(|this| {
1391 let body_id = this.lower_body(Some(decl), |this| {
1392 let body = this.lower_block(body, false);
1393 this.expr_block(body, ThinVec::new())
1395 hir::ItemFn(this.lower_fn_decl(decl),
1396 this.lower_unsafety(unsafety),
1397 this.lower_constness(constness),
1399 this.lower_generics(generics),
1403 ItemKind::Mod(ref m) => hir::ItemMod(self.lower_mod(m)),
1404 ItemKind::ForeignMod(ref nm) => hir::ItemForeignMod(self.lower_foreign_mod(nm)),
1405 ItemKind::GlobalAsm(ref ga) => hir::ItemGlobalAsm(self.lower_global_asm(ga)),
1406 ItemKind::Ty(ref t, ref generics) => {
1407 hir::ItemTy(self.lower_ty(t), self.lower_generics(generics))
1409 ItemKind::Enum(ref enum_definition, ref generics) => {
1410 hir::ItemEnum(hir::EnumDef {
1411 variants: enum_definition.variants
1413 .map(|x| self.lower_variant(x))
1416 self.lower_generics(generics))
1418 ItemKind::Struct(ref struct_def, ref generics) => {
1419 let struct_def = self.lower_variant_data(struct_def);
1420 hir::ItemStruct(struct_def, self.lower_generics(generics))
1422 ItemKind::Union(ref vdata, ref generics) => {
1423 let vdata = self.lower_variant_data(vdata);
1424 hir::ItemUnion(vdata, self.lower_generics(generics))
1426 ItemKind::DefaultImpl(unsafety, ref trait_ref) => {
1427 let trait_ref = self.lower_trait_ref(trait_ref);
1429 if let Def::Trait(def_id) = trait_ref.path.def {
1430 self.trait_default_impl.insert(def_id, id);
1433 hir::ItemDefaultImpl(self.lower_unsafety(unsafety),
1436 ItemKind::Impl(unsafety,
1442 ref impl_items) => {
1443 let new_impl_items = impl_items.iter()
1444 .map(|item| self.lower_impl_item_ref(item))
1446 let ifce = ifce.as_ref().map(|trait_ref| self.lower_trait_ref(trait_ref));
1448 if let Some(ref trait_ref) = ifce {
1449 if let Def::Trait(def_id) = trait_ref.path.def {
1450 self.trait_impls.entry(def_id).or_insert(vec![]).push(id);
1454 hir::ItemImpl(self.lower_unsafety(unsafety),
1455 self.lower_impl_polarity(polarity),
1456 self.lower_defaultness(defaultness, true /* [1] */),
1457 self.lower_generics(generics),
1462 ItemKind::Trait(unsafety, ref generics, ref bounds, ref items) => {
1463 let bounds = self.lower_bounds(bounds);
1464 let items = items.iter().map(|item| self.lower_trait_item_ref(item)).collect();
1465 hir::ItemTrait(self.lower_unsafety(unsafety),
1466 self.lower_generics(generics),
1470 ItemKind::MacroDef(..) | ItemKind::Mac(..) => panic!("Shouldn't still be around"),
1473 // [1] `defaultness.has_value()` is never called for an `impl`, always `true` in order to
1474 // not cause an assertion failure inside the `lower_defaultness` function
1477 fn lower_trait_item(&mut self, i: &TraitItem) -> hir::TraitItem {
1478 self.with_parent_def(i.id, |this| {
1479 let LoweredNodeId { node_id, hir_id } = this.lower_node_id(i.id);
1484 name: this.lower_ident(i.ident),
1485 attrs: this.lower_attrs(&i.attrs),
1486 node: match i.node {
1487 TraitItemKind::Const(ref ty, ref default) => {
1488 hir::TraitItemKind::Const(this.lower_ty(ty),
1489 default.as_ref().map(|x| {
1490 this.lower_body(None, |this| this.lower_expr(x))
1493 TraitItemKind::Method(ref sig, None) => {
1494 let names = this.lower_fn_args_to_names(&sig.decl);
1495 hir::TraitItemKind::Method(this.lower_method_sig(sig),
1496 hir::TraitMethod::Required(names))
1498 TraitItemKind::Method(ref sig, Some(ref body)) => {
1499 let body_id = this.lower_body(Some(&sig.decl), |this| {
1500 let body = this.lower_block(body, false);
1501 this.expr_block(body, ThinVec::new())
1503 hir::TraitItemKind::Method(this.lower_method_sig(sig),
1504 hir::TraitMethod::Provided(body_id))
1506 TraitItemKind::Type(ref bounds, ref default) => {
1507 hir::TraitItemKind::Type(this.lower_bounds(bounds),
1508 default.as_ref().map(|x| this.lower_ty(x)))
1510 TraitItemKind::Macro(..) => panic!("Shouldn't exist any more"),
1517 fn lower_trait_item_ref(&mut self, i: &TraitItem) -> hir::TraitItemRef {
1518 let (kind, has_default) = match i.node {
1519 TraitItemKind::Const(_, ref default) => {
1520 (hir::AssociatedItemKind::Const, default.is_some())
1522 TraitItemKind::Type(_, ref default) => {
1523 (hir::AssociatedItemKind::Type, default.is_some())
1525 TraitItemKind::Method(ref sig, ref default) => {
1526 (hir::AssociatedItemKind::Method {
1527 has_self: sig.decl.has_self(),
1528 }, default.is_some())
1530 TraitItemKind::Macro(..) => unimplemented!(),
1533 id: hir::TraitItemId { node_id: i.id },
1534 name: self.lower_ident(i.ident),
1536 defaultness: self.lower_defaultness(Defaultness::Default, has_default),
1541 fn lower_impl_item(&mut self, i: &ImplItem) -> hir::ImplItem {
1542 self.with_parent_def(i.id, |this| {
1543 let LoweredNodeId { node_id, hir_id } = this.lower_node_id(i.id);
1548 name: this.lower_ident(i.ident),
1549 attrs: this.lower_attrs(&i.attrs),
1550 vis: this.lower_visibility(&i.vis, None),
1551 defaultness: this.lower_defaultness(i.defaultness, true /* [1] */),
1552 node: match i.node {
1553 ImplItemKind::Const(ref ty, ref expr) => {
1554 let body_id = this.lower_body(None, |this| this.lower_expr(expr));
1555 hir::ImplItemKind::Const(this.lower_ty(ty), body_id)
1557 ImplItemKind::Method(ref sig, ref body) => {
1558 let body_id = this.lower_body(Some(&sig.decl), |this| {
1559 let body = this.lower_block(body, false);
1560 this.expr_block(body, ThinVec::new())
1562 hir::ImplItemKind::Method(this.lower_method_sig(sig), body_id)
1564 ImplItemKind::Type(ref ty) => hir::ImplItemKind::Type(this.lower_ty(ty)),
1565 ImplItemKind::Macro(..) => panic!("Shouldn't exist any more"),
1571 // [1] since `default impl` is not yet implemented, this is always true in impls
1574 fn lower_impl_item_ref(&mut self, i: &ImplItem) -> hir::ImplItemRef {
1576 id: hir::ImplItemId { node_id: i.id },
1577 name: self.lower_ident(i.ident),
1579 vis: self.lower_visibility(&i.vis, Some(i.id)),
1580 defaultness: self.lower_defaultness(i.defaultness, true /* [1] */),
1581 kind: match i.node {
1582 ImplItemKind::Const(..) => hir::AssociatedItemKind::Const,
1583 ImplItemKind::Type(..) => hir::AssociatedItemKind::Type,
1584 ImplItemKind::Method(ref sig, _) => hir::AssociatedItemKind::Method {
1585 has_self: sig.decl.has_self(),
1587 ImplItemKind::Macro(..) => unimplemented!(),
1591 // [1] since `default impl` is not yet implemented, this is always true in impls
1594 fn lower_mod(&mut self, m: &Mod) -> hir::Mod {
1597 item_ids: m.items.iter().flat_map(|x| self.lower_item_id(x)).collect(),
1601 fn lower_item_id(&mut self, i: &Item) -> SmallVector<hir::ItemId> {
1603 ItemKind::Use(ref view_path) => {
1604 if let ViewPathList(_, ref imports) = view_path.node {
1605 return iter::once(i.id).chain(imports.iter().map(|import| import.node.id))
1606 .map(|id| hir::ItemId { id: id }).collect();
1609 ItemKind::MacroDef(..) => return SmallVector::new(),
1612 SmallVector::one(hir::ItemId { id: i.id })
1615 pub fn lower_item(&mut self, i: &Item) -> Option<hir::Item> {
1616 let mut name = i.ident.name;
1617 let mut vis = self.lower_visibility(&i.vis, None);
1618 let attrs = self.lower_attrs(&i.attrs);
1619 if let ItemKind::MacroDef(ref def) = i.node {
1620 if !def.legacy || i.attrs.iter().any(|attr| attr.path == "macro_export") {
1621 self.exported_macros.push(hir::MacroDef {
1634 let node = self.with_parent_def(i.id, |this| {
1635 this.lower_item_kind(i.id, &mut name, &attrs, &mut vis, &i.node)
1638 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(i.id);
1651 fn lower_foreign_item(&mut self, i: &ForeignItem) -> hir::ForeignItem {
1652 self.with_parent_def(i.id, |this| {
1654 id: this.lower_node_id(i.id).node_id,
1656 attrs: this.lower_attrs(&i.attrs),
1657 node: match i.node {
1658 ForeignItemKind::Fn(ref fdec, ref generics) => {
1659 hir::ForeignItemFn(this.lower_fn_decl(fdec),
1660 this.lower_fn_args_to_names(fdec),
1661 this.lower_generics(generics))
1663 ForeignItemKind::Static(ref t, m) => {
1664 hir::ForeignItemStatic(this.lower_ty(t), m)
1667 vis: this.lower_visibility(&i.vis, None),
1673 fn lower_method_sig(&mut self, sig: &MethodSig) -> hir::MethodSig {
1675 generics: self.lower_generics(&sig.generics),
1677 unsafety: self.lower_unsafety(sig.unsafety),
1678 constness: self.lower_constness(sig.constness),
1679 decl: self.lower_fn_decl(&sig.decl),
1683 fn lower_unsafety(&mut self, u: Unsafety) -> hir::Unsafety {
1685 Unsafety::Unsafe => hir::Unsafety::Unsafe,
1686 Unsafety::Normal => hir::Unsafety::Normal,
1690 fn lower_constness(&mut self, c: Spanned<Constness>) -> hir::Constness {
1692 Constness::Const => hir::Constness::Const,
1693 Constness::NotConst => hir::Constness::NotConst,
1697 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
1699 UnOp::Deref => hir::UnDeref,
1700 UnOp::Not => hir::UnNot,
1701 UnOp::Neg => hir::UnNeg,
1705 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
1707 node: match b.node {
1708 BinOpKind::Add => hir::BiAdd,
1709 BinOpKind::Sub => hir::BiSub,
1710 BinOpKind::Mul => hir::BiMul,
1711 BinOpKind::Div => hir::BiDiv,
1712 BinOpKind::Rem => hir::BiRem,
1713 BinOpKind::And => hir::BiAnd,
1714 BinOpKind::Or => hir::BiOr,
1715 BinOpKind::BitXor => hir::BiBitXor,
1716 BinOpKind::BitAnd => hir::BiBitAnd,
1717 BinOpKind::BitOr => hir::BiBitOr,
1718 BinOpKind::Shl => hir::BiShl,
1719 BinOpKind::Shr => hir::BiShr,
1720 BinOpKind::Eq => hir::BiEq,
1721 BinOpKind::Lt => hir::BiLt,
1722 BinOpKind::Le => hir::BiLe,
1723 BinOpKind::Ne => hir::BiNe,
1724 BinOpKind::Ge => hir::BiGe,
1725 BinOpKind::Gt => hir::BiGt,
1731 fn lower_pat(&mut self, p: &Pat) -> P<hir::Pat> {
1732 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(p.id);
1737 node: match p.node {
1738 PatKind::Wild => hir::PatKind::Wild,
1739 PatKind::Ident(ref binding_mode, pth1, ref sub) => {
1740 match self.resolver.get_resolution(p.id).map(|d| d.base_def()) {
1741 // `None` can occur in body-less function signatures
1742 def @ None | def @ Some(Def::Local(_)) => {
1743 let canonical_id = match def {
1744 Some(Def::Local(id)) => id,
1747 hir::PatKind::Binding(self.lower_binding_mode(binding_mode),
1749 respan(pth1.span, pth1.node.name),
1750 sub.as_ref().map(|x| self.lower_pat(x)))
1753 hir::PatKind::Path(hir::QPath::Resolved(None, P(hir::Path {
1757 hir::PathSegment::from_name(pth1.node.name)
1763 PatKind::Lit(ref e) => hir::PatKind::Lit(P(self.lower_expr(e))),
1764 PatKind::TupleStruct(ref path, ref pats, ddpos) => {
1765 let qpath = self.lower_qpath(p.id, &None, path, ParamMode::Optional);
1766 hir::PatKind::TupleStruct(qpath,
1767 pats.iter().map(|x| self.lower_pat(x)).collect(),
1770 PatKind::Path(ref qself, ref path) => {
1771 hir::PatKind::Path(self.lower_qpath(p.id, qself, path, ParamMode::Optional))
1773 PatKind::Struct(ref path, ref fields, etc) => {
1774 let qpath = self.lower_qpath(p.id, &None, path, ParamMode::Optional);
1776 let fs = fields.iter()
1780 node: hir::FieldPat {
1781 name: self.lower_ident(f.node.ident),
1782 pat: self.lower_pat(&f.node.pat),
1783 is_shorthand: f.node.is_shorthand,
1788 hir::PatKind::Struct(qpath, fs, etc)
1790 PatKind::Tuple(ref elts, ddpos) => {
1791 hir::PatKind::Tuple(elts.iter().map(|x| self.lower_pat(x)).collect(), ddpos)
1793 PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)),
1794 PatKind::Ref(ref inner, mutbl) => {
1795 hir::PatKind::Ref(self.lower_pat(inner), self.lower_mutability(mutbl))
1797 PatKind::Range(ref e1, ref e2, ref end) => {
1798 hir::PatKind::Range(P(self.lower_expr(e1)),
1799 P(self.lower_expr(e2)),
1800 self.lower_range_end(end))
1802 PatKind::Slice(ref before, ref slice, ref after) => {
1803 hir::PatKind::Slice(before.iter().map(|x| self.lower_pat(x)).collect(),
1804 slice.as_ref().map(|x| self.lower_pat(x)),
1805 after.iter().map(|x| self.lower_pat(x)).collect())
1807 PatKind::Mac(_) => panic!("Shouldn't exist here"),
1813 fn lower_range_end(&mut self, e: &RangeEnd) -> hir::RangeEnd {
1815 RangeEnd::Included => hir::RangeEnd::Included,
1816 RangeEnd::Excluded => hir::RangeEnd::Excluded,
1820 fn lower_expr(&mut self, e: &Expr) -> hir::Expr {
1821 let kind = match e.node {
1823 // Eventually a desugaring for `box EXPR`
1824 // (similar to the desugaring above for `in PLACE BLOCK`)
1825 // should go here, desugaring
1829 // let mut place = BoxPlace::make_place();
1830 // let raw_place = Place::pointer(&mut place);
1831 // let value = $value;
1833 // ::std::ptr::write(raw_place, value);
1834 // Boxed::finalize(place)
1837 // But for now there are type-inference issues doing that.
1838 ExprKind::Box(ref inner) => {
1839 hir::ExprBox(P(self.lower_expr(inner)))
1842 // Desugar ExprBox: `in (PLACE) EXPR`
1843 ExprKind::InPlace(ref placer, ref value_expr) => {
1847 // let mut place = Placer::make_place(p);
1848 // let raw_place = Place::pointer(&mut place);
1850 // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR ));
1851 // InPlace::finalize(place)
1853 let placer_expr = P(self.lower_expr(placer));
1854 let value_expr = P(self.lower_expr(value_expr));
1856 let placer_ident = self.str_to_ident("placer");
1857 let place_ident = self.str_to_ident("place");
1858 let p_ptr_ident = self.str_to_ident("p_ptr");
1860 let make_place = ["ops", "Placer", "make_place"];
1861 let place_pointer = ["ops", "Place", "pointer"];
1862 let move_val_init = ["intrinsics", "move_val_init"];
1863 let inplace_finalize = ["ops", "InPlace", "finalize"];
1866 self.allow_internal_unstable(CompilerDesugaringKind::BackArrow, e.span);
1867 let make_call = |this: &mut LoweringContext, p, args| {
1868 let path = P(this.expr_std_path(unstable_span, p, ThinVec::new()));
1869 P(this.expr_call(e.span, path, args))
1872 let mk_stmt_let = |this: &mut LoweringContext, bind, expr| {
1873 this.stmt_let(e.span, false, bind, expr)
1876 let mk_stmt_let_mut = |this: &mut LoweringContext, bind, expr| {
1877 this.stmt_let(e.span, true, bind, expr)
1880 // let placer = <placer_expr> ;
1881 let (s1, placer_binding) = {
1882 mk_stmt_let(self, placer_ident, placer_expr)
1885 // let mut place = Placer::make_place(placer);
1886 let (s2, place_binding) = {
1887 let placer = self.expr_ident(e.span, placer_ident, placer_binding);
1888 let call = make_call(self, &make_place, hir_vec![placer]);
1889 mk_stmt_let_mut(self, place_ident, call)
1892 // let p_ptr = Place::pointer(&mut place);
1893 let (s3, p_ptr_binding) = {
1894 let agent = P(self.expr_ident(e.span, place_ident, place_binding));
1895 let args = hir_vec![self.expr_mut_addr_of(e.span, agent)];
1896 let call = make_call(self, &place_pointer, args);
1897 mk_stmt_let(self, p_ptr_ident, call)
1900 // pop_unsafe!(EXPR));
1901 let pop_unsafe_expr = {
1902 self.signal_block_expr(hir_vec![],
1905 hir::PopUnsafeBlock(hir::CompilerGenerated),
1910 // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR ));
1911 // InPlace::finalize(place)
1914 let ptr = self.expr_ident(e.span, p_ptr_ident, p_ptr_binding);
1915 let call_move_val_init =
1917 make_call(self, &move_val_init, hir_vec![ptr, pop_unsafe_expr]),
1918 self.next_id().node_id);
1919 let call_move_val_init = respan(e.span, call_move_val_init);
1921 let place = self.expr_ident(e.span, place_ident, place_binding);
1922 let call = make_call(self, &inplace_finalize, hir_vec![place]);
1923 P(self.signal_block_expr(hir_vec![call_move_val_init],
1926 hir::PushUnsafeBlock(hir::CompilerGenerated),
1930 let block = self.block_all(e.span, hir_vec![s1, s2, s3], Some(expr));
1931 hir::ExprBlock(P(block))
1934 ExprKind::Array(ref exprs) => {
1935 hir::ExprArray(exprs.iter().map(|x| self.lower_expr(x)).collect())
1937 ExprKind::Repeat(ref expr, ref count) => {
1938 let expr = P(self.lower_expr(expr));
1939 let count = self.lower_body(None, |this| this.lower_expr(count));
1940 hir::ExprRepeat(expr, count)
1942 ExprKind::Tup(ref elts) => {
1943 hir::ExprTup(elts.iter().map(|x| self.lower_expr(x)).collect())
1945 ExprKind::Call(ref f, ref args) => {
1946 let f = P(self.lower_expr(f));
1947 hir::ExprCall(f, args.iter().map(|x| self.lower_expr(x)).collect())
1949 ExprKind::MethodCall(ref seg, ref args) => {
1950 let hir_seg = self.lower_path_segment(e.span, seg, ParamMode::Optional, 0,
1951 ParenthesizedGenericArgs::Err);
1952 let args = args.iter().map(|x| self.lower_expr(x)).collect();
1953 hir::ExprMethodCall(hir_seg, seg.span, args)
1955 ExprKind::Binary(binop, ref lhs, ref rhs) => {
1956 let binop = self.lower_binop(binop);
1957 let lhs = P(self.lower_expr(lhs));
1958 let rhs = P(self.lower_expr(rhs));
1959 hir::ExprBinary(binop, lhs, rhs)
1961 ExprKind::Unary(op, ref ohs) => {
1962 let op = self.lower_unop(op);
1963 let ohs = P(self.lower_expr(ohs));
1964 hir::ExprUnary(op, ohs)
1966 ExprKind::Lit(ref l) => hir::ExprLit(P((**l).clone())),
1967 ExprKind::Cast(ref expr, ref ty) => {
1968 let expr = P(self.lower_expr(expr));
1969 hir::ExprCast(expr, self.lower_ty(ty))
1971 ExprKind::Type(ref expr, ref ty) => {
1972 let expr = P(self.lower_expr(expr));
1973 hir::ExprType(expr, self.lower_ty(ty))
1975 ExprKind::AddrOf(m, ref ohs) => {
1976 let m = self.lower_mutability(m);
1977 let ohs = P(self.lower_expr(ohs));
1978 hir::ExprAddrOf(m, ohs)
1980 // More complicated than you might expect because the else branch
1981 // might be `if let`.
1982 ExprKind::If(ref cond, ref blk, ref else_opt) => {
1983 let else_opt = else_opt.as_ref().map(|els| {
1985 ExprKind::IfLet(..) => {
1986 // wrap the if-let expr in a block
1987 let span = els.span;
1988 let els = P(self.lower_expr(els));
1993 let blk = P(hir::Block {
1998 rules: hir::DefaultBlock,
2000 targeted_by_break: false,
2002 P(self.expr_block(blk, ThinVec::new()))
2004 _ => P(self.lower_expr(els)),
2008 let then_blk = self.lower_block(blk, false);
2009 let then_expr = self.expr_block(then_blk, ThinVec::new());
2011 hir::ExprIf(P(self.lower_expr(cond)), P(then_expr), else_opt)
2013 ExprKind::While(ref cond, ref body, opt_ident) => {
2014 self.with_loop_scope(e.id, |this|
2016 this.with_loop_condition_scope(|this| P(this.lower_expr(cond))),
2017 this.lower_block(body, false),
2018 this.lower_opt_sp_ident(opt_ident)))
2020 ExprKind::Loop(ref body, opt_ident) => {
2021 self.with_loop_scope(e.id, |this|
2022 hir::ExprLoop(this.lower_block(body, false),
2023 this.lower_opt_sp_ident(opt_ident),
2024 hir::LoopSource::Loop))
2026 ExprKind::Catch(ref body) => {
2027 self.with_catch_scope(body.id, |this|
2028 hir::ExprBlock(this.lower_block(body, true)))
2030 ExprKind::Match(ref expr, ref arms) => {
2031 hir::ExprMatch(P(self.lower_expr(expr)),
2032 arms.iter().map(|x| self.lower_arm(x)).collect(),
2033 hir::MatchSource::Normal)
2035 ExprKind::Closure(capture_clause, ref decl, ref body, fn_decl_span) => {
2036 self.with_new_scopes(|this| {
2037 this.with_parent_def(e.id, |this| {
2038 let mut is_generator = false;
2039 let body_id = this.lower_body(Some(decl), |this| {
2040 let e = this.lower_expr(body);
2041 is_generator = this.is_generator;
2044 if is_generator && !decl.inputs.is_empty() {
2045 span_err!(this.sess, fn_decl_span, E0628,
2046 "generators cannot have explicit arguments");
2047 this.sess.abort_if_errors();
2049 hir::ExprClosure(this.lower_capture_clause(capture_clause),
2050 this.lower_fn_decl(decl),
2057 ExprKind::Block(ref blk) => hir::ExprBlock(self.lower_block(blk, false)),
2058 ExprKind::Assign(ref el, ref er) => {
2059 hir::ExprAssign(P(self.lower_expr(el)), P(self.lower_expr(er)))
2061 ExprKind::AssignOp(op, ref el, ref er) => {
2062 hir::ExprAssignOp(self.lower_binop(op),
2063 P(self.lower_expr(el)),
2064 P(self.lower_expr(er)))
2066 ExprKind::Field(ref el, ident) => {
2067 hir::ExprField(P(self.lower_expr(el)),
2068 respan(ident.span, self.lower_ident(ident.node)))
2070 ExprKind::TupField(ref el, ident) => {
2071 hir::ExprTupField(P(self.lower_expr(el)), ident)
2073 ExprKind::Index(ref el, ref er) => {
2074 hir::ExprIndex(P(self.lower_expr(el)), P(self.lower_expr(er)))
2076 ExprKind::Range(ref e1, ref e2, lims) => {
2077 use syntax::ast::RangeLimits::*;
2079 let path = match (e1, e2, lims) {
2080 (&None, &None, HalfOpen) => "RangeFull",
2081 (&Some(..), &None, HalfOpen) => "RangeFrom",
2082 (&None, &Some(..), HalfOpen) => "RangeTo",
2083 (&Some(..), &Some(..), HalfOpen) => "Range",
2084 (&None, &Some(..), Closed) => "RangeToInclusive",
2085 (&Some(..), &Some(..), Closed) => "RangeInclusive",
2086 (_, &None, Closed) =>
2087 panic!(self.diagnostic().span_fatal(
2088 e.span, "inclusive range with no end")),
2092 e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e)))
2094 let expr = P(self.lower_expr(&e));
2096 self.allow_internal_unstable(CompilerDesugaringKind::DotFill, e.span);
2097 self.field(Symbol::intern(s), expr, unstable_span)
2098 }).collect::<P<[hir::Field]>>();
2100 let is_unit = fields.is_empty();
2102 self.allow_internal_unstable(CompilerDesugaringKind::DotFill, e.span);
2104 iter::once("ops").chain(iter::once(path))
2105 .collect::<Vec<_>>();
2106 let struct_path = self.std_path(unstable_span, &struct_path, is_unit);
2107 let struct_path = hir::QPath::Resolved(None, P(struct_path));
2109 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id);
2115 hir::ExprPath(struct_path)
2117 hir::ExprStruct(struct_path, fields, None)
2119 span: unstable_span,
2120 attrs: e.attrs.clone(),
2123 ExprKind::Path(ref qself, ref path) => {
2124 hir::ExprPath(self.lower_qpath(e.id, qself, path, ParamMode::Optional))
2126 ExprKind::Break(opt_ident, ref opt_expr) => {
2127 let label_result = if self.is_in_loop_condition && opt_ident.is_none() {
2130 target_id: hir::ScopeTarget::Loop(
2131 Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into()),
2134 self.lower_loop_destination(opt_ident.map(|ident| (e.id, ident)))
2138 opt_expr.as_ref().map(|x| P(self.lower_expr(x))))
2140 ExprKind::Continue(opt_ident) =>
2142 if self.is_in_loop_condition && opt_ident.is_none() {
2145 target_id: hir::ScopeTarget::Loop(Err(
2146 hir::LoopIdError::UnlabeledCfInWhileCondition).into()),
2149 self.lower_loop_destination(opt_ident.map( |ident| (e.id, ident)))
2151 ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))),
2152 ExprKind::InlineAsm(ref asm) => {
2153 let hir_asm = hir::InlineAsm {
2154 inputs: asm.inputs.iter().map(|&(ref c, _)| c.clone()).collect(),
2155 outputs: asm.outputs.iter().map(|out| {
2156 hir::InlineAsmOutput {
2157 constraint: out.constraint.clone(),
2159 is_indirect: out.is_indirect,
2162 asm: asm.asm.clone(),
2163 asm_str_style: asm.asm_str_style,
2164 clobbers: asm.clobbers.clone().into(),
2165 volatile: asm.volatile,
2166 alignstack: asm.alignstack,
2167 dialect: asm.dialect,
2171 asm.outputs.iter().map(|out| self.lower_expr(&out.expr)).collect();
2173 asm.inputs.iter().map(|&(_, ref input)| self.lower_expr(input)).collect();
2174 hir::ExprInlineAsm(P(hir_asm), outputs, inputs)
2176 ExprKind::Struct(ref path, ref fields, ref maybe_expr) => {
2177 hir::ExprStruct(self.lower_qpath(e.id, &None, path, ParamMode::Optional),
2178 fields.iter().map(|x| self.lower_field(x)).collect(),
2179 maybe_expr.as_ref().map(|x| P(self.lower_expr(x))))
2181 ExprKind::Paren(ref ex) => {
2182 let mut ex = self.lower_expr(ex);
2183 // include parens in span, but only if it is a super-span.
2184 if e.span.contains(ex.span) {
2187 // merge attributes into the inner expression.
2188 let mut attrs = e.attrs.clone();
2189 attrs.extend::<Vec<_>>(ex.attrs.into());
2194 ExprKind::Yield(ref opt_expr) => {
2195 self.is_generator = true;
2196 let expr = opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| {
2197 self.expr(e.span, hir::ExprTup(hir_vec![]), ThinVec::new())
2199 hir::ExprYield(P(expr))
2202 // Desugar ExprIfLet
2203 // From: `if let <pat> = <sub_expr> <body> [<else_opt>]`
2204 ExprKind::IfLet(ref pat, ref sub_expr, ref body, ref else_opt) => {
2207 // match <sub_expr> {
2209 // _ => [<else_opt> | ()]
2212 let mut arms = vec![];
2214 // `<pat> => <body>`
2216 let body = self.lower_block(body, false);
2217 let body_expr = P(self.expr_block(body, ThinVec::new()));
2218 let pat = self.lower_pat(pat);
2219 arms.push(self.arm(hir_vec![pat], body_expr));
2222 // _ => [<else_opt>|()]
2224 let wildcard_arm: Option<&Expr> = else_opt.as_ref().map(|p| &**p);
2225 let wildcard_pattern = self.pat_wild(e.span);
2226 let body = if let Some(else_expr) = wildcard_arm {
2227 P(self.lower_expr(else_expr))
2229 self.expr_tuple(e.span, hir_vec![])
2231 arms.push(self.arm(hir_vec![wildcard_pattern], body));
2234 let contains_else_clause = else_opt.is_some();
2236 let sub_expr = P(self.lower_expr(sub_expr));
2241 hir::MatchSource::IfLetDesugar {
2242 contains_else_clause,
2246 // Desugar ExprWhileLet
2247 // From: `[opt_ident]: while let <pat> = <sub_expr> <body>`
2248 ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_ident) => {
2251 // [opt_ident]: loop {
2252 // match <sub_expr> {
2258 // Note that the block AND the condition are evaluated in the loop scope.
2259 // This is done to allow `break` from inside the condition of the loop.
2260 let (body, break_expr, sub_expr) = self.with_loop_scope(e.id, |this| (
2261 this.lower_block(body, false),
2262 this.expr_break(e.span, ThinVec::new()),
2263 this.with_loop_condition_scope(|this| P(this.lower_expr(sub_expr))),
2266 // `<pat> => <body>`
2268 let body_expr = P(self.expr_block(body, ThinVec::new()));
2269 let pat = self.lower_pat(pat);
2270 self.arm(hir_vec![pat], body_expr)
2275 let pat_under = self.pat_wild(e.span);
2276 self.arm(hir_vec![pat_under], break_expr)
2279 // `match <sub_expr> { ... }`
2280 let arms = hir_vec![pat_arm, break_arm];
2281 let match_expr = self.expr(e.span,
2282 hir::ExprMatch(sub_expr,
2284 hir::MatchSource::WhileLetDesugar),
2287 // `[opt_ident]: loop { ... }`
2288 let loop_block = P(self.block_expr(P(match_expr)));
2289 let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident),
2290 hir::LoopSource::WhileLet);
2291 // add attributes to the outer returned expr node
2295 // Desugar ExprForLoop
2296 // From: `[opt_ident]: for <pat> in <head> <body>`
2297 ExprKind::ForLoop(ref pat, ref head, ref body, opt_ident) => {
2301 // let result = match ::std::iter::IntoIterator::into_iter(<head>) {
2303 // [opt_ident]: loop {
2305 // match ::std::iter::Iterator::next(&mut iter) {
2306 // ::std::option::Option::Some(val) => __next = val,
2307 // ::std::option::Option::None => break
2309 // let <pat> = __next;
2310 // StmtExpr(<body>);
2318 let head = self.lower_expr(head);
2320 let iter = self.str_to_ident("iter");
2322 let next_ident = self.str_to_ident("__next");
2323 let next_pat = self.pat_ident_binding_mode(e.span,
2325 hir::BindingAnnotation::Mutable);
2327 // `::std::option::Option::Some(val) => next = val`
2329 let val_ident = self.str_to_ident("val");
2330 let val_pat = self.pat_ident(e.span, val_ident);
2331 let val_expr = P(self.expr_ident(e.span, val_ident, val_pat.id));
2332 let next_expr = P(self.expr_ident(e.span, next_ident, next_pat.id));
2333 let assign = P(self.expr(e.span,
2334 hir::ExprAssign(next_expr, val_expr),
2336 let some_pat = self.pat_some(e.span, val_pat);
2337 self.arm(hir_vec![some_pat], assign)
2340 // `::std::option::Option::None => break`
2342 let break_expr = self.with_loop_scope(e.id, |this|
2343 this.expr_break(e.span, ThinVec::new()));
2344 let pat = self.pat_none(e.span);
2345 self.arm(hir_vec![pat], break_expr)
2349 let iter_pat = self.pat_ident_binding_mode(e.span,
2351 hir::BindingAnnotation::Mutable);
2353 // `match ::std::iter::Iterator::next(&mut iter) { ... }`
2355 let iter = P(self.expr_ident(e.span, iter, iter_pat.id));
2356 let ref_mut_iter = self.expr_mut_addr_of(e.span, iter);
2357 let next_path = &["iter", "Iterator", "next"];
2358 let next_path = P(self.expr_std_path(e.span, next_path, ThinVec::new()));
2359 let next_expr = P(self.expr_call(e.span, next_path,
2360 hir_vec![ref_mut_iter]));
2361 let arms = hir_vec![pat_arm, break_arm];
2364 hir::ExprMatch(next_expr, arms,
2365 hir::MatchSource::ForLoopDesugar),
2368 let match_stmt = respan(e.span, hir::StmtExpr(match_expr, self.next_id().node_id));
2370 let next_expr = P(self.expr_ident(e.span, next_ident, next_pat.id));
2373 let next_let = self.stmt_let_pat(e.span,
2376 hir::LocalSource::ForLoopDesugar);
2378 // `let <pat> = __next`
2379 let pat = self.lower_pat(pat);
2380 let pat_let = self.stmt_let_pat(e.span,
2383 hir::LocalSource::ForLoopDesugar);
2385 let body_block = self.with_loop_scope(e.id,
2386 |this| this.lower_block(body, false));
2387 let body_expr = P(self.expr_block(body_block, ThinVec::new()));
2388 let body_stmt = respan(e.span, hir::StmtExpr(body_expr, self.next_id().node_id));
2390 let loop_block = P(self.block_all(e.span,
2397 // `[opt_ident]: loop { ... }`
2398 let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident),
2399 hir::LoopSource::ForLoop);
2400 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id);
2401 let loop_expr = P(hir::Expr {
2406 attrs: ThinVec::new(),
2409 // `mut iter => { ... }`
2410 let iter_arm = self.arm(hir_vec![iter_pat], loop_expr);
2412 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
2413 let into_iter_expr = {
2414 let into_iter_path = &["iter", "IntoIterator", "into_iter"];
2415 let into_iter = P(self.expr_std_path(e.span, into_iter_path,
2417 P(self.expr_call(e.span, into_iter, hir_vec![head]))
2420 let match_expr = P(self.expr_match(e.span,
2423 hir::MatchSource::ForLoopDesugar));
2425 // `{ let _result = ...; _result }`
2426 // underscore prevents an unused_variables lint if the head diverges
2427 let result_ident = self.str_to_ident("_result");
2428 let (let_stmt, let_stmt_binding) =
2429 self.stmt_let(e.span, false, result_ident, match_expr);
2431 let result = P(self.expr_ident(e.span, result_ident, let_stmt_binding));
2432 let block = P(self.block_all(e.span, hir_vec![let_stmt], Some(result)));
2433 // add the attributes to the outer returned expr node
2434 return self.expr_block(block, e.attrs.clone());
2437 // Desugar ExprKind::Try
2439 ExprKind::Try(ref sub_expr) => {
2442 // match Try::into_result(<expr>) {
2443 // Ok(val) => #[allow(unreachable_code)] val,
2444 // Err(err) => #[allow(unreachable_code)]
2445 // // If there is an enclosing `catch {...}`
2446 // break 'catch_target Try::from_error(From::from(err)),
2448 // return Try::from_error(From::from(err)),
2452 self.allow_internal_unstable(CompilerDesugaringKind::QuestionMark, e.span);
2454 // Try::into_result(<expr>)
2457 let sub_expr = self.lower_expr(sub_expr);
2459 let path = &["ops", "Try", "into_result"];
2460 let path = P(self.expr_std_path(unstable_span, path, ThinVec::new()));
2461 P(self.expr_call(e.span, path, hir_vec![sub_expr]))
2464 // #[allow(unreachable_code)]
2466 // allow(unreachable_code)
2468 let allow_ident = self.str_to_ident("allow");
2469 let uc_ident = self.str_to_ident("unreachable_code");
2470 let uc_meta_item = attr::mk_spanned_word_item(e.span, uc_ident);
2471 let uc_nested = NestedMetaItemKind::MetaItem(uc_meta_item);
2472 let uc_spanned = respan(e.span, uc_nested);
2473 attr::mk_spanned_list_item(e.span, allow_ident, vec![uc_spanned])
2475 attr::mk_spanned_attr_outer(e.span, attr::mk_attr_id(), allow)
2477 let attrs = vec![attr];
2479 // Ok(val) => #[allow(unreachable_code)] val,
2481 let val_ident = self.str_to_ident("val");
2482 let val_pat = self.pat_ident(e.span, val_ident);
2483 let val_expr = P(self.expr_ident_with_attrs(e.span,
2486 ThinVec::from(attrs.clone())));
2487 let ok_pat = self.pat_ok(e.span, val_pat);
2489 self.arm(hir_vec![ok_pat], val_expr)
2492 // Err(err) => #[allow(unreachable_code)]
2493 // return Carrier::from_error(From::from(err)),
2495 let err_ident = self.str_to_ident("err");
2496 let err_local = self.pat_ident(e.span, err_ident);
2498 let path = &["convert", "From", "from"];
2499 let from = P(self.expr_std_path(e.span, path, ThinVec::new()));
2500 let err_expr = self.expr_ident(e.span, err_ident, err_local.id);
2502 self.expr_call(e.span, from, hir_vec![err_expr])
2504 let from_err_expr = {
2505 let path = &["ops", "Try", "from_error"];
2506 let from_err = P(self.expr_std_path(unstable_span, path,
2508 P(self.expr_call(e.span, from_err, hir_vec![from_expr]))
2511 let thin_attrs = ThinVec::from(attrs);
2512 let catch_scope = self.catch_scopes.last().map(|x| *x);
2513 let ret_expr = if let Some(catch_node) = catch_scope {
2519 target_id: hir::ScopeTarget::Block(catch_node),
2526 hir::Expr_::ExprRet(Some(from_err_expr)),
2531 let err_pat = self.pat_err(e.span, err_local);
2532 self.arm(hir_vec![err_pat], ret_expr)
2535 hir::ExprMatch(discr,
2536 hir_vec![err_arm, ok_arm],
2537 hir::MatchSource::TryDesugar)
2540 ExprKind::Mac(_) => panic!("Shouldn't exist here"),
2543 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id);
2550 attrs: e.attrs.clone(),
2554 fn lower_stmt(&mut self, s: &Stmt) -> SmallVector<hir::Stmt> {
2555 SmallVector::one(match s.node {
2556 StmtKind::Local(ref l) => Spanned {
2557 node: hir::StmtDecl(P(Spanned {
2558 node: hir::DeclLocal(self.lower_local(l)),
2560 }), self.lower_node_id(s.id).node_id),
2563 StmtKind::Item(ref it) => {
2564 // Can only use the ID once.
2565 let mut id = Some(s.id);
2566 return self.lower_item_id(it).into_iter().map(|item_id| Spanned {
2567 node: hir::StmtDecl(P(Spanned {
2568 node: hir::DeclItem(item_id),
2571 .map(|id| self.lower_node_id(id).node_id)
2572 .unwrap_or_else(|| self.next_id().node_id)),
2576 StmtKind::Expr(ref e) => {
2578 node: hir::StmtExpr(P(self.lower_expr(e)),
2579 self.lower_node_id(s.id).node_id),
2583 StmtKind::Semi(ref e) => {
2585 node: hir::StmtSemi(P(self.lower_expr(e)),
2586 self.lower_node_id(s.id).node_id),
2590 StmtKind::Mac(..) => panic!("Shouldn't exist here"),
2594 fn lower_capture_clause(&mut self, c: CaptureBy) -> hir::CaptureClause {
2596 CaptureBy::Value => hir::CaptureByValue,
2597 CaptureBy::Ref => hir::CaptureByRef,
2601 /// If an `explicit_owner` is given, this method allocates the `HirId` in
2602 /// the address space of that item instead of the item currently being
2603 /// lowered. This can happen during `lower_impl_item_ref()` where we need to
2604 /// lower a `Visibility` value although we haven't lowered the owning
2605 /// `ImplItem` in question yet.
2606 fn lower_visibility(&mut self,
2608 explicit_owner: Option<NodeId>)
2609 -> hir::Visibility {
2611 Visibility::Public => hir::Public,
2612 Visibility::Crate(_) => hir::Visibility::Crate,
2613 Visibility::Restricted { ref path, id } => {
2614 hir::Visibility::Restricted {
2615 path: P(self.lower_path(id, path, ParamMode::Explicit, true)),
2616 id: if let Some(owner) = explicit_owner {
2617 self.lower_node_id_with_owner(id, owner).node_id
2619 self.lower_node_id(id).node_id
2623 Visibility::Inherited => hir::Inherited,
2627 fn lower_defaultness(&mut self, d: Defaultness, has_value: bool) -> hir::Defaultness {
2629 Defaultness::Default => hir::Defaultness::Default { has_value: has_value },
2630 Defaultness::Final => {
2632 hir::Defaultness::Final
2637 fn lower_block_check_mode(&mut self, b: &BlockCheckMode) -> hir::BlockCheckMode {
2639 BlockCheckMode::Default => hir::DefaultBlock,
2640 BlockCheckMode::Unsafe(u) => hir::UnsafeBlock(self.lower_unsafe_source(u)),
2644 fn lower_binding_mode(&mut self, b: &BindingMode) -> hir::BindingAnnotation {
2646 BindingMode::ByValue(Mutability::Immutable) =>
2647 hir::BindingAnnotation::Unannotated,
2648 BindingMode::ByRef(Mutability::Immutable) => hir::BindingAnnotation::Ref,
2649 BindingMode::ByValue(Mutability::Mutable) => hir::BindingAnnotation::Mutable,
2650 BindingMode::ByRef(Mutability::Mutable) => hir::BindingAnnotation::RefMut,
2654 fn lower_unsafe_source(&mut self, u: UnsafeSource) -> hir::UnsafeSource {
2656 CompilerGenerated => hir::CompilerGenerated,
2657 UserProvided => hir::UserProvided,
2661 fn lower_impl_polarity(&mut self, i: ImplPolarity) -> hir::ImplPolarity {
2663 ImplPolarity::Positive => hir::ImplPolarity::Positive,
2664 ImplPolarity::Negative => hir::ImplPolarity::Negative,
2668 fn lower_trait_bound_modifier(&mut self, f: TraitBoundModifier) -> hir::TraitBoundModifier {
2670 TraitBoundModifier::None => hir::TraitBoundModifier::None,
2671 TraitBoundModifier::Maybe => hir::TraitBoundModifier::Maybe,
2675 // Helper methods for building HIR.
2677 fn arm(&mut self, pats: hir::HirVec<P<hir::Pat>>, expr: P<hir::Expr>) -> hir::Arm {
2686 fn field(&mut self, name: Name, expr: P<hir::Expr>, span: Span) -> hir::Field {
2694 is_shorthand: false,
2698 fn expr_break(&mut self, span: Span, attrs: ThinVec<Attribute>) -> P<hir::Expr> {
2699 let expr_break = hir::ExprBreak(self.lower_loop_destination(None), None);
2700 P(self.expr(span, expr_break, attrs))
2703 fn expr_call(&mut self, span: Span, e: P<hir::Expr>, args: hir::HirVec<hir::Expr>)
2705 self.expr(span, hir::ExprCall(e, args), ThinVec::new())
2708 fn expr_ident(&mut self, span: Span, id: Name, binding: NodeId) -> hir::Expr {
2709 self.expr_ident_with_attrs(span, id, binding, ThinVec::new())
2712 fn expr_ident_with_attrs(&mut self, span: Span,
2715 attrs: ThinVec<Attribute>) -> hir::Expr {
2716 let expr_path = hir::ExprPath(hir::QPath::Resolved(None, P(hir::Path {
2718 def: Def::Local(binding),
2719 segments: hir_vec![hir::PathSegment::from_name(id)],
2722 self.expr(span, expr_path, attrs)
2725 fn expr_mut_addr_of(&mut self, span: Span, e: P<hir::Expr>) -> hir::Expr {
2726 self.expr(span, hir::ExprAddrOf(hir::MutMutable, e), ThinVec::new())
2729 fn expr_std_path(&mut self,
2731 components: &[&str],
2732 attrs: ThinVec<Attribute>)
2734 let path = self.std_path(span, components, true);
2735 self.expr(span, hir::ExprPath(hir::QPath::Resolved(None, P(path))), attrs)
2738 fn expr_match(&mut self,
2741 arms: hir::HirVec<hir::Arm>,
2742 source: hir::MatchSource)
2744 self.expr(span, hir::ExprMatch(arg, arms, source), ThinVec::new())
2747 fn expr_block(&mut self, b: P<hir::Block>, attrs: ThinVec<Attribute>) -> hir::Expr {
2748 self.expr(b.span, hir::ExprBlock(b), attrs)
2751 fn expr_tuple(&mut self, sp: Span, exprs: hir::HirVec<hir::Expr>) -> P<hir::Expr> {
2752 P(self.expr(sp, hir::ExprTup(exprs), ThinVec::new()))
2755 fn expr(&mut self, span: Span, node: hir::Expr_, attrs: ThinVec<Attribute>) -> hir::Expr {
2756 let LoweredNodeId { node_id, hir_id } = self.next_id();
2766 fn stmt_let_pat(&mut self,
2768 ex: Option<P<hir::Expr>>,
2770 source: hir::LocalSource)
2772 let LoweredNodeId { node_id, hir_id } = self.next_id();
2774 let local = P(hir::Local {
2781 attrs: ThinVec::new(),
2784 let decl = respan(sp, hir::DeclLocal(local));
2785 respan(sp, hir::StmtDecl(P(decl), self.next_id().node_id))
2788 fn stmt_let(&mut self, sp: Span, mutbl: bool, ident: Name, ex: P<hir::Expr>)
2789 -> (hir::Stmt, NodeId) {
2790 let pat = if mutbl {
2791 self.pat_ident_binding_mode(sp, ident, hir::BindingAnnotation::Mutable)
2793 self.pat_ident(sp, ident)
2795 let pat_id = pat.id;
2796 (self.stmt_let_pat(sp, Some(ex), pat, hir::LocalSource::Normal), pat_id)
2799 fn block_expr(&mut self, expr: P<hir::Expr>) -> hir::Block {
2800 self.block_all(expr.span, hir::HirVec::new(), Some(expr))
2803 fn block_all(&mut self, span: Span, stmts: hir::HirVec<hir::Stmt>, expr: Option<P<hir::Expr>>)
2805 let LoweredNodeId { node_id, hir_id } = self.next_id();
2812 rules: hir::DefaultBlock,
2814 targeted_by_break: false,
2818 fn pat_ok(&mut self, span: Span, pat: P<hir::Pat>) -> P<hir::Pat> {
2819 self.pat_std_enum(span, &["result", "Result", "Ok"], hir_vec![pat])
2822 fn pat_err(&mut self, span: Span, pat: P<hir::Pat>) -> P<hir::Pat> {
2823 self.pat_std_enum(span, &["result", "Result", "Err"], hir_vec![pat])
2826 fn pat_some(&mut self, span: Span, pat: P<hir::Pat>) -> P<hir::Pat> {
2827 self.pat_std_enum(span, &["option", "Option", "Some"], hir_vec![pat])
2830 fn pat_none(&mut self, span: Span) -> P<hir::Pat> {
2831 self.pat_std_enum(span, &["option", "Option", "None"], hir_vec![])
2834 fn pat_std_enum(&mut self,
2836 components: &[&str],
2837 subpats: hir::HirVec<P<hir::Pat>>)
2839 let path = self.std_path(span, components, true);
2840 let qpath = hir::QPath::Resolved(None, P(path));
2841 let pt = if subpats.is_empty() {
2842 hir::PatKind::Path(qpath)
2844 hir::PatKind::TupleStruct(qpath, subpats, None)
2849 fn pat_ident(&mut self, span: Span, name: Name) -> P<hir::Pat> {
2850 self.pat_ident_binding_mode(span, name, hir::BindingAnnotation::Unannotated)
2853 fn pat_ident_binding_mode(&mut self, span: Span, name: Name, bm: hir::BindingAnnotation)
2855 let LoweredNodeId { node_id, hir_id } = self.next_id();
2860 node: hir::PatKind::Binding(bm,
2871 fn pat_wild(&mut self, span: Span) -> P<hir::Pat> {
2872 self.pat(span, hir::PatKind::Wild)
2875 fn pat(&mut self, span: Span, pat: hir::PatKind) -> P<hir::Pat> {
2876 let LoweredNodeId { node_id, hir_id } = self.next_id();
2885 /// Given suffix ["b","c","d"], returns path `::std::b::c::d` when
2886 /// `fld.cx.use_std`, and `::core::b::c::d` otherwise.
2887 /// The path is also resolved according to `is_value`.
2888 fn std_path(&mut self, span: Span, components: &[&str], is_value: bool) -> hir::Path {
2889 let mut path = hir::Path {
2892 segments: iter::once(keywords::CrateRoot.name()).chain({
2893 self.crate_root.into_iter().chain(components.iter().cloned()).map(Symbol::intern)
2894 }).map(hir::PathSegment::from_name).collect(),
2897 self.resolver.resolve_hir_path(&mut path, is_value);
2901 fn signal_block_expr(&mut self,
2902 stmts: hir::HirVec<hir::Stmt>,
2905 rule: hir::BlockCheckMode,
2906 attrs: ThinVec<Attribute>)
2908 let LoweredNodeId { node_id, hir_id } = self.next_id();
2910 let block = P(hir::Block {
2917 targeted_by_break: false,
2919 self.expr_block(block, attrs)
2922 fn ty_path(&mut self, id: NodeId, span: Span, qpath: hir::QPath) -> P<hir::Ty> {
2924 let node = match qpath {
2925 hir::QPath::Resolved(None, path) => {
2926 // Turn trait object paths into `TyTraitObject` instead.
2927 if let Def::Trait(_) = path.def {
2928 let principal = hir::PolyTraitRef {
2929 bound_lifetimes: hir_vec![],
2930 trait_ref: hir::TraitRef {
2931 path: path.and_then(|path| path),
2937 // The original ID is taken by the `PolyTraitRef`,
2938 // so the `Ty` itself needs a different one.
2939 id = self.next_id().node_id;
2941 hir::TyTraitObject(hir_vec![principal], self.elided_lifetime(span))
2943 hir::TyPath(hir::QPath::Resolved(None, path))
2946 _ => hir::TyPath(qpath)
2948 P(hir::Ty { id, node, span })
2951 fn elided_lifetime(&mut self, span: Span) -> hir::Lifetime {
2953 id: self.next_id().node_id,
2955 name: keywords::Invalid.name()
2960 fn body_ids(bodies: &BTreeMap<hir::BodyId, hir::Body>) -> Vec<hir::BodyId> {
2961 // Sorting by span ensures that we get things in order within a
2962 // file, and also puts the files in a sensible order.
2963 let mut body_ids: Vec<_> = bodies.keys().cloned().collect();
2964 body_ids.sort_by_key(|b| bodies[b].value.span);