1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Lowers the AST to the HIR.
13 //! Since the AST and HIR are fairly similar, this is mostly a simple procedure,
14 //! much like a fold. Where lowering involves a bit more work things get more
15 //! interesting and there are some invariants you should know about. These mostly
16 //! concern spans and ids.
18 //! Spans are assigned to AST nodes during parsing and then are modified during
19 //! expansion to indicate the origin of a node and the process it went through
20 //! being expanded. Ids are assigned to AST nodes just before lowering.
22 //! For the simpler lowering steps, ids and spans should be preserved. Unlike
23 //! expansion we do not preserve the process of lowering in the spans, so spans
24 //! should not be modified here. When creating a new node (as opposed to
25 //! 'folding' an existing one), then you create a new id using `next_id()`.
27 //! You must ensure that ids are unique. That means that you should only use the
28 //! id from an AST node in a single HIR node (you can assume that AST node ids
29 //! are unique). Every new node must have a unique id. Avoid cloning HIR nodes.
30 //! If you do, you must then set the new node's id to a fresh one.
32 //! Spans are used for error messages and for tools to map semantics back to
33 //! source code. It is therefore not as important with spans as ids to be strict
34 //! about use (you can't break the compiler by screwing up a span). Obviously, a
35 //! HIR node can only have a single span. But multiple nodes can have the same
36 //! span and spans don't need to be kept in order, etc. Where code is preserved
37 //! by lowering, it should have the same span as in the AST. Where HIR nodes are
38 //! new it is probably best to give a span for the whole AST node being lowered.
39 //! All nodes should have real spans, don't use dummy spans. Tools are likely to
40 //! get confused if the spans from leaf AST nodes occur in multiple places
41 //! in the HIR, especially for multiple identifiers.
44 use hir::map::{Definitions, DefKey};
45 use hir::def_id::{DefIndex, DefId, CRATE_DEF_INDEX};
46 use hir::def::{Def, PathResolution};
47 use lint::builtin::PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES;
48 use middle::cstore::CrateStore;
49 use rustc_data_structures::indexed_vec::IndexVec;
51 use util::common::FN_OUTPUT_NAME;
52 use util::nodemap::{DefIdMap, FxHashMap, NodeMap};
54 use std::collections::BTreeMap;
61 use syntax::ext::hygiene::{Mark, SyntaxContext};
63 use syntax::codemap::{self, respan, Spanned, CompilerDesugaringKind};
64 use syntax::std_inject;
65 use syntax::symbol::{Symbol, keywords};
66 use syntax::util::small_vector::SmallVector;
67 use syntax::visit::{self, Visitor};
70 const HIR_ID_COUNTER_LOCKED: u32 = 0xFFFFFFFF;
72 pub struct LoweringContext<'a> {
73 crate_root: Option<&'static str>,
75 // Use to assign ids to hir nodes that do not directly correspond to an ast node
78 cstore: &'a CrateStore,
80 // As we walk the AST we must keep track of the current 'parent' def id (in
81 // the form of a DefIndex) so that if we create a new node which introduces
82 // a definition, then we can properly create the def id.
83 parent_def: Option<DefIndex>,
84 resolver: &'a mut Resolver,
85 name_map: FxHashMap<Ident, Name>,
87 /// The items being lowered are collected here.
88 items: BTreeMap<NodeId, hir::Item>,
90 trait_items: BTreeMap<hir::TraitItemId, hir::TraitItem>,
91 impl_items: BTreeMap<hir::ImplItemId, hir::ImplItem>,
92 bodies: BTreeMap<hir::BodyId, hir::Body>,
93 exported_macros: Vec<hir::MacroDef>,
95 trait_impls: BTreeMap<DefId, Vec<NodeId>>,
96 trait_default_impl: BTreeMap<DefId, NodeId>,
100 catch_scopes: Vec<NodeId>,
101 loop_scopes: Vec<NodeId>,
102 is_in_loop_condition: bool,
104 type_def_lifetime_params: DefIdMap<usize>,
106 current_hir_id_owner: Vec<(DefIndex, u32)>,
107 item_local_id_counters: NodeMap<u32>,
108 node_id_to_hir_id: IndexVec<NodeId, hir::HirId>,
112 /// Resolve a hir path generated by the lowerer when expanding `for`, `if let`, etc.
113 fn resolve_hir_path(&mut self, path: &mut hir::Path, is_value: bool);
115 /// Obtain the resolution for a node id
116 fn get_resolution(&mut self, id: NodeId) -> Option<PathResolution>;
118 /// We must keep the set of definitions up to date as we add nodes that weren't in the AST.
119 /// This should only return `None` during testing.
120 fn definitions(&mut self) -> &mut Definitions;
123 pub fn lower_crate(sess: &Session,
126 resolver: &mut Resolver)
128 // We're constructing the HIR here; we don't care what we will
129 // read, since we haven't even constructed the *input* to
131 let _ignore = sess.dep_graph.in_ignore();
134 crate_root: std_inject::injected_crate_name(krate),
139 name_map: FxHashMap(),
140 items: BTreeMap::new(),
141 trait_items: BTreeMap::new(),
142 impl_items: BTreeMap::new(),
143 bodies: BTreeMap::new(),
144 trait_impls: BTreeMap::new(),
145 trait_default_impl: BTreeMap::new(),
146 exported_macros: Vec::new(),
147 catch_scopes: Vec::new(),
148 loop_scopes: Vec::new(),
149 is_in_loop_condition: false,
150 type_def_lifetime_params: DefIdMap(),
151 current_hir_id_owner: vec![(CRATE_DEF_INDEX, 0)],
152 item_local_id_counters: NodeMap(),
153 node_id_to_hir_id: IndexVec::new(),
158 #[derive(Copy, Clone, PartialEq, Eq)]
160 /// Any path in a type context.
162 /// The `module::Type` in `module::Type::method` in an expression.
166 struct LoweredNodeId {
171 enum ParenthesizedGenericArgs {
177 impl<'a> LoweringContext<'a> {
178 fn lower_crate(mut self, c: &Crate) -> hir::Crate {
179 /// Full-crate AST visitor that inserts into a fresh
180 /// `LoweringContext` any information that may be
181 /// needed from arbitrary locations in the crate.
182 /// E.g. The number of lifetime generic parameters
183 /// declared for every type and trait definition.
184 struct MiscCollector<'lcx, 'interner: 'lcx> {
185 lctx: &'lcx mut LoweringContext<'interner>,
188 impl<'lcx, 'interner> Visitor<'lcx> for MiscCollector<'lcx, 'interner> {
189 fn visit_item(&mut self, item: &'lcx Item) {
190 self.lctx.allocate_hir_id_counter(item.id, item);
193 ItemKind::Struct(_, ref generics) |
194 ItemKind::Union(_, ref generics) |
195 ItemKind::Enum(_, ref generics) |
196 ItemKind::Ty(_, ref generics) |
197 ItemKind::Trait(_, ref generics, ..) => {
198 let def_id = self.lctx.resolver.definitions().local_def_id(item.id);
199 let count = generics.lifetimes.len();
200 self.lctx.type_def_lifetime_params.insert(def_id, count);
204 visit::walk_item(self, item);
207 fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
208 self.lctx.allocate_hir_id_counter(item.id, item);
209 visit::walk_trait_item(self, item);
212 fn visit_impl_item(&mut self, item: &'lcx ImplItem) {
213 self.lctx.allocate_hir_id_counter(item.id, item);
214 visit::walk_impl_item(self, item);
218 struct ItemLowerer<'lcx, 'interner: 'lcx> {
219 lctx: &'lcx mut LoweringContext<'interner>,
222 impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> {
223 fn visit_item(&mut self, item: &'lcx Item) {
224 let mut item_lowered = true;
225 self.lctx.with_hir_id_owner(item.id, |lctx| {
226 if let Some(hir_item) = lctx.lower_item(item) {
227 lctx.items.insert(item.id, hir_item);
229 item_lowered = false;
234 visit::walk_item(self, item);
238 fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
239 self.lctx.with_hir_id_owner(item.id, |lctx| {
240 let id = hir::TraitItemId { node_id: item.id };
241 let hir_item = lctx.lower_trait_item(item);
242 lctx.trait_items.insert(id, hir_item);
245 visit::walk_trait_item(self, item);
248 fn visit_impl_item(&mut self, item: &'lcx ImplItem) {
249 self.lctx.with_hir_id_owner(item.id, |lctx| {
250 let id = hir::ImplItemId { node_id: item.id };
251 let hir_item = lctx.lower_impl_item(item);
252 lctx.impl_items.insert(id, hir_item);
254 visit::walk_impl_item(self, item);
258 self.lower_node_id(CRATE_NODE_ID);
259 debug_assert!(self.node_id_to_hir_id[CRATE_NODE_ID] == hir::CRATE_HIR_ID);
261 visit::walk_crate(&mut MiscCollector { lctx: &mut self }, c);
262 visit::walk_crate(&mut ItemLowerer { lctx: &mut self }, c);
264 let module = self.lower_mod(&c.module);
265 let attrs = self.lower_attrs(&c.attrs);
266 let body_ids = body_ids(&self.bodies);
270 .init_node_id_to_hir_id_mapping(self.node_id_to_hir_id);
276 exported_macros: hir::HirVec::from(self.exported_macros),
278 trait_items: self.trait_items,
279 impl_items: self.impl_items,
282 trait_impls: self.trait_impls,
283 trait_default_impl: self.trait_default_impl,
287 fn allocate_hir_id_counter<T: Debug>(&mut self,
290 if self.item_local_id_counters.insert(owner, 0).is_some() {
291 bug!("Tried to allocate item_local_id_counter for {:?} twice", debug);
293 // Always allocate the first HirId for the owner itself
294 self.lower_node_id_with_owner(owner, owner);
297 fn lower_node_id_generic<F>(&mut self,
301 where F: FnOnce(&mut Self) -> hir::HirId
303 if ast_node_id == DUMMY_NODE_ID {
304 return LoweredNodeId {
305 node_id: DUMMY_NODE_ID,
306 hir_id: hir::DUMMY_HIR_ID,
310 let min_size = ast_node_id.as_usize() + 1;
312 if min_size > self.node_id_to_hir_id.len() {
313 self.node_id_to_hir_id.resize(min_size, hir::DUMMY_HIR_ID);
316 let existing_hir_id = self.node_id_to_hir_id[ast_node_id];
318 if existing_hir_id == hir::DUMMY_HIR_ID {
319 // Generate a new HirId
320 let hir_id = alloc_hir_id(self);
321 self.node_id_to_hir_id[ast_node_id] = hir_id;
323 node_id: ast_node_id,
328 node_id: ast_node_id,
329 hir_id: existing_hir_id,
334 fn with_hir_id_owner<F>(&mut self, owner: NodeId, f: F)
335 where F: FnOnce(&mut Self)
337 let counter = self.item_local_id_counters
338 .insert(owner, HIR_ID_COUNTER_LOCKED)
340 let def_index = self.resolver.definitions().opt_def_index(owner).unwrap();
341 self.current_hir_id_owner.push((def_index, counter));
343 let (new_def_index, new_counter) = self.current_hir_id_owner.pop().unwrap();
345 debug_assert!(def_index == new_def_index);
346 debug_assert!(new_counter >= counter);
348 let prev = self.item_local_id_counters.insert(owner, new_counter).unwrap();
349 debug_assert!(prev == HIR_ID_COUNTER_LOCKED);
352 /// This method allocates a new HirId for the given NodeId and stores it in
353 /// the LoweringContext's NodeId => HirId map.
354 /// Take care not to call this method if the resulting HirId is then not
355 /// actually used in the HIR, as that would trigger an assertion in the
356 /// HirIdValidator later on, which makes sure that all NodeIds got mapped
357 /// properly. Calling the method twice with the same NodeId is fine though.
358 fn lower_node_id(&mut self, ast_node_id: NodeId) -> LoweredNodeId {
359 self.lower_node_id_generic(ast_node_id, |this| {
360 let &mut (def_index, ref mut local_id_counter) = this.current_hir_id_owner
363 let local_id = *local_id_counter;
364 *local_id_counter += 1;
367 local_id: hir::ItemLocalId(local_id),
372 fn lower_node_id_with_owner(&mut self,
376 self.lower_node_id_generic(ast_node_id, |this| {
377 let local_id_counter = this.item_local_id_counters
380 let local_id = *local_id_counter;
382 // We want to be sure not to modify the counter in the map while it
383 // is also on the stack. Otherwise we'll get lost updates when writing
384 // back from the stack to the map.
385 debug_assert!(local_id != HIR_ID_COUNTER_LOCKED);
387 *local_id_counter += 1;
388 let def_index = this.resolver.definitions().opt_def_index(owner).unwrap();
392 local_id: hir::ItemLocalId(local_id),
397 fn record_body(&mut self, value: hir::Expr, decl: Option<&FnDecl>)
399 let body = hir::Body {
400 arguments: decl.map_or(hir_vec![], |decl| {
401 decl.inputs.iter().map(|x| self.lower_arg(x)).collect()
403 is_generator: self.is_generator,
407 self.bodies.insert(id, body);
411 fn next_id(&mut self) -> LoweredNodeId {
412 self.lower_node_id(self.sess.next_node_id())
415 fn expect_full_def(&mut self, id: NodeId) -> Def {
416 self.resolver.get_resolution(id).map_or(Def::Err, |pr| {
417 if pr.unresolved_segments() != 0 {
418 bug!("path not fully resolved: {:?}", pr);
424 fn diagnostic(&self) -> &errors::Handler {
425 self.sess.diagnostic()
428 fn str_to_ident(&self, s: &'static str) -> Name {
432 fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, span: Span) -> Span
434 let mark = Mark::fresh(Mark::root());
435 mark.set_expn_info(codemap::ExpnInfo {
437 callee: codemap::NameAndSpan {
438 format: codemap::CompilerDesugaring(reason),
440 allow_internal_unstable: true,
441 allow_internal_unsafe: false,
444 span.with_ctxt(SyntaxContext::empty().apply_mark(mark))
447 fn with_catch_scope<T, F>(&mut self, catch_id: NodeId, f: F) -> T
448 where F: FnOnce(&mut LoweringContext) -> T
450 let len = self.catch_scopes.len();
451 self.catch_scopes.push(catch_id);
453 let result = f(self);
454 assert_eq!(len + 1, self.catch_scopes.len(),
455 "catch scopes should be added and removed in stack order");
457 self.catch_scopes.pop().unwrap();
462 fn lower_body<F>(&mut self, decl: Option<&FnDecl>, f: F) -> hir::BodyId
463 where F: FnOnce(&mut LoweringContext) -> hir::Expr
465 let prev = mem::replace(&mut self.is_generator, false);
466 let result = f(self);
467 let r = self.record_body(result, decl);
468 self.is_generator = prev;
472 fn with_loop_scope<T, F>(&mut self, loop_id: NodeId, f: F) -> T
473 where F: FnOnce(&mut LoweringContext) -> T
475 // We're no longer in the base loop's condition; we're in another loop.
476 let was_in_loop_condition = self.is_in_loop_condition;
477 self.is_in_loop_condition = false;
479 let len = self.loop_scopes.len();
480 self.loop_scopes.push(loop_id);
482 let result = f(self);
483 assert_eq!(len + 1, self.loop_scopes.len(),
484 "Loop scopes should be added and removed in stack order");
486 self.loop_scopes.pop().unwrap();
488 self.is_in_loop_condition = was_in_loop_condition;
493 fn with_loop_condition_scope<T, F>(&mut self, f: F) -> T
494 where F: FnOnce(&mut LoweringContext) -> T
496 let was_in_loop_condition = self.is_in_loop_condition;
497 self.is_in_loop_condition = true;
499 let result = f(self);
501 self.is_in_loop_condition = was_in_loop_condition;
506 fn with_new_scopes<T, F>(&mut self, f: F) -> T
507 where F: FnOnce(&mut LoweringContext) -> T
509 let was_in_loop_condition = self.is_in_loop_condition;
510 self.is_in_loop_condition = false;
512 let catch_scopes = mem::replace(&mut self.catch_scopes, Vec::new());
513 let loop_scopes = mem::replace(&mut self.loop_scopes, Vec::new());
514 let result = f(self);
515 self.catch_scopes = catch_scopes;
516 self.loop_scopes = loop_scopes;
518 self.is_in_loop_condition = was_in_loop_condition;
523 fn with_parent_def<T, F>(&mut self, parent_id: NodeId, f: F) -> T
524 where F: FnOnce(&mut LoweringContext) -> T
526 let old_def = self.parent_def;
528 let defs = self.resolver.definitions();
529 Some(defs.opt_def_index(parent_id).unwrap())
532 let result = f(self);
534 self.parent_def = old_def;
538 fn def_key(&mut self, id: DefId) -> DefKey {
540 self.resolver.definitions().def_key(id.index)
542 self.cstore.def_key(id)
546 fn lower_ident(&mut self, ident: Ident) -> Name {
547 let ident = ident.modern();
548 if ident.ctxt == SyntaxContext::empty() {
551 *self.name_map.entry(ident).or_insert_with(|| Symbol::from_ident(ident))
554 fn lower_opt_sp_ident(&mut self, o_id: Option<Spanned<Ident>>) -> Option<Spanned<Name>> {
555 o_id.map(|sp_ident| respan(sp_ident.span, sp_ident.node.name))
558 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Spanned<Ident>)>)
562 Some((id, label_ident)) => {
563 let target = if let Def::Label(loop_id) = self.expect_full_def(id) {
564 hir::LoopIdResult::Ok(self.lower_node_id(loop_id).node_id)
566 hir::LoopIdResult::Err(hir::LoopIdError::UnresolvedLabel)
569 ident: Some(label_ident),
570 target_id: hir::ScopeTarget::Loop(target),
574 let loop_id = self.loop_scopes
576 .map(|innermost_loop_id| *innermost_loop_id);
580 target_id: hir::ScopeTarget::Loop(
581 loop_id.map(|id| Ok(self.lower_node_id(id).node_id))
582 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope))
589 fn lower_attrs(&mut self, attrs: &Vec<Attribute>) -> hir::HirVec<Attribute> {
593 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm {
595 attrs: self.lower_attrs(&arm.attrs),
596 pats: arm.pats.iter().map(|x| self.lower_pat(x)).collect(),
597 guard: arm.guard.as_ref().map(|ref x| P(self.lower_expr(x))),
598 body: P(self.lower_expr(&arm.body)),
602 fn lower_ty_binding(&mut self, b: &TypeBinding) -> hir::TypeBinding {
604 id: self.lower_node_id(b.id).node_id,
605 name: self.lower_ident(b.ident),
606 ty: self.lower_ty(&b.ty),
611 fn lower_ty(&mut self, t: &Ty) -> P<hir::Ty> {
612 let kind = match t.node {
613 TyKind::Infer => hir::TyInfer,
614 TyKind::Err => hir::TyErr,
615 TyKind::Slice(ref ty) => hir::TySlice(self.lower_ty(ty)),
616 TyKind::Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt)),
617 TyKind::Rptr(ref region, ref mt) => {
618 let span = t.span.with_hi(t.span.lo());
619 let lifetime = match *region {
620 Some(ref lt) => self.lower_lifetime(lt),
621 None => self.elided_lifetime(span)
623 hir::TyRptr(lifetime, self.lower_mt(mt))
625 TyKind::BareFn(ref f) => {
626 hir::TyBareFn(P(hir::BareFnTy {
627 lifetimes: self.lower_lifetime_defs(&f.lifetimes),
628 unsafety: self.lower_unsafety(f.unsafety),
630 decl: self.lower_fn_decl(&f.decl),
633 TyKind::Never => hir::TyNever,
634 TyKind::Tup(ref tys) => {
635 hir::TyTup(tys.iter().map(|ty| self.lower_ty(ty)).collect())
637 TyKind::Paren(ref ty) => {
638 return self.lower_ty(ty);
640 TyKind::Path(ref qself, ref path) => {
641 let id = self.lower_node_id(t.id).node_id;
642 let qpath = self.lower_qpath(t.id, qself, path, ParamMode::Explicit);
643 return self.ty_path(id, t.span, qpath);
645 TyKind::ImplicitSelf => {
646 hir::TyPath(hir::QPath::Resolved(None, P(hir::Path {
647 def: self.expect_full_def(t.id),
648 segments: hir_vec![hir::PathSegment {
649 name: keywords::SelfType.name(),
650 parameters: hir::PathParameters::none()
655 TyKind::Array(ref ty, ref length) => {
656 let length = self.lower_body(None, |this| this.lower_expr(length));
657 hir::TyArray(self.lower_ty(ty), length)
659 TyKind::Typeof(ref expr) => {
660 let expr = self.lower_body(None, |this| this.lower_expr(expr));
663 TyKind::TraitObject(ref bounds) => {
664 let mut lifetime_bound = None;
665 let bounds = bounds.iter().filter_map(|bound| {
667 TraitTyParamBound(ref ty, TraitBoundModifier::None) => {
668 Some(self.lower_poly_trait_ref(ty))
670 TraitTyParamBound(_, TraitBoundModifier::Maybe) => None,
671 RegionTyParamBound(ref lifetime) => {
672 if lifetime_bound.is_none() {
673 lifetime_bound = Some(self.lower_lifetime(lifetime));
679 let lifetime_bound = lifetime_bound.unwrap_or_else(|| {
680 self.elided_lifetime(t.span)
682 hir::TyTraitObject(bounds, lifetime_bound)
684 TyKind::ImplTrait(ref bounds) => {
685 hir::TyImplTrait(self.lower_bounds(bounds))
687 TyKind::Mac(_) => panic!("TyMac should have been expanded by now."),
691 id: self.lower_node_id(t.id).node_id,
697 fn lower_foreign_mod(&mut self, fm: &ForeignMod) -> hir::ForeignMod {
700 items: fm.items.iter().map(|x| self.lower_foreign_item(x)).collect(),
704 fn lower_global_asm(&mut self, ga: &GlobalAsm) -> P<hir::GlobalAsm> {
711 fn lower_variant(&mut self, v: &Variant) -> hir::Variant {
713 node: hir::Variant_ {
714 name: v.node.name.name,
715 attrs: self.lower_attrs(&v.node.attrs),
716 data: self.lower_variant_data(&v.node.data),
717 disr_expr: v.node.disr_expr.as_ref().map(|e| {
718 self.lower_body(None, |this| this.lower_expr(e))
725 fn lower_qpath(&mut self,
727 qself: &Option<QSelf>,
729 param_mode: ParamMode)
731 let qself_position = qself.as_ref().map(|q| q.position);
732 let qself = qself.as_ref().map(|q| self.lower_ty(&q.ty));
734 let resolution = self.resolver.get_resolution(id)
735 .unwrap_or(PathResolution::new(Def::Err));
737 let proj_start = p.segments.len() - resolution.unresolved_segments();
738 let path = P(hir::Path {
739 def: resolution.base_def(),
740 segments: p.segments[..proj_start].iter().enumerate().map(|(i, segment)| {
741 let param_mode = match (qself_position, param_mode) {
742 (Some(j), ParamMode::Optional) if i < j => {
743 // This segment is part of the trait path in a
744 // qualified path - one of `a`, `b` or `Trait`
745 // in `<X as a::b::Trait>::T::U::method`.
751 // Figure out if this is a type/trait segment,
752 // which may need lifetime elision performed.
753 let parent_def_id = |this: &mut Self, def_id: DefId| {
756 index: this.def_key(def_id).parent.expect("missing parent")
759 let type_def_id = match resolution.base_def() {
760 Def::AssociatedTy(def_id) if i + 2 == proj_start => {
761 Some(parent_def_id(self, def_id))
763 Def::Variant(def_id) if i + 1 == proj_start => {
764 Some(parent_def_id(self, def_id))
766 Def::Struct(def_id) |
769 Def::TyAlias(def_id) |
770 Def::Trait(def_id) if i + 1 == proj_start => Some(def_id),
773 let parenthesized_generic_args = match resolution.base_def() {
774 // `a::b::Trait(Args)`
775 Def::Trait(..) if i + 1 == proj_start => ParenthesizedGenericArgs::Ok,
776 // `a::b::Trait(Args)::TraitItem`
778 Def::AssociatedConst(..) |
779 Def::AssociatedTy(..) if i + 2 == proj_start => ParenthesizedGenericArgs::Ok,
780 // Avoid duplicated errors
781 Def::Err => ParenthesizedGenericArgs::Ok,
783 Def::Struct(..) | Def::Enum(..) | Def::Union(..) | Def::TyAlias(..) |
784 Def::Variant(..) if i + 1 == proj_start => ParenthesizedGenericArgs::Err,
785 // A warning for now, for compatibility reasons
786 _ => ParenthesizedGenericArgs::Warn,
789 let num_lifetimes = type_def_id.map_or(0, |def_id| {
790 if let Some(&n) = self.type_def_lifetime_params.get(&def_id) {
793 assert!(!def_id.is_local());
794 let n = self.cstore.item_generics_cloned_untracked(def_id).regions.len();
795 self.type_def_lifetime_params.insert(def_id, n);
798 self.lower_path_segment(p.span, segment, param_mode, num_lifetimes,
799 parenthesized_generic_args)
804 // Simple case, either no projections, or only fully-qualified.
805 // E.g. `std::mem::size_of` or `<I as Iterator>::Item`.
806 if resolution.unresolved_segments() == 0 {
807 return hir::QPath::Resolved(qself, path);
810 // Create the innermost type that we're projecting from.
811 let mut ty = if path.segments.is_empty() {
812 // If the base path is empty that means there exists a
813 // syntactical `Self`, e.g. `&i32` in `<&i32>::clone`.
814 qself.expect("missing QSelf for <T>::...")
816 // Otherwise, the base path is an implicit `Self` type path,
817 // e.g. `Vec` in `Vec::new` or `<I as Iterator>::Item` in
818 // `<I as Iterator>::Item::default`.
819 let new_id = self.next_id().node_id;
820 self.ty_path(new_id, p.span, hir::QPath::Resolved(qself, path))
823 // Anything after the base path are associated "extensions",
824 // out of which all but the last one are associated types,
825 // e.g. for `std::vec::Vec::<T>::IntoIter::Item::clone`:
826 // * base path is `std::vec::Vec<T>`
827 // * "extensions" are `IntoIter`, `Item` and `clone`
829 // 1. `std::vec::Vec<T>` (created above)
830 // 2. `<std::vec::Vec<T>>::IntoIter`
831 // 3. `<<std::vec::Vec<T>>::IntoIter>::Item`
832 // * final path is `<<<std::vec::Vec<T>>::IntoIter>::Item>::clone`
833 for (i, segment) in p.segments.iter().enumerate().skip(proj_start) {
834 let segment = P(self.lower_path_segment(p.span, segment, param_mode, 0,
835 ParenthesizedGenericArgs::Warn));
836 let qpath = hir::QPath::TypeRelative(ty, segment);
838 // It's finished, return the extension of the right node type.
839 if i == p.segments.len() - 1 {
843 // Wrap the associated extension in another type node.
844 let new_id = self.next_id().node_id;
845 ty = self.ty_path(new_id, p.span, qpath);
848 // Should've returned in the for loop above.
849 span_bug!(p.span, "lower_qpath: no final extension segment in {}..{}",
850 proj_start, p.segments.len())
853 fn lower_path_extra(&mut self,
857 param_mode: ParamMode,
858 defaults_to_global: bool)
860 let mut segments = p.segments.iter();
861 if defaults_to_global && p.is_global() {
866 def: self.expect_full_def(id),
867 segments: segments.map(|segment| {
868 self.lower_path_segment(p.span, segment, param_mode, 0,
869 ParenthesizedGenericArgs::Err)
870 }).chain(name.map(|name| {
873 parameters: hir::PathParameters::none()
880 fn lower_path(&mut self,
883 param_mode: ParamMode,
884 defaults_to_global: bool)
886 self.lower_path_extra(id, p, None, param_mode, defaults_to_global)
889 fn lower_path_segment(&mut self,
891 segment: &PathSegment,
892 param_mode: ParamMode,
893 expected_lifetimes: usize,
894 parenthesized_generic_args: ParenthesizedGenericArgs)
895 -> hir::PathSegment {
896 let mut parameters = if let Some(ref parameters) = segment.parameters {
897 let msg = "parenthesized parameters may only be used with a trait";
899 PathParameters::AngleBracketed(ref data) => {
900 self.lower_angle_bracketed_parameter_data(data, param_mode)
902 PathParameters::Parenthesized(ref data) => match parenthesized_generic_args {
903 ParenthesizedGenericArgs::Ok => self.lower_parenthesized_parameter_data(data),
904 ParenthesizedGenericArgs::Warn => {
905 self.sess.buffer_lint(PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
906 CRATE_NODE_ID, data.span, msg.into());
907 hir::PathParameters::none()
909 ParenthesizedGenericArgs::Err => {
910 struct_span_err!(self.sess, data.span, E0214, "{}", msg)
911 .span_label(data.span, "only traits may use parentheses").emit();
912 hir::PathParameters::none()
917 self.lower_angle_bracketed_parameter_data(&Default::default(), param_mode)
920 if !parameters.parenthesized && parameters.lifetimes.is_empty() {
921 parameters.lifetimes = (0..expected_lifetimes).map(|_| {
922 self.elided_lifetime(path_span)
927 name: self.lower_ident(segment.identifier),
932 fn lower_angle_bracketed_parameter_data(&mut self,
933 data: &AngleBracketedParameterData,
934 param_mode: ParamMode)
935 -> hir::PathParameters {
936 let &AngleBracketedParameterData { ref lifetimes, ref types, ref bindings, .. } = data;
937 hir::PathParameters {
938 lifetimes: self.lower_lifetimes(lifetimes),
939 types: types.iter().map(|ty| self.lower_ty(ty)).collect(),
940 infer_types: types.is_empty() && param_mode == ParamMode::Optional,
941 bindings: bindings.iter().map(|b| self.lower_ty_binding(b)).collect(),
942 parenthesized: false,
946 fn lower_parenthesized_parameter_data(&mut self,
947 data: &ParenthesizedParameterData)
948 -> hir::PathParameters {
949 let &ParenthesizedParameterData { ref inputs, ref output, span } = data;
950 let inputs = inputs.iter().map(|ty| self.lower_ty(ty)).collect();
951 let mk_tup = |this: &mut Self, tys, span| {
952 P(hir::Ty { node: hir::TyTup(tys), id: this.next_id().node_id, span })
955 hir::PathParameters {
956 lifetimes: hir::HirVec::new(),
957 types: hir_vec![mk_tup(self, inputs, span)],
959 bindings: hir_vec![hir::TypeBinding {
960 id: self.next_id().node_id,
961 name: Symbol::intern(FN_OUTPUT_NAME),
962 ty: output.as_ref().map(|ty| self.lower_ty(&ty))
963 .unwrap_or_else(|| mk_tup(self, hir::HirVec::new(), span)),
964 span: output.as_ref().map_or(span, |ty| ty.span),
970 fn lower_local(&mut self, l: &Local) -> P<hir::Local> {
971 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(l.id);
975 ty: l.ty.as_ref().map(|t| self.lower_ty(t)),
976 pat: self.lower_pat(&l.pat),
977 init: l.init.as_ref().map(|e| P(self.lower_expr(e))),
979 attrs: l.attrs.clone(),
980 source: hir::LocalSource::Normal,
984 fn lower_mutability(&mut self, m: Mutability) -> hir::Mutability {
986 Mutability::Mutable => hir::MutMutable,
987 Mutability::Immutable => hir::MutImmutable,
991 fn lower_arg(&mut self, arg: &Arg) -> hir::Arg {
992 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(arg.id);
996 pat: self.lower_pat(&arg.pat),
1000 fn lower_fn_args_to_names(&mut self, decl: &FnDecl)
1001 -> hir::HirVec<Spanned<Name>> {
1002 decl.inputs.iter().map(|arg| {
1003 match arg.pat.node {
1004 PatKind::Ident(_, ident, None) => {
1005 respan(ident.span, ident.node.name)
1007 _ => respan(arg.pat.span, keywords::Invalid.name()),
1012 fn lower_fn_decl(&mut self, decl: &FnDecl) -> P<hir::FnDecl> {
1014 inputs: decl.inputs.iter().map(|arg| self.lower_ty(&arg.ty)).collect(),
1015 output: match decl.output {
1016 FunctionRetTy::Ty(ref ty) => hir::Return(self.lower_ty(ty)),
1017 FunctionRetTy::Default(span) => hir::DefaultReturn(span),
1019 variadic: decl.variadic,
1020 has_implicit_self: decl.inputs.get(0).map_or(false, |arg| {
1022 TyKind::ImplicitSelf => true,
1023 TyKind::Rptr(_, ref mt) => mt.ty.node == TyKind::ImplicitSelf,
1030 fn lower_ty_param_bound(&mut self, tpb: &TyParamBound) -> hir::TyParamBound {
1032 TraitTyParamBound(ref ty, modifier) => {
1033 hir::TraitTyParamBound(self.lower_poly_trait_ref(ty),
1034 self.lower_trait_bound_modifier(modifier))
1036 RegionTyParamBound(ref lifetime) => {
1037 hir::RegionTyParamBound(self.lower_lifetime(lifetime))
1042 fn lower_ty_param(&mut self, tp: &TyParam, add_bounds: &[TyParamBound]) -> hir::TyParam {
1043 let mut name = self.lower_ident(tp.ident);
1045 // Don't expose `Self` (recovered "keyword used as ident" parse error).
1046 // `rustc::ty` expects `Self` to be only used for a trait's `Self`.
1047 // Instead, use gensym("Self") to create a distinct name that looks the same.
1048 if name == keywords::SelfType.name() {
1049 name = Symbol::gensym("Self");
1052 let mut bounds = self.lower_bounds(&tp.bounds);
1053 if !add_bounds.is_empty() {
1054 bounds = bounds.into_iter().chain(self.lower_bounds(add_bounds).into_iter()).collect();
1058 id: self.lower_node_id(tp.id).node_id,
1061 default: tp.default.as_ref().map(|x| self.lower_ty(x)),
1063 pure_wrt_drop: tp.attrs.iter().any(|attr| attr.check_name("may_dangle")),
1067 fn lower_ty_params(&mut self, tps: &Vec<TyParam>, add_bounds: &NodeMap<Vec<TyParamBound>>)
1068 -> hir::HirVec<hir::TyParam> {
1069 tps.iter().map(|tp| {
1070 self.lower_ty_param(tp, add_bounds.get(&tp.id).map_or(&[][..], |x| &x))
1074 fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime {
1076 id: self.lower_node_id(l.id).node_id,
1077 name: self.lower_ident(l.ident),
1082 fn lower_lifetime_def(&mut self, l: &LifetimeDef) -> hir::LifetimeDef {
1084 lifetime: self.lower_lifetime(&l.lifetime),
1085 bounds: self.lower_lifetimes(&l.bounds),
1086 pure_wrt_drop: l.attrs.iter().any(|attr| attr.check_name("may_dangle")),
1090 fn lower_lifetimes(&mut self, lts: &Vec<Lifetime>) -> hir::HirVec<hir::Lifetime> {
1091 lts.iter().map(|l| self.lower_lifetime(l)).collect()
1094 fn lower_lifetime_defs(&mut self, lts: &Vec<LifetimeDef>) -> hir::HirVec<hir::LifetimeDef> {
1095 lts.iter().map(|l| self.lower_lifetime_def(l)).collect()
1098 fn lower_generics(&mut self, g: &Generics) -> hir::Generics {
1099 // Collect `?Trait` bounds in where clause and move them to parameter definitions.
1100 let mut add_bounds = NodeMap();
1101 for pred in &g.where_clause.predicates {
1102 if let WherePredicate::BoundPredicate(ref bound_pred) = *pred {
1103 'next_bound: for bound in &bound_pred.bounds {
1104 if let TraitTyParamBound(_, TraitBoundModifier::Maybe) = *bound {
1105 let report_error = |this: &mut Self| {
1106 this.diagnostic().span_err(bound_pred.bounded_ty.span,
1107 "`?Trait` bounds are only permitted at the \
1108 point where a type parameter is declared");
1110 // Check if the where clause type is a plain type parameter.
1111 match bound_pred.bounded_ty.node {
1112 TyKind::Path(None, ref path)
1113 if path.segments.len() == 1 &&
1114 bound_pred.bound_lifetimes.is_empty() => {
1115 if let Some(Def::TyParam(def_id)) =
1116 self.resolver.get_resolution(bound_pred.bounded_ty.id)
1117 .map(|d| d.base_def()) {
1118 if let Some(node_id) =
1119 self.resolver.definitions().as_local_node_id(def_id) {
1120 for ty_param in &g.ty_params {
1121 if node_id == ty_param.id {
1122 add_bounds.entry(ty_param.id).or_insert(Vec::new())
1123 .push(bound.clone());
1124 continue 'next_bound;
1131 _ => report_error(self)
1139 ty_params: self.lower_ty_params(&g.ty_params, &add_bounds),
1140 lifetimes: self.lower_lifetime_defs(&g.lifetimes),
1141 where_clause: self.lower_where_clause(&g.where_clause),
1146 fn lower_where_clause(&mut self, wc: &WhereClause) -> hir::WhereClause {
1148 id: self.lower_node_id(wc.id).node_id,
1149 predicates: wc.predicates
1151 .map(|predicate| self.lower_where_predicate(predicate))
1156 fn lower_where_predicate(&mut self, pred: &WherePredicate) -> hir::WherePredicate {
1158 WherePredicate::BoundPredicate(WhereBoundPredicate{ ref bound_lifetimes,
1162 hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate {
1163 bound_lifetimes: self.lower_lifetime_defs(bound_lifetimes),
1164 bounded_ty: self.lower_ty(bounded_ty),
1165 bounds: bounds.iter().filter_map(|bound| match *bound {
1166 // Ignore `?Trait` bounds, they were copied into type parameters already.
1167 TraitTyParamBound(_, TraitBoundModifier::Maybe) => None,
1168 _ => Some(self.lower_ty_param_bound(bound))
1173 WherePredicate::RegionPredicate(WhereRegionPredicate{ ref lifetime,
1176 hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
1178 lifetime: self.lower_lifetime(lifetime),
1179 bounds: bounds.iter().map(|bound| self.lower_lifetime(bound)).collect(),
1182 WherePredicate::EqPredicate(WhereEqPredicate{ id,
1186 hir::WherePredicate::EqPredicate(hir::WhereEqPredicate {
1187 id: self.lower_node_id(id).node_id,
1188 lhs_ty: self.lower_ty(lhs_ty),
1189 rhs_ty: self.lower_ty(rhs_ty),
1196 fn lower_variant_data(&mut self, vdata: &VariantData) -> hir::VariantData {
1198 VariantData::Struct(ref fields, id) => {
1199 hir::VariantData::Struct(fields.iter()
1201 .map(|f| self.lower_struct_field(f))
1203 self.lower_node_id(id).node_id)
1205 VariantData::Tuple(ref fields, id) => {
1206 hir::VariantData::Tuple(fields.iter()
1208 .map(|f| self.lower_struct_field(f))
1210 self.lower_node_id(id).node_id)
1212 VariantData::Unit(id) => hir::VariantData::Unit(self.lower_node_id(id).node_id),
1216 fn lower_trait_ref(&mut self, p: &TraitRef) -> hir::TraitRef {
1217 let path = match self.lower_qpath(p.ref_id, &None, &p.path, ParamMode::Explicit) {
1218 hir::QPath::Resolved(None, path) => path.and_then(|path| path),
1219 qpath => bug!("lower_trait_ref: unexpected QPath `{:?}`", qpath)
1223 ref_id: self.lower_node_id(p.ref_id).node_id,
1227 fn lower_poly_trait_ref(&mut self, p: &PolyTraitRef) -> hir::PolyTraitRef {
1229 bound_lifetimes: self.lower_lifetime_defs(&p.bound_lifetimes),
1230 trait_ref: self.lower_trait_ref(&p.trait_ref),
1235 fn lower_struct_field(&mut self, (index, f): (usize, &StructField)) -> hir::StructField {
1238 id: self.lower_node_id(f.id).node_id,
1239 name: self.lower_ident(match f.ident {
1240 Some(ident) => ident,
1241 // FIXME(jseyfried) positional field hygiene
1242 None => Ident { name: Symbol::intern(&index.to_string()), ctxt: f.span.ctxt() },
1244 vis: self.lower_visibility(&f.vis, None),
1245 ty: self.lower_ty(&f.ty),
1246 attrs: self.lower_attrs(&f.attrs),
1250 fn lower_field(&mut self, f: &Field) -> hir::Field {
1252 name: respan(f.ident.span, self.lower_ident(f.ident.node)),
1253 expr: P(self.lower_expr(&f.expr)),
1255 is_shorthand: f.is_shorthand,
1259 fn lower_mt(&mut self, mt: &MutTy) -> hir::MutTy {
1261 ty: self.lower_ty(&mt.ty),
1262 mutbl: self.lower_mutability(mt.mutbl),
1266 fn lower_bounds(&mut self, bounds: &[TyParamBound]) -> hir::TyParamBounds {
1267 bounds.iter().map(|bound| self.lower_ty_param_bound(bound)).collect()
1270 fn lower_block(&mut self, b: &Block, targeted_by_break: bool) -> P<hir::Block> {
1271 let mut expr = None;
1273 let mut stmts = vec![];
1275 for (index, stmt) in b.stmts.iter().enumerate() {
1276 if index == b.stmts.len() - 1 {
1277 if let StmtKind::Expr(ref e) = stmt.node {
1278 expr = Some(P(self.lower_expr(e)));
1280 stmts.extend(self.lower_stmt(stmt));
1283 stmts.extend(self.lower_stmt(stmt));
1287 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(b.id);
1292 stmts: stmts.into(),
1294 rules: self.lower_block_check_mode(&b.rules),
1300 fn lower_item_kind(&mut self,
1303 attrs: &hir::HirVec<Attribute>,
1304 vis: &mut hir::Visibility,
1308 ItemKind::ExternCrate(string) => hir::ItemExternCrate(string),
1309 ItemKind::Use(ref view_path) => {
1310 let path = match view_path.node {
1311 ViewPathSimple(_, ref path) => path,
1312 ViewPathGlob(ref path) => path,
1313 ViewPathList(ref path, ref path_list_idents) => {
1314 for &Spanned { node: ref import, span } in path_list_idents {
1315 // `use a::{self as x, b as y};` lowers to
1316 // `use a as x; use a::b as y;`
1317 let mut ident = import.name;
1318 let suffix = if ident.name == keywords::SelfValue.name() {
1319 if let Some(last) = path.segments.last() {
1320 ident = last.identifier;
1327 let mut path = self.lower_path_extra(import.id, path, suffix,
1328 ParamMode::Explicit, true);
1331 self.allocate_hir_id_counter(import.id, import);
1333 node_id: import_node_id,
1334 hir_id: import_hir_id,
1335 } = self.lower_node_id(import.id);
1337 self.with_hir_id_owner(import_node_id, |this| {
1338 let vis = match *vis {
1339 hir::Visibility::Public => hir::Visibility::Public,
1340 hir::Visibility::Crate => hir::Visibility::Crate,
1341 hir::Visibility::Inherited => hir::Visibility::Inherited,
1342 hir::Visibility::Restricted { ref path, id: _ } => {
1343 hir::Visibility::Restricted {
1345 // We are allocating a new NodeId here
1346 id: this.next_id().node_id,
1351 this.items.insert(import_node_id, hir::Item {
1353 hir_id: import_hir_id,
1354 name: import.rename.unwrap_or(ident).name,
1355 attrs: attrs.clone(),
1356 node: hir::ItemUse(P(path), hir::UseKind::Single),
1365 let path = P(self.lower_path(id, path, ParamMode::Explicit, true));
1366 let kind = match view_path.node {
1367 ViewPathSimple(ident, _) => {
1369 hir::UseKind::Single
1371 ViewPathGlob(_) => {
1374 ViewPathList(..) => {
1375 // Privatize the degenerate import base, used only to check
1376 // the stability of `use a::{};`, to avoid it showing up as
1377 // a reexport by accident when `pub`, e.g. in documentation.
1378 *vis = hir::Inherited;
1379 hir::UseKind::ListStem
1382 hir::ItemUse(path, kind)
1384 ItemKind::Static(ref t, m, ref e) => {
1385 let value = self.lower_body(None, |this| this.lower_expr(e));
1386 hir::ItemStatic(self.lower_ty(t),
1387 self.lower_mutability(m),
1390 ItemKind::Const(ref t, ref e) => {
1391 let value = self.lower_body(None, |this| this.lower_expr(e));
1392 hir::ItemConst(self.lower_ty(t), value)
1394 ItemKind::Fn(ref decl, unsafety, constness, abi, ref generics, ref body) => {
1395 self.with_new_scopes(|this| {
1396 let body_id = this.lower_body(Some(decl), |this| {
1397 let body = this.lower_block(body, false);
1398 this.expr_block(body, ThinVec::new())
1400 hir::ItemFn(this.lower_fn_decl(decl),
1401 this.lower_unsafety(unsafety),
1402 this.lower_constness(constness),
1404 this.lower_generics(generics),
1408 ItemKind::Mod(ref m) => hir::ItemMod(self.lower_mod(m)),
1409 ItemKind::ForeignMod(ref nm) => hir::ItemForeignMod(self.lower_foreign_mod(nm)),
1410 ItemKind::GlobalAsm(ref ga) => hir::ItemGlobalAsm(self.lower_global_asm(ga)),
1411 ItemKind::Ty(ref t, ref generics) => {
1412 hir::ItemTy(self.lower_ty(t), self.lower_generics(generics))
1414 ItemKind::Enum(ref enum_definition, ref generics) => {
1415 hir::ItemEnum(hir::EnumDef {
1416 variants: enum_definition.variants
1418 .map(|x| self.lower_variant(x))
1421 self.lower_generics(generics))
1423 ItemKind::Struct(ref struct_def, ref generics) => {
1424 let struct_def = self.lower_variant_data(struct_def);
1425 hir::ItemStruct(struct_def, self.lower_generics(generics))
1427 ItemKind::Union(ref vdata, ref generics) => {
1428 let vdata = self.lower_variant_data(vdata);
1429 hir::ItemUnion(vdata, self.lower_generics(generics))
1431 ItemKind::DefaultImpl(unsafety, ref trait_ref) => {
1432 let trait_ref = self.lower_trait_ref(trait_ref);
1434 if let Def::Trait(def_id) = trait_ref.path.def {
1435 self.trait_default_impl.insert(def_id, id);
1438 hir::ItemDefaultImpl(self.lower_unsafety(unsafety),
1441 ItemKind::Impl(unsafety,
1447 ref impl_items) => {
1448 let new_impl_items = impl_items.iter()
1449 .map(|item| self.lower_impl_item_ref(item))
1451 let ifce = ifce.as_ref().map(|trait_ref| self.lower_trait_ref(trait_ref));
1453 if let Some(ref trait_ref) = ifce {
1454 if let Def::Trait(def_id) = trait_ref.path.def {
1455 self.trait_impls.entry(def_id).or_insert(vec![]).push(id);
1459 hir::ItemImpl(self.lower_unsafety(unsafety),
1460 self.lower_impl_polarity(polarity),
1461 self.lower_defaultness(defaultness, true /* [1] */),
1462 self.lower_generics(generics),
1467 ItemKind::Trait(unsafety, ref generics, ref bounds, ref items) => {
1468 let bounds = self.lower_bounds(bounds);
1469 let items = items.iter().map(|item| self.lower_trait_item_ref(item)).collect();
1470 hir::ItemTrait(self.lower_unsafety(unsafety),
1471 self.lower_generics(generics),
1475 ItemKind::MacroDef(..) | ItemKind::Mac(..) => panic!("Shouldn't still be around"),
1478 // [1] `defaultness.has_value()` is never called for an `impl`, always `true` in order to
1479 // not cause an assertion failure inside the `lower_defaultness` function
1482 fn lower_trait_item(&mut self, i: &TraitItem) -> hir::TraitItem {
1483 self.with_parent_def(i.id, |this| {
1484 let LoweredNodeId { node_id, hir_id } = this.lower_node_id(i.id);
1489 name: this.lower_ident(i.ident),
1490 attrs: this.lower_attrs(&i.attrs),
1491 node: match i.node {
1492 TraitItemKind::Const(ref ty, ref default) => {
1493 hir::TraitItemKind::Const(this.lower_ty(ty),
1494 default.as_ref().map(|x| {
1495 this.lower_body(None, |this| this.lower_expr(x))
1498 TraitItemKind::Method(ref sig, None) => {
1499 let names = this.lower_fn_args_to_names(&sig.decl);
1500 hir::TraitItemKind::Method(this.lower_method_sig(sig),
1501 hir::TraitMethod::Required(names))
1503 TraitItemKind::Method(ref sig, Some(ref body)) => {
1504 let body_id = this.lower_body(Some(&sig.decl), |this| {
1505 let body = this.lower_block(body, false);
1506 this.expr_block(body, ThinVec::new())
1508 hir::TraitItemKind::Method(this.lower_method_sig(sig),
1509 hir::TraitMethod::Provided(body_id))
1511 TraitItemKind::Type(ref bounds, ref default) => {
1512 hir::TraitItemKind::Type(this.lower_bounds(bounds),
1513 default.as_ref().map(|x| this.lower_ty(x)))
1515 TraitItemKind::Macro(..) => panic!("Shouldn't exist any more"),
1522 fn lower_trait_item_ref(&mut self, i: &TraitItem) -> hir::TraitItemRef {
1523 let (kind, has_default) = match i.node {
1524 TraitItemKind::Const(_, ref default) => {
1525 (hir::AssociatedItemKind::Const, default.is_some())
1527 TraitItemKind::Type(_, ref default) => {
1528 (hir::AssociatedItemKind::Type, default.is_some())
1530 TraitItemKind::Method(ref sig, ref default) => {
1531 (hir::AssociatedItemKind::Method {
1532 has_self: sig.decl.has_self(),
1533 }, default.is_some())
1535 TraitItemKind::Macro(..) => unimplemented!(),
1538 id: hir::TraitItemId { node_id: i.id },
1539 name: self.lower_ident(i.ident),
1541 defaultness: self.lower_defaultness(Defaultness::Default, has_default),
1546 fn lower_impl_item(&mut self, i: &ImplItem) -> hir::ImplItem {
1547 self.with_parent_def(i.id, |this| {
1548 let LoweredNodeId { node_id, hir_id } = this.lower_node_id(i.id);
1553 name: this.lower_ident(i.ident),
1554 attrs: this.lower_attrs(&i.attrs),
1555 vis: this.lower_visibility(&i.vis, None),
1556 defaultness: this.lower_defaultness(i.defaultness, true /* [1] */),
1557 node: match i.node {
1558 ImplItemKind::Const(ref ty, ref expr) => {
1559 let body_id = this.lower_body(None, |this| this.lower_expr(expr));
1560 hir::ImplItemKind::Const(this.lower_ty(ty), body_id)
1562 ImplItemKind::Method(ref sig, ref body) => {
1563 let body_id = this.lower_body(Some(&sig.decl), |this| {
1564 let body = this.lower_block(body, false);
1565 this.expr_block(body, ThinVec::new())
1567 hir::ImplItemKind::Method(this.lower_method_sig(sig), body_id)
1569 ImplItemKind::Type(ref ty) => hir::ImplItemKind::Type(this.lower_ty(ty)),
1570 ImplItemKind::Macro(..) => panic!("Shouldn't exist any more"),
1576 // [1] since `default impl` is not yet implemented, this is always true in impls
1579 fn lower_impl_item_ref(&mut self, i: &ImplItem) -> hir::ImplItemRef {
1581 id: hir::ImplItemId { node_id: i.id },
1582 name: self.lower_ident(i.ident),
1584 vis: self.lower_visibility(&i.vis, Some(i.id)),
1585 defaultness: self.lower_defaultness(i.defaultness, true /* [1] */),
1586 kind: match i.node {
1587 ImplItemKind::Const(..) => hir::AssociatedItemKind::Const,
1588 ImplItemKind::Type(..) => hir::AssociatedItemKind::Type,
1589 ImplItemKind::Method(ref sig, _) => hir::AssociatedItemKind::Method {
1590 has_self: sig.decl.has_self(),
1592 ImplItemKind::Macro(..) => unimplemented!(),
1596 // [1] since `default impl` is not yet implemented, this is always true in impls
1599 fn lower_mod(&mut self, m: &Mod) -> hir::Mod {
1602 item_ids: m.items.iter().flat_map(|x| self.lower_item_id(x)).collect(),
1606 fn lower_item_id(&mut self, i: &Item) -> SmallVector<hir::ItemId> {
1608 ItemKind::Use(ref view_path) => {
1609 if let ViewPathList(_, ref imports) = view_path.node {
1610 return iter::once(i.id).chain(imports.iter().map(|import| import.node.id))
1611 .map(|id| hir::ItemId { id: id }).collect();
1614 ItemKind::MacroDef(..) => return SmallVector::new(),
1617 SmallVector::one(hir::ItemId { id: i.id })
1620 pub fn lower_item(&mut self, i: &Item) -> Option<hir::Item> {
1621 let mut name = i.ident.name;
1622 let mut vis = self.lower_visibility(&i.vis, None);
1623 let attrs = self.lower_attrs(&i.attrs);
1624 if let ItemKind::MacroDef(ref def) = i.node {
1625 if !def.legacy || i.attrs.iter().any(|attr| attr.path == "macro_export") {
1626 self.exported_macros.push(hir::MacroDef {
1639 let node = self.with_parent_def(i.id, |this| {
1640 this.lower_item_kind(i.id, &mut name, &attrs, &mut vis, &i.node)
1643 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(i.id);
1656 fn lower_foreign_item(&mut self, i: &ForeignItem) -> hir::ForeignItem {
1657 self.with_parent_def(i.id, |this| {
1659 id: this.lower_node_id(i.id).node_id,
1661 attrs: this.lower_attrs(&i.attrs),
1662 node: match i.node {
1663 ForeignItemKind::Fn(ref fdec, ref generics) => {
1664 hir::ForeignItemFn(this.lower_fn_decl(fdec),
1665 this.lower_fn_args_to_names(fdec),
1666 this.lower_generics(generics))
1668 ForeignItemKind::Static(ref t, m) => {
1669 hir::ForeignItemStatic(this.lower_ty(t), m)
1672 vis: this.lower_visibility(&i.vis, None),
1678 fn lower_method_sig(&mut self, sig: &MethodSig) -> hir::MethodSig {
1680 generics: self.lower_generics(&sig.generics),
1682 unsafety: self.lower_unsafety(sig.unsafety),
1683 constness: self.lower_constness(sig.constness),
1684 decl: self.lower_fn_decl(&sig.decl),
1688 fn lower_unsafety(&mut self, u: Unsafety) -> hir::Unsafety {
1690 Unsafety::Unsafe => hir::Unsafety::Unsafe,
1691 Unsafety::Normal => hir::Unsafety::Normal,
1695 fn lower_constness(&mut self, c: Spanned<Constness>) -> hir::Constness {
1697 Constness::Const => hir::Constness::Const,
1698 Constness::NotConst => hir::Constness::NotConst,
1702 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
1704 UnOp::Deref => hir::UnDeref,
1705 UnOp::Not => hir::UnNot,
1706 UnOp::Neg => hir::UnNeg,
1710 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
1712 node: match b.node {
1713 BinOpKind::Add => hir::BiAdd,
1714 BinOpKind::Sub => hir::BiSub,
1715 BinOpKind::Mul => hir::BiMul,
1716 BinOpKind::Div => hir::BiDiv,
1717 BinOpKind::Rem => hir::BiRem,
1718 BinOpKind::And => hir::BiAnd,
1719 BinOpKind::Or => hir::BiOr,
1720 BinOpKind::BitXor => hir::BiBitXor,
1721 BinOpKind::BitAnd => hir::BiBitAnd,
1722 BinOpKind::BitOr => hir::BiBitOr,
1723 BinOpKind::Shl => hir::BiShl,
1724 BinOpKind::Shr => hir::BiShr,
1725 BinOpKind::Eq => hir::BiEq,
1726 BinOpKind::Lt => hir::BiLt,
1727 BinOpKind::Le => hir::BiLe,
1728 BinOpKind::Ne => hir::BiNe,
1729 BinOpKind::Ge => hir::BiGe,
1730 BinOpKind::Gt => hir::BiGt,
1736 fn lower_pat(&mut self, p: &Pat) -> P<hir::Pat> {
1737 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(p.id);
1742 node: match p.node {
1743 PatKind::Wild => hir::PatKind::Wild,
1744 PatKind::Ident(ref binding_mode, pth1, ref sub) => {
1745 match self.resolver.get_resolution(p.id).map(|d| d.base_def()) {
1746 // `None` can occur in body-less function signatures
1747 def @ None | def @ Some(Def::Local(_)) => {
1748 let canonical_id = match def {
1749 Some(Def::Local(id)) => id,
1752 hir::PatKind::Binding(self.lower_binding_mode(binding_mode),
1754 respan(pth1.span, pth1.node.name),
1755 sub.as_ref().map(|x| self.lower_pat(x)))
1758 hir::PatKind::Path(hir::QPath::Resolved(None, P(hir::Path {
1762 hir::PathSegment::from_name(pth1.node.name)
1768 PatKind::Lit(ref e) => hir::PatKind::Lit(P(self.lower_expr(e))),
1769 PatKind::TupleStruct(ref path, ref pats, ddpos) => {
1770 let qpath = self.lower_qpath(p.id, &None, path, ParamMode::Optional);
1771 hir::PatKind::TupleStruct(qpath,
1772 pats.iter().map(|x| self.lower_pat(x)).collect(),
1775 PatKind::Path(ref qself, ref path) => {
1776 hir::PatKind::Path(self.lower_qpath(p.id, qself, path, ParamMode::Optional))
1778 PatKind::Struct(ref path, ref fields, etc) => {
1779 let qpath = self.lower_qpath(p.id, &None, path, ParamMode::Optional);
1781 let fs = fields.iter()
1785 node: hir::FieldPat {
1786 name: self.lower_ident(f.node.ident),
1787 pat: self.lower_pat(&f.node.pat),
1788 is_shorthand: f.node.is_shorthand,
1793 hir::PatKind::Struct(qpath, fs, etc)
1795 PatKind::Tuple(ref elts, ddpos) => {
1796 hir::PatKind::Tuple(elts.iter().map(|x| self.lower_pat(x)).collect(), ddpos)
1798 PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)),
1799 PatKind::Ref(ref inner, mutbl) => {
1800 hir::PatKind::Ref(self.lower_pat(inner), self.lower_mutability(mutbl))
1802 PatKind::Range(ref e1, ref e2, ref end) => {
1803 hir::PatKind::Range(P(self.lower_expr(e1)),
1804 P(self.lower_expr(e2)),
1805 self.lower_range_end(end))
1807 PatKind::Slice(ref before, ref slice, ref after) => {
1808 hir::PatKind::Slice(before.iter().map(|x| self.lower_pat(x)).collect(),
1809 slice.as_ref().map(|x| self.lower_pat(x)),
1810 after.iter().map(|x| self.lower_pat(x)).collect())
1812 PatKind::Mac(_) => panic!("Shouldn't exist here"),
1818 fn lower_range_end(&mut self, e: &RangeEnd) -> hir::RangeEnd {
1820 RangeEnd::Included => hir::RangeEnd::Included,
1821 RangeEnd::Excluded => hir::RangeEnd::Excluded,
1825 fn lower_expr(&mut self, e: &Expr) -> hir::Expr {
1826 let kind = match e.node {
1828 // Eventually a desugaring for `box EXPR`
1829 // (similar to the desugaring above for `in PLACE BLOCK`)
1830 // should go here, desugaring
1834 // let mut place = BoxPlace::make_place();
1835 // let raw_place = Place::pointer(&mut place);
1836 // let value = $value;
1838 // ::std::ptr::write(raw_place, value);
1839 // Boxed::finalize(place)
1842 // But for now there are type-inference issues doing that.
1843 ExprKind::Box(ref inner) => {
1844 hir::ExprBox(P(self.lower_expr(inner)))
1847 // Desugar ExprBox: `in (PLACE) EXPR`
1848 ExprKind::InPlace(ref placer, ref value_expr) => {
1852 // let mut place = Placer::make_place(p);
1853 // let raw_place = Place::pointer(&mut place);
1855 // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR ));
1856 // InPlace::finalize(place)
1858 let placer_expr = P(self.lower_expr(placer));
1859 let value_expr = P(self.lower_expr(value_expr));
1861 let placer_ident = self.str_to_ident("placer");
1862 let place_ident = self.str_to_ident("place");
1863 let p_ptr_ident = self.str_to_ident("p_ptr");
1865 let make_place = ["ops", "Placer", "make_place"];
1866 let place_pointer = ["ops", "Place", "pointer"];
1867 let move_val_init = ["intrinsics", "move_val_init"];
1868 let inplace_finalize = ["ops", "InPlace", "finalize"];
1871 self.allow_internal_unstable(CompilerDesugaringKind::BackArrow, e.span);
1872 let make_call = |this: &mut LoweringContext, p, args| {
1873 let path = P(this.expr_std_path(unstable_span, p, ThinVec::new()));
1874 P(this.expr_call(e.span, path, args))
1877 let mk_stmt_let = |this: &mut LoweringContext, bind, expr| {
1878 this.stmt_let(e.span, false, bind, expr)
1881 let mk_stmt_let_mut = |this: &mut LoweringContext, bind, expr| {
1882 this.stmt_let(e.span, true, bind, expr)
1885 // let placer = <placer_expr> ;
1886 let (s1, placer_binding) = {
1887 mk_stmt_let(self, placer_ident, placer_expr)
1890 // let mut place = Placer::make_place(placer);
1891 let (s2, place_binding) = {
1892 let placer = self.expr_ident(e.span, placer_ident, placer_binding);
1893 let call = make_call(self, &make_place, hir_vec![placer]);
1894 mk_stmt_let_mut(self, place_ident, call)
1897 // let p_ptr = Place::pointer(&mut place);
1898 let (s3, p_ptr_binding) = {
1899 let agent = P(self.expr_ident(e.span, place_ident, place_binding));
1900 let args = hir_vec![self.expr_mut_addr_of(e.span, agent)];
1901 let call = make_call(self, &place_pointer, args);
1902 mk_stmt_let(self, p_ptr_ident, call)
1905 // pop_unsafe!(EXPR));
1906 let pop_unsafe_expr = {
1907 self.signal_block_expr(hir_vec![],
1910 hir::PopUnsafeBlock(hir::CompilerGenerated),
1915 // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR ));
1916 // InPlace::finalize(place)
1919 let ptr = self.expr_ident(e.span, p_ptr_ident, p_ptr_binding);
1920 let call_move_val_init =
1922 make_call(self, &move_val_init, hir_vec![ptr, pop_unsafe_expr]),
1923 self.next_id().node_id);
1924 let call_move_val_init = respan(e.span, call_move_val_init);
1926 let place = self.expr_ident(e.span, place_ident, place_binding);
1927 let call = make_call(self, &inplace_finalize, hir_vec![place]);
1928 P(self.signal_block_expr(hir_vec![call_move_val_init],
1931 hir::PushUnsafeBlock(hir::CompilerGenerated),
1935 let block = self.block_all(e.span, hir_vec![s1, s2, s3], Some(expr));
1936 hir::ExprBlock(P(block))
1939 ExprKind::Array(ref exprs) => {
1940 hir::ExprArray(exprs.iter().map(|x| self.lower_expr(x)).collect())
1942 ExprKind::Repeat(ref expr, ref count) => {
1943 let expr = P(self.lower_expr(expr));
1944 let count = self.lower_body(None, |this| this.lower_expr(count));
1945 hir::ExprRepeat(expr, count)
1947 ExprKind::Tup(ref elts) => {
1948 hir::ExprTup(elts.iter().map(|x| self.lower_expr(x)).collect())
1950 ExprKind::Call(ref f, ref args) => {
1951 let f = P(self.lower_expr(f));
1952 hir::ExprCall(f, args.iter().map(|x| self.lower_expr(x)).collect())
1954 ExprKind::MethodCall(ref seg, ref args) => {
1955 let hir_seg = self.lower_path_segment(e.span, seg, ParamMode::Optional, 0,
1956 ParenthesizedGenericArgs::Err);
1957 let args = args.iter().map(|x| self.lower_expr(x)).collect();
1958 hir::ExprMethodCall(hir_seg, seg.span, args)
1960 ExprKind::Binary(binop, ref lhs, ref rhs) => {
1961 let binop = self.lower_binop(binop);
1962 let lhs = P(self.lower_expr(lhs));
1963 let rhs = P(self.lower_expr(rhs));
1964 hir::ExprBinary(binop, lhs, rhs)
1966 ExprKind::Unary(op, ref ohs) => {
1967 let op = self.lower_unop(op);
1968 let ohs = P(self.lower_expr(ohs));
1969 hir::ExprUnary(op, ohs)
1971 ExprKind::Lit(ref l) => hir::ExprLit(P((**l).clone())),
1972 ExprKind::Cast(ref expr, ref ty) => {
1973 let expr = P(self.lower_expr(expr));
1974 hir::ExprCast(expr, self.lower_ty(ty))
1976 ExprKind::Type(ref expr, ref ty) => {
1977 let expr = P(self.lower_expr(expr));
1978 hir::ExprType(expr, self.lower_ty(ty))
1980 ExprKind::AddrOf(m, ref ohs) => {
1981 let m = self.lower_mutability(m);
1982 let ohs = P(self.lower_expr(ohs));
1983 hir::ExprAddrOf(m, ohs)
1985 // More complicated than you might expect because the else branch
1986 // might be `if let`.
1987 ExprKind::If(ref cond, ref blk, ref else_opt) => {
1988 let else_opt = else_opt.as_ref().map(|els| {
1990 ExprKind::IfLet(..) => {
1991 // wrap the if-let expr in a block
1992 let span = els.span;
1993 let els = P(self.lower_expr(els));
1998 let blk = P(hir::Block {
2003 rules: hir::DefaultBlock,
2005 targeted_by_break: false,
2007 P(self.expr_block(blk, ThinVec::new()))
2009 _ => P(self.lower_expr(els)),
2013 let then_blk = self.lower_block(blk, false);
2014 let then_expr = self.expr_block(then_blk, ThinVec::new());
2016 hir::ExprIf(P(self.lower_expr(cond)), P(then_expr), else_opt)
2018 ExprKind::While(ref cond, ref body, opt_ident) => {
2019 self.with_loop_scope(e.id, |this|
2021 this.with_loop_condition_scope(|this| P(this.lower_expr(cond))),
2022 this.lower_block(body, false),
2023 this.lower_opt_sp_ident(opt_ident)))
2025 ExprKind::Loop(ref body, opt_ident) => {
2026 self.with_loop_scope(e.id, |this|
2027 hir::ExprLoop(this.lower_block(body, false),
2028 this.lower_opt_sp_ident(opt_ident),
2029 hir::LoopSource::Loop))
2031 ExprKind::Catch(ref body) => {
2032 self.with_catch_scope(body.id, |this|
2033 hir::ExprBlock(this.lower_block(body, true)))
2035 ExprKind::Match(ref expr, ref arms) => {
2036 hir::ExprMatch(P(self.lower_expr(expr)),
2037 arms.iter().map(|x| self.lower_arm(x)).collect(),
2038 hir::MatchSource::Normal)
2040 ExprKind::Closure(capture_clause, ref decl, ref body, fn_decl_span) => {
2041 self.with_new_scopes(|this| {
2042 this.with_parent_def(e.id, |this| {
2043 let mut is_generator = false;
2044 let body_id = this.lower_body(Some(decl), |this| {
2045 let e = this.lower_expr(body);
2046 is_generator = this.is_generator;
2049 if is_generator && !decl.inputs.is_empty() {
2050 span_err!(this.sess, fn_decl_span, E0628,
2051 "generators cannot have explicit arguments");
2052 this.sess.abort_if_errors();
2054 hir::ExprClosure(this.lower_capture_clause(capture_clause),
2055 this.lower_fn_decl(decl),
2062 ExprKind::Block(ref blk) => hir::ExprBlock(self.lower_block(blk, false)),
2063 ExprKind::Assign(ref el, ref er) => {
2064 hir::ExprAssign(P(self.lower_expr(el)), P(self.lower_expr(er)))
2066 ExprKind::AssignOp(op, ref el, ref er) => {
2067 hir::ExprAssignOp(self.lower_binop(op),
2068 P(self.lower_expr(el)),
2069 P(self.lower_expr(er)))
2071 ExprKind::Field(ref el, ident) => {
2072 hir::ExprField(P(self.lower_expr(el)),
2073 respan(ident.span, self.lower_ident(ident.node)))
2075 ExprKind::TupField(ref el, ident) => {
2076 hir::ExprTupField(P(self.lower_expr(el)), ident)
2078 ExprKind::Index(ref el, ref er) => {
2079 hir::ExprIndex(P(self.lower_expr(el)), P(self.lower_expr(er)))
2081 ExprKind::Range(ref e1, ref e2, lims) => {
2082 use syntax::ast::RangeLimits::*;
2084 let path = match (e1, e2, lims) {
2085 (&None, &None, HalfOpen) => "RangeFull",
2086 (&Some(..), &None, HalfOpen) => "RangeFrom",
2087 (&None, &Some(..), HalfOpen) => "RangeTo",
2088 (&Some(..), &Some(..), HalfOpen) => "Range",
2089 (&None, &Some(..), Closed) => "RangeToInclusive",
2090 (&Some(..), &Some(..), Closed) => "RangeInclusive",
2091 (_, &None, Closed) =>
2092 panic!(self.diagnostic().span_fatal(
2093 e.span, "inclusive range with no end")),
2097 e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e)))
2099 let expr = P(self.lower_expr(&e));
2101 self.allow_internal_unstable(CompilerDesugaringKind::DotFill, e.span);
2102 self.field(Symbol::intern(s), expr, unstable_span)
2103 }).collect::<P<[hir::Field]>>();
2105 let is_unit = fields.is_empty();
2107 self.allow_internal_unstable(CompilerDesugaringKind::DotFill, e.span);
2109 iter::once("ops").chain(iter::once(path))
2110 .collect::<Vec<_>>();
2111 let struct_path = self.std_path(unstable_span, &struct_path, is_unit);
2112 let struct_path = hir::QPath::Resolved(None, P(struct_path));
2114 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id);
2120 hir::ExprPath(struct_path)
2122 hir::ExprStruct(struct_path, fields, None)
2124 span: unstable_span,
2125 attrs: e.attrs.clone(),
2128 ExprKind::Path(ref qself, ref path) => {
2129 hir::ExprPath(self.lower_qpath(e.id, qself, path, ParamMode::Optional))
2131 ExprKind::Break(opt_ident, ref opt_expr) => {
2132 let label_result = if self.is_in_loop_condition && opt_ident.is_none() {
2135 target_id: hir::ScopeTarget::Loop(
2136 Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into()),
2139 self.lower_loop_destination(opt_ident.map(|ident| (e.id, ident)))
2143 opt_expr.as_ref().map(|x| P(self.lower_expr(x))))
2145 ExprKind::Continue(opt_ident) =>
2147 if self.is_in_loop_condition && opt_ident.is_none() {
2150 target_id: hir::ScopeTarget::Loop(Err(
2151 hir::LoopIdError::UnlabeledCfInWhileCondition).into()),
2154 self.lower_loop_destination(opt_ident.map( |ident| (e.id, ident)))
2156 ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))),
2157 ExprKind::InlineAsm(ref asm) => {
2158 let hir_asm = hir::InlineAsm {
2159 inputs: asm.inputs.iter().map(|&(ref c, _)| c.clone()).collect(),
2160 outputs: asm.outputs.iter().map(|out| {
2161 hir::InlineAsmOutput {
2162 constraint: out.constraint.clone(),
2164 is_indirect: out.is_indirect,
2167 asm: asm.asm.clone(),
2168 asm_str_style: asm.asm_str_style,
2169 clobbers: asm.clobbers.clone().into(),
2170 volatile: asm.volatile,
2171 alignstack: asm.alignstack,
2172 dialect: asm.dialect,
2176 asm.outputs.iter().map(|out| self.lower_expr(&out.expr)).collect();
2178 asm.inputs.iter().map(|&(_, ref input)| self.lower_expr(input)).collect();
2179 hir::ExprInlineAsm(P(hir_asm), outputs, inputs)
2181 ExprKind::Struct(ref path, ref fields, ref maybe_expr) => {
2182 hir::ExprStruct(self.lower_qpath(e.id, &None, path, ParamMode::Optional),
2183 fields.iter().map(|x| self.lower_field(x)).collect(),
2184 maybe_expr.as_ref().map(|x| P(self.lower_expr(x))))
2186 ExprKind::Paren(ref ex) => {
2187 let mut ex = self.lower_expr(ex);
2188 // include parens in span, but only if it is a super-span.
2189 if e.span.contains(ex.span) {
2192 // merge attributes into the inner expression.
2193 let mut attrs = e.attrs.clone();
2194 attrs.extend::<Vec<_>>(ex.attrs.into());
2199 ExprKind::Yield(ref opt_expr) => {
2200 self.is_generator = true;
2201 let expr = opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| {
2202 self.expr(e.span, hir::ExprTup(hir_vec![]), ThinVec::new())
2204 hir::ExprYield(P(expr))
2207 // Desugar ExprIfLet
2208 // From: `if let <pat> = <sub_expr> <body> [<else_opt>]`
2209 ExprKind::IfLet(ref pat, ref sub_expr, ref body, ref else_opt) => {
2212 // match <sub_expr> {
2214 // _ => [<else_opt> | ()]
2217 let mut arms = vec![];
2219 // `<pat> => <body>`
2221 let body = self.lower_block(body, false);
2222 let body_expr = P(self.expr_block(body, ThinVec::new()));
2223 let pat = self.lower_pat(pat);
2224 arms.push(self.arm(hir_vec![pat], body_expr));
2227 // _ => [<else_opt>|()]
2229 let wildcard_arm: Option<&Expr> = else_opt.as_ref().map(|p| &**p);
2230 let wildcard_pattern = self.pat_wild(e.span);
2231 let body = if let Some(else_expr) = wildcard_arm {
2232 P(self.lower_expr(else_expr))
2234 self.expr_tuple(e.span, hir_vec![])
2236 arms.push(self.arm(hir_vec![wildcard_pattern], body));
2239 let contains_else_clause = else_opt.is_some();
2241 let sub_expr = P(self.lower_expr(sub_expr));
2246 hir::MatchSource::IfLetDesugar {
2247 contains_else_clause,
2251 // Desugar ExprWhileLet
2252 // From: `[opt_ident]: while let <pat> = <sub_expr> <body>`
2253 ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_ident) => {
2256 // [opt_ident]: loop {
2257 // match <sub_expr> {
2263 // Note that the block AND the condition are evaluated in the loop scope.
2264 // This is done to allow `break` from inside the condition of the loop.
2265 let (body, break_expr, sub_expr) = self.with_loop_scope(e.id, |this| (
2266 this.lower_block(body, false),
2267 this.expr_break(e.span, ThinVec::new()),
2268 this.with_loop_condition_scope(|this| P(this.lower_expr(sub_expr))),
2271 // `<pat> => <body>`
2273 let body_expr = P(self.expr_block(body, ThinVec::new()));
2274 let pat = self.lower_pat(pat);
2275 self.arm(hir_vec![pat], body_expr)
2280 let pat_under = self.pat_wild(e.span);
2281 self.arm(hir_vec![pat_under], break_expr)
2284 // `match <sub_expr> { ... }`
2285 let arms = hir_vec![pat_arm, break_arm];
2286 let match_expr = self.expr(e.span,
2287 hir::ExprMatch(sub_expr,
2289 hir::MatchSource::WhileLetDesugar),
2292 // `[opt_ident]: loop { ... }`
2293 let loop_block = P(self.block_expr(P(match_expr)));
2294 let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident),
2295 hir::LoopSource::WhileLet);
2296 // add attributes to the outer returned expr node
2300 // Desugar ExprForLoop
2301 // From: `[opt_ident]: for <pat> in <head> <body>`
2302 ExprKind::ForLoop(ref pat, ref head, ref body, opt_ident) => {
2306 // let result = match ::std::iter::IntoIterator::into_iter(<head>) {
2308 // [opt_ident]: loop {
2310 // match ::std::iter::Iterator::next(&mut iter) {
2311 // ::std::option::Option::Some(val) => __next = val,
2312 // ::std::option::Option::None => break
2314 // let <pat> = __next;
2315 // StmtExpr(<body>);
2323 let head = self.lower_expr(head);
2325 let iter = self.str_to_ident("iter");
2327 let next_ident = self.str_to_ident("__next");
2328 let next_pat = self.pat_ident_binding_mode(e.span,
2330 hir::BindingAnnotation::Mutable);
2332 // `::std::option::Option::Some(val) => next = val`
2334 let val_ident = self.str_to_ident("val");
2335 let val_pat = self.pat_ident(e.span, val_ident);
2336 let val_expr = P(self.expr_ident(e.span, val_ident, val_pat.id));
2337 let next_expr = P(self.expr_ident(e.span, next_ident, next_pat.id));
2338 let assign = P(self.expr(e.span,
2339 hir::ExprAssign(next_expr, val_expr),
2341 let some_pat = self.pat_some(e.span, val_pat);
2342 self.arm(hir_vec![some_pat], assign)
2345 // `::std::option::Option::None => break`
2347 let break_expr = self.with_loop_scope(e.id, |this|
2348 this.expr_break(e.span, ThinVec::new()));
2349 let pat = self.pat_none(e.span);
2350 self.arm(hir_vec![pat], break_expr)
2354 let iter_pat = self.pat_ident_binding_mode(e.span,
2356 hir::BindingAnnotation::Mutable);
2358 // `match ::std::iter::Iterator::next(&mut iter) { ... }`
2360 let iter = P(self.expr_ident(e.span, iter, iter_pat.id));
2361 let ref_mut_iter = self.expr_mut_addr_of(e.span, iter);
2362 let next_path = &["iter", "Iterator", "next"];
2363 let next_path = P(self.expr_std_path(e.span, next_path, ThinVec::new()));
2364 let next_expr = P(self.expr_call(e.span, next_path,
2365 hir_vec![ref_mut_iter]));
2366 let arms = hir_vec![pat_arm, break_arm];
2369 hir::ExprMatch(next_expr, arms,
2370 hir::MatchSource::ForLoopDesugar),
2373 let match_stmt = respan(e.span, hir::StmtExpr(match_expr, self.next_id().node_id));
2375 let next_expr = P(self.expr_ident(e.span, next_ident, next_pat.id));
2378 let next_let = self.stmt_let_pat(e.span,
2381 hir::LocalSource::ForLoopDesugar);
2383 // `let <pat> = __next`
2384 let pat = self.lower_pat(pat);
2385 let pat_let = self.stmt_let_pat(e.span,
2388 hir::LocalSource::ForLoopDesugar);
2390 let body_block = self.with_loop_scope(e.id,
2391 |this| this.lower_block(body, false));
2392 let body_expr = P(self.expr_block(body_block, ThinVec::new()));
2393 let body_stmt = respan(e.span, hir::StmtExpr(body_expr, self.next_id().node_id));
2395 let loop_block = P(self.block_all(e.span,
2402 // `[opt_ident]: loop { ... }`
2403 let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident),
2404 hir::LoopSource::ForLoop);
2405 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id);
2406 let loop_expr = P(hir::Expr {
2411 attrs: ThinVec::new(),
2414 // `mut iter => { ... }`
2415 let iter_arm = self.arm(hir_vec![iter_pat], loop_expr);
2417 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
2418 let into_iter_expr = {
2419 let into_iter_path = &["iter", "IntoIterator", "into_iter"];
2420 let into_iter = P(self.expr_std_path(e.span, into_iter_path,
2422 P(self.expr_call(e.span, into_iter, hir_vec![head]))
2425 let match_expr = P(self.expr_match(e.span,
2428 hir::MatchSource::ForLoopDesugar));
2430 // `{ let _result = ...; _result }`
2431 // underscore prevents an unused_variables lint if the head diverges
2432 let result_ident = self.str_to_ident("_result");
2433 let (let_stmt, let_stmt_binding) =
2434 self.stmt_let(e.span, false, result_ident, match_expr);
2436 let result = P(self.expr_ident(e.span, result_ident, let_stmt_binding));
2437 let block = P(self.block_all(e.span, hir_vec![let_stmt], Some(result)));
2438 // add the attributes to the outer returned expr node
2439 return self.expr_block(block, e.attrs.clone());
2442 // Desugar ExprKind::Try
2444 ExprKind::Try(ref sub_expr) => {
2447 // match Try::into_result(<expr>) {
2448 // Ok(val) => #[allow(unreachable_code)] val,
2449 // Err(err) => #[allow(unreachable_code)]
2450 // // If there is an enclosing `catch {...}`
2451 // break 'catch_target Try::from_error(From::from(err)),
2453 // return Try::from_error(From::from(err)),
2457 self.allow_internal_unstable(CompilerDesugaringKind::QuestionMark, e.span);
2459 // Try::into_result(<expr>)
2462 let sub_expr = self.lower_expr(sub_expr);
2464 let path = &["ops", "Try", "into_result"];
2465 let path = P(self.expr_std_path(unstable_span, path, ThinVec::new()));
2466 P(self.expr_call(e.span, path, hir_vec![sub_expr]))
2469 // #[allow(unreachable_code)]
2471 // allow(unreachable_code)
2473 let allow_ident = self.str_to_ident("allow");
2474 let uc_ident = self.str_to_ident("unreachable_code");
2475 let uc_meta_item = attr::mk_spanned_word_item(e.span, uc_ident);
2476 let uc_nested = NestedMetaItemKind::MetaItem(uc_meta_item);
2477 let uc_spanned = respan(e.span, uc_nested);
2478 attr::mk_spanned_list_item(e.span, allow_ident, vec![uc_spanned])
2480 attr::mk_spanned_attr_outer(e.span, attr::mk_attr_id(), allow)
2482 let attrs = vec![attr];
2484 // Ok(val) => #[allow(unreachable_code)] val,
2486 let val_ident = self.str_to_ident("val");
2487 let val_pat = self.pat_ident(e.span, val_ident);
2488 let val_expr = P(self.expr_ident_with_attrs(e.span,
2491 ThinVec::from(attrs.clone())));
2492 let ok_pat = self.pat_ok(e.span, val_pat);
2494 self.arm(hir_vec![ok_pat], val_expr)
2497 // Err(err) => #[allow(unreachable_code)]
2498 // return Carrier::from_error(From::from(err)),
2500 let err_ident = self.str_to_ident("err");
2501 let err_local = self.pat_ident(e.span, err_ident);
2503 let path = &["convert", "From", "from"];
2504 let from = P(self.expr_std_path(e.span, path, ThinVec::new()));
2505 let err_expr = self.expr_ident(e.span, err_ident, err_local.id);
2507 self.expr_call(e.span, from, hir_vec![err_expr])
2509 let from_err_expr = {
2510 let path = &["ops", "Try", "from_error"];
2511 let from_err = P(self.expr_std_path(unstable_span, path,
2513 P(self.expr_call(e.span, from_err, hir_vec![from_expr]))
2516 let thin_attrs = ThinVec::from(attrs);
2517 let catch_scope = self.catch_scopes.last().map(|x| *x);
2518 let ret_expr = if let Some(catch_node) = catch_scope {
2524 target_id: hir::ScopeTarget::Block(catch_node),
2531 hir::Expr_::ExprRet(Some(from_err_expr)),
2536 let err_pat = self.pat_err(e.span, err_local);
2537 self.arm(hir_vec![err_pat], ret_expr)
2540 hir::ExprMatch(discr,
2541 hir_vec![err_arm, ok_arm],
2542 hir::MatchSource::TryDesugar)
2545 ExprKind::Mac(_) => panic!("Shouldn't exist here"),
2548 let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id);
2555 attrs: e.attrs.clone(),
2559 fn lower_stmt(&mut self, s: &Stmt) -> SmallVector<hir::Stmt> {
2560 SmallVector::one(match s.node {
2561 StmtKind::Local(ref l) => Spanned {
2562 node: hir::StmtDecl(P(Spanned {
2563 node: hir::DeclLocal(self.lower_local(l)),
2565 }), self.lower_node_id(s.id).node_id),
2568 StmtKind::Item(ref it) => {
2569 // Can only use the ID once.
2570 let mut id = Some(s.id);
2571 return self.lower_item_id(it).into_iter().map(|item_id| Spanned {
2572 node: hir::StmtDecl(P(Spanned {
2573 node: hir::DeclItem(item_id),
2576 .map(|id| self.lower_node_id(id).node_id)
2577 .unwrap_or_else(|| self.next_id().node_id)),
2581 StmtKind::Expr(ref e) => {
2583 node: hir::StmtExpr(P(self.lower_expr(e)),
2584 self.lower_node_id(s.id).node_id),
2588 StmtKind::Semi(ref e) => {
2590 node: hir::StmtSemi(P(self.lower_expr(e)),
2591 self.lower_node_id(s.id).node_id),
2595 StmtKind::Mac(..) => panic!("Shouldn't exist here"),
2599 fn lower_capture_clause(&mut self, c: CaptureBy) -> hir::CaptureClause {
2601 CaptureBy::Value => hir::CaptureByValue,
2602 CaptureBy::Ref => hir::CaptureByRef,
2606 /// If an `explicit_owner` is given, this method allocates the `HirId` in
2607 /// the address space of that item instead of the item currently being
2608 /// lowered. This can happen during `lower_impl_item_ref()` where we need to
2609 /// lower a `Visibility` value although we haven't lowered the owning
2610 /// `ImplItem` in question yet.
2611 fn lower_visibility(&mut self,
2613 explicit_owner: Option<NodeId>)
2614 -> hir::Visibility {
2616 Visibility::Public => hir::Public,
2617 Visibility::Crate(_) => hir::Visibility::Crate,
2618 Visibility::Restricted { ref path, id } => {
2619 hir::Visibility::Restricted {
2620 path: P(self.lower_path(id, path, ParamMode::Explicit, true)),
2621 id: if let Some(owner) = explicit_owner {
2622 self.lower_node_id_with_owner(id, owner).node_id
2624 self.lower_node_id(id).node_id
2628 Visibility::Inherited => hir::Inherited,
2632 fn lower_defaultness(&mut self, d: Defaultness, has_value: bool) -> hir::Defaultness {
2634 Defaultness::Default => hir::Defaultness::Default { has_value: has_value },
2635 Defaultness::Final => {
2637 hir::Defaultness::Final
2642 fn lower_block_check_mode(&mut self, b: &BlockCheckMode) -> hir::BlockCheckMode {
2644 BlockCheckMode::Default => hir::DefaultBlock,
2645 BlockCheckMode::Unsafe(u) => hir::UnsafeBlock(self.lower_unsafe_source(u)),
2649 fn lower_binding_mode(&mut self, b: &BindingMode) -> hir::BindingAnnotation {
2651 BindingMode::ByValue(Mutability::Immutable) =>
2652 hir::BindingAnnotation::Unannotated,
2653 BindingMode::ByRef(Mutability::Immutable) => hir::BindingAnnotation::Ref,
2654 BindingMode::ByValue(Mutability::Mutable) => hir::BindingAnnotation::Mutable,
2655 BindingMode::ByRef(Mutability::Mutable) => hir::BindingAnnotation::RefMut,
2659 fn lower_unsafe_source(&mut self, u: UnsafeSource) -> hir::UnsafeSource {
2661 CompilerGenerated => hir::CompilerGenerated,
2662 UserProvided => hir::UserProvided,
2666 fn lower_impl_polarity(&mut self, i: ImplPolarity) -> hir::ImplPolarity {
2668 ImplPolarity::Positive => hir::ImplPolarity::Positive,
2669 ImplPolarity::Negative => hir::ImplPolarity::Negative,
2673 fn lower_trait_bound_modifier(&mut self, f: TraitBoundModifier) -> hir::TraitBoundModifier {
2675 TraitBoundModifier::None => hir::TraitBoundModifier::None,
2676 TraitBoundModifier::Maybe => hir::TraitBoundModifier::Maybe,
2680 // Helper methods for building HIR.
2682 fn arm(&mut self, pats: hir::HirVec<P<hir::Pat>>, expr: P<hir::Expr>) -> hir::Arm {
2691 fn field(&mut self, name: Name, expr: P<hir::Expr>, span: Span) -> hir::Field {
2699 is_shorthand: false,
2703 fn expr_break(&mut self, span: Span, attrs: ThinVec<Attribute>) -> P<hir::Expr> {
2704 let expr_break = hir::ExprBreak(self.lower_loop_destination(None), None);
2705 P(self.expr(span, expr_break, attrs))
2708 fn expr_call(&mut self, span: Span, e: P<hir::Expr>, args: hir::HirVec<hir::Expr>)
2710 self.expr(span, hir::ExprCall(e, args), ThinVec::new())
2713 fn expr_ident(&mut self, span: Span, id: Name, binding: NodeId) -> hir::Expr {
2714 self.expr_ident_with_attrs(span, id, binding, ThinVec::new())
2717 fn expr_ident_with_attrs(&mut self, span: Span,
2720 attrs: ThinVec<Attribute>) -> hir::Expr {
2721 let expr_path = hir::ExprPath(hir::QPath::Resolved(None, P(hir::Path {
2723 def: Def::Local(binding),
2724 segments: hir_vec![hir::PathSegment::from_name(id)],
2727 self.expr(span, expr_path, attrs)
2730 fn expr_mut_addr_of(&mut self, span: Span, e: P<hir::Expr>) -> hir::Expr {
2731 self.expr(span, hir::ExprAddrOf(hir::MutMutable, e), ThinVec::new())
2734 fn expr_std_path(&mut self,
2736 components: &[&str],
2737 attrs: ThinVec<Attribute>)
2739 let path = self.std_path(span, components, true);
2740 self.expr(span, hir::ExprPath(hir::QPath::Resolved(None, P(path))), attrs)
2743 fn expr_match(&mut self,
2746 arms: hir::HirVec<hir::Arm>,
2747 source: hir::MatchSource)
2749 self.expr(span, hir::ExprMatch(arg, arms, source), ThinVec::new())
2752 fn expr_block(&mut self, b: P<hir::Block>, attrs: ThinVec<Attribute>) -> hir::Expr {
2753 self.expr(b.span, hir::ExprBlock(b), attrs)
2756 fn expr_tuple(&mut self, sp: Span, exprs: hir::HirVec<hir::Expr>) -> P<hir::Expr> {
2757 P(self.expr(sp, hir::ExprTup(exprs), ThinVec::new()))
2760 fn expr(&mut self, span: Span, node: hir::Expr_, attrs: ThinVec<Attribute>) -> hir::Expr {
2761 let LoweredNodeId { node_id, hir_id } = self.next_id();
2771 fn stmt_let_pat(&mut self,
2773 ex: Option<P<hir::Expr>>,
2775 source: hir::LocalSource)
2777 let LoweredNodeId { node_id, hir_id } = self.next_id();
2779 let local = P(hir::Local {
2786 attrs: ThinVec::new(),
2789 let decl = respan(sp, hir::DeclLocal(local));
2790 respan(sp, hir::StmtDecl(P(decl), self.next_id().node_id))
2793 fn stmt_let(&mut self, sp: Span, mutbl: bool, ident: Name, ex: P<hir::Expr>)
2794 -> (hir::Stmt, NodeId) {
2795 let pat = if mutbl {
2796 self.pat_ident_binding_mode(sp, ident, hir::BindingAnnotation::Mutable)
2798 self.pat_ident(sp, ident)
2800 let pat_id = pat.id;
2801 (self.stmt_let_pat(sp, Some(ex), pat, hir::LocalSource::Normal), pat_id)
2804 fn block_expr(&mut self, expr: P<hir::Expr>) -> hir::Block {
2805 self.block_all(expr.span, hir::HirVec::new(), Some(expr))
2808 fn block_all(&mut self, span: Span, stmts: hir::HirVec<hir::Stmt>, expr: Option<P<hir::Expr>>)
2810 let LoweredNodeId { node_id, hir_id } = self.next_id();
2817 rules: hir::DefaultBlock,
2819 targeted_by_break: false,
2823 fn pat_ok(&mut self, span: Span, pat: P<hir::Pat>) -> P<hir::Pat> {
2824 self.pat_std_enum(span, &["result", "Result", "Ok"], hir_vec![pat])
2827 fn pat_err(&mut self, span: Span, pat: P<hir::Pat>) -> P<hir::Pat> {
2828 self.pat_std_enum(span, &["result", "Result", "Err"], hir_vec![pat])
2831 fn pat_some(&mut self, span: Span, pat: P<hir::Pat>) -> P<hir::Pat> {
2832 self.pat_std_enum(span, &["option", "Option", "Some"], hir_vec![pat])
2835 fn pat_none(&mut self, span: Span) -> P<hir::Pat> {
2836 self.pat_std_enum(span, &["option", "Option", "None"], hir_vec![])
2839 fn pat_std_enum(&mut self,
2841 components: &[&str],
2842 subpats: hir::HirVec<P<hir::Pat>>)
2844 let path = self.std_path(span, components, true);
2845 let qpath = hir::QPath::Resolved(None, P(path));
2846 let pt = if subpats.is_empty() {
2847 hir::PatKind::Path(qpath)
2849 hir::PatKind::TupleStruct(qpath, subpats, None)
2854 fn pat_ident(&mut self, span: Span, name: Name) -> P<hir::Pat> {
2855 self.pat_ident_binding_mode(span, name, hir::BindingAnnotation::Unannotated)
2858 fn pat_ident_binding_mode(&mut self, span: Span, name: Name, bm: hir::BindingAnnotation)
2860 let LoweredNodeId { node_id, hir_id } = self.next_id();
2865 node: hir::PatKind::Binding(bm,
2876 fn pat_wild(&mut self, span: Span) -> P<hir::Pat> {
2877 self.pat(span, hir::PatKind::Wild)
2880 fn pat(&mut self, span: Span, pat: hir::PatKind) -> P<hir::Pat> {
2881 let LoweredNodeId { node_id, hir_id } = self.next_id();
2890 /// Given suffix ["b","c","d"], returns path `::std::b::c::d` when
2891 /// `fld.cx.use_std`, and `::core::b::c::d` otherwise.
2892 /// The path is also resolved according to `is_value`.
2893 fn std_path(&mut self, span: Span, components: &[&str], is_value: bool) -> hir::Path {
2894 let mut path = hir::Path {
2897 segments: iter::once(keywords::CrateRoot.name()).chain({
2898 self.crate_root.into_iter().chain(components.iter().cloned()).map(Symbol::intern)
2899 }).map(hir::PathSegment::from_name).collect(),
2902 self.resolver.resolve_hir_path(&mut path, is_value);
2906 fn signal_block_expr(&mut self,
2907 stmts: hir::HirVec<hir::Stmt>,
2910 rule: hir::BlockCheckMode,
2911 attrs: ThinVec<Attribute>)
2913 let LoweredNodeId { node_id, hir_id } = self.next_id();
2915 let block = P(hir::Block {
2922 targeted_by_break: false,
2924 self.expr_block(block, attrs)
2927 fn ty_path(&mut self, id: NodeId, span: Span, qpath: hir::QPath) -> P<hir::Ty> {
2929 let node = match qpath {
2930 hir::QPath::Resolved(None, path) => {
2931 // Turn trait object paths into `TyTraitObject` instead.
2932 if let Def::Trait(_) = path.def {
2933 let principal = hir::PolyTraitRef {
2934 bound_lifetimes: hir_vec![],
2935 trait_ref: hir::TraitRef {
2936 path: path.and_then(|path| path),
2942 // The original ID is taken by the `PolyTraitRef`,
2943 // so the `Ty` itself needs a different one.
2944 id = self.next_id().node_id;
2946 hir::TyTraitObject(hir_vec![principal], self.elided_lifetime(span))
2948 hir::TyPath(hir::QPath::Resolved(None, path))
2951 _ => hir::TyPath(qpath)
2953 P(hir::Ty { id, node, span })
2956 fn elided_lifetime(&mut self, span: Span) -> hir::Lifetime {
2958 id: self.next_id().node_id,
2960 name: keywords::Invalid.name()
2965 fn body_ids(bodies: &BTreeMap<hir::BodyId, hir::Body>) -> Vec<hir::BodyId> {
2966 // Sorting by span ensures that we get things in order within a
2967 // file, and also puts the files in a sensible order.
2968 let mut body_ids: Vec<_> = bodies.keys().cloned().collect();
2969 body_ids.sort_by_key(|b| bodies[b].value.span);