1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 // Lowers the AST to the HIR.
13 // Since the AST and HIR are fairly similar, this is mostly a simple procedure,
14 // much like a fold. Where lowering involves a bit more work things get more
15 // interesting and there are some invariants you should know about. These mostly
16 // concern spans and ids.
18 // Spans are assigned to AST nodes during parsing and then are modified during
19 // expansion to indicate the origin of a node and the process it went through
20 // being expanded. Ids are assigned to AST nodes just before lowering.
22 // For the simpler lowering steps, ids and spans should be preserved. Unlike
23 // expansion we do not preserve the process of lowering in the spans, so spans
24 // should not be modified here. When creating a new node (as opposed to
25 // 'folding' an existing one), then you create a new id using `next_id()`.
27 // You must ensure that ids are unique. That means that you should only use the
28 // id from an AST node in a single HIR node (you can assume that AST node ids
29 // are unique). Every new node must have a unique id. Avoid cloning HIR nodes.
30 // If you do, you must then set the new node's id to a fresh one.
32 // Spans are used for error messages and for tools to map semantics back to
33 // source code. It is therefore not as important with spans as ids to be strict
34 // about use (you can't break the compiler by screwing up a span). Obviously, a
35 // HIR node can only have a single span. But multiple nodes can have the same
36 // span and spans don't need to be kept in order, etc. Where code is preserved
37 // by lowering, it should have the same span as in the AST. Where HIR nodes are
38 // new it is probably best to give a span for the whole AST node being lowered.
39 // All nodes should have real spans, don't use dummy spans. Tools are likely to
40 // get confused if the spans from leaf AST nodes occur in multiple places
41 // in the HIR, especially for multiple identifiers.
44 use hir::map::{Definitions, DefKey, REGULAR_SPACE};
45 use hir::map::definitions::DefPathData;
46 use hir::def_id::{DefIndex, DefId, CRATE_DEF_INDEX};
47 use hir::def::{Def, PathResolution};
48 use rustc_data_structures::indexed_vec::IndexVec;
50 use util::nodemap::{DefIdMap, FxHashMap, NodeMap};
52 use std::collections::BTreeMap;
60 use syntax::ext::hygiene::{Mark, SyntaxContext};
62 use syntax::codemap::{self, respan, Spanned};
63 use syntax::std_inject;
64 use syntax::symbol::{Symbol, keywords};
65 use syntax::util::small_vector::SmallVector;
66 use syntax::visit::{self, Visitor};
69 const HIR_ID_COUNTER_LOCKED: u32 = 0xFFFFFFFF;
71 pub struct LoweringContext<'a> {
72 crate_root: Option<&'static str>,
73 // Use to assign ids to hir nodes that do not directly correspond to an ast node
75 // As we walk the AST we must keep track of the current 'parent' def id (in
76 // the form of a DefIndex) so that if we create a new node which introduces
77 // a definition, then we can properly create the def id.
78 parent_def: Option<DefIndex>,
79 resolver: &'a mut Resolver,
80 name_map: FxHashMap<Ident, Name>,
82 /// The items being lowered are collected here.
83 items: BTreeMap<NodeId, hir::Item>,
85 trait_items: BTreeMap<hir::TraitItemId, hir::TraitItem>,
86 impl_items: BTreeMap<hir::ImplItemId, hir::ImplItem>,
87 bodies: BTreeMap<hir::BodyId, hir::Body>,
88 exported_macros: Vec<hir::MacroDef>,
90 trait_impls: BTreeMap<DefId, Vec<NodeId>>,
91 trait_default_impl: BTreeMap<DefId, NodeId>,
93 catch_scopes: Vec<NodeId>,
94 loop_scopes: Vec<NodeId>,
95 is_in_loop_condition: bool,
97 type_def_lifetime_params: DefIdMap<usize>,
99 current_hir_id_owner: Vec<(DefIndex, u32)>,
100 item_local_id_counters: NodeMap<u32>,
101 node_id_to_hir_id: IndexVec<NodeId, hir::HirId>,
105 // Resolve a hir path generated by the lowerer when expanding `for`, `if let`, etc.
106 fn resolve_hir_path(&mut self, path: &mut hir::Path, is_value: bool);
108 // Obtain the resolution for a node id
109 fn get_resolution(&mut self, id: NodeId) -> Option<PathResolution>;
111 // We must keep the set of definitions up to date as we add nodes that weren't in the AST.
112 // This should only return `None` during testing.
113 fn definitions(&mut self) -> &mut Definitions;
116 pub fn lower_crate(sess: &Session,
118 resolver: &mut Resolver)
120 // We're constructing the HIR here; we don't care what we will
121 // read, since we haven't even constructed the *input* to
123 let _ignore = sess.dep_graph.in_ignore();
126 crate_root: std_inject::injected_crate_name(krate),
130 name_map: FxHashMap(),
131 items: BTreeMap::new(),
132 trait_items: BTreeMap::new(),
133 impl_items: BTreeMap::new(),
134 bodies: BTreeMap::new(),
135 trait_impls: BTreeMap::new(),
136 trait_default_impl: BTreeMap::new(),
137 exported_macros: Vec::new(),
138 catch_scopes: Vec::new(),
139 loop_scopes: Vec::new(),
140 is_in_loop_condition: false,
141 type_def_lifetime_params: DefIdMap(),
142 current_hir_id_owner: vec![(CRATE_DEF_INDEX, 0)],
143 item_local_id_counters: NodeMap(),
144 node_id_to_hir_id: IndexVec::new(),
148 #[derive(Copy, Clone, PartialEq, Eq)]
150 /// Any path in a type context.
152 /// The `module::Type` in `module::Type::method` in an expression.
156 impl<'a> LoweringContext<'a> {
157 fn lower_crate(mut self, c: &Crate) -> hir::Crate {
158 /// Full-crate AST visitor that inserts into a fresh
159 /// `LoweringContext` any information that may be
160 /// needed from arbitrary locations in the crate.
161 /// E.g. The number of lifetime generic parameters
162 /// declared for every type and trait definition.
163 struct MiscCollector<'lcx, 'interner: 'lcx> {
164 lctx: &'lcx mut LoweringContext<'interner>,
167 impl<'lcx, 'interner> Visitor<'lcx> for MiscCollector<'lcx, 'interner> {
168 fn visit_item(&mut self, item: &'lcx Item) {
169 self.lctx.allocate_hir_id_counter(item.id, item);
172 ItemKind::Struct(_, ref generics) |
173 ItemKind::Union(_, ref generics) |
174 ItemKind::Enum(_, ref generics) |
175 ItemKind::Ty(_, ref generics) |
176 ItemKind::Trait(_, ref generics, ..) => {
177 let def_id = self.lctx.resolver.definitions().local_def_id(item.id);
178 let count = generics.lifetimes.len();
179 self.lctx.type_def_lifetime_params.insert(def_id, count);
183 visit::walk_item(self, item);
186 fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
187 self.lctx.allocate_hir_id_counter(item.id, item);
188 visit::walk_trait_item(self, item);
191 fn visit_impl_item(&mut self, item: &'lcx ImplItem) {
192 self.lctx.allocate_hir_id_counter(item.id, item);
193 visit::walk_impl_item(self, item);
197 struct ItemLowerer<'lcx, 'interner: 'lcx> {
198 lctx: &'lcx mut LoweringContext<'interner>,
201 impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> {
202 fn visit_item(&mut self, item: &'lcx Item) {
203 let mut item_lowered = true;
204 self.lctx.with_hir_id_owner(item.id, |lctx| {
205 if let Some(hir_item) = lctx.lower_item(item) {
206 lctx.items.insert(item.id, hir_item);
208 item_lowered = false;
213 visit::walk_item(self, item);
217 fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
218 self.lctx.with_hir_id_owner(item.id, |lctx| {
219 let id = hir::TraitItemId { node_id: item.id };
220 let hir_item = lctx.lower_trait_item(item);
221 lctx.trait_items.insert(id, hir_item);
224 visit::walk_trait_item(self, item);
227 fn visit_impl_item(&mut self, item: &'lcx ImplItem) {
228 self.lctx.with_hir_id_owner(item.id, |lctx| {
229 let id = hir::ImplItemId { node_id: item.id };
230 let hir_item = lctx.lower_impl_item(item);
231 lctx.impl_items.insert(id, hir_item);
233 visit::walk_impl_item(self, item);
237 self.lower_node_id(CRATE_NODE_ID);
238 debug_assert!(self.node_id_to_hir_id[CRATE_NODE_ID] == hir::CRATE_HIR_ID);
240 visit::walk_crate(&mut MiscCollector { lctx: &mut self }, c);
241 visit::walk_crate(&mut ItemLowerer { lctx: &mut self }, c);
243 let module = self.lower_mod(&c.module);
244 let attrs = self.lower_attrs(&c.attrs);
245 let body_ids = body_ids(&self.bodies);
249 .init_node_id_to_hir_id_mapping(self.node_id_to_hir_id);
255 exported_macros: hir::HirVec::from(self.exported_macros),
257 trait_items: self.trait_items,
258 impl_items: self.impl_items,
261 trait_impls: self.trait_impls,
262 trait_default_impl: self.trait_default_impl,
266 fn allocate_hir_id_counter<T: Debug>(&mut self,
269 if self.item_local_id_counters.insert(owner, 0).is_some() {
270 bug!("Tried to allocate item_local_id_counter for {:?} twice", debug);
272 // Always allocate the first HirId for the owner itself
273 self.lower_node_id_with_owner(owner, owner);
276 fn lower_node_id_generic<F>(&mut self,
280 where F: FnOnce(&mut Self) -> hir::HirId
282 if ast_node_id == DUMMY_NODE_ID {
286 let min_size = ast_node_id.as_usize() + 1;
288 if min_size > self.node_id_to_hir_id.len() {
289 self.node_id_to_hir_id.resize(min_size, hir::DUMMY_HIR_ID);
292 if self.node_id_to_hir_id[ast_node_id] == hir::DUMMY_HIR_ID {
293 // Generate a new HirId
294 self.node_id_to_hir_id[ast_node_id] = alloc_hir_id(self);
300 fn with_hir_id_owner<F>(&mut self, owner: NodeId, f: F)
301 where F: FnOnce(&mut Self)
303 let counter = self.item_local_id_counters
304 .insert(owner, HIR_ID_COUNTER_LOCKED)
306 let def_index = self.resolver.definitions().opt_def_index(owner).unwrap();
307 self.current_hir_id_owner.push((def_index, counter));
309 let (new_def_index, new_counter) = self.current_hir_id_owner.pop().unwrap();
311 debug_assert!(def_index == new_def_index);
312 debug_assert!(new_counter >= counter);
314 let prev = self.item_local_id_counters.insert(owner, new_counter).unwrap();
315 debug_assert!(prev == HIR_ID_COUNTER_LOCKED);
318 /// This method allocates a new HirId for the given NodeId and stores it in
319 /// the LoweringContext's NodeId => HirId map.
320 /// Take care not to call this method if the resulting HirId is then not
321 /// actually used in the HIR, as that would trigger an assertion in the
322 /// HirIdValidator later on, which makes sure that all NodeIds got mapped
323 /// properly. Calling the method twice with the same NodeId is fine though.
324 fn lower_node_id(&mut self, ast_node_id: NodeId) -> NodeId {
325 self.lower_node_id_generic(ast_node_id, |this| {
326 let &mut (def_index, ref mut local_id_counter) = this.current_hir_id_owner
329 let local_id = *local_id_counter;
330 *local_id_counter += 1;
333 local_id: hir::ItemLocalId(local_id),
338 fn lower_node_id_with_owner(&mut self,
342 self.lower_node_id_generic(ast_node_id, |this| {
343 let local_id_counter = this.item_local_id_counters
346 let local_id = *local_id_counter;
348 // We want to be sure not to modify the counter in the map while it
349 // is also on the stack. Otherwise we'll get lost updates when writing
350 // back from the stack to the map.
351 debug_assert!(local_id != HIR_ID_COUNTER_LOCKED);
353 *local_id_counter += 1;
354 let def_index = this.resolver.definitions().opt_def_index(owner).unwrap();
358 local_id: hir::ItemLocalId(local_id),
363 fn record_body(&mut self, value: hir::Expr, decl: Option<&FnDecl>)
365 let body = hir::Body {
366 arguments: decl.map_or(hir_vec![], |decl| {
367 decl.inputs.iter().map(|x| self.lower_arg(x)).collect()
372 self.bodies.insert(id, body);
376 fn next_id(&mut self) -> NodeId {
377 self.lower_node_id(self.sess.next_node_id())
380 fn expect_full_def(&mut self, id: NodeId) -> Def {
381 self.resolver.get_resolution(id).map_or(Def::Err, |pr| {
382 if pr.unresolved_segments() != 0 {
383 bug!("path not fully resolved: {:?}", pr);
389 fn diagnostic(&self) -> &errors::Handler {
390 self.sess.diagnostic()
393 fn str_to_ident(&self, s: &'static str) -> Name {
397 fn allow_internal_unstable(&self, reason: &'static str, mut span: Span) -> Span {
398 let mark = Mark::fresh(Mark::root());
399 mark.set_expn_info(codemap::ExpnInfo {
401 callee: codemap::NameAndSpan {
402 format: codemap::CompilerDesugaring(Symbol::intern(reason)),
404 allow_internal_unstable: true,
407 span.ctxt = SyntaxContext::empty().apply_mark(mark);
411 fn with_catch_scope<T, F>(&mut self, catch_id: NodeId, f: F) -> T
412 where F: FnOnce(&mut LoweringContext) -> T
414 let len = self.catch_scopes.len();
415 self.catch_scopes.push(catch_id);
417 let result = f(self);
418 assert_eq!(len + 1, self.catch_scopes.len(),
419 "catch scopes should be added and removed in stack order");
421 self.catch_scopes.pop().unwrap();
426 fn with_loop_scope<T, F>(&mut self, loop_id: NodeId, f: F) -> T
427 where F: FnOnce(&mut LoweringContext) -> T
429 // We're no longer in the base loop's condition; we're in another loop.
430 let was_in_loop_condition = self.is_in_loop_condition;
431 self.is_in_loop_condition = false;
433 let len = self.loop_scopes.len();
434 self.loop_scopes.push(loop_id);
436 let result = f(self);
437 assert_eq!(len + 1, self.loop_scopes.len(),
438 "Loop scopes should be added and removed in stack order");
440 self.loop_scopes.pop().unwrap();
442 self.is_in_loop_condition = was_in_loop_condition;
447 fn with_loop_condition_scope<T, F>(&mut self, f: F) -> T
448 where F: FnOnce(&mut LoweringContext) -> T
450 let was_in_loop_condition = self.is_in_loop_condition;
451 self.is_in_loop_condition = true;
453 let result = f(self);
455 self.is_in_loop_condition = was_in_loop_condition;
460 fn with_new_scopes<T, F>(&mut self, f: F) -> T
461 where F: FnOnce(&mut LoweringContext) -> T
463 let was_in_loop_condition = self.is_in_loop_condition;
464 self.is_in_loop_condition = false;
466 let catch_scopes = mem::replace(&mut self.catch_scopes, Vec::new());
467 let loop_scopes = mem::replace(&mut self.loop_scopes, Vec::new());
468 let result = f(self);
469 self.catch_scopes = catch_scopes;
470 self.loop_scopes = loop_scopes;
472 self.is_in_loop_condition = was_in_loop_condition;
477 fn with_parent_def<T, F>(&mut self, parent_id: NodeId, f: F) -> T
478 where F: FnOnce(&mut LoweringContext) -> T
480 let old_def = self.parent_def;
482 let defs = self.resolver.definitions();
483 Some(defs.opt_def_index(parent_id).unwrap())
486 let result = f(self);
488 self.parent_def = old_def;
492 fn def_key(&mut self, id: DefId) -> DefKey {
494 self.resolver.definitions().def_key(id.index)
496 self.sess.cstore.def_key(id)
500 fn lower_ident(&mut self, ident: Ident) -> Name {
501 let ident = ident.modern();
502 if ident.ctxt == SyntaxContext::empty() {
505 *self.name_map.entry(ident).or_insert_with(|| Symbol::from_ident(ident))
508 fn lower_opt_sp_ident(&mut self, o_id: Option<Spanned<Ident>>) -> Option<Spanned<Name>> {
509 o_id.map(|sp_ident| respan(sp_ident.span, sp_ident.node.name))
512 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Spanned<Ident>)>)
516 Some((id, label_ident)) => {
517 let target = if let Def::Label(loop_id) = self.expect_full_def(id) {
518 hir::LoopIdResult::Ok(self.lower_node_id(loop_id))
520 hir::LoopIdResult::Err(hir::LoopIdError::UnresolvedLabel)
523 ident: Some(label_ident),
524 target_id: hir::ScopeTarget::Loop(target),
528 let loop_id = self.loop_scopes
530 .map(|innermost_loop_id| *innermost_loop_id);
534 target_id: hir::ScopeTarget::Loop(
535 loop_id.map(|id| Ok(self.lower_node_id(id)))
536 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope))
543 fn lower_attrs(&mut self, attrs: &Vec<Attribute>) -> hir::HirVec<Attribute> {
547 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm {
549 attrs: self.lower_attrs(&arm.attrs),
550 pats: arm.pats.iter().map(|x| self.lower_pat(x)).collect(),
551 guard: arm.guard.as_ref().map(|ref x| P(self.lower_expr(x))),
552 body: P(self.lower_expr(&arm.body)),
556 fn lower_ty_binding(&mut self, b: &TypeBinding) -> hir::TypeBinding {
558 id: self.lower_node_id(b.id),
559 name: self.lower_ident(b.ident),
560 ty: self.lower_ty(&b.ty),
565 fn lower_ty(&mut self, t: &Ty) -> P<hir::Ty> {
566 let kind = match t.node {
567 TyKind::Infer => hir::TyInfer,
568 TyKind::Err => hir::TyErr,
569 TyKind::Slice(ref ty) => hir::TySlice(self.lower_ty(ty)),
570 TyKind::Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt)),
571 TyKind::Rptr(ref region, ref mt) => {
572 let span = Span { hi: t.span.lo, ..t.span };
573 let lifetime = match *region {
574 Some(ref lt) => self.lower_lifetime(lt),
575 None => self.elided_lifetime(span)
577 hir::TyRptr(lifetime, self.lower_mt(mt))
579 TyKind::BareFn(ref f) => {
580 hir::TyBareFn(P(hir::BareFnTy {
581 lifetimes: self.lower_lifetime_defs(&f.lifetimes),
582 unsafety: self.lower_unsafety(f.unsafety),
584 decl: self.lower_fn_decl(&f.decl),
587 TyKind::Never => hir::TyNever,
588 TyKind::Tup(ref tys) => {
589 hir::TyTup(tys.iter().map(|ty| self.lower_ty(ty)).collect())
591 TyKind::Paren(ref ty) => {
592 return self.lower_ty(ty);
594 TyKind::Path(ref qself, ref path) => {
595 let id = self.lower_node_id(t.id);
596 let qpath = self.lower_qpath(t.id, qself, path, ParamMode::Explicit);
597 return self.ty_path(id, t.span, qpath);
599 TyKind::ImplicitSelf => {
600 hir::TyPath(hir::QPath::Resolved(None, P(hir::Path {
601 def: self.expect_full_def(t.id),
602 segments: hir_vec![hir::PathSegment {
603 name: keywords::SelfType.name(),
604 parameters: hir::PathParameters::none()
609 TyKind::Array(ref ty, ref length) => {
610 let length = self.lower_expr(length);
611 hir::TyArray(self.lower_ty(ty),
612 self.record_body(length, None))
614 TyKind::Typeof(ref expr) => {
615 let expr = self.lower_expr(expr);
616 hir::TyTypeof(self.record_body(expr, None))
618 TyKind::TraitObject(ref bounds) => {
619 let mut lifetime_bound = None;
620 let bounds = bounds.iter().filter_map(|bound| {
622 TraitTyParamBound(ref ty, TraitBoundModifier::None) => {
623 Some(self.lower_poly_trait_ref(ty))
625 TraitTyParamBound(_, TraitBoundModifier::Maybe) => None,
626 RegionTyParamBound(ref lifetime) => {
627 if lifetime_bound.is_none() {
628 lifetime_bound = Some(self.lower_lifetime(lifetime));
634 let lifetime_bound = lifetime_bound.unwrap_or_else(|| {
635 self.elided_lifetime(t.span)
637 hir::TyTraitObject(bounds, lifetime_bound)
639 TyKind::ImplTrait(ref bounds) => {
640 hir::TyImplTrait(self.lower_bounds(bounds))
642 TyKind::Mac(_) => panic!("TyMac should have been expanded by now."),
646 id: self.lower_node_id(t.id),
652 fn lower_foreign_mod(&mut self, fm: &ForeignMod) -> hir::ForeignMod {
655 items: fm.items.iter().map(|x| self.lower_foreign_item(x)).collect(),
659 fn lower_global_asm(&mut self, ga: &GlobalAsm) -> P<hir::GlobalAsm> {
666 fn lower_variant(&mut self, v: &Variant) -> hir::Variant {
668 node: hir::Variant_ {
669 name: v.node.name.name,
670 attrs: self.lower_attrs(&v.node.attrs),
671 data: self.lower_variant_data(&v.node.data),
672 disr_expr: v.node.disr_expr.as_ref().map(|e| {
673 let e = self.lower_expr(e);
674 self.record_body(e, None)
681 fn lower_qpath(&mut self,
683 qself: &Option<QSelf>,
685 param_mode: ParamMode)
687 let qself_position = qself.as_ref().map(|q| q.position);
688 let qself = qself.as_ref().map(|q| self.lower_ty(&q.ty));
690 let resolution = self.resolver.get_resolution(id)
691 .unwrap_or(PathResolution::new(Def::Err));
693 let proj_start = p.segments.len() - resolution.unresolved_segments();
694 let path = P(hir::Path {
695 def: resolution.base_def(),
696 segments: p.segments[..proj_start].iter().enumerate().map(|(i, segment)| {
697 let param_mode = match (qself_position, param_mode) {
698 (Some(j), ParamMode::Optional) if i < j => {
699 // This segment is part of the trait path in a
700 // qualified path - one of `a`, `b` or `Trait`
701 // in `<X as a::b::Trait>::T::U::method`.
707 // Figure out if this is a type/trait segment,
708 // which may need lifetime elision performed.
709 let parent_def_id = |this: &mut Self, def_id: DefId| {
712 index: this.def_key(def_id).parent.expect("missing parent")
715 let type_def_id = match resolution.base_def() {
716 Def::AssociatedTy(def_id) if i + 2 == proj_start => {
717 Some(parent_def_id(self, def_id))
719 Def::Variant(def_id) if i + 1 == proj_start => {
720 Some(parent_def_id(self, def_id))
722 Def::Struct(def_id) |
725 Def::TyAlias(def_id) |
726 Def::Trait(def_id) if i + 1 == proj_start => Some(def_id),
730 let num_lifetimes = type_def_id.map_or(0, |def_id| {
731 if let Some(&n) = self.type_def_lifetime_params.get(&def_id) {
734 assert!(!def_id.is_local());
735 let n = self.sess.cstore.item_generics_cloned(def_id).regions.len();
736 self.type_def_lifetime_params.insert(def_id, n);
739 self.lower_path_segment(p.span, segment, param_mode, num_lifetimes)
744 // Simple case, either no projections, or only fully-qualified.
745 // E.g. `std::mem::size_of` or `<I as Iterator>::Item`.
746 if resolution.unresolved_segments() == 0 {
747 return hir::QPath::Resolved(qself, path);
750 // Create the innermost type that we're projecting from.
751 let mut ty = if path.segments.is_empty() {
752 // If the base path is empty that means there exists a
753 // syntactical `Self`, e.g. `&i32` in `<&i32>::clone`.
754 qself.expect("missing QSelf for <T>::...")
756 // Otherwise, the base path is an implicit `Self` type path,
757 // e.g. `Vec` in `Vec::new` or `<I as Iterator>::Item` in
758 // `<I as Iterator>::Item::default`.
759 let new_id = self.next_id();
760 self.ty_path(new_id, p.span, hir::QPath::Resolved(qself, path))
763 // Anything after the base path are associated "extensions",
764 // out of which all but the last one are associated types,
765 // e.g. for `std::vec::Vec::<T>::IntoIter::Item::clone`:
766 // * base path is `std::vec::Vec<T>`
767 // * "extensions" are `IntoIter`, `Item` and `clone`
769 // 1. `std::vec::Vec<T>` (created above)
770 // 2. `<std::vec::Vec<T>>::IntoIter`
771 // 3. `<<std::vec::Vec<T>>::IntoIter>::Item`
772 // * final path is `<<<std::vec::Vec<T>>::IntoIter>::Item>::clone`
773 for (i, segment) in p.segments.iter().enumerate().skip(proj_start) {
774 let segment = P(self.lower_path_segment(p.span, segment, param_mode, 0));
775 let qpath = hir::QPath::TypeRelative(ty, segment);
777 // It's finished, return the extension of the right node type.
778 if i == p.segments.len() - 1 {
782 // Wrap the associated extension in another type node.
783 let new_id = self.next_id();
784 ty = self.ty_path(new_id, p.span, qpath);
787 // Should've returned in the for loop above.
788 span_bug!(p.span, "lower_qpath: no final extension segment in {}..{}",
789 proj_start, p.segments.len())
792 fn lower_path_extra(&mut self,
796 param_mode: ParamMode,
797 defaults_to_global: bool)
799 let mut segments = p.segments.iter();
800 if defaults_to_global && p.is_global() {
805 def: self.expect_full_def(id),
806 segments: segments.map(|segment| {
807 self.lower_path_segment(p.span, segment, param_mode, 0)
808 }).chain(name.map(|name| {
811 parameters: hir::PathParameters::none()
818 fn lower_path(&mut self,
821 param_mode: ParamMode,
822 defaults_to_global: bool)
824 self.lower_path_extra(id, p, None, param_mode, defaults_to_global)
827 fn lower_path_segment(&mut self,
829 segment: &PathSegment,
830 param_mode: ParamMode,
831 expected_lifetimes: usize)
832 -> hir::PathSegment {
833 let mut parameters = if let Some(ref parameters) = segment.parameters {
835 PathParameters::AngleBracketed(ref data) => {
836 let data = self.lower_angle_bracketed_parameter_data(data, param_mode);
837 hir::AngleBracketedParameters(data)
839 PathParameters::Parenthesized(ref data) => {
840 hir::ParenthesizedParameters(self.lower_parenthesized_parameter_data(data))
844 let data = self.lower_angle_bracketed_parameter_data(&Default::default(), param_mode);
845 hir::AngleBracketedParameters(data)
848 if let hir::AngleBracketedParameters(ref mut data) = parameters {
849 if data.lifetimes.is_empty() {
850 data.lifetimes = (0..expected_lifetimes).map(|_| {
851 self.elided_lifetime(path_span)
857 name: self.lower_ident(segment.identifier),
858 parameters: parameters,
862 fn lower_angle_bracketed_parameter_data(&mut self,
863 data: &AngleBracketedParameterData,
864 param_mode: ParamMode)
865 -> hir::AngleBracketedParameterData {
866 let &AngleBracketedParameterData { ref lifetimes, ref types, ref bindings } = data;
867 hir::AngleBracketedParameterData {
868 lifetimes: self.lower_lifetimes(lifetimes),
869 types: types.iter().map(|ty| self.lower_ty(ty)).collect(),
870 infer_types: types.is_empty() && param_mode == ParamMode::Optional,
871 bindings: bindings.iter().map(|b| self.lower_ty_binding(b)).collect(),
875 fn lower_parenthesized_parameter_data(&mut self,
876 data: &ParenthesizedParameterData)
877 -> hir::ParenthesizedParameterData {
878 let &ParenthesizedParameterData { ref inputs, ref output, span } = data;
879 hir::ParenthesizedParameterData {
880 inputs: inputs.iter().map(|ty| self.lower_ty(ty)).collect(),
881 output: output.as_ref().map(|ty| self.lower_ty(ty)),
886 fn lower_local(&mut self, l: &Local) -> P<hir::Local> {
888 id: self.lower_node_id(l.id),
889 ty: l.ty.as_ref().map(|t| self.lower_ty(t)),
890 pat: self.lower_pat(&l.pat),
891 init: l.init.as_ref().map(|e| P(self.lower_expr(e))),
893 attrs: l.attrs.clone(),
894 source: hir::LocalSource::Normal,
898 fn lower_mutability(&mut self, m: Mutability) -> hir::Mutability {
900 Mutability::Mutable => hir::MutMutable,
901 Mutability::Immutable => hir::MutImmutable,
905 fn lower_arg(&mut self, arg: &Arg) -> hir::Arg {
907 id: self.lower_node_id(arg.id),
908 pat: self.lower_pat(&arg.pat),
912 fn lower_fn_args_to_names(&mut self, decl: &FnDecl)
913 -> hir::HirVec<Spanned<Name>> {
914 decl.inputs.iter().map(|arg| {
916 PatKind::Ident(_, ident, None) => {
917 respan(ident.span, ident.node.name)
919 _ => respan(arg.pat.span, keywords::Invalid.name()),
924 fn lower_fn_decl(&mut self, decl: &FnDecl) -> P<hir::FnDecl> {
926 inputs: decl.inputs.iter().map(|arg| self.lower_ty(&arg.ty)).collect(),
927 output: match decl.output {
928 FunctionRetTy::Ty(ref ty) => hir::Return(self.lower_ty(ty)),
929 FunctionRetTy::Default(span) => hir::DefaultReturn(span),
931 variadic: decl.variadic,
932 has_implicit_self: decl.inputs.get(0).map_or(false, |arg| {
934 TyKind::ImplicitSelf => true,
935 TyKind::Rptr(_, ref mt) => mt.ty.node == TyKind::ImplicitSelf,
942 fn lower_ty_param_bound(&mut self, tpb: &TyParamBound) -> hir::TyParamBound {
944 TraitTyParamBound(ref ty, modifier) => {
945 hir::TraitTyParamBound(self.lower_poly_trait_ref(ty),
946 self.lower_trait_bound_modifier(modifier))
948 RegionTyParamBound(ref lifetime) => {
949 hir::RegionTyParamBound(self.lower_lifetime(lifetime))
954 fn lower_ty_param(&mut self, tp: &TyParam, add_bounds: &[TyParamBound]) -> hir::TyParam {
955 let mut name = self.lower_ident(tp.ident);
957 // Don't expose `Self` (recovered "keyword used as ident" parse error).
958 // `rustc::ty` expects `Self` to be only used for a trait's `Self`.
959 // Instead, use gensym("Self") to create a distinct name that looks the same.
960 if name == keywords::SelfType.name() {
961 name = Symbol::gensym("Self");
964 let mut bounds = self.lower_bounds(&tp.bounds);
965 if !add_bounds.is_empty() {
966 bounds = bounds.into_iter().chain(self.lower_bounds(add_bounds).into_iter()).collect();
970 id: self.lower_node_id(tp.id),
973 default: tp.default.as_ref().map(|x| self.lower_ty(x)),
975 pure_wrt_drop: tp.attrs.iter().any(|attr| attr.check_name("may_dangle")),
979 fn lower_ty_params(&mut self, tps: &Vec<TyParam>, add_bounds: &NodeMap<Vec<TyParamBound>>)
980 -> hir::HirVec<hir::TyParam> {
981 tps.iter().map(|tp| {
982 self.lower_ty_param(tp, add_bounds.get(&tp.id).map_or(&[][..], |x| &x))
986 fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime {
988 id: self.lower_node_id(l.id),
989 name: self.lower_ident(l.ident),
994 fn lower_lifetime_def(&mut self, l: &LifetimeDef) -> hir::LifetimeDef {
996 lifetime: self.lower_lifetime(&l.lifetime),
997 bounds: self.lower_lifetimes(&l.bounds),
998 pure_wrt_drop: l.attrs.iter().any(|attr| attr.check_name("may_dangle")),
1002 fn lower_lifetimes(&mut self, lts: &Vec<Lifetime>) -> hir::HirVec<hir::Lifetime> {
1003 lts.iter().map(|l| self.lower_lifetime(l)).collect()
1006 fn lower_lifetime_defs(&mut self, lts: &Vec<LifetimeDef>) -> hir::HirVec<hir::LifetimeDef> {
1007 lts.iter().map(|l| self.lower_lifetime_def(l)).collect()
1010 fn lower_generics(&mut self, g: &Generics) -> hir::Generics {
1011 // Collect `?Trait` bounds in where clause and move them to parameter definitions.
1012 let mut add_bounds = NodeMap();
1013 for pred in &g.where_clause.predicates {
1014 if let WherePredicate::BoundPredicate(ref bound_pred) = *pred {
1015 'next_bound: for bound in &bound_pred.bounds {
1016 if let TraitTyParamBound(_, TraitBoundModifier::Maybe) = *bound {
1017 let report_error = |this: &mut Self| {
1018 this.diagnostic().span_err(bound_pred.bounded_ty.span,
1019 "`?Trait` bounds are only permitted at the \
1020 point where a type parameter is declared");
1022 // Check if the where clause type is a plain type parameter.
1023 match bound_pred.bounded_ty.node {
1024 TyKind::Path(None, ref path)
1025 if path.segments.len() == 1 &&
1026 bound_pred.bound_lifetimes.is_empty() => {
1027 if let Some(Def::TyParam(def_id)) =
1028 self.resolver.get_resolution(bound_pred.bounded_ty.id)
1029 .map(|d| d.base_def()) {
1030 if let Some(node_id) =
1031 self.resolver.definitions().as_local_node_id(def_id) {
1032 for ty_param in &g.ty_params {
1033 if node_id == ty_param.id {
1034 add_bounds.entry(ty_param.id).or_insert(Vec::new())
1035 .push(bound.clone());
1036 continue 'next_bound;
1043 _ => report_error(self)
1051 ty_params: self.lower_ty_params(&g.ty_params, &add_bounds),
1052 lifetimes: self.lower_lifetime_defs(&g.lifetimes),
1053 where_clause: self.lower_where_clause(&g.where_clause),
1058 fn lower_where_clause(&mut self, wc: &WhereClause) -> hir::WhereClause {
1060 id: self.lower_node_id(wc.id),
1061 predicates: wc.predicates
1063 .map(|predicate| self.lower_where_predicate(predicate))
1068 fn lower_where_predicate(&mut self, pred: &WherePredicate) -> hir::WherePredicate {
1070 WherePredicate::BoundPredicate(WhereBoundPredicate{ ref bound_lifetimes,
1074 hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate {
1075 bound_lifetimes: self.lower_lifetime_defs(bound_lifetimes),
1076 bounded_ty: self.lower_ty(bounded_ty),
1077 bounds: bounds.iter().filter_map(|bound| match *bound {
1078 // Ignore `?Trait` bounds, they were copied into type parameters already.
1079 TraitTyParamBound(_, TraitBoundModifier::Maybe) => None,
1080 _ => Some(self.lower_ty_param_bound(bound))
1085 WherePredicate::RegionPredicate(WhereRegionPredicate{ ref lifetime,
1088 hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
1090 lifetime: self.lower_lifetime(lifetime),
1091 bounds: bounds.iter().map(|bound| self.lower_lifetime(bound)).collect(),
1094 WherePredicate::EqPredicate(WhereEqPredicate{ id,
1098 hir::WherePredicate::EqPredicate(hir::WhereEqPredicate {
1099 id: self.lower_node_id(id),
1100 lhs_ty: self.lower_ty(lhs_ty),
1101 rhs_ty: self.lower_ty(rhs_ty),
1108 fn lower_variant_data(&mut self, vdata: &VariantData) -> hir::VariantData {
1110 VariantData::Struct(ref fields, id) => {
1111 hir::VariantData::Struct(fields.iter()
1113 .map(|f| self.lower_struct_field(f))
1115 self.lower_node_id(id))
1117 VariantData::Tuple(ref fields, id) => {
1118 hir::VariantData::Tuple(fields.iter()
1120 .map(|f| self.lower_struct_field(f))
1122 self.lower_node_id(id))
1124 VariantData::Unit(id) => hir::VariantData::Unit(self.lower_node_id(id)),
1128 fn lower_trait_ref(&mut self, p: &TraitRef) -> hir::TraitRef {
1129 let path = match self.lower_qpath(p.ref_id, &None, &p.path, ParamMode::Explicit) {
1130 hir::QPath::Resolved(None, path) => path.and_then(|path| path),
1131 qpath => bug!("lower_trait_ref: unexpected QPath `{:?}`", qpath)
1135 ref_id: self.lower_node_id(p.ref_id),
1139 fn lower_poly_trait_ref(&mut self, p: &PolyTraitRef) -> hir::PolyTraitRef {
1141 bound_lifetimes: self.lower_lifetime_defs(&p.bound_lifetimes),
1142 trait_ref: self.lower_trait_ref(&p.trait_ref),
1147 fn lower_struct_field(&mut self, (index, f): (usize, &StructField)) -> hir::StructField {
1150 id: self.lower_node_id(f.id),
1151 name: self.lower_ident(match f.ident {
1152 Some(ident) => ident,
1153 // FIXME(jseyfried) positional field hygiene
1154 None => Ident { name: Symbol::intern(&index.to_string()), ctxt: f.span.ctxt },
1156 vis: self.lower_visibility(&f.vis, None),
1157 ty: self.lower_ty(&f.ty),
1158 attrs: self.lower_attrs(&f.attrs),
1162 fn lower_field(&mut self, f: &Field) -> hir::Field {
1164 name: respan(f.ident.span, self.lower_ident(f.ident.node)),
1165 expr: P(self.lower_expr(&f.expr)),
1167 is_shorthand: f.is_shorthand,
1171 fn lower_mt(&mut self, mt: &MutTy) -> hir::MutTy {
1173 ty: self.lower_ty(&mt.ty),
1174 mutbl: self.lower_mutability(mt.mutbl),
1178 fn lower_bounds(&mut self, bounds: &[TyParamBound]) -> hir::TyParamBounds {
1179 bounds.iter().map(|bound| self.lower_ty_param_bound(bound)).collect()
1182 fn lower_block(&mut self, b: &Block, targeted_by_break: bool) -> P<hir::Block> {
1183 let mut expr = None;
1185 let mut stmts = vec![];
1187 for (index, stmt) in b.stmts.iter().enumerate() {
1188 if index == b.stmts.len() - 1 {
1189 if let StmtKind::Expr(ref e) = stmt.node {
1190 expr = Some(P(self.lower_expr(e)));
1192 stmts.extend(self.lower_stmt(stmt));
1195 stmts.extend(self.lower_stmt(stmt));
1200 id: self.lower_node_id(b.id),
1201 stmts: stmts.into(),
1203 rules: self.lower_block_check_mode(&b.rules),
1205 targeted_by_break: targeted_by_break,
1209 fn lower_item_kind(&mut self,
1212 attrs: &hir::HirVec<Attribute>,
1213 vis: &mut hir::Visibility,
1217 ItemKind::ExternCrate(string) => hir::ItemExternCrate(string),
1218 ItemKind::Use(ref view_path) => {
1219 let path = match view_path.node {
1220 ViewPathSimple(_, ref path) => path,
1221 ViewPathGlob(ref path) => path,
1222 ViewPathList(ref path, ref path_list_idents) => {
1223 for &Spanned { node: ref import, span } in path_list_idents {
1224 // `use a::{self as x, b as y};` lowers to
1225 // `use a as x; use a::b as y;`
1226 let mut ident = import.name;
1227 let suffix = if ident.name == keywords::SelfValue.name() {
1228 if let Some(last) = path.segments.last() {
1229 ident = last.identifier;
1236 let mut path = self.lower_path_extra(import.id, path, suffix,
1237 ParamMode::Explicit, true);
1240 self.allocate_hir_id_counter(import.id, import);
1241 self.with_hir_id_owner(import.id, |this| {
1242 let vis = match *vis {
1243 hir::Visibility::Public => hir::Visibility::Public,
1244 hir::Visibility::Crate => hir::Visibility::Crate,
1245 hir::Visibility::Inherited => hir::Visibility::Inherited,
1246 hir::Visibility::Restricted { ref path, id: _ } => {
1247 hir::Visibility::Restricted {
1249 // We are allocating a new NodeId here
1255 this.items.insert(import.id, hir::Item {
1257 name: import.rename.unwrap_or(ident).name,
1258 attrs: attrs.clone(),
1259 node: hir::ItemUse(P(path), hir::UseKind::Single),
1268 let path = P(self.lower_path(id, path, ParamMode::Explicit, true));
1269 let kind = match view_path.node {
1270 ViewPathSimple(ident, _) => {
1272 hir::UseKind::Single
1274 ViewPathGlob(_) => {
1277 ViewPathList(..) => {
1278 // Privatize the degenerate import base, used only to check
1279 // the stability of `use a::{};`, to avoid it showing up as
1280 // a reexport by accident when `pub`, e.g. in documentation.
1281 *vis = hir::Inherited;
1282 hir::UseKind::ListStem
1285 hir::ItemUse(path, kind)
1287 ItemKind::Static(ref t, m, ref e) => {
1288 let value = self.lower_expr(e);
1289 hir::ItemStatic(self.lower_ty(t),
1290 self.lower_mutability(m),
1291 self.record_body(value, None))
1293 ItemKind::Const(ref t, ref e) => {
1294 let value = self.lower_expr(e);
1295 hir::ItemConst(self.lower_ty(t),
1296 self.record_body(value, None))
1298 ItemKind::Fn(ref decl, unsafety, constness, abi, ref generics, ref body) => {
1299 self.with_new_scopes(|this| {
1300 let body = this.lower_block(body, false);
1301 let body = this.expr_block(body, ThinVec::new());
1302 let body_id = this.record_body(body, Some(decl));
1303 hir::ItemFn(this.lower_fn_decl(decl),
1304 this.lower_unsafety(unsafety),
1305 this.lower_constness(constness),
1307 this.lower_generics(generics),
1311 ItemKind::Mod(ref m) => hir::ItemMod(self.lower_mod(m)),
1312 ItemKind::ForeignMod(ref nm) => hir::ItemForeignMod(self.lower_foreign_mod(nm)),
1313 ItemKind::GlobalAsm(ref ga) => hir::ItemGlobalAsm(self.lower_global_asm(ga)),
1314 ItemKind::Ty(ref t, ref generics) => {
1315 hir::ItemTy(self.lower_ty(t), self.lower_generics(generics))
1317 ItemKind::Enum(ref enum_definition, ref generics) => {
1318 hir::ItemEnum(hir::EnumDef {
1319 variants: enum_definition.variants
1321 .map(|x| self.lower_variant(x))
1324 self.lower_generics(generics))
1326 ItemKind::Struct(ref struct_def, ref generics) => {
1327 let struct_def = self.lower_variant_data(struct_def);
1328 hir::ItemStruct(struct_def, self.lower_generics(generics))
1330 ItemKind::Union(ref vdata, ref generics) => {
1331 let vdata = self.lower_variant_data(vdata);
1332 hir::ItemUnion(vdata, self.lower_generics(generics))
1334 ItemKind::DefaultImpl(unsafety, ref trait_ref) => {
1335 let trait_ref = self.lower_trait_ref(trait_ref);
1337 if let Def::Trait(def_id) = trait_ref.path.def {
1338 self.trait_default_impl.insert(def_id, id);
1341 hir::ItemDefaultImpl(self.lower_unsafety(unsafety),
1344 ItemKind::Impl(unsafety,
1350 ref impl_items) => {
1351 let new_impl_items = impl_items.iter()
1352 .map(|item| self.lower_impl_item_ref(item))
1354 let ifce = ifce.as_ref().map(|trait_ref| self.lower_trait_ref(trait_ref));
1356 if let Some(ref trait_ref) = ifce {
1357 if let Def::Trait(def_id) = trait_ref.path.def {
1358 self.trait_impls.entry(def_id).or_insert(vec![]).push(id);
1362 hir::ItemImpl(self.lower_unsafety(unsafety),
1363 self.lower_impl_polarity(polarity),
1364 self.lower_defaultness(defaultness, true /* [1] */),
1365 self.lower_generics(generics),
1370 ItemKind::Trait(unsafety, ref generics, ref bounds, ref items) => {
1371 let bounds = self.lower_bounds(bounds);
1372 let items = items.iter().map(|item| self.lower_trait_item_ref(item)).collect();
1373 hir::ItemTrait(self.lower_unsafety(unsafety),
1374 self.lower_generics(generics),
1378 ItemKind::MacroDef(..) | ItemKind::Mac(..) => panic!("Shouldn't still be around"),
1381 // [1] `defaultness.has_value()` is never called for an `impl`, always `true` in order to
1382 // not cause an assertion failure inside the `lower_defaultness` function
1385 fn lower_trait_item(&mut self, i: &TraitItem) -> hir::TraitItem {
1386 self.with_parent_def(i.id, |this| {
1388 id: this.lower_node_id(i.id),
1389 name: this.lower_ident(i.ident),
1390 attrs: this.lower_attrs(&i.attrs),
1391 node: match i.node {
1392 TraitItemKind::Const(ref ty, ref default) => {
1393 hir::TraitItemKind::Const(this.lower_ty(ty),
1394 default.as_ref().map(|x| {
1395 let value = this.lower_expr(x);
1396 this.record_body(value, None)
1399 TraitItemKind::Method(ref sig, None) => {
1400 let names = this.lower_fn_args_to_names(&sig.decl);
1401 hir::TraitItemKind::Method(this.lower_method_sig(sig),
1402 hir::TraitMethod::Required(names))
1404 TraitItemKind::Method(ref sig, Some(ref body)) => {
1405 let body = this.lower_block(body, false);
1406 let expr = this.expr_block(body, ThinVec::new());
1407 let body_id = this.record_body(expr, Some(&sig.decl));
1408 hir::TraitItemKind::Method(this.lower_method_sig(sig),
1409 hir::TraitMethod::Provided(body_id))
1411 TraitItemKind::Type(ref bounds, ref default) => {
1412 hir::TraitItemKind::Type(this.lower_bounds(bounds),
1413 default.as_ref().map(|x| this.lower_ty(x)))
1415 TraitItemKind::Macro(..) => panic!("Shouldn't exist any more"),
1422 fn lower_trait_item_ref(&mut self, i: &TraitItem) -> hir::TraitItemRef {
1423 let (kind, has_default) = match i.node {
1424 TraitItemKind::Const(_, ref default) => {
1425 (hir::AssociatedItemKind::Const, default.is_some())
1427 TraitItemKind::Type(_, ref default) => {
1428 (hir::AssociatedItemKind::Type, default.is_some())
1430 TraitItemKind::Method(ref sig, ref default) => {
1431 (hir::AssociatedItemKind::Method {
1432 has_self: sig.decl.has_self(),
1433 }, default.is_some())
1435 TraitItemKind::Macro(..) => unimplemented!(),
1438 id: hir::TraitItemId { node_id: i.id },
1439 name: self.lower_ident(i.ident),
1441 defaultness: self.lower_defaultness(Defaultness::Default, has_default),
1446 fn lower_impl_item(&mut self, i: &ImplItem) -> hir::ImplItem {
1447 self.with_parent_def(i.id, |this| {
1449 id: this.lower_node_id(i.id),
1450 name: this.lower_ident(i.ident),
1451 attrs: this.lower_attrs(&i.attrs),
1452 vis: this.lower_visibility(&i.vis, None),
1453 defaultness: this.lower_defaultness(i.defaultness, true /* [1] */),
1454 node: match i.node {
1455 ImplItemKind::Const(ref ty, ref expr) => {
1456 let value = this.lower_expr(expr);
1457 let body_id = this.record_body(value, None);
1458 hir::ImplItemKind::Const(this.lower_ty(ty), body_id)
1460 ImplItemKind::Method(ref sig, ref body) => {
1461 let body = this.lower_block(body, false);
1462 let expr = this.expr_block(body, ThinVec::new());
1463 let body_id = this.record_body(expr, Some(&sig.decl));
1464 hir::ImplItemKind::Method(this.lower_method_sig(sig), body_id)
1466 ImplItemKind::Type(ref ty) => hir::ImplItemKind::Type(this.lower_ty(ty)),
1467 ImplItemKind::Macro(..) => panic!("Shouldn't exist any more"),
1473 // [1] since `default impl` is not yet implemented, this is always true in impls
1476 fn lower_impl_item_ref(&mut self, i: &ImplItem) -> hir::ImplItemRef {
1478 id: hir::ImplItemId { node_id: i.id },
1479 name: self.lower_ident(i.ident),
1481 vis: self.lower_visibility(&i.vis, Some(i.id)),
1482 defaultness: self.lower_defaultness(i.defaultness, true /* [1] */),
1483 kind: match i.node {
1484 ImplItemKind::Const(..) => hir::AssociatedItemKind::Const,
1485 ImplItemKind::Type(..) => hir::AssociatedItemKind::Type,
1486 ImplItemKind::Method(ref sig, _) => hir::AssociatedItemKind::Method {
1487 has_self: sig.decl.has_self(),
1489 ImplItemKind::Macro(..) => unimplemented!(),
1493 // [1] since `default impl` is not yet implemented, this is always true in impls
1496 fn lower_mod(&mut self, m: &Mod) -> hir::Mod {
1499 item_ids: m.items.iter().flat_map(|x| self.lower_item_id(x)).collect(),
1503 fn lower_item_id(&mut self, i: &Item) -> SmallVector<hir::ItemId> {
1505 ItemKind::Use(ref view_path) => {
1506 if let ViewPathList(_, ref imports) = view_path.node {
1507 return iter::once(i.id).chain(imports.iter().map(|import| import.node.id))
1508 .map(|id| hir::ItemId { id: id }).collect();
1511 ItemKind::MacroDef(..) => return SmallVector::new(),
1514 SmallVector::one(hir::ItemId { id: i.id })
1517 pub fn lower_item(&mut self, i: &Item) -> Option<hir::Item> {
1518 let mut name = i.ident.name;
1519 let mut vis = self.lower_visibility(&i.vis, None);
1520 let attrs = self.lower_attrs(&i.attrs);
1521 if let ItemKind::MacroDef(ref def) = i.node {
1522 if !def.legacy || i.attrs.iter().any(|attr| attr.path == "macro_export") {
1523 self.exported_macros.push(hir::MacroDef {
1536 let node = self.with_parent_def(i.id, |this| {
1537 this.lower_item_kind(i.id, &mut name, &attrs, &mut vis, &i.node)
1541 id: self.lower_node_id(i.id),
1550 fn lower_foreign_item(&mut self, i: &ForeignItem) -> hir::ForeignItem {
1551 self.with_parent_def(i.id, |this| {
1553 id: this.lower_node_id(i.id),
1555 attrs: this.lower_attrs(&i.attrs),
1556 node: match i.node {
1557 ForeignItemKind::Fn(ref fdec, ref generics) => {
1558 hir::ForeignItemFn(this.lower_fn_decl(fdec),
1559 this.lower_fn_args_to_names(fdec),
1560 this.lower_generics(generics))
1562 ForeignItemKind::Static(ref t, m) => {
1563 hir::ForeignItemStatic(this.lower_ty(t), m)
1566 vis: this.lower_visibility(&i.vis, None),
1572 fn lower_method_sig(&mut self, sig: &MethodSig) -> hir::MethodSig {
1574 generics: self.lower_generics(&sig.generics),
1576 unsafety: self.lower_unsafety(sig.unsafety),
1577 constness: self.lower_constness(sig.constness),
1578 decl: self.lower_fn_decl(&sig.decl),
1582 fn lower_unsafety(&mut self, u: Unsafety) -> hir::Unsafety {
1584 Unsafety::Unsafe => hir::Unsafety::Unsafe,
1585 Unsafety::Normal => hir::Unsafety::Normal,
1589 fn lower_constness(&mut self, c: Spanned<Constness>) -> hir::Constness {
1591 Constness::Const => hir::Constness::Const,
1592 Constness::NotConst => hir::Constness::NotConst,
1596 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
1598 UnOp::Deref => hir::UnDeref,
1599 UnOp::Not => hir::UnNot,
1600 UnOp::Neg => hir::UnNeg,
1604 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
1606 node: match b.node {
1607 BinOpKind::Add => hir::BiAdd,
1608 BinOpKind::Sub => hir::BiSub,
1609 BinOpKind::Mul => hir::BiMul,
1610 BinOpKind::Div => hir::BiDiv,
1611 BinOpKind::Rem => hir::BiRem,
1612 BinOpKind::And => hir::BiAnd,
1613 BinOpKind::Or => hir::BiOr,
1614 BinOpKind::BitXor => hir::BiBitXor,
1615 BinOpKind::BitAnd => hir::BiBitAnd,
1616 BinOpKind::BitOr => hir::BiBitOr,
1617 BinOpKind::Shl => hir::BiShl,
1618 BinOpKind::Shr => hir::BiShr,
1619 BinOpKind::Eq => hir::BiEq,
1620 BinOpKind::Lt => hir::BiLt,
1621 BinOpKind::Le => hir::BiLe,
1622 BinOpKind::Ne => hir::BiNe,
1623 BinOpKind::Ge => hir::BiGe,
1624 BinOpKind::Gt => hir::BiGt,
1630 fn lower_pat(&mut self, p: &Pat) -> P<hir::Pat> {
1632 id: self.lower_node_id(p.id),
1633 node: match p.node {
1634 PatKind::Wild => hir::PatKind::Wild,
1635 PatKind::Ident(ref binding_mode, pth1, ref sub) => {
1636 self.with_parent_def(p.id, |this| {
1637 match this.resolver.get_resolution(p.id).map(|d| d.base_def()) {
1638 // `None` can occur in body-less function signatures
1639 def @ None | def @ Some(Def::Local(_)) => {
1640 let def_id = def.map(|d| d.def_id()).unwrap_or_else(|| {
1641 this.resolver.definitions().local_def_id(p.id)
1643 hir::PatKind::Binding(this.lower_binding_mode(binding_mode),
1645 respan(pth1.span, pth1.node.name),
1646 sub.as_ref().map(|x| this.lower_pat(x)))
1649 hir::PatKind::Path(hir::QPath::Resolved(None, P(hir::Path {
1653 hir::PathSegment::from_name(pth1.node.name)
1660 PatKind::Lit(ref e) => hir::PatKind::Lit(P(self.lower_expr(e))),
1661 PatKind::TupleStruct(ref path, ref pats, ddpos) => {
1662 let qpath = self.lower_qpath(p.id, &None, path, ParamMode::Optional);
1663 hir::PatKind::TupleStruct(qpath,
1664 pats.iter().map(|x| self.lower_pat(x)).collect(),
1667 PatKind::Path(ref qself, ref path) => {
1668 hir::PatKind::Path(self.lower_qpath(p.id, qself, path, ParamMode::Optional))
1670 PatKind::Struct(ref path, ref fields, etc) => {
1671 let qpath = self.lower_qpath(p.id, &None, path, ParamMode::Optional);
1673 let fs = fields.iter()
1677 node: hir::FieldPat {
1678 name: self.lower_ident(f.node.ident),
1679 pat: self.lower_pat(&f.node.pat),
1680 is_shorthand: f.node.is_shorthand,
1685 hir::PatKind::Struct(qpath, fs, etc)
1687 PatKind::Tuple(ref elts, ddpos) => {
1688 hir::PatKind::Tuple(elts.iter().map(|x| self.lower_pat(x)).collect(), ddpos)
1690 PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)),
1691 PatKind::Ref(ref inner, mutbl) => {
1692 hir::PatKind::Ref(self.lower_pat(inner), self.lower_mutability(mutbl))
1694 PatKind::Range(ref e1, ref e2, ref end) => {
1695 hir::PatKind::Range(P(self.lower_expr(e1)),
1696 P(self.lower_expr(e2)),
1697 self.lower_range_end(end))
1699 PatKind::Slice(ref before, ref slice, ref after) => {
1700 hir::PatKind::Slice(before.iter().map(|x| self.lower_pat(x)).collect(),
1701 slice.as_ref().map(|x| self.lower_pat(x)),
1702 after.iter().map(|x| self.lower_pat(x)).collect())
1704 PatKind::Mac(_) => panic!("Shouldn't exist here"),
1710 fn lower_range_end(&mut self, e: &RangeEnd) -> hir::RangeEnd {
1712 RangeEnd::Included => hir::RangeEnd::Included,
1713 RangeEnd::Excluded => hir::RangeEnd::Excluded,
1717 fn lower_expr(&mut self, e: &Expr) -> hir::Expr {
1718 let kind = match e.node {
1720 // Eventually a desugaring for `box EXPR`
1721 // (similar to the desugaring above for `in PLACE BLOCK`)
1722 // should go here, desugaring
1726 // let mut place = BoxPlace::make_place();
1727 // let raw_place = Place::pointer(&mut place);
1728 // let value = $value;
1730 // ::std::ptr::write(raw_place, value);
1731 // Boxed::finalize(place)
1734 // But for now there are type-inference issues doing that.
1735 ExprKind::Box(ref inner) => {
1736 hir::ExprBox(P(self.lower_expr(inner)))
1739 // Desugar ExprBox: `in (PLACE) EXPR`
1740 ExprKind::InPlace(ref placer, ref value_expr) => {
1744 // let mut place = Placer::make_place(p);
1745 // let raw_place = Place::pointer(&mut place);
1747 // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR ));
1748 // InPlace::finalize(place)
1750 let placer_expr = P(self.lower_expr(placer));
1751 let value_expr = P(self.lower_expr(value_expr));
1753 let placer_ident = self.str_to_ident("placer");
1754 let place_ident = self.str_to_ident("place");
1755 let p_ptr_ident = self.str_to_ident("p_ptr");
1757 let make_place = ["ops", "Placer", "make_place"];
1758 let place_pointer = ["ops", "Place", "pointer"];
1759 let move_val_init = ["intrinsics", "move_val_init"];
1760 let inplace_finalize = ["ops", "InPlace", "finalize"];
1762 let unstable_span = self.allow_internal_unstable("<-", e.span);
1763 let make_call = |this: &mut LoweringContext, p, args| {
1764 let path = P(this.expr_std_path(unstable_span, p, ThinVec::new()));
1765 P(this.expr_call(e.span, path, args))
1768 let mk_stmt_let = |this: &mut LoweringContext, bind, expr| {
1769 this.stmt_let(e.span, false, bind, expr)
1772 let mk_stmt_let_mut = |this: &mut LoweringContext, bind, expr| {
1773 this.stmt_let(e.span, true, bind, expr)
1776 // let placer = <placer_expr> ;
1777 let (s1, placer_binding) = {
1778 mk_stmt_let(self, placer_ident, placer_expr)
1781 // let mut place = Placer::make_place(placer);
1782 let (s2, place_binding) = {
1783 let placer = self.expr_ident(e.span, placer_ident, placer_binding);
1784 let call = make_call(self, &make_place, hir_vec![placer]);
1785 mk_stmt_let_mut(self, place_ident, call)
1788 // let p_ptr = Place::pointer(&mut place);
1789 let (s3, p_ptr_binding) = {
1790 let agent = P(self.expr_ident(e.span, place_ident, place_binding));
1791 let args = hir_vec![self.expr_mut_addr_of(e.span, agent)];
1792 let call = make_call(self, &place_pointer, args);
1793 mk_stmt_let(self, p_ptr_ident, call)
1796 // pop_unsafe!(EXPR));
1797 let pop_unsafe_expr = {
1798 self.signal_block_expr(hir_vec![],
1801 hir::PopUnsafeBlock(hir::CompilerGenerated),
1806 // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR ));
1807 // InPlace::finalize(place)
1810 let ptr = self.expr_ident(e.span, p_ptr_ident, p_ptr_binding);
1811 let call_move_val_init =
1813 make_call(self, &move_val_init, hir_vec![ptr, pop_unsafe_expr]),
1815 let call_move_val_init = respan(e.span, call_move_val_init);
1817 let place = self.expr_ident(e.span, place_ident, place_binding);
1818 let call = make_call(self, &inplace_finalize, hir_vec![place]);
1819 P(self.signal_block_expr(hir_vec![call_move_val_init],
1822 hir::PushUnsafeBlock(hir::CompilerGenerated),
1826 let block = self.block_all(e.span, hir_vec![s1, s2, s3], Some(expr));
1827 hir::ExprBlock(P(block))
1830 ExprKind::Array(ref exprs) => {
1831 hir::ExprArray(exprs.iter().map(|x| self.lower_expr(x)).collect())
1833 ExprKind::Repeat(ref expr, ref count) => {
1834 let expr = P(self.lower_expr(expr));
1835 let count = self.lower_expr(count);
1836 hir::ExprRepeat(expr, self.record_body(count, None))
1838 ExprKind::Tup(ref elts) => {
1839 hir::ExprTup(elts.iter().map(|x| self.lower_expr(x)).collect())
1841 ExprKind::Call(ref f, ref args) => {
1842 let f = P(self.lower_expr(f));
1843 hir::ExprCall(f, args.iter().map(|x| self.lower_expr(x)).collect())
1845 ExprKind::MethodCall(i, ref tps, ref args) => {
1846 let tps = tps.iter().map(|x| self.lower_ty(x)).collect();
1847 let args = args.iter().map(|x| self.lower_expr(x)).collect();
1848 hir::ExprMethodCall(respan(i.span, self.lower_ident(i.node)), tps, args)
1850 ExprKind::Binary(binop, ref lhs, ref rhs) => {
1851 let binop = self.lower_binop(binop);
1852 let lhs = P(self.lower_expr(lhs));
1853 let rhs = P(self.lower_expr(rhs));
1854 hir::ExprBinary(binop, lhs, rhs)
1856 ExprKind::Unary(op, ref ohs) => {
1857 let op = self.lower_unop(op);
1858 let ohs = P(self.lower_expr(ohs));
1859 hir::ExprUnary(op, ohs)
1861 ExprKind::Lit(ref l) => hir::ExprLit(P((**l).clone())),
1862 ExprKind::Cast(ref expr, ref ty) => {
1863 let expr = P(self.lower_expr(expr));
1864 hir::ExprCast(expr, self.lower_ty(ty))
1866 ExprKind::Type(ref expr, ref ty) => {
1867 let expr = P(self.lower_expr(expr));
1868 hir::ExprType(expr, self.lower_ty(ty))
1870 ExprKind::AddrOf(m, ref ohs) => {
1871 let m = self.lower_mutability(m);
1872 let ohs = P(self.lower_expr(ohs));
1873 hir::ExprAddrOf(m, ohs)
1875 // More complicated than you might expect because the else branch
1876 // might be `if let`.
1877 ExprKind::If(ref cond, ref blk, ref else_opt) => {
1878 let else_opt = else_opt.as_ref().map(|els| {
1880 ExprKind::IfLet(..) => {
1881 // wrap the if-let expr in a block
1882 let span = els.span;
1883 let els = P(self.lower_expr(els));
1884 let id = self.next_id();
1885 let blk = P(hir::Block {
1889 rules: hir::DefaultBlock,
1891 targeted_by_break: false,
1893 P(self.expr_block(blk, ThinVec::new()))
1895 _ => P(self.lower_expr(els)),
1899 let then_blk = self.lower_block(blk, false);
1900 let then_expr = self.expr_block(then_blk, ThinVec::new());
1902 hir::ExprIf(P(self.lower_expr(cond)), P(then_expr), else_opt)
1904 ExprKind::While(ref cond, ref body, opt_ident) => {
1905 self.with_loop_scope(e.id, |this|
1907 this.with_loop_condition_scope(|this| P(this.lower_expr(cond))),
1908 this.lower_block(body, false),
1909 this.lower_opt_sp_ident(opt_ident)))
1911 ExprKind::Loop(ref body, opt_ident) => {
1912 self.with_loop_scope(e.id, |this|
1913 hir::ExprLoop(this.lower_block(body, false),
1914 this.lower_opt_sp_ident(opt_ident),
1915 hir::LoopSource::Loop))
1917 ExprKind::Catch(ref body) => {
1918 self.with_catch_scope(body.id, |this|
1919 hir::ExprBlock(this.lower_block(body, true)))
1921 ExprKind::Match(ref expr, ref arms) => {
1922 hir::ExprMatch(P(self.lower_expr(expr)),
1923 arms.iter().map(|x| self.lower_arm(x)).collect(),
1924 hir::MatchSource::Normal)
1926 ExprKind::Closure(capture_clause, ref decl, ref body, fn_decl_span) => {
1927 self.with_new_scopes(|this| {
1928 this.with_parent_def(e.id, |this| {
1929 let expr = this.lower_expr(body);
1930 hir::ExprClosure(this.lower_capture_clause(capture_clause),
1931 this.lower_fn_decl(decl),
1932 this.record_body(expr, Some(decl)),
1937 ExprKind::Block(ref blk) => hir::ExprBlock(self.lower_block(blk, false)),
1938 ExprKind::Assign(ref el, ref er) => {
1939 hir::ExprAssign(P(self.lower_expr(el)), P(self.lower_expr(er)))
1941 ExprKind::AssignOp(op, ref el, ref er) => {
1942 hir::ExprAssignOp(self.lower_binop(op),
1943 P(self.lower_expr(el)),
1944 P(self.lower_expr(er)))
1946 ExprKind::Field(ref el, ident) => {
1947 hir::ExprField(P(self.lower_expr(el)),
1948 respan(ident.span, self.lower_ident(ident.node)))
1950 ExprKind::TupField(ref el, ident) => {
1951 hir::ExprTupField(P(self.lower_expr(el)), ident)
1953 ExprKind::Index(ref el, ref er) => {
1954 hir::ExprIndex(P(self.lower_expr(el)), P(self.lower_expr(er)))
1956 ExprKind::Range(ref e1, ref e2, lims) => {
1957 use syntax::ast::RangeLimits::*;
1959 let path = match (e1, e2, lims) {
1960 (&None, &None, HalfOpen) => "RangeFull",
1961 (&Some(..), &None, HalfOpen) => "RangeFrom",
1962 (&None, &Some(..), HalfOpen) => "RangeTo",
1963 (&Some(..), &Some(..), HalfOpen) => "Range",
1964 (&None, &Some(..), Closed) => "RangeToInclusive",
1965 (&Some(..), &Some(..), Closed) => "RangeInclusive",
1966 (_, &None, Closed) =>
1967 panic!(self.diagnostic().span_fatal(
1968 e.span, "inclusive range with no end")),
1972 e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e)))
1974 let expr = P(self.lower_expr(&e));
1975 let unstable_span = self.allow_internal_unstable("...", e.span);
1976 self.field(Symbol::intern(s), expr, unstable_span)
1977 }).collect::<P<[hir::Field]>>();
1979 let is_unit = fields.is_empty();
1980 let unstable_span = self.allow_internal_unstable("...", e.span);
1982 iter::once("ops").chain(iter::once(path))
1983 .collect::<Vec<_>>();
1984 let struct_path = self.std_path(unstable_span, &struct_path, is_unit);
1985 let struct_path = hir::QPath::Resolved(None, P(struct_path));
1988 id: self.lower_node_id(e.id),
1990 hir::ExprPath(struct_path)
1992 hir::ExprStruct(struct_path, fields, None)
1994 span: unstable_span,
1995 attrs: e.attrs.clone(),
1998 ExprKind::Path(ref qself, ref path) => {
1999 hir::ExprPath(self.lower_qpath(e.id, qself, path, ParamMode::Optional))
2001 ExprKind::Break(opt_ident, ref opt_expr) => {
2002 let label_result = if self.is_in_loop_condition && opt_ident.is_none() {
2005 target_id: hir::ScopeTarget::Loop(
2006 Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into()),
2009 self.lower_loop_destination(opt_ident.map(|ident| (e.id, ident)))
2013 opt_expr.as_ref().map(|x| P(self.lower_expr(x))))
2015 ExprKind::Continue(opt_ident) =>
2017 if self.is_in_loop_condition && opt_ident.is_none() {
2020 target_id: hir::ScopeTarget::Loop(Err(
2021 hir::LoopIdError::UnlabeledCfInWhileCondition).into()),
2024 self.lower_loop_destination(opt_ident.map( |ident| (e.id, ident)))
2026 ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))),
2027 ExprKind::InlineAsm(ref asm) => {
2028 let hir_asm = hir::InlineAsm {
2029 inputs: asm.inputs.iter().map(|&(ref c, _)| c.clone()).collect(),
2030 outputs: asm.outputs.iter().map(|out| {
2031 hir::InlineAsmOutput {
2032 constraint: out.constraint.clone(),
2034 is_indirect: out.is_indirect,
2037 asm: asm.asm.clone(),
2038 asm_str_style: asm.asm_str_style,
2039 clobbers: asm.clobbers.clone().into(),
2040 volatile: asm.volatile,
2041 alignstack: asm.alignstack,
2042 dialect: asm.dialect,
2046 asm.outputs.iter().map(|out| self.lower_expr(&out.expr)).collect();
2048 asm.inputs.iter().map(|&(_, ref input)| self.lower_expr(input)).collect();
2049 hir::ExprInlineAsm(P(hir_asm), outputs, inputs)
2051 ExprKind::Struct(ref path, ref fields, ref maybe_expr) => {
2052 hir::ExprStruct(self.lower_qpath(e.id, &None, path, ParamMode::Optional),
2053 fields.iter().map(|x| self.lower_field(x)).collect(),
2054 maybe_expr.as_ref().map(|x| P(self.lower_expr(x))))
2056 ExprKind::Paren(ref ex) => {
2057 let mut ex = self.lower_expr(ex);
2058 // include parens in span, but only if it is a super-span.
2059 if e.span.contains(ex.span) {
2062 // merge attributes into the inner expression.
2063 let mut attrs = e.attrs.clone();
2064 attrs.extend::<Vec<_>>(ex.attrs.into());
2069 // Desugar ExprIfLet
2070 // From: `if let <pat> = <sub_expr> <body> [<else_opt>]`
2071 ExprKind::IfLet(ref pat, ref sub_expr, ref body, ref else_opt) => {
2074 // match <sub_expr> {
2076 // _ => [<else_opt> | ()]
2079 let mut arms = vec![];
2081 // `<pat> => <body>`
2083 let body = self.lower_block(body, false);
2084 let body_expr = P(self.expr_block(body, ThinVec::new()));
2085 let pat = self.lower_pat(pat);
2086 arms.push(self.arm(hir_vec![pat], body_expr));
2089 // _ => [<else_opt>|()]
2091 let wildcard_arm: Option<&Expr> = else_opt.as_ref().map(|p| &**p);
2092 let wildcard_pattern = self.pat_wild(e.span);
2093 let body = if let Some(else_expr) = wildcard_arm {
2094 P(self.lower_expr(else_expr))
2096 self.expr_tuple(e.span, hir_vec![])
2098 arms.push(self.arm(hir_vec![wildcard_pattern], body));
2101 let contains_else_clause = else_opt.is_some();
2103 let sub_expr = P(self.lower_expr(sub_expr));
2108 hir::MatchSource::IfLetDesugar {
2109 contains_else_clause: contains_else_clause,
2113 // Desugar ExprWhileLet
2114 // From: `[opt_ident]: while let <pat> = <sub_expr> <body>`
2115 ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_ident) => {
2118 // [opt_ident]: loop {
2119 // match <sub_expr> {
2125 // Note that the block AND the condition are evaluated in the loop scope.
2126 // This is done to allow `break` from inside the condition of the loop.
2127 let (body, break_expr, sub_expr) = self.with_loop_scope(e.id, |this| (
2128 this.lower_block(body, false),
2129 this.expr_break(e.span, ThinVec::new()),
2130 this.with_loop_condition_scope(|this| P(this.lower_expr(sub_expr))),
2133 // `<pat> => <body>`
2135 let body_expr = P(self.expr_block(body, ThinVec::new()));
2136 let pat = self.lower_pat(pat);
2137 self.arm(hir_vec![pat], body_expr)
2142 let pat_under = self.pat_wild(e.span);
2143 self.arm(hir_vec![pat_under], break_expr)
2146 // `match <sub_expr> { ... }`
2147 let arms = hir_vec![pat_arm, break_arm];
2148 let match_expr = self.expr(e.span,
2149 hir::ExprMatch(sub_expr,
2151 hir::MatchSource::WhileLetDesugar),
2154 // `[opt_ident]: loop { ... }`
2155 let loop_block = P(self.block_expr(P(match_expr)));
2156 let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident),
2157 hir::LoopSource::WhileLet);
2158 // add attributes to the outer returned expr node
2162 // Desugar ExprForLoop
2163 // From: `[opt_ident]: for <pat> in <head> <body>`
2164 ExprKind::ForLoop(ref pat, ref head, ref body, opt_ident) => {
2168 // let result = match ::std::iter::IntoIterator::into_iter(<head>) {
2170 // [opt_ident]: loop {
2171 // let <pat> = match ::std::iter::Iterator::next(&mut iter) {
2172 // ::std::option::Option::Some(val) => val,
2173 // ::std::option::Option::None => break
2175 // SemiExpr(<body>);
2183 let head = self.lower_expr(head);
2185 let iter = self.str_to_ident("iter");
2187 // `::std::option::Option::Some(val) => val`
2189 let val_ident = self.str_to_ident("val");
2190 let val_pat = self.pat_ident(e.span, val_ident);
2191 let val_expr = P(self.expr_ident(e.span, val_ident, val_pat.id));
2192 let some_pat = self.pat_some(e.span, val_pat);
2193 self.arm(hir_vec![some_pat], val_expr)
2196 // `::std::option::Option::None => break`
2198 let break_expr = self.with_loop_scope(e.id, |this|
2199 this.expr_break(e.span, ThinVec::new()));
2200 let pat = self.pat_none(e.span);
2201 self.arm(hir_vec![pat], break_expr)
2205 let iter_pat = self.pat_ident_binding_mode(e.span, iter,
2206 hir::BindByValue(hir::MutMutable));
2208 // `match ::std::iter::Iterator::next(&mut iter) { ... }`
2210 let iter = P(self.expr_ident(e.span, iter, iter_pat.id));
2211 let ref_mut_iter = self.expr_mut_addr_of(e.span, iter);
2212 let next_path = &["iter", "Iterator", "next"];
2213 let next_path = P(self.expr_std_path(e.span, next_path, ThinVec::new()));
2214 let next_expr = P(self.expr_call(e.span, next_path,
2215 hir_vec![ref_mut_iter]));
2216 let arms = hir_vec![pat_arm, break_arm];
2219 hir::ExprMatch(next_expr, arms,
2220 hir::MatchSource::ForLoopDesugar),
2224 let pat = self.lower_pat(pat);
2225 let pat_let = self.stmt_let_pat(e.span,
2228 hir::LocalSource::ForLoopDesugar);
2230 let body_block = self.with_loop_scope(e.id,
2231 |this| this.lower_block(body, false));
2232 let body_expr = P(self.expr_block(body_block, ThinVec::new()));
2233 let body_stmt = respan(e.span, hir::StmtExpr(body_expr, self.next_id()));
2235 let loop_block = P(self.block_all(e.span, hir_vec![pat_let, body_stmt], None));
2237 // `[opt_ident]: loop { ... }`
2238 let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident),
2239 hir::LoopSource::ForLoop);
2240 let loop_expr = P(hir::Expr {
2241 id: self.lower_node_id(e.id),
2244 attrs: ThinVec::new(),
2247 // `mut iter => { ... }`
2248 let iter_arm = self.arm(hir_vec![iter_pat], loop_expr);
2250 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
2251 let into_iter_expr = {
2252 let into_iter_path = &["iter", "IntoIterator", "into_iter"];
2253 let into_iter = P(self.expr_std_path(e.span, into_iter_path,
2255 P(self.expr_call(e.span, into_iter, hir_vec![head]))
2258 let match_expr = P(self.expr_match(e.span,
2261 hir::MatchSource::ForLoopDesugar));
2263 // `{ let _result = ...; _result }`
2264 // underscore prevents an unused_variables lint if the head diverges
2265 let result_ident = self.str_to_ident("_result");
2266 let (let_stmt, let_stmt_binding) =
2267 self.stmt_let(e.span, false, result_ident, match_expr);
2269 let result = P(self.expr_ident(e.span, result_ident, let_stmt_binding));
2270 let block = P(self.block_all(e.span, hir_vec![let_stmt], Some(result)));
2271 // add the attributes to the outer returned expr node
2272 return self.expr_block(block, e.attrs.clone());
2275 // Desugar ExprKind::Try
2277 ExprKind::Try(ref sub_expr) => {
2280 // match Carrier::translate(<expr>) {
2281 // Ok(val) => #[allow(unreachable_code)] val,
2282 // Err(err) => #[allow(unreachable_code)]
2283 // // If there is an enclosing `catch {...}`
2284 // break 'catch_target Carrier::from_error(From::from(err)),
2286 // return Carrier::from_error(From::from(err)),
2289 let unstable_span = self.allow_internal_unstable("?", e.span);
2291 // Carrier::translate(<expr>)
2294 let sub_expr = self.lower_expr(sub_expr);
2296 let path = &["ops", "Carrier", "translate"];
2297 let path = P(self.expr_std_path(unstable_span, path, ThinVec::new()));
2298 P(self.expr_call(e.span, path, hir_vec![sub_expr]))
2301 // #[allow(unreachable_code)]
2303 // allow(unreachable_code)
2305 let allow_ident = self.str_to_ident("allow");
2306 let uc_ident = self.str_to_ident("unreachable_code");
2307 let uc_meta_item = attr::mk_spanned_word_item(e.span, uc_ident);
2308 let uc_nested = NestedMetaItemKind::MetaItem(uc_meta_item);
2309 let uc_spanned = respan(e.span, uc_nested);
2310 attr::mk_spanned_list_item(e.span, allow_ident, vec![uc_spanned])
2312 attr::mk_spanned_attr_outer(e.span, attr::mk_attr_id(), allow)
2314 let attrs = vec![attr];
2316 // Ok(val) => #[allow(unreachable_code)] val,
2318 let val_ident = self.str_to_ident("val");
2319 let val_pat = self.pat_ident(e.span, val_ident);
2320 let val_expr = P(self.expr_ident_with_attrs(e.span,
2323 ThinVec::from(attrs.clone())));
2324 let ok_pat = self.pat_ok(e.span, val_pat);
2326 self.arm(hir_vec![ok_pat], val_expr)
2329 // Err(err) => #[allow(unreachable_code)]
2330 // return Carrier::from_error(From::from(err)),
2332 let err_ident = self.str_to_ident("err");
2333 let err_local = self.pat_ident(e.span, err_ident);
2335 let path = &["convert", "From", "from"];
2336 let from = P(self.expr_std_path(e.span, path, ThinVec::new()));
2337 let err_expr = self.expr_ident(e.span, err_ident, err_local.id);
2339 self.expr_call(e.span, from, hir_vec![err_expr])
2341 let from_err_expr = {
2342 let path = &["ops", "Carrier", "from_error"];
2343 let from_err = P(self.expr_std_path(unstable_span, path,
2345 P(self.expr_call(e.span, from_err, hir_vec![from_expr]))
2348 let thin_attrs = ThinVec::from(attrs);
2349 let catch_scope = self.catch_scopes.last().map(|x| *x);
2350 let ret_expr = if let Some(catch_node) = catch_scope {
2356 target_id: hir::ScopeTarget::Block(catch_node),
2363 hir::Expr_::ExprRet(Some(from_err_expr)),
2368 let err_pat = self.pat_err(e.span, err_local);
2369 self.arm(hir_vec![err_pat], ret_expr)
2372 hir::ExprMatch(discr,
2373 hir_vec![err_arm, ok_arm],
2374 hir::MatchSource::TryDesugar)
2377 ExprKind::Mac(_) => panic!("Shouldn't exist here"),
2381 id: self.lower_node_id(e.id),
2384 attrs: e.attrs.clone(),
2388 fn lower_stmt(&mut self, s: &Stmt) -> SmallVector<hir::Stmt> {
2389 SmallVector::one(match s.node {
2390 StmtKind::Local(ref l) => Spanned {
2391 node: hir::StmtDecl(P(Spanned {
2392 node: hir::DeclLocal(self.lower_local(l)),
2394 }), self.lower_node_id(s.id)),
2397 StmtKind::Item(ref it) => {
2398 // Can only use the ID once.
2399 let mut id = Some(s.id);
2400 return self.lower_item_id(it).into_iter().map(|item_id| Spanned {
2401 node: hir::StmtDecl(P(Spanned {
2402 node: hir::DeclItem(item_id),
2405 .map(|id| self.lower_node_id(id))
2406 .unwrap_or_else(|| self.next_id())),
2410 StmtKind::Expr(ref e) => {
2412 node: hir::StmtExpr(P(self.lower_expr(e)),
2413 self.lower_node_id(s.id)),
2417 StmtKind::Semi(ref e) => {
2419 node: hir::StmtSemi(P(self.lower_expr(e)),
2420 self.lower_node_id(s.id)),
2424 StmtKind::Mac(..) => panic!("Shouldn't exist here"),
2428 fn lower_capture_clause(&mut self, c: CaptureBy) -> hir::CaptureClause {
2430 CaptureBy::Value => hir::CaptureByValue,
2431 CaptureBy::Ref => hir::CaptureByRef,
2435 /// If an `explicit_owner` is given, this method allocates the `HirId` in
2436 /// the address space of that item instead of the item currently being
2437 /// lowered. This can happen during `lower_impl_item_ref()` where we need to
2438 /// lower a `Visibility` value although we haven't lowered the owning
2439 /// `ImplItem` in question yet.
2440 fn lower_visibility(&mut self,
2442 explicit_owner: Option<NodeId>)
2443 -> hir::Visibility {
2445 Visibility::Public => hir::Public,
2446 Visibility::Crate(_) => hir::Visibility::Crate,
2447 Visibility::Restricted { ref path, id } => {
2448 hir::Visibility::Restricted {
2449 path: P(self.lower_path(id, path, ParamMode::Explicit, true)),
2450 id: if let Some(owner) = explicit_owner {
2451 self.lower_node_id_with_owner(id, owner)
2453 self.lower_node_id(id)
2457 Visibility::Inherited => hir::Inherited,
2461 fn lower_defaultness(&mut self, d: Defaultness, has_value: bool) -> hir::Defaultness {
2463 Defaultness::Default => hir::Defaultness::Default { has_value: has_value },
2464 Defaultness::Final => {
2466 hir::Defaultness::Final
2471 fn lower_block_check_mode(&mut self, b: &BlockCheckMode) -> hir::BlockCheckMode {
2473 BlockCheckMode::Default => hir::DefaultBlock,
2474 BlockCheckMode::Unsafe(u) => hir::UnsafeBlock(self.lower_unsafe_source(u)),
2478 fn lower_binding_mode(&mut self, b: &BindingMode) -> hir::BindingMode {
2480 BindingMode::ByRef(m) => hir::BindByRef(self.lower_mutability(m)),
2481 BindingMode::ByValue(m) => hir::BindByValue(self.lower_mutability(m)),
2485 fn lower_unsafe_source(&mut self, u: UnsafeSource) -> hir::UnsafeSource {
2487 CompilerGenerated => hir::CompilerGenerated,
2488 UserProvided => hir::UserProvided,
2492 fn lower_impl_polarity(&mut self, i: ImplPolarity) -> hir::ImplPolarity {
2494 ImplPolarity::Positive => hir::ImplPolarity::Positive,
2495 ImplPolarity::Negative => hir::ImplPolarity::Negative,
2499 fn lower_trait_bound_modifier(&mut self, f: TraitBoundModifier) -> hir::TraitBoundModifier {
2501 TraitBoundModifier::None => hir::TraitBoundModifier::None,
2502 TraitBoundModifier::Maybe => hir::TraitBoundModifier::Maybe,
2506 // Helper methods for building HIR.
2508 fn arm(&mut self, pats: hir::HirVec<P<hir::Pat>>, expr: P<hir::Expr>) -> hir::Arm {
2517 fn field(&mut self, name: Name, expr: P<hir::Expr>, span: Span) -> hir::Field {
2525 is_shorthand: false,
2529 fn expr_break(&mut self, span: Span, attrs: ThinVec<Attribute>) -> P<hir::Expr> {
2530 let expr_break = hir::ExprBreak(self.lower_loop_destination(None), None);
2531 P(self.expr(span, expr_break, attrs))
2534 fn expr_call(&mut self, span: Span, e: P<hir::Expr>, args: hir::HirVec<hir::Expr>)
2536 self.expr(span, hir::ExprCall(e, args), ThinVec::new())
2539 fn expr_ident(&mut self, span: Span, id: Name, binding: NodeId) -> hir::Expr {
2540 self.expr_ident_with_attrs(span, id, binding, ThinVec::new())
2543 fn expr_ident_with_attrs(&mut self, span: Span,
2546 attrs: ThinVec<Attribute>) -> hir::Expr {
2548 let defs = self.resolver.definitions();
2549 Def::Local(defs.local_def_id(binding))
2552 let expr_path = hir::ExprPath(hir::QPath::Resolved(None, P(hir::Path {
2555 segments: hir_vec![hir::PathSegment::from_name(id)],
2558 self.expr(span, expr_path, attrs)
2561 fn expr_mut_addr_of(&mut self, span: Span, e: P<hir::Expr>) -> hir::Expr {
2562 self.expr(span, hir::ExprAddrOf(hir::MutMutable, e), ThinVec::new())
2565 fn expr_std_path(&mut self,
2567 components: &[&str],
2568 attrs: ThinVec<Attribute>)
2570 let path = self.std_path(span, components, true);
2571 self.expr(span, hir::ExprPath(hir::QPath::Resolved(None, P(path))), attrs)
2574 fn expr_match(&mut self,
2577 arms: hir::HirVec<hir::Arm>,
2578 source: hir::MatchSource)
2580 self.expr(span, hir::ExprMatch(arg, arms, source), ThinVec::new())
2583 fn expr_block(&mut self, b: P<hir::Block>, attrs: ThinVec<Attribute>) -> hir::Expr {
2584 self.expr(b.span, hir::ExprBlock(b), attrs)
2587 fn expr_tuple(&mut self, sp: Span, exprs: hir::HirVec<hir::Expr>) -> P<hir::Expr> {
2588 P(self.expr(sp, hir::ExprTup(exprs), ThinVec::new()))
2591 fn expr(&mut self, span: Span, node: hir::Expr_, attrs: ThinVec<Attribute>) -> hir::Expr {
2600 fn stmt_let_pat(&mut self,
2604 source: hir::LocalSource)
2606 let local = P(hir::Local {
2612 attrs: ThinVec::new(),
2615 let decl = respan(sp, hir::DeclLocal(local));
2616 respan(sp, hir::StmtDecl(P(decl), self.next_id()))
2619 fn stmt_let(&mut self, sp: Span, mutbl: bool, ident: Name, ex: P<hir::Expr>)
2620 -> (hir::Stmt, NodeId) {
2621 let pat = if mutbl {
2622 self.pat_ident_binding_mode(sp, ident, hir::BindByValue(hir::MutMutable))
2624 self.pat_ident(sp, ident)
2626 let pat_id = pat.id;
2627 (self.stmt_let_pat(sp, ex, pat, hir::LocalSource::Normal), pat_id)
2630 fn block_expr(&mut self, expr: P<hir::Expr>) -> hir::Block {
2631 self.block_all(expr.span, hir::HirVec::new(), Some(expr))
2634 fn block_all(&mut self, span: Span, stmts: hir::HirVec<hir::Stmt>, expr: Option<P<hir::Expr>>)
2640 rules: hir::DefaultBlock,
2642 targeted_by_break: false,
2646 fn pat_ok(&mut self, span: Span, pat: P<hir::Pat>) -> P<hir::Pat> {
2647 self.pat_std_enum(span, &["result", "Result", "Ok"], hir_vec![pat])
2650 fn pat_err(&mut self, span: Span, pat: P<hir::Pat>) -> P<hir::Pat> {
2651 self.pat_std_enum(span, &["result", "Result", "Err"], hir_vec![pat])
2654 fn pat_some(&mut self, span: Span, pat: P<hir::Pat>) -> P<hir::Pat> {
2655 self.pat_std_enum(span, &["option", "Option", "Some"], hir_vec![pat])
2658 fn pat_none(&mut self, span: Span) -> P<hir::Pat> {
2659 self.pat_std_enum(span, &["option", "Option", "None"], hir_vec![])
2662 fn pat_std_enum(&mut self,
2664 components: &[&str],
2665 subpats: hir::HirVec<P<hir::Pat>>)
2667 let path = self.std_path(span, components, true);
2668 let qpath = hir::QPath::Resolved(None, P(path));
2669 let pt = if subpats.is_empty() {
2670 hir::PatKind::Path(qpath)
2672 hir::PatKind::TupleStruct(qpath, subpats, None)
2677 fn pat_ident(&mut self, span: Span, name: Name) -> P<hir::Pat> {
2678 self.pat_ident_binding_mode(span, name, hir::BindByValue(hir::MutImmutable))
2681 fn pat_ident_binding_mode(&mut self, span: Span, name: Name, bm: hir::BindingMode)
2683 let id = self.next_id();
2684 let parent_def = self.parent_def.unwrap();
2686 let defs = self.resolver.definitions();
2687 let def_path_data = DefPathData::Binding(Ident::with_empty_ctxt(name));
2688 let def_index = defs
2689 .create_def_with_parent(parent_def, id, def_path_data, REGULAR_SPACE, Mark::root());
2690 DefId::local(def_index)
2695 node: hir::PatKind::Binding(bm,
2706 fn pat_wild(&mut self, span: Span) -> P<hir::Pat> {
2707 self.pat(span, hir::PatKind::Wild)
2710 fn pat(&mut self, span: Span, pat: hir::PatKind) -> P<hir::Pat> {
2718 /// Given suffix ["b","c","d"], returns path `::std::b::c::d` when
2719 /// `fld.cx.use_std`, and `::core::b::c::d` otherwise.
2720 /// The path is also resolved according to `is_value`.
2721 fn std_path(&mut self, span: Span, components: &[&str], is_value: bool) -> hir::Path {
2722 let mut path = hir::Path {
2725 segments: iter::once(keywords::CrateRoot.name()).chain({
2726 self.crate_root.into_iter().chain(components.iter().cloned()).map(Symbol::intern)
2727 }).map(hir::PathSegment::from_name).collect(),
2730 self.resolver.resolve_hir_path(&mut path, is_value);
2734 fn signal_block_expr(&mut self,
2735 stmts: hir::HirVec<hir::Stmt>,
2738 rule: hir::BlockCheckMode,
2739 attrs: ThinVec<Attribute>)
2741 let id = self.next_id();
2742 let block = P(hir::Block {
2748 targeted_by_break: false,
2750 self.expr_block(block, attrs)
2753 fn ty_path(&mut self, id: NodeId, span: Span, qpath: hir::QPath) -> P<hir::Ty> {
2755 let node = match qpath {
2756 hir::QPath::Resolved(None, path) => {
2757 // Turn trait object paths into `TyTraitObject` instead.
2758 if let Def::Trait(_) = path.def {
2759 let principal = hir::PolyTraitRef {
2760 bound_lifetimes: hir_vec![],
2761 trait_ref: hir::TraitRef {
2762 path: path.and_then(|path| path),
2768 // The original ID is taken by the `PolyTraitRef`,
2769 // so the `Ty` itself needs a different one.
2770 id = self.next_id();
2772 hir::TyTraitObject(hir_vec![principal], self.elided_lifetime(span))
2774 hir::TyPath(hir::QPath::Resolved(None, path))
2777 _ => hir::TyPath(qpath)
2779 P(hir::Ty { id, node, span })
2782 fn elided_lifetime(&mut self, span: Span) -> hir::Lifetime {
2786 name: keywords::Invalid.name()
2791 fn body_ids(bodies: &BTreeMap<hir::BodyId, hir::Body>) -> Vec<hir::BodyId> {
2792 // Sorting by span ensures that we get things in order within a
2793 // file, and also puts the files in a sensible order.
2794 let mut body_ids: Vec<_> = bodies.keys().cloned().collect();
2795 body_ids.sort_by_key(|b| bodies[b].value.span);