2 use crate::build::scope::{CachedBlock, DropKind};
3 use crate::hair::cx::Cx;
4 use crate::hair::{LintLevel, BindingMode, PatternKind};
6 use crate::transform::MirSource;
7 use crate::util as mir_util;
10 use rustc::hir::def_id::DefId;
11 use rustc::middle::region;
13 use rustc::mir::visit::{MutVisitor, TyContext};
14 use rustc::ty::{self, Ty, TyCtxt};
15 use rustc::ty::subst::Substs;
16 use rustc::util::nodemap::NodeMap;
17 use rustc_target::spec::PanicStrategy;
18 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
21 use rustc_target::spec::abi::Abi;
23 use syntax::attr::{self, UnwindAttr};
24 use syntax::symbol::keywords;
29 /// Construct the MIR for a given `DefId`.
30 pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Mir<'tcx> {
31 let id = tcx.hir().as_local_node_id(def_id).unwrap();
33 // Figure out what primary body this item has.
34 let (body_id, return_ty_span) = match tcx.hir().get(id) {
35 Node::Variant(variant) =>
36 return create_constructor_shim(tcx, id, &variant.node.data),
37 Node::StructCtor(ctor) =>
38 return create_constructor_shim(tcx, id, ctor),
40 Node::Expr(hir::Expr { node: hir::ExprKind::Closure(_, decl, body_id, _, _), .. })
41 | Node::Item(hir::Item { node: hir::ItemKind::Fn(decl, _, _, body_id), .. })
44 node: hir::ImplItemKind::Method(hir::MethodSig { decl, .. }, body_id),
50 node: hir::TraitItemKind::Method(
51 hir::MethodSig { decl, .. },
52 hir::TraitMethod::Provided(body_id),
57 (*body_id, decl.output.span())
59 Node::Item(hir::Item { node: hir::ItemKind::Static(ty, _, body_id), .. })
60 | Node::Item(hir::Item { node: hir::ItemKind::Const(ty, body_id), .. })
61 | Node::ImplItem(hir::ImplItem { node: hir::ImplItemKind::Const(ty, body_id), .. })
63 hir::TraitItem { node: hir::TraitItemKind::Const(ty, Some(body_id)), .. }
67 Node::AnonConst(hir::AnonConst { body, hir_id, .. }) => {
68 (*body, tcx.hir().span_by_hir_id(*hir_id))
71 _ => span_bug!(tcx.hir().span(id), "can't build MIR for {:?}", def_id),
74 tcx.infer_ctxt().enter(|infcx| {
75 let cx = Cx::new(&infcx, id);
76 let mut mir = if cx.tables().tainted_by_errors {
77 build::construct_error(cx, body_id)
78 } else if cx.body_owner_kind.is_fn_or_closure() {
79 // fetch the fully liberated fn signature (that is, all bound
80 // types/lifetimes replaced)
81 let fn_hir_id = tcx.hir().node_to_hir_id(id);
82 let fn_sig = cx.tables().liberated_fn_sigs()[fn_hir_id].clone();
83 let fn_def_id = tcx.hir().local_def_id(id);
85 let ty = tcx.type_of(fn_def_id);
86 let mut abi = fn_sig.abi;
87 let implicit_argument = match ty.sty {
89 // HACK(eddyb) Avoid having RustCall on closures,
90 // as it adds unnecessary (and wrong) auto-tupling.
92 Some(ArgInfo(liberated_closure_env_ty(tcx, id, body_id), None, None, None))
94 ty::Generator(..) => {
95 let gen_ty = tcx.body_tables(body_id).node_type(fn_hir_id);
96 Some(ArgInfo(gen_ty, None, None, None))
101 let safety = match fn_sig.unsafety {
102 hir::Unsafety::Normal => Safety::Safe,
103 hir::Unsafety::Unsafe => Safety::FnUnsafe,
106 let body = tcx.hir().body(body_id);
107 let explicit_arguments =
111 .map(|(index, arg)| {
112 let owner_id = tcx.hir().body_owner(body_id);
115 if let Some(ref fn_decl) = tcx.hir().fn_decl(owner_id) {
116 let ty_hir_id = fn_decl.inputs[index].hir_id;
117 let ty_span = tcx.hir().span_by_hir_id(ty_hir_id);
118 opt_ty_info = Some(ty_span);
119 self_arg = if index == 0 && fn_decl.implicit_self.has_implicit_self() {
120 match fn_decl.implicit_self {
121 hir::ImplicitSelfKind::Imm => Some(ImplicitSelfKind::Imm),
122 hir::ImplicitSelfKind::Mut => Some(ImplicitSelfKind::Mut),
123 hir::ImplicitSelfKind::ImmRef => Some(ImplicitSelfKind::ImmRef),
124 hir::ImplicitSelfKind::MutRef => Some(ImplicitSelfKind::MutRef),
134 ArgInfo(fn_sig.inputs()[index], opt_ty_info, Some(&*arg.pat), self_arg)
137 let arguments = implicit_argument.into_iter().chain(explicit_arguments);
139 let (yield_ty, return_ty) = if body.is_generator {
140 let gen_sig = match ty.sty {
141 ty::Generator(gen_def_id, gen_substs, ..) =>
142 gen_substs.sig(gen_def_id, tcx),
144 span_bug!(tcx.hir().span(id), "generator w/o generator type: {:?}", ty),
146 (Some(gen_sig.yield_ty), gen_sig.return_ty)
148 (None, fn_sig.output())
151 build::construct_fn(cx, id, arguments, safety, abi,
152 return_ty, yield_ty, return_ty_span, body)
154 build::construct_const(cx, body_id, return_ty_span)
157 // Convert the Mir to global types.
158 let mut globalizer = GlobalizeMir {
162 globalizer.visit_mir(&mut mir);
164 mem::transmute::<Mir<'_>, Mir<'tcx>>(mir)
167 mir_util::dump_mir(tcx, None, "mir_map", &0,
168 MirSource::item(def_id), &mir, |_, _| Ok(()) );
170 lints::check(tcx, &mir, def_id);
176 /// A pass to lift all the types and substitutions in a MIR
177 /// to the global tcx. Sadly, we don't have a "folder" that
178 /// can change `'tcx` so we have to transmute afterwards.
179 struct GlobalizeMir<'a, 'gcx: 'a> {
180 tcx: TyCtxt<'a, 'gcx, 'gcx>,
184 impl<'a, 'gcx: 'tcx, 'tcx> MutVisitor<'tcx> for GlobalizeMir<'a, 'gcx> {
185 fn visit_ty(&mut self, ty: &mut Ty<'tcx>, _: TyContext) {
186 if let Some(lifted) = self.tcx.lift(ty) {
190 "found type `{:?}` with inference types/regions in MIR",
195 fn visit_region(&mut self, region: &mut ty::Region<'tcx>, _: Location) {
196 if let Some(lifted) = self.tcx.lift(region) {
200 "found region `{:?}` with inference types/regions in MIR",
205 fn visit_const(&mut self, constant: &mut &'tcx ty::LazyConst<'tcx>, _: Location) {
206 if let Some(lifted) = self.tcx.lift(constant) {
210 "found constant `{:?}` with inference types/regions in MIR",
215 fn visit_substs(&mut self, substs: &mut &'tcx Substs<'tcx>, _: Location) {
216 if let Some(lifted) = self.tcx.lift(substs) {
220 "found substs `{:?}` with inference types/regions in MIR",
226 fn create_constructor_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
227 ctor_id: ast::NodeId,
228 v: &'tcx hir::VariantData)
231 let span = tcx.hir().span(ctor_id);
232 if let hir::VariantData::Tuple(ref fields, ctor_id, _) = *v {
233 tcx.infer_ctxt().enter(|infcx| {
234 let mut mir = shim::build_adt_ctor(&infcx, ctor_id, fields, span);
236 // Convert the Mir to global types.
237 let tcx = infcx.tcx.global_tcx();
238 let mut globalizer = GlobalizeMir {
242 globalizer.visit_mir(&mut mir);
244 mem::transmute::<Mir<'_>, Mir<'tcx>>(mir)
247 mir_util::dump_mir(tcx, None, "mir_map", &0,
248 MirSource::item(tcx.hir().local_def_id(ctor_id)),
249 &mir, |_, _| Ok(()) );
254 span_bug!(span, "attempting to create MIR for non-tuple variant {:?}", v);
258 ///////////////////////////////////////////////////////////////////////////
259 // BuildMir -- walks a crate, looking for fn items and methods to build MIR from
261 fn liberated_closure_env_ty<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
262 closure_expr_id: ast::NodeId,
263 body_id: hir::BodyId)
265 let closure_expr_hir_id = tcx.hir().node_to_hir_id(closure_expr_id);
266 let closure_ty = tcx.body_tables(body_id).node_type(closure_expr_hir_id);
268 let (closure_def_id, closure_substs) = match closure_ty.sty {
269 ty::Closure(closure_def_id, closure_substs) => (closure_def_id, closure_substs),
270 _ => bug!("closure expr does not have closure type: {:?}", closure_ty)
273 let closure_env_ty = tcx.closure_env_ty(closure_def_id, closure_substs).unwrap();
274 tcx.liberate_late_bound_regions(closure_def_id, &closure_env_ty)
277 #[derive(Debug, PartialEq, Eq)]
278 pub enum BlockFrame {
279 /// Evaluation is currently within a statement.
281 /// Examples include:
283 /// 2. `let _ = EXPR;`
284 /// 3. `let x = EXPR;`
286 /// If true, then statement discards result from evaluating
287 /// the expression (such as examples 1 and 2 above).
288 ignores_expr_result: bool
291 /// Evaluation is currently within the tail expression of a block.
293 /// Example: `{ STMT_1; STMT_2; EXPR }`
295 /// If true, then the surrounding context of the block ignores
296 /// the result of evaluating the block's tail expression.
298 /// Example: `let _ = { STMT_1; EXPR };`
299 tail_result_is_ignored: bool
302 /// Generic mark meaning that the block occurred as a subexpression
303 /// where the result might be used.
305 /// Examples: `foo(EXPR)`, `match EXPR { ... }`
310 fn is_tail_expr(&self) -> bool {
312 BlockFrame::TailExpr { .. } => true,
314 BlockFrame::Statement { .. } |
315 BlockFrame::SubExpr => false,
318 fn is_statement(&self) -> bool {
320 BlockFrame::Statement { .. } => true,
322 BlockFrame::TailExpr { .. } |
323 BlockFrame::SubExpr => false,
329 struct BlockContext(Vec<BlockFrame>);
331 struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
332 hir: Cx<'a, 'gcx, 'tcx>,
338 /// The current set of scopes, updated as we traverse;
339 /// see the `scope` module for more details.
340 scopes: Vec<scope::Scope<'tcx>>,
342 /// The block-context: each time we build the code within an hair::Block,
343 /// we push a frame here tracking whether we are building a statement or
344 /// if we are pushing the tail expression of the block. This is used to
345 /// embed information in generated temps about whether they were created
346 /// for a block tail expression or not.
348 /// It would be great if we could fold this into `self.scopes`
349 /// somehow, but right now I think that is very tightly tied to
350 /// the code generation in ways that we cannot (or should not)
351 /// start just throwing new entries onto that vector in order to
352 /// distinguish the context of EXPR1 from the context of EXPR2 in
353 /// `{ STMTS; EXPR1 } + EXPR2`.
354 block_context: BlockContext,
356 /// The current unsafe block in scope, even if it is hidden by
357 /// a `PushUnsafeBlock`.
358 unpushed_unsafe: Safety,
360 /// The number of `push_unsafe_block` levels in scope.
361 push_unsafe_count: usize,
363 /// The current set of breakables; see the `scope` module for more
365 breakable_scopes: Vec<scope::BreakableScope<'tcx>>,
367 /// The vector of all scopes that we have created thus far;
368 /// we track this for debuginfo later.
369 source_scopes: IndexVec<SourceScope, SourceScopeData>,
370 source_scope_local_data: IndexVec<SourceScope, SourceScopeLocalData>,
371 source_scope: SourceScope,
373 /// The guard-context: each time we build the guard expression for
374 /// a match arm, we push onto this stack, and then pop when we
375 /// finish building it.
376 guard_context: Vec<GuardFrame>,
378 /// Maps `NodeId`s of variable bindings to the `Local`s created for them.
379 /// (A match binding can have two locals; the 2nd is for the arm's guard.)
380 var_indices: NodeMap<LocalsForNode>,
381 local_decls: IndexVec<Local, LocalDecl<'tcx>>,
382 canonical_user_type_annotations: ty::CanonicalUserTypeAnnotations<'tcx>,
383 upvar_decls: Vec<UpvarDecl>,
384 unit_temp: Option<Place<'tcx>>,
386 /// Cached block with the `RESUME` terminator; this is created
387 /// when first set of cleanups are built.
388 cached_resume_block: Option<BasicBlock>,
389 /// Cached block with the `RETURN` terminator.
390 cached_return_block: Option<BasicBlock>,
391 /// Cached block with the `UNREACHABLE` terminator.
392 cached_unreachable_block: Option<BasicBlock>,
395 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
396 fn is_bound_var_in_guard(&self, id: ast::NodeId) -> bool {
397 self.guard_context.iter().any(|frame| frame.locals.iter().any(|local| local.id == id))
400 fn var_local_id(&self, id: ast::NodeId, for_guard: ForGuard) -> Local {
401 self.var_indices[&id].local_id(for_guard)
406 fn new() -> Self { BlockContext(vec![]) }
407 fn push(&mut self, bf: BlockFrame) { self.0.push(bf); }
408 fn pop(&mut self) -> Option<BlockFrame> { self.0.pop() }
410 /// Traverses the frames on the `BlockContext`, searching for either
411 /// the first block-tail expression frame with no intervening
414 /// Notably, this skips over `SubExpr` frames; this method is
415 /// meant to be used in the context of understanding the
416 /// relationship of a temp (created within some complicated
417 /// expression) with its containing expression, and whether the
418 /// value of that *containing expression* (not the temp!) is
420 fn currently_in_block_tail(&self) -> Option<BlockTailInfo> {
421 for bf in self.0.iter().rev() {
423 BlockFrame::SubExpr => continue,
424 BlockFrame::Statement { .. } => break,
425 &BlockFrame::TailExpr { tail_result_is_ignored } =>
426 return Some(BlockTailInfo { tail_result_is_ignored })
433 /// Looks at the topmost frame on the BlockContext and reports
434 /// whether its one that would discard a block tail result.
436 /// Unlike `currently_within_ignored_tail_expression`, this does
437 /// *not* skip over `SubExpr` frames: here, we want to know
438 /// whether the block result itself is discarded.
439 fn currently_ignores_tail_results(&self) -> bool {
440 match self.0.last() {
441 // no context: conservatively assume result is read
444 // sub-expression: block result feeds into some computation
445 Some(BlockFrame::SubExpr) => false,
447 // otherwise: use accumulated is_ignored state.
448 Some(BlockFrame::TailExpr { tail_result_is_ignored: ignored }) |
449 Some(BlockFrame::Statement { ignores_expr_result: ignored }) => *ignored,
456 /// In the usual case, a `NodeId` for an identifier maps to at most
457 /// one `Local` declaration.
460 /// The exceptional case is identifiers in a match arm's pattern
461 /// that are referenced in a guard of that match arm. For these,
462 /// we have `2` Locals.
464 /// * `for_arm_body` is the Local used in the arm body (which is
465 /// just like the `One` case above),
467 /// * `ref_for_guard` is the Local used in the arm's guard (which
468 /// is a reference to a temp that is an alias of
470 ForGuard { ref_for_guard: Local, for_arm_body: Local },
474 struct GuardFrameLocal {
478 impl GuardFrameLocal {
479 fn new(id: ast::NodeId, _binding_mode: BindingMode) -> Self {
488 /// These are the id's of names that are bound by patterns of the
489 /// arm of *this* guard.
491 /// (Frames higher up the stack will have the id's bound in arms
492 /// further out, such as in a case like:
495 /// P1(id1) if (... (match E2 { P2(id2) if ... => B2 })) => B1,
498 /// here, when building for FIXME.
499 locals: Vec<GuardFrameLocal>,
502 /// `ForGuard` indicates whether we are talking about:
503 /// 1. The variable for use outside of guard expressions, or
504 /// 2. The temp that holds reference to (1.), which is actually what the
505 /// guard expressions see.
506 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
513 fn local_id(&self, for_guard: ForGuard) -> Local {
514 match (self, for_guard) {
515 (&LocalsForNode::One(local_id), ForGuard::OutsideGuard) |
516 (&LocalsForNode::ForGuard { ref_for_guard: local_id, .. }, ForGuard::RefWithinGuard) |
517 (&LocalsForNode::ForGuard { for_arm_body: local_id, .. }, ForGuard::OutsideGuard) =>
520 (&LocalsForNode::One(_), ForGuard::RefWithinGuard) =>
521 bug!("anything with one local should never be within a guard."),
527 basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
531 pub struct ScopeId { .. }
534 ///////////////////////////////////////////////////////////////////////////
535 /// The `BlockAnd` "monad" packages up the new basic block along with a
536 /// produced value (sometimes just unit, of course). The `unpack!`
537 /// macro (and methods below) makes working with `BlockAnd` much more
540 #[must_use = "if you don't use one of these results, you're leaving a dangling edge"]
541 struct BlockAnd<T>(BasicBlock, T);
543 trait BlockAndExtension {
544 fn and<T>(self, v: T) -> BlockAnd<T>;
545 fn unit(self) -> BlockAnd<()>;
548 impl BlockAndExtension for BasicBlock {
549 fn and<T>(self, v: T) -> BlockAnd<T> {
553 fn unit(self) -> BlockAnd<()> {
558 /// Update a block pointer and return the value.
559 /// Use it like `let x = unpack!(block = self.foo(block, foo))`.
560 macro_rules! unpack {
561 ($x:ident = $c:expr) => {
563 let BlockAnd(b, v) = $c;
571 let BlockAnd(b, ()) = $c;
577 fn should_abort_on_panic<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
581 // Not callable from C, so we can safely unwind through these
582 if abi == Abi::Rust || abi == Abi::RustCall { return false; }
584 // We never unwind, so it's not relevant to stop an unwind
585 if tcx.sess.panic_strategy() != PanicStrategy::Unwind { return false; }
587 // We cannot add landing pads, so don't add one
588 if tcx.sess.no_landing_pads() { return false; }
590 // This is a special case: some functions have a C abi but are meant to
591 // unwind anyway. Don't stop them.
592 let attrs = &tcx.get_attrs(fn_def_id);
593 match attr::find_unwind_attr(Some(tcx.sess.diagnostic()), attrs) {
595 Some(UnwindAttr::Allowed) => false,
596 Some(UnwindAttr::Aborts) => true,
600 ///////////////////////////////////////////////////////////////////////////
601 /// the main entry point for building MIR for a function
603 struct ArgInfo<'gcx>(Ty<'gcx>,
605 Option<&'gcx hir::Pat>,
606 Option<ImplicitSelfKind>);
608 fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>,
614 yield_ty: Option<Ty<'gcx>>,
615 return_ty_span: Span,
616 body: &'gcx hir::Body)
618 where A: Iterator<Item=ArgInfo<'gcx>>
620 let arguments: Vec<_> = arguments.collect();
623 let tcx_hir = tcx.hir();
624 let span = tcx_hir.span(fn_id);
626 let hir_tables = hir.tables();
627 let fn_def_id = tcx_hir.local_def_id(fn_id);
629 // Gather the upvars of a closure, if any.
630 // In analyze_closure() in upvar.rs we gathered a list of upvars used by a
631 // closure and we stored in a map called upvar_list in TypeckTables indexed
632 // with the closure's DefId. Here, we run through that vec of UpvarIds for
633 // the given closure and use the necessary information to create UpvarDecl.
634 let upvar_decls: Vec<_> = hir_tables
640 let var_hir_id = upvar_id.var_path.hir_id;
641 let var_node_id = tcx_hir.hir_to_node_id(var_hir_id);
642 let capture = hir_tables.upvar_capture(*upvar_id);
643 let by_ref = match capture {
644 ty::UpvarCapture::ByValue => false,
645 ty::UpvarCapture::ByRef(..) => true,
647 let mut decl = UpvarDecl {
648 debug_name: keywords::Invalid.name(),
649 var_hir_id: ClearCrossCrate::Set(var_hir_id),
651 mutability: Mutability::Not,
653 if let Some(Node::Binding(pat)) = tcx_hir.find(var_node_id) {
654 if let hir::PatKind::Binding(_, _, _, ident, _) = pat.node {
655 decl.debug_name = ident.name;
656 if let Some(&bm) = hir.tables.pat_binding_modes().get(pat.hir_id) {
657 if bm == ty::BindByValue(hir::MutMutable) {
658 decl.mutability = Mutability::Mut;
660 decl.mutability = Mutability::Not;
663 tcx.sess.delay_span_bug(pat.span, "missing binding mode");
671 let mut builder = Builder::new(hir,
679 let call_site_scope = region::Scope {
680 id: body.value.hir_id.local_id,
681 data: region::ScopeData::CallSite
683 let arg_scope = region::Scope {
684 id: body.value.hir_id.local_id,
685 data: region::ScopeData::Arguments
687 let mut block = START_BLOCK;
688 let source_info = builder.source_info(span);
689 let call_site_s = (call_site_scope, source_info);
690 unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, block, |builder| {
691 if should_abort_on_panic(tcx, fn_def_id, abi) {
692 builder.schedule_abort();
695 let arg_scope_s = (arg_scope, source_info);
696 unpack!(block = builder.in_scope(arg_scope_s, LintLevel::Inherited, block, |builder| {
697 builder.args_and_body(block, &arguments, arg_scope, &body.value)
699 // Attribute epilogue to function's closing brace
700 let fn_end = span.shrink_to_hi();
701 let source_info = builder.source_info(fn_end);
702 let return_block = builder.return_block();
703 builder.cfg.terminate(block, source_info,
704 TerminatorKind::Goto { target: return_block });
705 builder.cfg.terminate(return_block, source_info,
706 TerminatorKind::Return);
707 // Attribute any unreachable codepaths to the function's closing brace
708 if let Some(unreachable_block) = builder.cached_unreachable_block {
709 builder.cfg.terminate(unreachable_block, source_info,
710 TerminatorKind::Unreachable);
714 assert_eq!(block, builder.return_block());
716 let mut spread_arg = None;
717 if abi == Abi::RustCall {
718 // RustCall pseudo-ABI untuples the last argument.
719 spread_arg = Some(Local::new(arguments.len()));
721 let closure_expr_id = tcx_hir.local_def_id(fn_id);
722 info!("fn_id {:?} has attrs {:?}", closure_expr_id,
723 tcx.get_attrs(closure_expr_id));
725 let mut mir = builder.finish(yield_ty);
726 mir.spread_arg = spread_arg;
730 fn construct_const<'a, 'gcx, 'tcx>(
731 hir: Cx<'a, 'gcx, 'tcx>,
732 body_id: hir::BodyId,
736 let ast_expr = &tcx.hir().body(body_id).value;
737 let ty = hir.tables().expr_ty_adjusted(ast_expr);
738 let owner_id = tcx.hir().body_owner(body_id);
739 let span = tcx.hir().span(owner_id);
740 let mut builder = Builder::new(hir, span, 0, Safety::Safe, ty, ty_span,vec![]);
742 let mut block = START_BLOCK;
743 let expr = builder.hir.mirror(ast_expr);
744 unpack!(block = builder.into_expr(&Place::Local(RETURN_PLACE), block, expr));
746 let source_info = builder.source_info(span);
747 builder.cfg.terminate(block, source_info, TerminatorKind::Return);
749 // Constants can't `return` so a return block should not be created.
750 assert_eq!(builder.cached_return_block, None);
752 // Constants may be match expressions in which case an unreachable block may
753 // be created, so terminate it properly.
754 if let Some(unreachable_block) = builder.cached_unreachable_block {
755 builder.cfg.terminate(unreachable_block, source_info,
756 TerminatorKind::Unreachable);
762 fn construct_error<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
763 body_id: hir::BodyId)
765 let owner_id = hir.tcx().hir().body_owner(body_id);
766 let span = hir.tcx().hir().span(owner_id);
767 let ty = hir.tcx().types.err;
768 let mut builder = Builder::new(hir, span, 0, Safety::Safe, ty, span, vec![]);
769 let source_info = builder.source_info(span);
770 builder.cfg.terminate(START_BLOCK, source_info, TerminatorKind::Unreachable);
774 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
775 fn new(hir: Cx<'a, 'gcx, 'tcx>,
781 upvar_decls: Vec<UpvarDecl>)
782 -> Builder<'a, 'gcx, 'tcx> {
783 let lint_level = LintLevel::Explicit(hir.root_lint_level);
784 let mut builder = Builder {
786 cfg: CFG { basic_blocks: IndexVec::new() },
790 block_context: BlockContext::new(),
791 source_scopes: IndexVec::new(),
792 source_scope: OUTERMOST_SOURCE_SCOPE,
793 source_scope_local_data: IndexVec::new(),
794 guard_context: vec![],
795 push_unsafe_count: 0,
796 unpushed_unsafe: safety,
797 breakable_scopes: vec![],
798 local_decls: IndexVec::from_elem_n(
799 LocalDecl::new_return_place(return_ty, return_span),
802 canonical_user_type_annotations: IndexVec::new(),
804 var_indices: Default::default(),
806 cached_resume_block: None,
807 cached_return_block: None,
808 cached_unreachable_block: None,
811 assert_eq!(builder.cfg.start_new_block(), START_BLOCK);
813 builder.new_source_scope(span, lint_level, Some(safety)),
814 OUTERMOST_SOURCE_SCOPE);
815 builder.source_scopes[OUTERMOST_SOURCE_SCOPE].parent_scope = None;
821 yield_ty: Option<Ty<'tcx>>)
823 for (index, block) in self.cfg.basic_blocks.iter().enumerate() {
824 if block.terminator.is_none() {
825 span_bug!(self.fn_span, "no terminator on block {:?}", index);
830 self.cfg.basic_blocks,
832 ClearCrossCrate::Set(self.source_scope_local_data),
836 self.canonical_user_type_annotations,
840 self.hir.control_flow_destroyed(),
844 fn args_and_body(&mut self,
845 mut block: BasicBlock,
846 arguments: &[ArgInfo<'gcx>],
847 argument_scope: region::Scope,
848 ast_body: &'gcx hir::Expr)
851 // Allocate locals for the function arguments
852 for &ArgInfo(ty, _, pattern, _) in arguments.iter() {
853 // If this is a simple binding pattern, give the local a name for
854 // debuginfo and so that error reporting knows that this is a user
855 // variable. For any other pattern the pattern introduces new
856 // variables which will be named instead.
858 if let Some(pat) = pattern {
860 hir::PatKind::Binding(hir::BindingAnnotation::Unannotated, _, _, ident, _)
861 | hir::PatKind::Binding(hir::BindingAnnotation::Mutable, _, _, ident, _) => {
862 name = Some(ident.name);
868 let source_info = SourceInfo {
869 scope: OUTERMOST_SOURCE_SCOPE,
870 span: pattern.map_or(self.fn_span, |pat| pat.span)
872 self.local_decls.push(LocalDecl {
873 mutability: Mutability::Mut,
875 user_ty: UserTypeProjections::none(),
877 visibility_scope: source_info.scope,
880 is_user_variable: None,
885 let mut scope = None;
886 // Bind the argument patterns
887 for (index, arg_info) in arguments.iter().enumerate() {
888 // Function arguments always get the first Local indices after the return place
889 let local = Local::new(index + 1);
890 let place = Place::Local(local);
891 let &ArgInfo(ty, opt_ty_info, pattern, ref self_binding) = arg_info;
893 // Make sure we drop (parts of) the argument even when not matched on.
895 pattern.as_ref().map_or(ast_body.span, |pat| pat.span),
896 argument_scope, &place, ty,
897 DropKind::Value { cached_block: CachedBlock::default() },
900 if let Some(pattern) = pattern {
901 let pattern = self.hir.pattern_from_hir(pattern);
902 let span = pattern.span;
904 match *pattern.kind {
905 // Don't introduce extra copies for simple bindings
906 PatternKind::Binding { mutability, var, mode: BindingMode::ByValue, .. } => {
907 self.local_decls[local].mutability = mutability;
908 self.local_decls[local].is_user_variable =
909 if let Some(kind) = self_binding {
910 Some(ClearCrossCrate::Set(BindingForm::ImplicitSelf(*kind)))
912 let binding_mode = ty::BindingMode::BindByValue(mutability.into());
913 Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {
916 opt_match_place: Some((Some(place.clone()), span)),
920 self.var_indices.insert(var, LocalsForNode::One(local));
923 scope = self.declare_bindings(scope, ast_body.span,
924 LintLevel::Inherited, &pattern,
925 matches::ArmHasGuard(false),
926 Some((Some(&place), span)));
927 unpack!(block = self.place_into_pattern(block, pattern, &place, false));
933 // Enter the argument pattern bindings source scope, if it exists.
934 if let Some(source_scope) = scope {
935 self.source_scope = source_scope;
938 let body = self.hir.mirror(ast_body);
939 self.into(&Place::Local(RETURN_PLACE), block, body)
942 fn get_unit_temp(&mut self) -> Place<'tcx> {
943 match self.unit_temp {
944 Some(ref tmp) => tmp.clone(),
946 let ty = self.hir.unit_ty();
947 let fn_span = self.fn_span;
948 let tmp = self.temp(ty, fn_span);
949 self.unit_temp = Some(tmp.clone());
955 fn return_block(&mut self) -> BasicBlock {
956 match self.cached_return_block {
959 let rb = self.cfg.start_new_block();
960 self.cached_return_block = Some(rb);
966 fn unreachable_block(&mut self) -> BasicBlock {
967 match self.cached_unreachable_block {
970 let ub = self.cfg.start_new_block();
971 self.cached_unreachable_block = Some(ub);
978 ///////////////////////////////////////////////////////////////////////////
979 // Builder methods are broken up into modules, depending on what kind
980 // of thing is being lowered. Note that they use the `unpack` macro
981 // above extensively.