1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
13 use build::scope::{CachedBlock, DropKind};
15 use hair::{LintLevel, BindingMode, PatternKind};
17 use rustc::hir::def_id::{DefId, LocalDefId};
18 use rustc::middle::region;
20 use rustc::mir::visit::{MutVisitor, TyContext};
21 use rustc::ty::{self, Ty, TyCtxt};
22 use rustc::ty::subst::Substs;
23 use rustc::util::nodemap::NodeMap;
24 use rustc_target::spec::PanicStrategy;
25 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
29 use rustc_target::spec::abi::Abi;
31 use syntax::attr::{self, UnwindAttr};
32 use syntax::symbol::keywords;
34 use transform::MirSource;
37 /// Construct the MIR for a given def-id.
38 pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Mir<'tcx> {
39 let id = tcx.hir.as_local_node_id(def_id).unwrap();
40 let unsupported = || {
41 span_bug!(tcx.hir.span(id), "can't build MIR for {:?}", def_id);
44 // Figure out what primary body this item has.
45 let body_id = match tcx.hir.get(id) {
46 hir::map::NodeVariant(variant) =>
47 return create_constructor_shim(tcx, id, &variant.node.data),
48 hir::map::NodeStructCtor(ctor) =>
49 return create_constructor_shim(tcx, id, ctor),
51 _ => match tcx.hir.maybe_body_owned_by(id) {
53 None => unsupported(),
57 tcx.infer_ctxt().enter(|infcx| {
58 let cx = Cx::new(&infcx, id);
59 let mut mir = if cx.tables().tainted_by_errors {
60 build::construct_error(cx, body_id)
61 } else if let hir::BodyOwnerKind::Fn = cx.body_owner_kind {
62 // fetch the fully liberated fn signature (that is, all bound
63 // types/lifetimes replaced)
64 let fn_hir_id = tcx.hir.node_to_hir_id(id);
65 let fn_sig = cx.tables().liberated_fn_sigs()[fn_hir_id].clone();
67 let ty = tcx.type_of(tcx.hir.local_def_id(id));
68 let mut abi = fn_sig.abi;
69 let implicit_argument = match ty.sty {
70 ty::TyClosure(..) => {
71 // HACK(eddyb) Avoid having RustCall on closures,
72 // as it adds unnecessary (and wrong) auto-tupling.
74 Some(ArgInfo(liberated_closure_env_ty(tcx, id, body_id), None, None, None))
76 ty::TyGenerator(..) => {
77 let gen_ty = tcx.body_tables(body_id).node_id_to_type(fn_hir_id);
78 Some(ArgInfo(gen_ty, None, None, None))
83 // FIXME: safety in closures
84 let safety = match fn_sig.unsafety {
85 hir::Unsafety::Normal => Safety::Safe,
86 hir::Unsafety::Unsafe => Safety::FnUnsafe,
89 let body = tcx.hir.body(body_id);
90 let explicit_arguments =
95 let owner_id = tcx.hir.body_owner(body_id);
98 if let Some(ref fn_decl) = tcx.hir.fn_decl(owner_id) {
99 let ty_hir_id = fn_decl.inputs[index].hir_id;
100 let ty_span = tcx.hir.span(tcx.hir.hir_to_node_id(ty_hir_id));
101 opt_ty_info = Some(ty_span);
102 self_arg = if index == 0 && fn_decl.has_implicit_self {
103 Some(ImplicitSelfBinding)
111 ArgInfo(fn_sig.inputs()[index], opt_ty_info, Some(&*arg.pat), self_arg)
114 let arguments = implicit_argument.into_iter().chain(explicit_arguments);
116 let (yield_ty, return_ty) = if body.is_generator {
117 let gen_sig = match ty.sty {
118 ty::TyGenerator(gen_def_id, gen_substs, ..) =>
119 gen_substs.sig(gen_def_id, tcx),
121 span_bug!(tcx.hir.span(id), "generator w/o generator type: {:?}", ty),
123 (Some(gen_sig.yield_ty), gen_sig.return_ty)
125 (None, fn_sig.output())
128 build::construct_fn(cx, id, arguments, safety, abi,
129 return_ty, yield_ty, body)
131 build::construct_const(cx, body_id)
134 // Convert the Mir to global types.
135 let mut globalizer = GlobalizeMir {
139 globalizer.visit_mir(&mut mir);
141 mem::transmute::<Mir, Mir<'tcx>>(mir)
144 mir_util::dump_mir(tcx, None, "mir_map", &0,
145 MirSource::item(def_id), &mir, |_, _| Ok(()) );
151 /// A pass to lift all the types and substitutions in a Mir
152 /// to the global tcx. Sadly, we don't have a "folder" that
153 /// can change 'tcx so we have to transmute afterwards.
154 struct GlobalizeMir<'a, 'gcx: 'a> {
155 tcx: TyCtxt<'a, 'gcx, 'gcx>,
159 impl<'a, 'gcx: 'tcx, 'tcx> MutVisitor<'tcx> for GlobalizeMir<'a, 'gcx> {
160 fn visit_ty(&mut self, ty: &mut Ty<'tcx>, _: TyContext) {
161 if let Some(lifted) = self.tcx.lift(ty) {
165 "found type `{:?}` with inference types/regions in MIR",
170 fn visit_region(&mut self, region: &mut ty::Region<'tcx>, _: Location) {
171 if let Some(lifted) = self.tcx.lift(region) {
175 "found region `{:?}` with inference types/regions in MIR",
180 fn visit_const(&mut self, constant: &mut &'tcx ty::Const<'tcx>, _: Location) {
181 if let Some(lifted) = self.tcx.lift(constant) {
185 "found constant `{:?}` with inference types/regions in MIR",
190 fn visit_substs(&mut self, substs: &mut &'tcx Substs<'tcx>, _: Location) {
191 if let Some(lifted) = self.tcx.lift(substs) {
195 "found substs `{:?}` with inference types/regions in MIR",
201 fn create_constructor_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
202 ctor_id: ast::NodeId,
203 v: &'tcx hir::VariantData)
206 let span = tcx.hir.span(ctor_id);
207 if let hir::VariantData::Tuple(ref fields, ctor_id) = *v {
208 tcx.infer_ctxt().enter(|infcx| {
209 let mut mir = shim::build_adt_ctor(&infcx, ctor_id, fields, span);
211 // Convert the Mir to global types.
212 let tcx = infcx.tcx.global_tcx();
213 let mut globalizer = GlobalizeMir {
217 globalizer.visit_mir(&mut mir);
219 mem::transmute::<Mir, Mir<'tcx>>(mir)
222 mir_util::dump_mir(tcx, None, "mir_map", &0,
223 MirSource::item(tcx.hir.local_def_id(ctor_id)),
224 &mir, |_, _| Ok(()) );
229 span_bug!(span, "attempting to create MIR for non-tuple variant {:?}", v);
233 ///////////////////////////////////////////////////////////////////////////
234 // BuildMir -- walks a crate, looking for fn items and methods to build MIR from
236 fn liberated_closure_env_ty<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
237 closure_expr_id: ast::NodeId,
238 body_id: hir::BodyId)
240 let closure_expr_hir_id = tcx.hir.node_to_hir_id(closure_expr_id);
241 let closure_ty = tcx.body_tables(body_id).node_id_to_type(closure_expr_hir_id);
243 let (closure_def_id, closure_substs) = match closure_ty.sty {
244 ty::TyClosure(closure_def_id, closure_substs) => (closure_def_id, closure_substs),
245 _ => bug!("closure expr does not have closure type: {:?}", closure_ty)
248 let closure_env_ty = tcx.closure_env_ty(closure_def_id, closure_substs).unwrap();
249 tcx.liberate_late_bound_regions(closure_def_id, &closure_env_ty)
252 struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
253 hir: Cx<'a, 'gcx, 'tcx>,
259 /// the current set of scopes, updated as we traverse;
260 /// see the `scope` module for more details
261 scopes: Vec<scope::Scope<'tcx>>,
263 /// The current unsafe block in scope, even if it is hidden by
264 /// a PushUnsafeBlock
265 unpushed_unsafe: Safety,
267 /// The number of `push_unsafe_block` levels in scope
268 push_unsafe_count: usize,
270 /// the current set of breakables; see the `scope` module for more
272 breakable_scopes: Vec<scope::BreakableScope<'tcx>>,
274 /// the vector of all scopes that we have created thus far;
275 /// we track this for debuginfo later
276 source_scopes: IndexVec<SourceScope, SourceScopeData>,
277 source_scope_local_data: IndexVec<SourceScope, SourceScopeLocalData>,
278 source_scope: SourceScope,
280 /// the guard-context: each time we build the guard expression for
281 /// a match arm, we push onto this stack, and then pop when we
282 /// finish building it.
283 guard_context: Vec<GuardFrame>,
285 /// Maps node ids of variable bindings to the `Local`s created for them.
286 /// (A match binding can have two locals; the 2nd is for the arm's guard.)
287 var_indices: NodeMap<LocalsForNode>,
288 local_decls: IndexVec<Local, LocalDecl<'tcx>>,
289 upvar_decls: Vec<UpvarDecl>,
290 unit_temp: Option<Place<'tcx>>,
292 /// cached block with the RESUME terminator; this is created
293 /// when first set of cleanups are built.
294 cached_resume_block: Option<BasicBlock>,
295 /// cached block with the RETURN terminator
296 cached_return_block: Option<BasicBlock>,
297 /// cached block with the UNREACHABLE terminator
298 cached_unreachable_block: Option<BasicBlock>,
301 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
302 fn is_bound_var_in_guard(&self, id: ast::NodeId) -> bool {
303 self.guard_context.iter().any(|frame| frame.locals.iter().any(|local| local.id == id))
306 fn var_local_id(&self, id: ast::NodeId, for_guard: ForGuard) -> Local {
307 self.var_indices[&id].local_id(for_guard)
314 Three { val_for_guard: Local, ref_for_guard: Local, for_arm_body: Local },
318 struct GuardFrameLocal {
322 impl GuardFrameLocal {
323 fn new(id: ast::NodeId, _binding_mode: BindingMode) -> Self {
332 /// These are the id's of names that are bound by patterns of the
333 /// arm of *this* guard.
335 /// (Frames higher up the stack will have the id's bound in arms
336 /// further out, such as in a case like:
339 /// P1(id1) if (... (match E2 { P2(id2) if ... => B2 })) => B1,
342 /// here, when building for FIXME
343 locals: Vec<GuardFrameLocal>,
346 /// ForGuard indicates whether we are talking about:
347 /// 1. the temp for a local binding used solely within guard expressions,
348 /// 2. the temp that holds reference to (1.), which is actually what the
349 /// guard expressions see, or
350 /// 3. the temp for use outside of guard expressions.
351 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
359 fn local_id(&self, for_guard: ForGuard) -> Local {
360 match (self, for_guard) {
361 (&LocalsForNode::One(local_id), ForGuard::OutsideGuard) |
362 (&LocalsForNode::Three { val_for_guard: local_id, .. }, ForGuard::ValWithinGuard) |
363 (&LocalsForNode::Three { ref_for_guard: local_id, .. }, ForGuard::RefWithinGuard) |
364 (&LocalsForNode::Three { for_arm_body: local_id, .. }, ForGuard::OutsideGuard) =>
367 (&LocalsForNode::One(_), ForGuard::ValWithinGuard) |
368 (&LocalsForNode::One(_), ForGuard::RefWithinGuard) =>
369 bug!("anything with one local should never be within a guard."),
375 basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
378 newtype_index!(ScopeId);
380 ///////////////////////////////////////////////////////////////////////////
381 /// The `BlockAnd` "monad" packages up the new basic block along with a
382 /// produced value (sometimes just unit, of course). The `unpack!`
383 /// macro (and methods below) makes working with `BlockAnd` much more
386 #[must_use = "if you don't use one of these results, you're leaving a dangling edge"]
387 struct BlockAnd<T>(BasicBlock, T);
389 trait BlockAndExtension {
390 fn and<T>(self, v: T) -> BlockAnd<T>;
391 fn unit(self) -> BlockAnd<()>;
394 impl BlockAndExtension for BasicBlock {
395 fn and<T>(self, v: T) -> BlockAnd<T> {
399 fn unit(self) -> BlockAnd<()> {
404 /// Update a block pointer and return the value.
405 /// Use it like `let x = unpack!(block = self.foo(block, foo))`.
406 macro_rules! unpack {
407 ($x:ident = $c:expr) => {
409 let BlockAnd(b, v) = $c;
417 let BlockAnd(b, ()) = $c;
423 fn should_abort_on_panic<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
427 // Not callable from C, so we can safely unwind through these
428 if abi == Abi::Rust || abi == Abi::RustCall { return false; }
430 // We never unwind, so it's not relevant to stop an unwind
431 if tcx.sess.panic_strategy() != PanicStrategy::Unwind { return false; }
433 // We cannot add landing pads, so don't add one
434 if tcx.sess.no_landing_pads() { return false; }
436 // This is a special case: some functions have a C abi but are meant to
437 // unwind anyway. Don't stop them.
438 let attrs = &tcx.get_attrs(fn_def_id);
439 match attr::find_unwind_attr(Some(tcx.sess.diagnostic()), attrs) {
441 // FIXME(rust-lang/rust#48251) -- Had to disable
442 // abort-on-panic for backwards compatibility reasons.
446 Some(UnwindAttr::Allowed) => false,
447 Some(UnwindAttr::Aborts) => true,
451 ///////////////////////////////////////////////////////////////////////////
452 /// the main entry point for building MIR for a function
454 struct ImplicitSelfBinding;
456 struct ArgInfo<'gcx>(Ty<'gcx>,
458 Option<&'gcx hir::Pat>,
459 Option<ImplicitSelfBinding>);
461 fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>,
467 yield_ty: Option<Ty<'gcx>>,
468 body: &'gcx hir::Body)
470 where A: Iterator<Item=ArgInfo<'gcx>>
472 let arguments: Vec<_> = arguments.collect();
475 let span = tcx.hir.span(fn_id);
477 // Gather the upvars of a closure, if any.
478 let upvar_decls: Vec<_> = tcx.with_freevars(fn_id, |freevars| {
479 freevars.iter().map(|fv| {
480 let var_id = fv.var_id();
481 let var_hir_id = tcx.hir.node_to_hir_id(var_id);
482 let closure_expr_id = tcx.hir.local_def_id(fn_id);
483 let capture = hir.tables().upvar_capture(ty::UpvarId {
485 closure_expr_id: LocalDefId::from_def_id(closure_expr_id),
487 let by_ref = match capture {
488 ty::UpvarCapture::ByValue => false,
489 ty::UpvarCapture::ByRef(..) => true
491 let mut decl = UpvarDecl {
492 debug_name: keywords::Invalid.name(),
493 var_hir_id: ClearCrossCrate::Set(var_hir_id),
495 mutability: Mutability::Not,
497 if let Some(hir::map::NodeBinding(pat)) = tcx.hir.find(var_id) {
498 if let hir::PatKind::Binding(_, _, ident, _) = pat.node {
499 decl.debug_name = ident.name;
501 if let Some(&bm) = hir.tables.pat_binding_modes().get(pat.hir_id) {
502 if bm == ty::BindByValue(hir::MutMutable) {
503 decl.mutability = Mutability::Mut;
505 decl.mutability = Mutability::Not;
508 tcx.sess.delay_span_bug(pat.span, "missing binding mode");
516 let mut builder = Builder::new(hir.clone(),
523 let fn_def_id = tcx.hir.local_def_id(fn_id);
524 let call_site_scope = region::Scope::CallSite(body.value.hir_id.local_id);
525 let arg_scope = region::Scope::Arguments(body.value.hir_id.local_id);
526 let mut block = START_BLOCK;
527 let source_info = builder.source_info(span);
528 let call_site_s = (call_site_scope, source_info);
529 unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, block, |builder| {
530 if should_abort_on_panic(tcx, fn_def_id, abi) {
531 builder.schedule_abort();
534 let arg_scope_s = (arg_scope, source_info);
535 unpack!(block = builder.in_scope(arg_scope_s, LintLevel::Inherited, block, |builder| {
536 builder.args_and_body(block, &arguments, arg_scope, &body.value)
538 // Attribute epilogue to function's closing brace
539 let fn_end = span.shrink_to_hi();
540 let source_info = builder.source_info(fn_end);
541 let return_block = builder.return_block();
542 builder.cfg.terminate(block, source_info,
543 TerminatorKind::Goto { target: return_block });
544 builder.cfg.terminate(return_block, source_info,
545 TerminatorKind::Return);
546 // Attribute any unreachable codepaths to the function's closing brace
547 if let Some(unreachable_block) = builder.cached_unreachable_block {
548 builder.cfg.terminate(unreachable_block, source_info,
549 TerminatorKind::Unreachable);
553 assert_eq!(block, builder.return_block());
555 let mut spread_arg = None;
556 if abi == Abi::RustCall {
557 // RustCall pseudo-ABI untuples the last argument.
558 spread_arg = Some(Local::new(arguments.len()));
560 let closure_expr_id = tcx.hir.local_def_id(fn_id);
561 info!("fn_id {:?} has attrs {:?}", closure_expr_id,
562 tcx.get_attrs(closure_expr_id));
564 let mut mir = builder.finish(yield_ty);
565 mir.spread_arg = spread_arg;
569 fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
570 body_id: hir::BodyId)
573 let ast_expr = &tcx.hir.body(body_id).value;
574 let ty = hir.tables().expr_ty_adjusted(ast_expr);
575 let owner_id = tcx.hir.body_owner(body_id);
576 let span = tcx.hir.span(owner_id);
577 let mut builder = Builder::new(hir.clone(), span, 0, Safety::Safe, ty, vec![]);
579 let mut block = START_BLOCK;
580 let expr = builder.hir.mirror(ast_expr);
581 unpack!(block = builder.into_expr(&Place::Local(RETURN_PLACE), block, expr));
583 let source_info = builder.source_info(span);
584 builder.cfg.terminate(block, source_info, TerminatorKind::Return);
586 // Constants can't `return` so a return block should not be created.
587 assert_eq!(builder.cached_return_block, None);
589 // Constants may be match expressions in which case an unreachable block may
590 // be created, so terminate it properly.
591 if let Some(unreachable_block) = builder.cached_unreachable_block {
592 builder.cfg.terminate(unreachable_block, source_info,
593 TerminatorKind::Unreachable);
599 fn construct_error<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
600 body_id: hir::BodyId)
602 let owner_id = hir.tcx().hir.body_owner(body_id);
603 let span = hir.tcx().hir.span(owner_id);
604 let ty = hir.tcx().types.err;
605 let mut builder = Builder::new(hir, span, 0, Safety::Safe, ty, vec![]);
606 let source_info = builder.source_info(span);
607 builder.cfg.terminate(START_BLOCK, source_info, TerminatorKind::Unreachable);
611 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
612 fn new(hir: Cx<'a, 'gcx, 'tcx>,
617 upvar_decls: Vec<UpvarDecl>)
618 -> Builder<'a, 'gcx, 'tcx> {
619 let lint_level = LintLevel::Explicit(hir.root_lint_level);
620 let mut builder = Builder {
622 cfg: CFG { basic_blocks: IndexVec::new() },
626 source_scopes: IndexVec::new(),
627 source_scope: OUTERMOST_SOURCE_SCOPE,
628 source_scope_local_data: IndexVec::new(),
629 guard_context: vec![],
630 push_unsafe_count: 0,
631 unpushed_unsafe: safety,
632 breakable_scopes: vec![],
633 local_decls: IndexVec::from_elem_n(LocalDecl::new_return_place(return_ty,
636 var_indices: NodeMap(),
638 cached_resume_block: None,
639 cached_return_block: None,
640 cached_unreachable_block: None,
643 assert_eq!(builder.cfg.start_new_block(), START_BLOCK);
645 builder.new_source_scope(span, lint_level, Some(safety)),
646 OUTERMOST_SOURCE_SCOPE);
647 builder.source_scopes[OUTERMOST_SOURCE_SCOPE].parent_scope = None;
653 yield_ty: Option<Ty<'tcx>>)
655 for (index, block) in self.cfg.basic_blocks.iter().enumerate() {
656 if block.terminator.is_none() {
657 span_bug!(self.fn_span, "no terminator on block {:?}", index);
661 Mir::new(self.cfg.basic_blocks,
663 ClearCrossCrate::Set(self.source_scope_local_data),
673 fn args_and_body(&mut self,
674 mut block: BasicBlock,
675 arguments: &[ArgInfo<'gcx>],
676 argument_scope: region::Scope,
677 ast_body: &'gcx hir::Expr)
680 // Allocate locals for the function arguments
681 for &ArgInfo(ty, _, pattern, _) in arguments.iter() {
682 // If this is a simple binding pattern, give the local a name for
683 // debuginfo and so that error reporting knows that this is a user
684 // variable. For any other pattern the pattern introduces new
685 // variables which will be named instead.
687 if let Some(pat) = pattern {
689 hir::PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ident, _)
690 | hir::PatKind::Binding(hir::BindingAnnotation::Mutable, _, ident, _) => {
691 name = Some(ident.name);
697 let source_info = SourceInfo {
698 scope: OUTERMOST_SOURCE_SCOPE,
699 span: pattern.map_or(self.fn_span, |pat| pat.span)
701 self.local_decls.push(LocalDecl {
702 mutability: Mutability::Mut,
705 visibility_scope: source_info.scope,
708 is_user_variable: None,
712 let mut scope = None;
713 // Bind the argument patterns
714 for (index, arg_info) in arguments.iter().enumerate() {
715 // Function arguments always get the first Local indices after the return place
716 let local = Local::new(index + 1);
717 let place = Place::Local(local);
718 let &ArgInfo(ty, opt_ty_info, pattern, ref self_binding) = arg_info;
720 if let Some(pattern) = pattern {
721 let pattern = self.hir.pattern_from_hir(pattern);
722 let span = pattern.span;
724 match *pattern.kind {
725 // Don't introduce extra copies for simple bindings
726 PatternKind::Binding { mutability, var, mode: BindingMode::ByValue, .. } => {
727 self.local_decls[local].mutability = mutability;
728 self.local_decls[local].is_user_variable =
729 if let Some(ImplicitSelfBinding) = self_binding {
730 Some(ClearCrossCrate::Set(BindingForm::ImplicitSelf))
732 let binding_mode = ty::BindingMode::BindByValue(mutability.into());
733 Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {
736 opt_match_place: Some((Some(place.clone()), span)),
739 self.var_indices.insert(var, LocalsForNode::One(local));
742 scope = self.declare_bindings(scope, ast_body.span,
743 LintLevel::Inherited, &pattern,
744 matches::ArmHasGuard(false),
745 Some((Some(&place), span)));
746 unpack!(block = self.place_into_pattern(block, pattern, &place, false));
751 // Make sure we drop (parts of) the argument even when not matched on.
753 pattern.as_ref().map_or(ast_body.span, |pat| pat.span),
754 argument_scope, &place, ty,
755 DropKind::Value { cached_block: CachedBlock::default() },
759 // Enter the argument pattern bindings source scope, if it exists.
760 if let Some(source_scope) = scope {
761 self.source_scope = source_scope;
764 let body = self.hir.mirror(ast_body);
765 self.into(&Place::Local(RETURN_PLACE), block, body)
768 fn get_unit_temp(&mut self) -> Place<'tcx> {
769 match self.unit_temp {
770 Some(ref tmp) => tmp.clone(),
772 let ty = self.hir.unit_ty();
773 let fn_span = self.fn_span;
774 let tmp = self.temp(ty, fn_span);
775 self.unit_temp = Some(tmp.clone());
781 fn return_block(&mut self) -> BasicBlock {
782 match self.cached_return_block {
785 let rb = self.cfg.start_new_block();
786 self.cached_return_block = Some(rb);
792 fn unreachable_block(&mut self) -> BasicBlock {
793 match self.cached_unreachable_block {
796 let ub = self.cfg.start_new_block();
797 self.cached_unreachable_block = Some(ub);
804 ///////////////////////////////////////////////////////////////////////////
805 // Builder methods are broken up into modules, depending on what kind
806 // of thing is being lowered. Note that they use the `unpack` macro
807 // above extensively.