]> git.lizzy.rs Git - rust.git/blob - src/librustc_mir/transform/inline.rs
Changes the type `mir::Mir` into `mir::Body`
[rust.git] / src / librustc_mir / transform / inline.rs
1 //! Inlining pass for MIR functions
2
3 use rustc::hir::CodegenFnAttrFlags;
4 use rustc::hir::def_id::DefId;
5
6 use rustc_data_structures::bit_set::BitSet;
7 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
8
9 use rustc::mir::*;
10 use rustc::mir::visit::*;
11 use rustc::ty::{self, Instance, InstanceDef, ParamEnv, Ty, TyCtxt};
12 use rustc::ty::subst::{Subst, SubstsRef};
13
14 use std::collections::VecDeque;
15 use std::iter;
16 use crate::transform::{MirPass, MirSource};
17 use super::simplify::{remove_dead_blocks, CfgSimplifier};
18
19 use syntax::attr;
20 use rustc_target::spec::abi::Abi;
21
22 const DEFAULT_THRESHOLD: usize = 50;
23 const HINT_THRESHOLD: usize = 100;
24
25 const INSTR_COST: usize = 5;
26 const CALL_PENALTY: usize = 25;
27
28 const UNKNOWN_SIZE_COST: usize = 10;
29
30 pub struct Inline;
31
32 #[derive(Copy, Clone, Debug)]
33 struct CallSite<'tcx> {
34     callee: DefId,
35     substs: SubstsRef<'tcx>,
36     bb: BasicBlock,
37     location: SourceInfo,
38 }
39
40 impl MirPass for Inline {
41     fn run_pass<'a, 'tcx>(&self,
42                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
43                           source: MirSource<'tcx>,
44                           mir: &mut Body<'tcx>) {
45         if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 {
46             Inliner { tcx, source }.run_pass(mir);
47         }
48     }
49 }
50
51 struct Inliner<'a, 'tcx: 'a> {
52     tcx: TyCtxt<'a, 'tcx, 'tcx>,
53     source: MirSource<'tcx>,
54 }
55
56 impl<'a, 'tcx> Inliner<'a, 'tcx> {
57     fn run_pass(&self, caller_mir: &mut Body<'tcx>) {
58         // Keep a queue of callsites to try inlining on. We take
59         // advantage of the fact that queries detect cycles here to
60         // allow us to try and fetch the fully optimized MIR of a
61         // call; if it succeeds, we can inline it and we know that
62         // they do not call us.  Otherwise, we just don't try to
63         // inline.
64         //
65         // We use a queue so that we inline "broadly" before we inline
66         // in depth. It is unclear if this is the best heuristic,
67         // really, but that's true of all the heuristics in this
68         // file. =)
69
70         let mut callsites = VecDeque::new();
71
72         let param_env = self.tcx.param_env(self.source.def_id());
73
74         // Only do inlining into fn bodies.
75         let id = self.tcx.hir().as_local_hir_id(self.source.def_id()).unwrap();
76         if self.tcx.hir().body_owner_kind_by_hir_id(id).is_fn_or_closure()
77             && self.source.promoted.is_none()
78         {
79             for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated() {
80                 if let Some(callsite) = self.get_valid_function_call(bb,
81                                                                     bb_data,
82                                                                     caller_mir,
83                                                                     param_env) {
84                     callsites.push_back(callsite);
85                 }
86             }
87         } else {
88             return;
89         }
90
91         let mut local_change;
92         let mut changed = false;
93
94         loop {
95             local_change = false;
96             while let Some(callsite) = callsites.pop_front() {
97                 debug!("checking whether to inline callsite {:?}", callsite);
98                 if !self.tcx.is_mir_available(callsite.callee) {
99                     debug!("checking whether to inline callsite {:?} - MIR unavailable", callsite);
100                     continue;
101                 }
102
103                 let self_node_id = self.tcx.hir().as_local_node_id(self.source.def_id()).unwrap();
104                 let callee_node_id = self.tcx.hir().as_local_node_id(callsite.callee);
105
106                 let callee_mir = if let Some(callee_node_id) = callee_node_id {
107                     // Avoid a cycle here by only using `optimized_mir` only if we have
108                     // a lower node id than the callee. This ensures that the callee will
109                     // not inline us. This trick only works without incremental compilation.
110                     // So don't do it if that is enabled.
111                     if !self.tcx.dep_graph.is_fully_enabled()
112                         && self_node_id.as_u32() < callee_node_id.as_u32() {
113                         self.tcx.optimized_mir(callsite.callee)
114                     } else {
115                         continue;
116                     }
117                 } else {
118                     // This cannot result in a cycle since the callee MIR is from another crate
119                     // and is already optimized.
120                     self.tcx.optimized_mir(callsite.callee)
121                 };
122
123                 let callee_mir = if self.consider_optimizing(callsite, callee_mir) {
124                     self.tcx.subst_and_normalize_erasing_regions(
125                         &callsite.substs,
126                         param_env,
127                         callee_mir,
128                     )
129                 } else {
130                     continue;
131                 };
132
133                 let start = caller_mir.basic_blocks().len();
134                 debug!("attempting to inline callsite {:?} - mir={:?}", callsite, callee_mir);
135                 if !self.inline_call(callsite, caller_mir, callee_mir) {
136                     debug!("attempting to inline callsite {:?} - failure", callsite);
137                     continue;
138                 }
139                 debug!("attempting to inline callsite {:?} - success", callsite);
140
141                 // Add callsites from inlined function
142                 for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated().skip(start) {
143                     if let Some(new_callsite) = self.get_valid_function_call(bb,
144                                                                              bb_data,
145                                                                              caller_mir,
146                                                                              param_env) {
147                         // Don't inline the same function multiple times.
148                         if callsite.callee != new_callsite.callee {
149                             callsites.push_back(new_callsite);
150                         }
151                     }
152                 }
153
154                 local_change = true;
155                 changed = true;
156             }
157
158             if !local_change {
159                 break;
160             }
161         }
162
163         // Simplify if we inlined anything.
164         if changed {
165             debug!("Running simplify cfg on {:?}", self.source);
166             CfgSimplifier::new(caller_mir).simplify();
167             remove_dead_blocks(caller_mir);
168         }
169     }
170
171     fn get_valid_function_call(&self,
172                                bb: BasicBlock,
173                                bb_data: &BasicBlockData<'tcx>,
174                                caller_mir: &Body<'tcx>,
175                                param_env: ParamEnv<'tcx>,
176     ) -> Option<CallSite<'tcx>> {
177         // Don't inline calls that are in cleanup blocks.
178         if bb_data.is_cleanup { return None; }
179
180         // Only consider direct calls to functions
181         let terminator = bb_data.terminator();
182         if let TerminatorKind::Call { func: ref op, .. } = terminator.kind {
183             if let ty::FnDef(callee_def_id, substs) = op.ty(caller_mir, self.tcx).sty {
184                 let instance = Instance::resolve(self.tcx,
185                                                  param_env,
186                                                  callee_def_id,
187                                                  substs)?;
188
189                 if let InstanceDef::Virtual(..) = instance.def {
190                     return None;
191                 }
192
193                 return Some(CallSite {
194                     callee: instance.def_id(),
195                     substs: instance.substs,
196                     bb,
197                     location: terminator.source_info
198                 });
199             }
200         }
201
202         None
203     }
204
205     fn consider_optimizing(&self,
206                            callsite: CallSite<'tcx>,
207                            callee_mir: &Body<'tcx>)
208                            -> bool
209     {
210         debug!("consider_optimizing({:?})", callsite);
211         self.should_inline(callsite, callee_mir)
212             && self.tcx.consider_optimizing(|| format!("Inline {:?} into {:?}",
213                                                        callee_mir.span,
214                                                        callsite))
215     }
216
217     fn should_inline(&self,
218                      callsite: CallSite<'tcx>,
219                      callee_mir: &Body<'tcx>)
220                      -> bool
221     {
222         debug!("should_inline({:?})", callsite);
223         let tcx = self.tcx;
224
225         // Don't inline closures that have capture debuginfo
226         // FIXME: Handle closures better
227         if callee_mir.__upvar_debuginfo_codegen_only_do_not_use.len() > 0 {
228             debug!("    upvar debuginfo present - not inlining");
229             return false;
230         }
231
232         // Cannot inline generators which haven't been transformed yet
233         if callee_mir.yield_ty.is_some() {
234             debug!("    yield ty present - not inlining");
235             return false;
236         }
237
238         // Do not inline {u,i}128 lang items, codegen const eval depends
239         // on detecting calls to these lang items and intercepting them
240         if tcx.is_binop_lang_item(callsite.callee).is_some() {
241             debug!("    not inlining 128bit integer lang item");
242             return false;
243         }
244
245         let codegen_fn_attrs = tcx.codegen_fn_attrs(callsite.callee);
246
247         let hinted = match codegen_fn_attrs.inline {
248             // Just treat inline(always) as a hint for now,
249             // there are cases that prevent inlining that we
250             // need to check for first.
251             attr::InlineAttr::Always => true,
252             attr::InlineAttr::Never => {
253                 debug!("#[inline(never)] present - not inlining");
254                 return false
255             }
256             attr::InlineAttr::Hint => true,
257             attr::InlineAttr::None => false,
258         };
259
260         // Only inline local functions if they would be eligible for cross-crate
261         // inlining. This is to ensure that the final crate doesn't have MIR that
262         // reference unexported symbols
263         if callsite.callee.is_local() {
264             if callsite.substs.non_erasable_generics().count() == 0 && !hinted {
265                 debug!("    callee is an exported function - not inlining");
266                 return false;
267             }
268         }
269
270         let mut threshold = if hinted {
271             HINT_THRESHOLD
272         } else {
273             DEFAULT_THRESHOLD
274         };
275
276         // Significantly lower the threshold for inlining cold functions
277         if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
278             threshold /= 5;
279         }
280
281         // Give a bonus functions with a small number of blocks,
282         // We normally have two or three blocks for even
283         // very small functions.
284         if callee_mir.basic_blocks().len() <= 3 {
285             threshold += threshold / 4;
286         }
287         debug!("    final inline threshold = {}", threshold);
288
289         // FIXME: Give a bonus to functions with only a single caller
290
291         let param_env = tcx.param_env(self.source.def_id());
292
293         let mut first_block = true;
294         let mut cost = 0;
295
296         // Traverse the MIR manually so we can account for the effects of
297         // inlining on the CFG.
298         let mut work_list = vec![START_BLOCK];
299         let mut visited = BitSet::new_empty(callee_mir.basic_blocks().len());
300         while let Some(bb) = work_list.pop() {
301             if !visited.insert(bb.index()) { continue; }
302             let blk = &callee_mir.basic_blocks()[bb];
303
304             for stmt in &blk.statements {
305                 // Don't count StorageLive/StorageDead in the inlining cost.
306                 match stmt.kind {
307                     StatementKind::StorageLive(_) |
308                     StatementKind::StorageDead(_) |
309                     StatementKind::Nop => {}
310                     _ => cost += INSTR_COST
311                 }
312             }
313             let term = blk.terminator();
314             let mut is_drop = false;
315             match term.kind {
316                 TerminatorKind::Drop { ref location, target, unwind } |
317                 TerminatorKind::DropAndReplace { ref location, target, unwind, .. } => {
318                     is_drop = true;
319                     work_list.push(target);
320                     // If the location doesn't actually need dropping, treat it like
321                     // a regular goto.
322                     let ty = location.ty(callee_mir, tcx).subst(tcx, callsite.substs).ty;
323                     if ty.needs_drop(tcx, param_env) {
324                         cost += CALL_PENALTY;
325                         if let Some(unwind) = unwind {
326                             work_list.push(unwind);
327                         }
328                     } else {
329                         cost += INSTR_COST;
330                     }
331                 }
332
333                 TerminatorKind::Unreachable |
334                 TerminatorKind::Call { destination: None, .. } if first_block => {
335                     // If the function always diverges, don't inline
336                     // unless the cost is zero
337                     threshold = 0;
338                 }
339
340                 TerminatorKind::Call {func: Operand::Constant(ref f), .. } => {
341                     if let ty::FnDef(def_id, _) = f.ty.sty {
342                         // Don't give intrinsics the extra penalty for calls
343                         let f = tcx.fn_sig(def_id);
344                         if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
345                             cost += INSTR_COST;
346                         } else {
347                             cost += CALL_PENALTY;
348                         }
349                     }
350                 }
351                 TerminatorKind::Assert { .. } => cost += CALL_PENALTY,
352                 _ => cost += INSTR_COST
353             }
354
355             if !is_drop {
356                 for &succ in term.successors() {
357                     work_list.push(succ);
358                 }
359             }
360
361             first_block = false;
362         }
363
364         // Count up the cost of local variables and temps, if we know the size
365         // use that, otherwise we use a moderately-large dummy cost.
366
367         let ptr_size = tcx.data_layout.pointer_size.bytes();
368
369         for v in callee_mir.vars_and_temps_iter() {
370             let v = &callee_mir.local_decls[v];
371             let ty = v.ty.subst(tcx, callsite.substs);
372             // Cost of the var is the size in machine-words, if we know
373             // it.
374             if let Some(size) = type_size_of(tcx, param_env.clone(), ty) {
375                 cost += (size / ptr_size) as usize;
376             } else {
377                 cost += UNKNOWN_SIZE_COST;
378             }
379         }
380
381         if let attr::InlineAttr::Always = codegen_fn_attrs.inline {
382             debug!("INLINING {:?} because inline(always) [cost={}]", callsite, cost);
383             true
384         } else {
385             if cost <= threshold {
386                 debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold);
387                 true
388             } else {
389                 debug!("NOT inlining {:?} [cost={} > threshold={}]", callsite, cost, threshold);
390                 false
391             }
392         }
393     }
394
395     fn inline_call(&self,
396                    callsite: CallSite<'tcx>,
397                    caller_mir: &mut Body<'tcx>,
398                    mut callee_mir: Body<'tcx>) -> bool {
399         let terminator = caller_mir[callsite.bb].terminator.take().unwrap();
400         match terminator.kind {
401             // FIXME: Handle inlining of diverging calls
402             TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => {
403                 debug!("Inlined {:?} into {:?}", callsite.callee, self.source);
404
405                 let mut local_map = IndexVec::with_capacity(callee_mir.local_decls.len());
406                 let mut scope_map = IndexVec::with_capacity(callee_mir.source_scopes.len());
407                 let mut promoted_map = IndexVec::with_capacity(callee_mir.promoted.len());
408
409                 for mut scope in callee_mir.source_scopes.iter().cloned() {
410                     if scope.parent_scope.is_none() {
411                         scope.parent_scope = Some(callsite.location.scope);
412                         scope.span = callee_mir.span;
413                     }
414
415                     scope.span = callsite.location.span;
416
417                     let idx = caller_mir.source_scopes.push(scope);
418                     scope_map.push(idx);
419                 }
420
421                 for loc in callee_mir.vars_and_temps_iter() {
422                     let mut local = callee_mir.local_decls[loc].clone();
423
424                     local.source_info.scope =
425                         scope_map[local.source_info.scope];
426                     local.source_info.span = callsite.location.span;
427                     local.visibility_scope = scope_map[local.visibility_scope];
428
429                     let idx = caller_mir.local_decls.push(local);
430                     local_map.push(idx);
431                 }
432
433                 promoted_map.extend(
434                     callee_mir.promoted.iter().cloned().map(|p| caller_mir.promoted.push(p))
435                 );
436
437                 // If the call is something like `a[*i] = f(i)`, where
438                 // `i : &mut usize`, then just duplicating the `a[*i]`
439                 // Place could result in two different locations if `f`
440                 // writes to `i`. To prevent this we need to create a temporary
441                 // borrow of the place and pass the destination as `*temp` instead.
442                 fn dest_needs_borrow(place: &Place<'_>) -> bool {
443                     place.iterate(|place_base, place_projection| {
444                         for proj in place_projection {
445                             match proj.elem {
446                                 ProjectionElem::Deref |
447                                 ProjectionElem::Index(_) => return true,
448                                 _ => {}
449                             }
450                         }
451
452                         match place_base {
453                             // Static variables need a borrow because the callee
454                             // might modify the same static.
455                             PlaceBase::Static(_) => true,
456                             _ => false
457                         }
458                     })
459                 }
460
461                 let dest = if dest_needs_borrow(&destination.0) {
462                     debug!("Creating temp for return destination");
463                     let dest = Rvalue::Ref(
464                         self.tcx.lifetimes.re_erased,
465                         BorrowKind::Mut { allow_two_phase_borrow: false },
466                         destination.0);
467
468                     let ty = dest.ty(caller_mir, self.tcx);
469
470                     let temp = LocalDecl::new_temp(ty, callsite.location.span);
471
472                     let tmp = caller_mir.local_decls.push(temp);
473                     let tmp = Place::Base(PlaceBase::Local(tmp));
474
475                     let stmt = Statement {
476                         source_info: callsite.location,
477                         kind: StatementKind::Assign(tmp.clone(), box dest)
478                     };
479                     caller_mir[callsite.bb]
480                         .statements.push(stmt);
481                     tmp.deref()
482                 } else {
483                     destination.0
484                 };
485
486                 let return_block = destination.1;
487
488                 // Copy the arguments if needed.
489                 let args: Vec<_> = self.make_call_args(args, &callsite, caller_mir);
490
491                 let bb_len = caller_mir.basic_blocks().len();
492                 let mut integrator = Integrator {
493                     block_idx: bb_len,
494                     args: &args,
495                     local_map,
496                     scope_map,
497                     promoted_map,
498                     _callsite: callsite,
499                     destination: dest,
500                     return_block,
501                     cleanup_block: cleanup,
502                     in_cleanup_block: false
503                 };
504
505
506                 for (bb, mut block) in callee_mir.basic_blocks_mut().drain_enumerated(..) {
507                     integrator.visit_basic_block_data(bb, &mut block);
508                     caller_mir.basic_blocks_mut().push(block);
509                 }
510
511                 let terminator = Terminator {
512                     source_info: callsite.location,
513                     kind: TerminatorKind::Goto { target: BasicBlock::new(bb_len) }
514                 };
515
516                 caller_mir[callsite.bb].terminator = Some(terminator);
517
518                 true
519             }
520             kind => {
521                 caller_mir[callsite.bb].terminator = Some(Terminator {
522                     source_info: terminator.source_info,
523                     kind,
524                 });
525                 false
526             }
527         }
528     }
529
530     fn make_call_args(
531         &self,
532         args: Vec<Operand<'tcx>>,
533         callsite: &CallSite<'tcx>,
534         caller_mir: &mut Body<'tcx>,
535     ) -> Vec<Local> {
536         let tcx = self.tcx;
537
538         // There is a bit of a mismatch between the *caller* of a closure and the *callee*.
539         // The caller provides the arguments wrapped up in a tuple:
540         //
541         //     tuple_tmp = (a, b, c)
542         //     Fn::call(closure_ref, tuple_tmp)
543         //
544         // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`)
545         // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, codegen has
546         // the job of unpacking this tuple. But here, we are codegen. =) So we want to create
547         // a vector like
548         //
549         //     [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2]
550         //
551         // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient
552         // if we "spill" that into *another* temporary, so that we can map the argument
553         // variable in the callee MIR directly to an argument variable on our side.
554         // So we introduce temporaries like:
555         //
556         //     tmp0 = tuple_tmp.0
557         //     tmp1 = tuple_tmp.1
558         //     tmp2 = tuple_tmp.2
559         //
560         // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
561         if tcx.is_closure(callsite.callee) {
562             let mut args = args.into_iter();
563             let self_ = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_mir);
564             let tuple = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_mir);
565             assert!(args.next().is_none());
566
567             let tuple = Place::Base(PlaceBase::Local(tuple));
568             let tuple_tys = if let ty::Tuple(s) = tuple.ty(caller_mir, tcx).ty.sty {
569                 s
570             } else {
571                 bug!("Closure arguments are not passed as a tuple");
572             };
573
574             // The `closure_ref` in our example above.
575             let closure_ref_arg = iter::once(self_);
576
577             // The `tmp0`, `tmp1`, and `tmp2` in our example abonve.
578             let tuple_tmp_args =
579                 tuple_tys.iter().enumerate().map(|(i, ty)| {
580                     // This is e.g., `tuple_tmp.0` in our example above.
581                     let tuple_field = Operand::Move(tuple.clone().field(
582                         Field::new(i),
583                         ty.expect_ty(),
584                     ));
585
586                     // Spill to a local to make e.g., `tmp0`.
587                     self.create_temp_if_necessary(tuple_field, callsite, caller_mir)
588                 });
589
590             closure_ref_arg.chain(tuple_tmp_args).collect()
591         } else {
592             args.into_iter()
593                 .map(|a| self.create_temp_if_necessary(a, callsite, caller_mir))
594                 .collect()
595         }
596     }
597
598     /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
599     /// temporary `T` and an instruction `T = arg`, and returns `T`.
600     fn create_temp_if_necessary(
601         &self,
602         arg: Operand<'tcx>,
603         callsite: &CallSite<'tcx>,
604         caller_mir: &mut Body<'tcx>,
605     ) -> Local {
606         // FIXME: Analysis of the usage of the arguments to avoid
607         // unnecessary temporaries.
608
609         if let Operand::Move(Place::Base(PlaceBase::Local(local))) = arg {
610             if caller_mir.local_kind(local) == LocalKind::Temp {
611                 // Reuse the operand if it's a temporary already
612                 return local;
613             }
614         }
615
616         debug!("Creating temp for argument {:?}", arg);
617         // Otherwise, create a temporary for the arg
618         let arg = Rvalue::Use(arg);
619
620         let ty = arg.ty(caller_mir, self.tcx);
621
622         let arg_tmp = LocalDecl::new_temp(ty, callsite.location.span);
623         let arg_tmp = caller_mir.local_decls.push(arg_tmp);
624
625         let stmt = Statement {
626             source_info: callsite.location,
627             kind: StatementKind::Assign(Place::Base(PlaceBase::Local(arg_tmp)), box arg),
628         };
629         caller_mir[callsite.bb].statements.push(stmt);
630         arg_tmp
631     }
632 }
633
634 fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
635                           param_env: ty::ParamEnv<'tcx>,
636                           ty: Ty<'tcx>) -> Option<u64> {
637     tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
638 }
639
640 /**
641  * Integrator.
642  *
643  * Integrates blocks from the callee function into the calling function.
644  * Updates block indices, references to locals and other control flow
645  * stuff.
646  */
647 struct Integrator<'a, 'tcx: 'a> {
648     block_idx: usize,
649     args: &'a [Local],
650     local_map: IndexVec<Local, Local>,
651     scope_map: IndexVec<SourceScope, SourceScope>,
652     promoted_map: IndexVec<Promoted, Promoted>,
653     _callsite: CallSite<'tcx>,
654     destination: Place<'tcx>,
655     return_block: BasicBlock,
656     cleanup_block: Option<BasicBlock>,
657     in_cleanup_block: bool,
658 }
659
660 impl<'a, 'tcx> Integrator<'a, 'tcx> {
661     fn update_target(&self, tgt: BasicBlock) -> BasicBlock {
662         let new = BasicBlock::new(tgt.index() + self.block_idx);
663         debug!("Updating target `{:?}`, new: `{:?}`", tgt, new);
664         new
665     }
666 }
667
668 impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
669     fn visit_local(&mut self,
670                    local: &mut Local,
671                    _ctxt: PlaceContext,
672                    _location: Location) {
673         if *local == RETURN_PLACE {
674             match self.destination {
675                 Place::Base(PlaceBase::Local(l)) => {
676                     *local = l;
677                     return;
678                 },
679                 ref place => bug!("Return place is {:?}, not local", place)
680             }
681         }
682         let idx = local.index() - 1;
683         if idx < self.args.len() {
684             *local = self.args[idx];
685             return;
686         }
687         *local = self.local_map[Local::new(idx - self.args.len())];
688     }
689
690     fn visit_place(&mut self,
691                     place: &mut Place<'tcx>,
692                     _ctxt: PlaceContext,
693                     _location: Location) {
694
695         match place {
696             Place::Base(PlaceBase::Local(RETURN_PLACE)) => {
697                 // Return pointer; update the place itself
698                 *place = self.destination.clone();
699             },
700             Place::Base(
701                 PlaceBase::Static(box Static { kind: StaticKind::Promoted(promoted), .. })
702             ) => {
703                 if let Some(p) = self.promoted_map.get(*promoted).cloned() {
704                     *promoted = p;
705                 }
706             },
707             _ => self.super_place(place, _ctxt, _location)
708         }
709     }
710
711     fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
712         self.in_cleanup_block = data.is_cleanup;
713         self.super_basic_block_data(block, data);
714         self.in_cleanup_block = false;
715     }
716
717     fn visit_retag(
718         &mut self,
719         kind: &mut RetagKind,
720         place: &mut Place<'tcx>,
721         loc: Location,
722     ) {
723         self.super_retag(kind, place, loc);
724
725         // We have to patch all inlined retags to be aware that they are no longer
726         // happening on function entry.
727         if *kind == RetagKind::FnEntry {
728             *kind = RetagKind::Default;
729         }
730     }
731
732     fn visit_terminator_kind(&mut self,
733                              kind: &mut TerminatorKind<'tcx>, loc: Location) {
734         self.super_terminator_kind(kind, loc);
735
736         match *kind {
737             TerminatorKind::GeneratorDrop |
738             TerminatorKind::Yield { .. } => bug!(),
739             TerminatorKind::Goto { ref mut target} => {
740                 *target = self.update_target(*target);
741             }
742             TerminatorKind::SwitchInt { ref mut targets, .. } => {
743                 for tgt in targets {
744                     *tgt = self.update_target(*tgt);
745                 }
746             }
747             TerminatorKind::Drop { ref mut target, ref mut unwind, .. } |
748             TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
749                 *target = self.update_target(*target);
750                 if let Some(tgt) = *unwind {
751                     *unwind = Some(self.update_target(tgt));
752                 } else if !self.in_cleanup_block {
753                     // Unless this drop is in a cleanup block, add an unwind edge to
754                     // the original call's cleanup block
755                     *unwind = self.cleanup_block;
756                 }
757             }
758             TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
759                 if let Some((_, ref mut tgt)) = *destination {
760                     *tgt = self.update_target(*tgt);
761                 }
762                 if let Some(tgt) = *cleanup {
763                     *cleanup = Some(self.update_target(tgt));
764                 } else if !self.in_cleanup_block {
765                     // Unless this call is in a cleanup block, add an unwind edge to
766                     // the original call's cleanup block
767                     *cleanup = self.cleanup_block;
768                 }
769             }
770             TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
771                 *target = self.update_target(*target);
772                 if let Some(tgt) = *cleanup {
773                     *cleanup = Some(self.update_target(tgt));
774                 } else if !self.in_cleanup_block {
775                     // Unless this assert is in a cleanup block, add an unwind edge to
776                     // the original call's cleanup block
777                     *cleanup = self.cleanup_block;
778                 }
779             }
780             TerminatorKind::Return => {
781                 *kind = TerminatorKind::Goto { target: self.return_block };
782             }
783             TerminatorKind::Resume => {
784                 if let Some(tgt) = self.cleanup_block {
785                     *kind = TerminatorKind::Goto { target: tgt }
786                 }
787             }
788             TerminatorKind::Abort => { }
789             TerminatorKind::Unreachable => { }
790             TerminatorKind::FalseEdges { ref mut real_target, ref mut imaginary_targets } => {
791                 *real_target = self.update_target(*real_target);
792                 for target in imaginary_targets {
793                     *target = self.update_target(*target);
794                 }
795             }
796             TerminatorKind::FalseUnwind { real_target: _ , unwind: _ } =>
797                 // see the ordering of passes in the optimized_mir query.
798                 bug!("False unwinds should have been removed before inlining")
799         }
800     }
801
802     fn visit_source_scope(&mut self, scope: &mut SourceScope) {
803         *scope = self.scope_map[*scope];
804     }
805 }