]> git.lizzy.rs Git - rust.git/blob - src/librustc_mir/transform/inline.rs
e10a91c6ec2e7afb4030cbf4612b62fece9a85e6
[rust.git] / src / librustc_mir / transform / inline.rs
1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! Inlining pass for MIR functions
12
13 use rustc::hir::def_id::DefId;
14
15 use rustc_data_structures::bitvec::BitVector;
16 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
17 use rustc_data_structures::graph;
18
19 use rustc::dep_graph::DepNode;
20 use rustc::mir::*;
21 use rustc::mir::transform::{MirCtxt, MirSource, Pass, PassId};
22 use rustc::mir::visit::*;
23 use rustc::traits;
24 use rustc::ty::{self, Ty, TyCtxt};
25 use rustc::ty::maps::Multi;
26 use rustc::ty::steal::Steal;
27 use rustc::ty::subst::{Subst,Substs};
28 use rustc::util::nodemap::{DefIdSet};
29
30 use super::simplify::{remove_dead_blocks, CfgSimplifier};
31
32 use syntax::{attr};
33 use syntax::abi::Abi;
34
35 use callgraph;
36 use transform::interprocedural::InterproceduralCx;
37
38 const DEFAULT_THRESHOLD: usize = 50;
39 const HINT_THRESHOLD: usize = 100;
40
41 const INSTR_COST: usize = 5;
42 const CALL_PENALTY: usize = 25;
43
44 const UNKNOWN_SIZE_COST: usize = 10;
45
46 pub struct Inline;
47
48 impl Pass for Inline {
49     fn run_pass<'a, 'tcx: 'a>(&self, mir_cx: &MirCtxt<'a, 'tcx>)
50                               -> Multi<PassId, &'tcx Steal<Mir<'tcx>>> {
51         let tcx = mir_cx.tcx();
52         if tcx.sess.opts.debugging_opts.mir_opt_level < 2 {
53             return Multi::from(tcx.alloc_steal_mir(mir_cx.steal_previous_mir()));
54         }
55
56         let mut cx = InterproceduralCx::new(mir_cx);
57
58         let callgraph = callgraph::CallGraph::build(&mut cx);
59
60         let mut inliner = Inliner { tcx };
61
62         for scc in callgraph.scc_iter() {
63             inliner.inline_scc(&mut cx, &callgraph, &scc);
64         }
65
66         Multi::from(cx.into_local_mirs())
67     }
68 }
69
70 struct Inliner<'mir, 'tcx: 'mir> {
71     tcx: TyCtxt<'mir, 'tcx, 'tcx>,
72 }
73
74 #[derive(Copy, Clone)]
75 struct CallSite<'tcx> {
76     caller: DefId,
77     callee: DefId,
78     substs: &'tcx Substs<'tcx>,
79     bb: BasicBlock,
80     location: SourceInfo,
81 }
82
83 impl<'mir, 'tcx> Inliner<'mir, 'tcx> {
84     fn inline_scc<'a>(&mut self,
85                       cx: &mut InterproceduralCx<'a, 'mir, 'tcx>,
86                       callgraph: &callgraph::CallGraph,
87                       scc: &[graph::NodeIndex]) -> bool {
88         let tcx = self.tcx;
89         let mut callsites = Vec::new();
90         let mut in_scc = DefIdSet();
91
92         let mut inlined_into = DefIdSet();
93
94         for &node in scc {
95             let def_id = callgraph.def_id(node);
96
97             // Don't inspect functions from other crates
98             let id = if let Some(id) = tcx.hir.as_local_node_id(def_id) {
99                 id
100             } else {
101                 continue;
102             };
103             let src = MirSource::from_node(tcx, id);
104             if let MirSource::Fn(_) = src {
105                 if let Some(mir) = cx.ensure_mir_and_read(def_id) {
106                     for (bb, bb_data) in mir.basic_blocks().iter_enumerated() {
107                         // Don't inline calls that are in cleanup blocks.
108                         if bb_data.is_cleanup { continue; }
109
110                         // Only consider direct calls to functions
111                         let terminator = bb_data.terminator();
112                         if let TerminatorKind::Call {
113                             func: Operand::Constant(ref f), .. } = terminator.kind {
114                             if let ty::TyFnDef(callee_def_id, substs, _) = f.ty.sty {
115                                 callsites.push(CallSite {
116                                     caller: def_id,
117                                     callee: callee_def_id,
118                                     substs: substs,
119                                     bb: bb,
120                                     location: terminator.source_info
121                                 });
122                             }
123                         }
124                     }
125
126                     in_scc.insert(def_id);
127                 }
128             }
129         }
130
131         // Move callsites that are in the the SCC to the end so
132         // they're inlined after calls to outside the SCC
133         let mut first_call_in_scc = callsites.len();
134
135         let mut i = 0;
136         while i < first_call_in_scc {
137             let f = callsites[i].caller;
138             if in_scc.contains(&f) {
139                 first_call_in_scc -= 1;
140                 callsites.swap(i, first_call_in_scc);
141             } else {
142                 i += 1;
143             }
144         }
145
146         let mut local_change;
147         let mut changed = false;
148
149         loop {
150             local_change = false;
151             let mut csi = 0;
152             while csi < callsites.len() {
153                 let callsite = callsites[csi];
154                 csi += 1;
155
156                 let _task = tcx.dep_graph.in_task(DepNode::Mir(callsite.caller));
157                 tcx.dep_graph.write(DepNode::Mir(callsite.caller));
158
159                 let callee_mir = {
160                     if let Some(callee_mir) = cx.ensure_mir_and_read(callsite.callee) {
161                         if !self.should_inline(callsite, &callee_mir) {
162                             continue;
163                         }
164
165                         callee_mir.subst(tcx, callsite.substs)
166                     } else {
167                         continue;
168                     }
169
170                 };
171
172                 let caller_mir = cx.mir_mut(callsite.caller);
173
174                 let start = caller_mir.basic_blocks().len();
175
176                 if !self.inline_call(callsite, caller_mir, callee_mir) {
177                     continue;
178                 }
179
180                 inlined_into.insert(callsite.caller);
181
182                 // Add callsites from inlined function
183                 for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated().skip(start) {
184                     // Only consider direct calls to functions
185                     let terminator = bb_data.terminator();
186                     if let TerminatorKind::Call {
187                         func: Operand::Constant(ref f), .. } = terminator.kind {
188                         if let ty::TyFnDef(callee_def_id, substs, _) = f.ty.sty {
189                             // Don't inline the same function multiple times.
190                             if callsite.callee != callee_def_id {
191                                 callsites.push(CallSite {
192                                     caller: callsite.caller,
193                                     callee: callee_def_id,
194                                     substs: substs,
195                                     bb: bb,
196                                     location: terminator.source_info
197                                 });
198                             }
199                         }
200                     }
201                 }
202
203                 csi -= 1;
204                 if scc.len() == 1 {
205                     callsites.swap_remove(csi);
206                 } else {
207                     callsites.remove(csi);
208                 }
209
210                 local_change = true;
211                 changed = true;
212             }
213
214             if !local_change {
215                 break;
216             }
217         }
218
219         // Simplify functions we inlined into.
220         for def_id in inlined_into {
221             let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id));
222             tcx.dep_graph.write(DepNode::Mir(def_id));
223
224             let caller_mir = cx.mir_mut(def_id);
225
226             debug!("Running simplify cfg on {:?}", def_id);
227             CfgSimplifier::new(caller_mir).simplify();
228             remove_dead_blocks(caller_mir);
229         }
230         changed
231     }
232
233     fn should_inline(&self,
234                      callsite: CallSite<'tcx>,
235                      callee_mir: &Mir<'tcx>)
236                      -> bool
237     {
238         let tcx = self.tcx;
239
240         // Don't inline closures that have captures
241         // FIXME: Handle closures better
242         if callee_mir.upvar_decls.len() > 0 {
243             return false;
244         }
245
246
247         let attrs = tcx.get_attrs(callsite.callee);
248         let hint = attr::find_inline_attr(None, &attrs[..]);
249
250         let hinted = match hint {
251             // Just treat inline(always) as a hint for now,
252             // there are cases that prevent inlining that we
253             // need to check for first.
254             attr::InlineAttr::Always => true,
255             attr::InlineAttr::Never => return false,
256             attr::InlineAttr::Hint => true,
257             attr::InlineAttr::None => false,
258         };
259
260         // Only inline local functions if they would be eligible for cross-crate
261         // inlining. This is to ensure that the final crate doesn't have MIR that
262         // reference unexported symbols
263         if callsite.callee.is_local() {
264             if callsite.substs.types().count() == 0 && !hinted {
265                 return false;
266             }
267         }
268
269         let mut threshold = if hinted {
270             HINT_THRESHOLD
271         } else {
272             DEFAULT_THRESHOLD
273         };
274
275         // Significantly lower the threshold for inlining cold functions
276         if attr::contains_name(&attrs[..], "cold") {
277             threshold /= 5;
278         }
279
280         // Give a bonus functions with a small number of blocks,
281         // We normally have two or three blocks for even
282         // very small functions.
283         if callee_mir.basic_blocks().len() <= 3 {
284             threshold += threshold / 4;
285         }
286
287         // FIXME: Give a bonus to functions with only a single caller
288
289         let id = tcx.hir.as_local_node_id(callsite.caller).expect("Caller not local");
290         let param_env = ty::ParameterEnvironment::for_item(tcx, id);
291
292         let mut first_block = true;
293         let mut cost = 0;
294
295         // Traverse the MIR manually so we can account for the effects of
296         // inlining on the CFG.
297         let mut work_list = vec![START_BLOCK];
298         let mut visited = BitVector::new(callee_mir.basic_blocks().len());
299         while let Some(bb) = work_list.pop() {
300             if !visited.insert(bb.index()) { continue; }
301             let blk = &callee_mir.basic_blocks()[bb];
302
303             for stmt in &blk.statements {
304                 // Don't count StorageLive/StorageDead in the inlining cost.
305                 match stmt.kind {
306                     StatementKind::StorageLive(_) |
307                     StatementKind::StorageDead(_) |
308                     StatementKind::Nop => {}
309                     _ => cost += INSTR_COST
310                 }
311             }
312             let term = blk.terminator();
313             let mut is_drop = false;
314             match term.kind {
315                 TerminatorKind::Drop { ref location, target, unwind } |
316                 TerminatorKind::DropAndReplace { ref location, target, unwind, .. } => {
317                     is_drop = true;
318                     work_list.push(target);
319                     // If the location doesn't actually need dropping, treat it like
320                     // a regular goto.
321                     let ty = location.ty(&callee_mir, tcx).subst(tcx, callsite.substs);
322                     let ty = ty.to_ty(tcx);
323                     if ty.needs_drop(tcx, &param_env) {
324                         cost += CALL_PENALTY;
325                         if let Some(unwind) = unwind {
326                             work_list.push(unwind);
327                         }
328                     } else {
329                         cost += INSTR_COST;
330                     }
331                 }
332
333                 TerminatorKind::Unreachable |
334                 TerminatorKind::Call { destination: None, .. } if first_block => {
335                     // If the function always diverges, don't inline
336                     // unless the cost is zero
337                     threshold = 0;
338                 }
339
340                 TerminatorKind::Call {func: Operand::Constant(ref f), .. } => {
341                     if let ty::TyFnDef(.., f) = f.ty.sty {
342                         // Don't give intrinsics the extra penalty for calls
343                         if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
344                             cost += INSTR_COST;
345                         } else {
346                             cost += CALL_PENALTY;
347                         }
348                     }
349                 }
350                 TerminatorKind::Assert { .. } => cost += CALL_PENALTY,
351                 _ => cost += INSTR_COST
352             }
353
354             if !is_drop {
355                 for &succ in &term.successors()[..] {
356                     work_list.push(succ);
357                 }
358             }
359
360             first_block = false;
361         }
362
363         // Count up the cost of local variables and temps, if we know the size
364         // use that, otherwise we use a moderately-large dummy cost.
365
366         let ptr_size = tcx.data_layout.pointer_size.bytes();
367
368         for v in callee_mir.vars_and_temps_iter() {
369             let v = &callee_mir.local_decls[v];
370             let ty = v.ty.subst(tcx, callsite.substs);
371             // Cost of the var is the size in machine-words, if we know
372             // it.
373             if let Some(size) = type_size_of(tcx, param_env.clone(), ty) {
374                 cost += (size / ptr_size) as usize;
375             } else {
376                 cost += UNKNOWN_SIZE_COST;
377             }
378         }
379
380         debug!("Inline cost for {:?} is {}", callsite.callee, cost);
381
382         if let attr::InlineAttr::Always = hint {
383             true
384         } else {
385             cost <= threshold
386         }
387     }
388
389     fn inline_call(&self,
390                    callsite: CallSite<'tcx>,
391                    caller_mir: &mut Mir<'tcx>,
392                    mut callee_mir: Mir<'tcx>) -> bool {
393         // Don't inline a function into itself
394         if callsite.caller == callsite.callee { return false; }
395
396         let _task = self.tcx.dep_graph.in_task(DepNode::Mir(callsite.caller));
397
398
399         let terminator = caller_mir[callsite.bb].terminator.take().unwrap();
400         match terminator.kind {
401             // FIXME: Handle inlining of diverging calls
402             TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => {
403
404                 debug!("Inlined {:?} into {:?}", callsite.callee, callsite.caller);
405
406                 let is_box_free = Some(callsite.callee) == self.tcx.lang_items.box_free_fn();
407
408                 let mut local_map = IndexVec::with_capacity(callee_mir.local_decls.len());
409                 let mut scope_map = IndexVec::with_capacity(callee_mir.visibility_scopes.len());
410                 let mut promoted_map = IndexVec::with_capacity(callee_mir.promoted.len());
411
412                 for mut scope in callee_mir.visibility_scopes.iter().cloned() {
413                     if scope.parent_scope.is_none() {
414                         scope.parent_scope = Some(callsite.location.scope);
415                         scope.span = callee_mir.span;
416                     }
417
418                     scope.span = callsite.location.span;
419
420                     let idx = caller_mir.visibility_scopes.push(scope);
421                     scope_map.push(idx);
422                 }
423
424                 for loc in callee_mir.vars_and_temps_iter() {
425                     let mut local = callee_mir.local_decls[loc].clone();
426
427                     local.source_info.scope = scope_map[local.source_info.scope];
428                     local.source_info.span = callsite.location.span;
429
430                     let idx = caller_mir.local_decls.push(local);
431                     local_map.push(idx);
432                 }
433
434                 for p in callee_mir.promoted.iter().cloned() {
435                     let idx = caller_mir.promoted.push(p);
436                     promoted_map.push(idx);
437                 }
438
439                 // If the call is something like `a[*i] = f(i)`, where
440                 // `i : &mut usize`, then just duplicating the `a[*i]`
441                 // Lvalue could result in two different locations if `f`
442                 // writes to `i`. To prevent this we need to create a temporary
443                 // borrow of the lvalue and pass the destination as `*temp` instead.
444                 fn dest_needs_borrow(lval: &Lvalue) -> bool {
445                     match *lval {
446                         Lvalue::Projection(ref p) => {
447                             match p.elem {
448                                 ProjectionElem::Deref |
449                                 ProjectionElem::Index(_) => true,
450                                 _ => dest_needs_borrow(&p.base)
451                             }
452                         }
453                         // Static variables need a borrow because the callee
454                         // might modify the same static.
455                         Lvalue::Static(_) => true,
456                         _ => false
457                     }
458                 }
459
460                 let dest = if dest_needs_borrow(&destination.0) {
461                     debug!("Creating temp for return destination");
462                     let dest = Rvalue::Ref(
463                         self.tcx.types.re_erased,
464                         BorrowKind::Mut,
465                         destination.0);
466
467                     let ty = dest.ty(caller_mir, self.tcx);
468
469                     let temp = LocalDecl::new_temp(ty, callsite.location.span);
470
471                     let tmp = caller_mir.local_decls.push(temp);
472                     let tmp = Lvalue::Local(tmp);
473
474                     let stmt = Statement {
475                         source_info: callsite.location,
476                         kind: StatementKind::Assign(tmp.clone(), dest)
477                     };
478                     caller_mir[callsite.bb]
479                         .statements.push(stmt);
480                     tmp.deref()
481                 } else {
482                     destination.0
483                 };
484
485                 let return_block = destination.1;
486
487                 let args : Vec<_> = if is_box_free {
488                     assert!(args.len() == 1);
489                     // box_free takes a Box, but is defined with a *mut T, inlining
490                     // needs to generate the cast.
491                     // FIXME: we should probably just generate correct MIR in the first place...
492
493                     let arg = if let Operand::Consume(ref lval) = args[0] {
494                         lval.clone()
495                     } else {
496                         bug!("Constant arg to \"box_free\"");
497                     };
498
499                     let ptr_ty = args[0].ty(caller_mir, self.tcx);
500                     vec![self.cast_box_free_arg(arg, ptr_ty, &callsite, caller_mir)]
501                 } else {
502                     // Copy the arguments if needed.
503                     self.make_call_args(args, &callsite, caller_mir)
504                 };
505
506                 let bb_len = caller_mir.basic_blocks().len();
507                 let mut integrator = Integrator {
508                     block_idx: bb_len,
509                     args: &args,
510                     local_map: local_map,
511                     scope_map: scope_map,
512                     promoted_map: promoted_map,
513                     _callsite: callsite,
514                     destination: dest,
515                     return_block: return_block,
516                     cleanup_block: cleanup,
517                     in_cleanup_block: false
518                 };
519
520
521                 for (bb, mut block) in callee_mir.basic_blocks_mut().drain_enumerated(..) {
522                     integrator.visit_basic_block_data(bb, &mut block);
523                     caller_mir.basic_blocks_mut().push(block);
524                 }
525
526                 let terminator = Terminator {
527                     source_info: callsite.location,
528                     kind: TerminatorKind::Goto { target: BasicBlock::new(bb_len) }
529                 };
530
531                 caller_mir[callsite.bb].terminator = Some(terminator);
532
533                 true
534             }
535             kind => {
536                 caller_mir[callsite.bb].terminator = Some(Terminator {
537                     source_info: terminator.source_info,
538                     kind: kind
539                 });
540                 false
541             }
542         }
543     }
544
545     fn cast_box_free_arg(&self, arg: Lvalue<'tcx>, ptr_ty: Ty<'tcx>,
546                          callsite: &CallSite<'tcx>, caller_mir: &mut Mir<'tcx>) -> Operand<'tcx> {
547         let arg = Rvalue::Ref(
548             self.tcx.types.re_erased,
549             BorrowKind::Mut,
550             arg.deref());
551
552         let ty = arg.ty(caller_mir, self.tcx);
553         let ref_tmp = LocalDecl::new_temp(ty, callsite.location.span);
554         let ref_tmp = caller_mir.local_decls.push(ref_tmp);
555         let ref_tmp = Lvalue::Local(ref_tmp);
556
557         let ref_stmt = Statement {
558             source_info: callsite.location,
559             kind: StatementKind::Assign(ref_tmp.clone(), arg)
560         };
561
562         caller_mir[callsite.bb]
563             .statements.push(ref_stmt);
564
565         let pointee_ty = match ptr_ty.sty {
566             ty::TyRawPtr(tm) | ty::TyRef(_, tm) => tm.ty,
567             _ if ptr_ty.is_box() => ptr_ty.boxed_ty(),
568             _ => bug!("Invalid type `{:?}` for call to box_free", ptr_ty)
569         };
570         let ptr_ty = self.tcx.mk_mut_ptr(pointee_ty);
571
572         let raw_ptr = Rvalue::Cast(CastKind::Misc, Operand::Consume(ref_tmp), ptr_ty);
573
574         let cast_tmp = LocalDecl::new_temp(ptr_ty, callsite.location.span);
575         let cast_tmp = caller_mir.local_decls.push(cast_tmp);
576         let cast_tmp = Lvalue::Local(cast_tmp);
577
578         let cast_stmt = Statement {
579             source_info: callsite.location,
580             kind: StatementKind::Assign(cast_tmp.clone(), raw_ptr)
581         };
582
583         caller_mir[callsite.bb]
584             .statements.push(cast_stmt);
585
586         Operand::Consume(cast_tmp)
587     }
588
589     fn make_call_args(&self, args: Vec<Operand<'tcx>>,
590                       callsite: &CallSite<'tcx>, caller_mir: &mut Mir<'tcx>) -> Vec<Operand<'tcx>> {
591         let tcx = self.tcx;
592         // FIXME: Analysis of the usage of the arguments to avoid
593         // unnecessary temporaries.
594         args.into_iter().map(|a| {
595             if let Operand::Consume(Lvalue::Local(local)) = a {
596                 if caller_mir.local_kind(local) == LocalKind::Temp {
597                     // Reuse the operand if it's a temporary already
598                     return a;
599                 }
600             }
601
602             debug!("Creating temp for argument");
603             // Otherwise, create a temporary for the arg
604             let arg = Rvalue::Use(a);
605
606             let ty = arg.ty(caller_mir, tcx);
607
608             let arg_tmp = LocalDecl::new_temp(ty, callsite.location.span);
609             let arg_tmp = caller_mir.local_decls.push(arg_tmp);
610             let arg_tmp = Lvalue::Local(arg_tmp);
611
612             let stmt = Statement {
613                 source_info: callsite.location,
614                 kind: StatementKind::Assign(arg_tmp.clone(), arg)
615             };
616             caller_mir[callsite.bb].statements.push(stmt);
617             Operand::Consume(arg_tmp)
618         }).collect()
619     }
620 }
621
622 fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, param_env: ty::ParameterEnvironment<'tcx>,
623                           ty: Ty<'tcx>) -> Option<u64> {
624     tcx.infer_ctxt(param_env, traits::Reveal::All).enter(|infcx| {
625         ty.layout(&infcx).ok().map(|layout| {
626             layout.size(&tcx.data_layout).bytes()
627         })
628     })
629 }
630
631 /**
632  * Integrator.
633  *
634  * Integrates blocks from the callee function into the calling function.
635  * Updates block indices, references to locals and other control flow
636  * stuff.
637  */
638 struct Integrator<'a, 'tcx: 'a> {
639     block_idx: usize,
640     args: &'a [Operand<'tcx>],
641     local_map: IndexVec<Local, Local>,
642     scope_map: IndexVec<VisibilityScope, VisibilityScope>,
643     promoted_map: IndexVec<Promoted, Promoted>,
644     _callsite: CallSite<'tcx>,
645     destination: Lvalue<'tcx>,
646     return_block: BasicBlock,
647     cleanup_block: Option<BasicBlock>,
648     in_cleanup_block: bool,
649 }
650
651 impl<'a, 'tcx> Integrator<'a, 'tcx> {
652     fn update_target(&self, tgt: BasicBlock) -> BasicBlock {
653         let new = BasicBlock::new(tgt.index() + self.block_idx);
654         debug!("Updating target `{:?}`, new: `{:?}`", tgt, new);
655         new
656     }
657
658     fn update_local(&self, local: Local) -> Option<Local> {
659         let idx = local.index();
660         if idx < (self.args.len() + 1) {
661             return None;
662         }
663         let idx = idx - (self.args.len() + 1);
664         let local = Local::new(idx);
665         self.local_map.get(local).cloned()
666     }
667
668     fn arg_index(&self, arg: Local) -> Option<usize> {
669         let idx = arg.index();
670         if idx > 0 && idx <= self.args.len() {
671             Some(idx - 1)
672         } else {
673             None
674         }
675     }
676 }
677
678 impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
679     fn visit_lvalue(&mut self,
680                     lvalue: &mut Lvalue<'tcx>,
681                     _ctxt: LvalueContext<'tcx>,
682                     _location: Location) {
683         if let Lvalue::Local(ref mut local) = *lvalue {
684             if let Some(l) = self.update_local(*local) {
685                 // Temp or Var; update the local reference
686                 *local = l;
687                 return;
688             }
689         }
690         if let Lvalue::Local(local) = *lvalue {
691             if local == RETURN_POINTER {
692                 // Return pointer; update the lvalue itself
693                 *lvalue = self.destination.clone();
694             } else if local.index() < (self.args.len() + 1) {
695                 // Argument, once again update the the lvalue itself
696                 let idx = local.index() - 1;
697                 if let Operand::Consume(ref lval) = self.args[idx] {
698                     *lvalue = lval.clone();
699                 } else {
700                     bug!("Arg operand `{:?}` is not an Lvalue use.", idx)
701                 }
702             }
703         } else {
704             self.super_lvalue(lvalue, _ctxt, _location)
705         }
706     }
707
708     fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
709         if let Operand::Consume(Lvalue::Local(arg)) = *operand {
710             if let Some(idx) = self.arg_index(arg) {
711                 let new_arg = self.args[idx].clone();
712                 *operand = new_arg;
713                 return;
714             }
715         }
716         self.super_operand(operand, location);
717     }
718
719     fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
720         self.in_cleanup_block = data.is_cleanup;
721         self.super_basic_block_data(block, data);
722         self.in_cleanup_block = false;
723     }
724
725     fn visit_terminator_kind(&mut self, block: BasicBlock,
726                              kind: &mut TerminatorKind<'tcx>, loc: Location) {
727         self.super_terminator_kind(block, kind, loc);
728
729         match *kind {
730             TerminatorKind::Goto { ref mut target} => {
731                 *target = self.update_target(*target);
732             }
733             TerminatorKind::SwitchInt { ref mut targets, .. } => {
734                 for tgt in targets {
735                     *tgt = self.update_target(*tgt);
736                 }
737             }
738             TerminatorKind::Drop { ref mut target, ref mut unwind, .. } |
739             TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
740                 *target = self.update_target(*target);
741                 if let Some(tgt) = *unwind {
742                     *unwind = Some(self.update_target(tgt));
743                 } else if !self.in_cleanup_block {
744                     // Unless this drop is in a cleanup block, add an unwind edge to
745                     // the orignal call's cleanup block
746                     *unwind = self.cleanup_block;
747                 }
748             }
749             TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
750                 if let Some((_, ref mut tgt)) = *destination {
751                     *tgt = self.update_target(*tgt);
752                 }
753                 if let Some(tgt) = *cleanup {
754                     *cleanup = Some(self.update_target(tgt));
755                 } else if !self.in_cleanup_block {
756                     // Unless this call is in a cleanup block, add an unwind edge to
757                     // the orignal call's cleanup block
758                     *cleanup = self.cleanup_block;
759                 }
760             }
761             TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
762                 *target = self.update_target(*target);
763                 if let Some(tgt) = *cleanup {
764                     *cleanup = Some(self.update_target(tgt));
765                 } else if !self.in_cleanup_block {
766                     // Unless this assert is in a cleanup block, add an unwind edge to
767                     // the orignal call's cleanup block
768                     *cleanup = self.cleanup_block;
769                 }
770             }
771             TerminatorKind::Return => {
772                 *kind = TerminatorKind::Goto { target: self.return_block };
773             }
774             TerminatorKind::Resume => {
775                 if let Some(tgt) = self.cleanup_block {
776                     *kind = TerminatorKind::Goto { target: tgt }
777                 }
778             }
779             TerminatorKind::Unreachable => { }
780         }
781     }
782
783     fn visit_visibility_scope(&mut self, scope: &mut VisibilityScope) {
784         *scope = self.scope_map[*scope];
785     }
786
787     fn visit_literal(&mut self, literal: &mut Literal<'tcx>, loc: Location) {
788         if let Literal::Promoted { ref mut index } = *literal {
789             if let Some(p) = self.promoted_map.get(*index).cloned() {
790                 *index = p;
791             }
792         } else {
793             self.super_literal(literal, loc);
794         }
795     }
796 }