]> git.lizzy.rs Git - rust.git/blob - src/librustc_mir/transform/inline.rs
remove `Pass` and (temporarily) drop `Inline`
[rust.git] / src / librustc_mir / transform / inline.rs
1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! Inlining pass for MIR functions
12
13 use rustc::hir::def_id::DefId;
14
15 use rustc_data_structures::bitvec::BitVector;
16 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
17 use rustc_data_structures::graph;
18
19 use rustc::dep_graph::DepNode;
20 use rustc::mir::*;
21 use rustc::mir::transform::{MirCtxt, MirSource, PassId};
22 use rustc::mir::visit::*;
23 use rustc::traits;
24 use rustc::ty::{self, Ty, TyCtxt};
25 use rustc::ty::maps::Multi;
26 use rustc::ty::steal::Steal;
27 use rustc::ty::subst::{Subst,Substs};
28 use rustc::util::nodemap::{DefIdSet};
29
30 use super::simplify::{remove_dead_blocks, CfgSimplifier};
31
32 use syntax::{attr};
33 use syntax::abi::Abi;
34
35 use callgraph;
36 use transform::interprocedural::InterproceduralCx;
37
38 const DEFAULT_THRESHOLD: usize = 50;
39 const HINT_THRESHOLD: usize = 100;
40
41 const INSTR_COST: usize = 5;
42 const CALL_PENALTY: usize = 25;
43
44 const UNKNOWN_SIZE_COST: usize = 10;
45
46 pub struct Inline;
47
48 pub trait Pass {
49     fn run_pass<'a, 'tcx: 'a>(&self, mir_cx: &MirCtxt<'a, 'tcx>)
50                               -> Multi<PassId, &'tcx Steal<Mir<'tcx>>>;
51 }
52
53 impl Pass for Inline {
54     fn run_pass<'a, 'tcx: 'a>(&self, mir_cx: &MirCtxt<'a, 'tcx>)
55                               -> Multi<PassId, &'tcx Steal<Mir<'tcx>>> {
56         let tcx = mir_cx.tcx();
57         if tcx.sess.opts.debugging_opts.mir_opt_level < 2 {
58             return Multi::from(tcx.alloc_steal_mir(mir_cx.steal_previous_mir()));
59         }
60
61         let mut cx = InterproceduralCx::new(mir_cx);
62
63         let callgraph = callgraph::CallGraph::build(&mut cx);
64
65         let mut inliner = Inliner { tcx };
66
67         for scc in callgraph.scc_iter() {
68             inliner.inline_scc(&mut cx, &callgraph, &scc);
69         }
70
71         Multi::from(cx.into_local_mirs())
72     }
73 }
74
75 struct Inliner<'mir, 'tcx: 'mir> {
76     tcx: TyCtxt<'mir, 'tcx, 'tcx>,
77 }
78
79 #[derive(Copy, Clone)]
80 struct CallSite<'tcx> {
81     caller: DefId,
82     callee: DefId,
83     substs: &'tcx Substs<'tcx>,
84     bb: BasicBlock,
85     location: SourceInfo,
86 }
87
88 impl<'mir, 'tcx> Inliner<'mir, 'tcx> {
89     fn inline_scc<'a>(&mut self,
90                       cx: &mut InterproceduralCx<'a, 'mir, 'tcx>,
91                       callgraph: &callgraph::CallGraph,
92                       scc: &[graph::NodeIndex]) -> bool {
93         let tcx = self.tcx;
94         let mut callsites = Vec::new();
95         let mut in_scc = DefIdSet();
96
97         let mut inlined_into = DefIdSet();
98
99         for &node in scc {
100             let def_id = callgraph.def_id(node);
101
102             // Don't inspect functions from other crates
103             let id = if let Some(id) = tcx.hir.as_local_node_id(def_id) {
104                 id
105             } else {
106                 continue;
107             };
108             let src = MirSource::from_node(tcx, id);
109             if let MirSource::Fn(_) = src {
110                 if let Some(mir) = cx.ensure_mir_and_read(def_id) {
111                     for (bb, bb_data) in mir.basic_blocks().iter_enumerated() {
112                         // Don't inline calls that are in cleanup blocks.
113                         if bb_data.is_cleanup { continue; }
114
115                         // Only consider direct calls to functions
116                         let terminator = bb_data.terminator();
117                         if let TerminatorKind::Call {
118                             func: Operand::Constant(ref f), .. } = terminator.kind {
119                             if let ty::TyFnDef(callee_def_id, substs, _) = f.ty.sty {
120                                 callsites.push(CallSite {
121                                     caller: def_id,
122                                     callee: callee_def_id,
123                                     substs: substs,
124                                     bb: bb,
125                                     location: terminator.source_info
126                                 });
127                             }
128                         }
129                     }
130
131                     in_scc.insert(def_id);
132                 }
133             }
134         }
135
136         // Move callsites that are in the the SCC to the end so
137         // they're inlined after calls to outside the SCC
138         let mut first_call_in_scc = callsites.len();
139
140         let mut i = 0;
141         while i < first_call_in_scc {
142             let f = callsites[i].caller;
143             if in_scc.contains(&f) {
144                 first_call_in_scc -= 1;
145                 callsites.swap(i, first_call_in_scc);
146             } else {
147                 i += 1;
148             }
149         }
150
151         let mut local_change;
152         let mut changed = false;
153
154         loop {
155             local_change = false;
156             let mut csi = 0;
157             while csi < callsites.len() {
158                 let callsite = callsites[csi];
159                 csi += 1;
160
161                 let _task = tcx.dep_graph.in_task(DepNode::Mir(callsite.caller));
162                 tcx.dep_graph.write(DepNode::Mir(callsite.caller));
163
164                 let callee_mir = {
165                     if let Some(callee_mir) = cx.ensure_mir_and_read(callsite.callee) {
166                         if !self.should_inline(callsite, &callee_mir) {
167                             continue;
168                         }
169
170                         callee_mir.subst(tcx, callsite.substs)
171                     } else {
172                         continue;
173                     }
174
175                 };
176
177                 let caller_mir = cx.mir_mut(callsite.caller);
178
179                 let start = caller_mir.basic_blocks().len();
180
181                 if !self.inline_call(callsite, caller_mir, callee_mir) {
182                     continue;
183                 }
184
185                 inlined_into.insert(callsite.caller);
186
187                 // Add callsites from inlined function
188                 for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated().skip(start) {
189                     // Only consider direct calls to functions
190                     let terminator = bb_data.terminator();
191                     if let TerminatorKind::Call {
192                         func: Operand::Constant(ref f), .. } = terminator.kind {
193                         if let ty::TyFnDef(callee_def_id, substs, _) = f.ty.sty {
194                             // Don't inline the same function multiple times.
195                             if callsite.callee != callee_def_id {
196                                 callsites.push(CallSite {
197                                     caller: callsite.caller,
198                                     callee: callee_def_id,
199                                     substs: substs,
200                                     bb: bb,
201                                     location: terminator.source_info
202                                 });
203                             }
204                         }
205                     }
206                 }
207
208                 csi -= 1;
209                 if scc.len() == 1 {
210                     callsites.swap_remove(csi);
211                 } else {
212                     callsites.remove(csi);
213                 }
214
215                 local_change = true;
216                 changed = true;
217             }
218
219             if !local_change {
220                 break;
221             }
222         }
223
224         // Simplify functions we inlined into.
225         for def_id in inlined_into {
226             let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id));
227             tcx.dep_graph.write(DepNode::Mir(def_id));
228
229             let caller_mir = cx.mir_mut(def_id);
230
231             debug!("Running simplify cfg on {:?}", def_id);
232             CfgSimplifier::new(caller_mir).simplify();
233             remove_dead_blocks(caller_mir);
234         }
235         changed
236     }
237
238     fn should_inline(&self,
239                      callsite: CallSite<'tcx>,
240                      callee_mir: &Mir<'tcx>)
241                      -> bool
242     {
243         let tcx = self.tcx;
244
245         // Don't inline closures that have captures
246         // FIXME: Handle closures better
247         if callee_mir.upvar_decls.len() > 0 {
248             return false;
249         }
250
251
252         let attrs = tcx.get_attrs(callsite.callee);
253         let hint = attr::find_inline_attr(None, &attrs[..]);
254
255         let hinted = match hint {
256             // Just treat inline(always) as a hint for now,
257             // there are cases that prevent inlining that we
258             // need to check for first.
259             attr::InlineAttr::Always => true,
260             attr::InlineAttr::Never => return false,
261             attr::InlineAttr::Hint => true,
262             attr::InlineAttr::None => false,
263         };
264
265         // Only inline local functions if they would be eligible for cross-crate
266         // inlining. This is to ensure that the final crate doesn't have MIR that
267         // reference unexported symbols
268         if callsite.callee.is_local() {
269             if callsite.substs.types().count() == 0 && !hinted {
270                 return false;
271             }
272         }
273
274         let mut threshold = if hinted {
275             HINT_THRESHOLD
276         } else {
277             DEFAULT_THRESHOLD
278         };
279
280         // Significantly lower the threshold for inlining cold functions
281         if attr::contains_name(&attrs[..], "cold") {
282             threshold /= 5;
283         }
284
285         // Give a bonus functions with a small number of blocks,
286         // We normally have two or three blocks for even
287         // very small functions.
288         if callee_mir.basic_blocks().len() <= 3 {
289             threshold += threshold / 4;
290         }
291
292         // FIXME: Give a bonus to functions with only a single caller
293
294         let id = tcx.hir.as_local_node_id(callsite.caller).expect("Caller not local");
295         let param_env = ty::ParameterEnvironment::for_item(tcx, id);
296
297         let mut first_block = true;
298         let mut cost = 0;
299
300         // Traverse the MIR manually so we can account for the effects of
301         // inlining on the CFG.
302         let mut work_list = vec![START_BLOCK];
303         let mut visited = BitVector::new(callee_mir.basic_blocks().len());
304         while let Some(bb) = work_list.pop() {
305             if !visited.insert(bb.index()) { continue; }
306             let blk = &callee_mir.basic_blocks()[bb];
307
308             for stmt in &blk.statements {
309                 // Don't count StorageLive/StorageDead in the inlining cost.
310                 match stmt.kind {
311                     StatementKind::StorageLive(_) |
312                     StatementKind::StorageDead(_) |
313                     StatementKind::Nop => {}
314                     _ => cost += INSTR_COST
315                 }
316             }
317             let term = blk.terminator();
318             let mut is_drop = false;
319             match term.kind {
320                 TerminatorKind::Drop { ref location, target, unwind } |
321                 TerminatorKind::DropAndReplace { ref location, target, unwind, .. } => {
322                     is_drop = true;
323                     work_list.push(target);
324                     // If the location doesn't actually need dropping, treat it like
325                     // a regular goto.
326                     let ty = location.ty(&callee_mir, tcx).subst(tcx, callsite.substs);
327                     let ty = ty.to_ty(tcx);
328                     if ty.needs_drop(tcx, &param_env) {
329                         cost += CALL_PENALTY;
330                         if let Some(unwind) = unwind {
331                             work_list.push(unwind);
332                         }
333                     } else {
334                         cost += INSTR_COST;
335                     }
336                 }
337
338                 TerminatorKind::Unreachable |
339                 TerminatorKind::Call { destination: None, .. } if first_block => {
340                     // If the function always diverges, don't inline
341                     // unless the cost is zero
342                     threshold = 0;
343                 }
344
345                 TerminatorKind::Call {func: Operand::Constant(ref f), .. } => {
346                     if let ty::TyFnDef(.., f) = f.ty.sty {
347                         // Don't give intrinsics the extra penalty for calls
348                         if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
349                             cost += INSTR_COST;
350                         } else {
351                             cost += CALL_PENALTY;
352                         }
353                     }
354                 }
355                 TerminatorKind::Assert { .. } => cost += CALL_PENALTY,
356                 _ => cost += INSTR_COST
357             }
358
359             if !is_drop {
360                 for &succ in &term.successors()[..] {
361                     work_list.push(succ);
362                 }
363             }
364
365             first_block = false;
366         }
367
368         // Count up the cost of local variables and temps, if we know the size
369         // use that, otherwise we use a moderately-large dummy cost.
370
371         let ptr_size = tcx.data_layout.pointer_size.bytes();
372
373         for v in callee_mir.vars_and_temps_iter() {
374             let v = &callee_mir.local_decls[v];
375             let ty = v.ty.subst(tcx, callsite.substs);
376             // Cost of the var is the size in machine-words, if we know
377             // it.
378             if let Some(size) = type_size_of(tcx, param_env.clone(), ty) {
379                 cost += (size / ptr_size) as usize;
380             } else {
381                 cost += UNKNOWN_SIZE_COST;
382             }
383         }
384
385         debug!("Inline cost for {:?} is {}", callsite.callee, cost);
386
387         if let attr::InlineAttr::Always = hint {
388             true
389         } else {
390             cost <= threshold
391         }
392     }
393
394     fn inline_call(&self,
395                    callsite: CallSite<'tcx>,
396                    caller_mir: &mut Mir<'tcx>,
397                    mut callee_mir: Mir<'tcx>) -> bool {
398         // Don't inline a function into itself
399         if callsite.caller == callsite.callee { return false; }
400
401         let _task = self.tcx.dep_graph.in_task(DepNode::Mir(callsite.caller));
402
403
404         let terminator = caller_mir[callsite.bb].terminator.take().unwrap();
405         match terminator.kind {
406             // FIXME: Handle inlining of diverging calls
407             TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => {
408
409                 debug!("Inlined {:?} into {:?}", callsite.callee, callsite.caller);
410
411                 let is_box_free = Some(callsite.callee) == self.tcx.lang_items.box_free_fn();
412
413                 let mut local_map = IndexVec::with_capacity(callee_mir.local_decls.len());
414                 let mut scope_map = IndexVec::with_capacity(callee_mir.visibility_scopes.len());
415                 let mut promoted_map = IndexVec::with_capacity(callee_mir.promoted.len());
416
417                 for mut scope in callee_mir.visibility_scopes.iter().cloned() {
418                     if scope.parent_scope.is_none() {
419                         scope.parent_scope = Some(callsite.location.scope);
420                         scope.span = callee_mir.span;
421                     }
422
423                     scope.span = callsite.location.span;
424
425                     let idx = caller_mir.visibility_scopes.push(scope);
426                     scope_map.push(idx);
427                 }
428
429                 for loc in callee_mir.vars_and_temps_iter() {
430                     let mut local = callee_mir.local_decls[loc].clone();
431
432                     local.source_info.scope = scope_map[local.source_info.scope];
433                     local.source_info.span = callsite.location.span;
434
435                     let idx = caller_mir.local_decls.push(local);
436                     local_map.push(idx);
437                 }
438
439                 for p in callee_mir.promoted.iter().cloned() {
440                     let idx = caller_mir.promoted.push(p);
441                     promoted_map.push(idx);
442                 }
443
444                 // If the call is something like `a[*i] = f(i)`, where
445                 // `i : &mut usize`, then just duplicating the `a[*i]`
446                 // Lvalue could result in two different locations if `f`
447                 // writes to `i`. To prevent this we need to create a temporary
448                 // borrow of the lvalue and pass the destination as `*temp` instead.
449                 fn dest_needs_borrow(lval: &Lvalue) -> bool {
450                     match *lval {
451                         Lvalue::Projection(ref p) => {
452                             match p.elem {
453                                 ProjectionElem::Deref |
454                                 ProjectionElem::Index(_) => true,
455                                 _ => dest_needs_borrow(&p.base)
456                             }
457                         }
458                         // Static variables need a borrow because the callee
459                         // might modify the same static.
460                         Lvalue::Static(_) => true,
461                         _ => false
462                     }
463                 }
464
465                 let dest = if dest_needs_borrow(&destination.0) {
466                     debug!("Creating temp for return destination");
467                     let dest = Rvalue::Ref(
468                         self.tcx.types.re_erased,
469                         BorrowKind::Mut,
470                         destination.0);
471
472                     let ty = dest.ty(caller_mir, self.tcx);
473
474                     let temp = LocalDecl::new_temp(ty, callsite.location.span);
475
476                     let tmp = caller_mir.local_decls.push(temp);
477                     let tmp = Lvalue::Local(tmp);
478
479                     let stmt = Statement {
480                         source_info: callsite.location,
481                         kind: StatementKind::Assign(tmp.clone(), dest)
482                     };
483                     caller_mir[callsite.bb]
484                         .statements.push(stmt);
485                     tmp.deref()
486                 } else {
487                     destination.0
488                 };
489
490                 let return_block = destination.1;
491
492                 let args : Vec<_> = if is_box_free {
493                     assert!(args.len() == 1);
494                     // box_free takes a Box, but is defined with a *mut T, inlining
495                     // needs to generate the cast.
496                     // FIXME: we should probably just generate correct MIR in the first place...
497
498                     let arg = if let Operand::Consume(ref lval) = args[0] {
499                         lval.clone()
500                     } else {
501                         bug!("Constant arg to \"box_free\"");
502                     };
503
504                     let ptr_ty = args[0].ty(caller_mir, self.tcx);
505                     vec![self.cast_box_free_arg(arg, ptr_ty, &callsite, caller_mir)]
506                 } else {
507                     // Copy the arguments if needed.
508                     self.make_call_args(args, &callsite, caller_mir)
509                 };
510
511                 let bb_len = caller_mir.basic_blocks().len();
512                 let mut integrator = Integrator {
513                     block_idx: bb_len,
514                     args: &args,
515                     local_map: local_map,
516                     scope_map: scope_map,
517                     promoted_map: promoted_map,
518                     _callsite: callsite,
519                     destination: dest,
520                     return_block: return_block,
521                     cleanup_block: cleanup,
522                     in_cleanup_block: false
523                 };
524
525
526                 for (bb, mut block) in callee_mir.basic_blocks_mut().drain_enumerated(..) {
527                     integrator.visit_basic_block_data(bb, &mut block);
528                     caller_mir.basic_blocks_mut().push(block);
529                 }
530
531                 let terminator = Terminator {
532                     source_info: callsite.location,
533                     kind: TerminatorKind::Goto { target: BasicBlock::new(bb_len) }
534                 };
535
536                 caller_mir[callsite.bb].terminator = Some(terminator);
537
538                 true
539             }
540             kind => {
541                 caller_mir[callsite.bb].terminator = Some(Terminator {
542                     source_info: terminator.source_info,
543                     kind: kind
544                 });
545                 false
546             }
547         }
548     }
549
550     fn cast_box_free_arg(&self, arg: Lvalue<'tcx>, ptr_ty: Ty<'tcx>,
551                          callsite: &CallSite<'tcx>, caller_mir: &mut Mir<'tcx>) -> Operand<'tcx> {
552         let arg = Rvalue::Ref(
553             self.tcx.types.re_erased,
554             BorrowKind::Mut,
555             arg.deref());
556
557         let ty = arg.ty(caller_mir, self.tcx);
558         let ref_tmp = LocalDecl::new_temp(ty, callsite.location.span);
559         let ref_tmp = caller_mir.local_decls.push(ref_tmp);
560         let ref_tmp = Lvalue::Local(ref_tmp);
561
562         let ref_stmt = Statement {
563             source_info: callsite.location,
564             kind: StatementKind::Assign(ref_tmp.clone(), arg)
565         };
566
567         caller_mir[callsite.bb]
568             .statements.push(ref_stmt);
569
570         let pointee_ty = match ptr_ty.sty {
571             ty::TyRawPtr(tm) | ty::TyRef(_, tm) => tm.ty,
572             _ if ptr_ty.is_box() => ptr_ty.boxed_ty(),
573             _ => bug!("Invalid type `{:?}` for call to box_free", ptr_ty)
574         };
575         let ptr_ty = self.tcx.mk_mut_ptr(pointee_ty);
576
577         let raw_ptr = Rvalue::Cast(CastKind::Misc, Operand::Consume(ref_tmp), ptr_ty);
578
579         let cast_tmp = LocalDecl::new_temp(ptr_ty, callsite.location.span);
580         let cast_tmp = caller_mir.local_decls.push(cast_tmp);
581         let cast_tmp = Lvalue::Local(cast_tmp);
582
583         let cast_stmt = Statement {
584             source_info: callsite.location,
585             kind: StatementKind::Assign(cast_tmp.clone(), raw_ptr)
586         };
587
588         caller_mir[callsite.bb]
589             .statements.push(cast_stmt);
590
591         Operand::Consume(cast_tmp)
592     }
593
594     fn make_call_args(&self, args: Vec<Operand<'tcx>>,
595                       callsite: &CallSite<'tcx>, caller_mir: &mut Mir<'tcx>) -> Vec<Operand<'tcx>> {
596         let tcx = self.tcx;
597         // FIXME: Analysis of the usage of the arguments to avoid
598         // unnecessary temporaries.
599         args.into_iter().map(|a| {
600             if let Operand::Consume(Lvalue::Local(local)) = a {
601                 if caller_mir.local_kind(local) == LocalKind::Temp {
602                     // Reuse the operand if it's a temporary already
603                     return a;
604                 }
605             }
606
607             debug!("Creating temp for argument");
608             // Otherwise, create a temporary for the arg
609             let arg = Rvalue::Use(a);
610
611             let ty = arg.ty(caller_mir, tcx);
612
613             let arg_tmp = LocalDecl::new_temp(ty, callsite.location.span);
614             let arg_tmp = caller_mir.local_decls.push(arg_tmp);
615             let arg_tmp = Lvalue::Local(arg_tmp);
616
617             let stmt = Statement {
618                 source_info: callsite.location,
619                 kind: StatementKind::Assign(arg_tmp.clone(), arg)
620             };
621             caller_mir[callsite.bb].statements.push(stmt);
622             Operand::Consume(arg_tmp)
623         }).collect()
624     }
625 }
626
627 fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, param_env: ty::ParameterEnvironment<'tcx>,
628                           ty: Ty<'tcx>) -> Option<u64> {
629     tcx.infer_ctxt(param_env, traits::Reveal::All).enter(|infcx| {
630         ty.layout(&infcx).ok().map(|layout| {
631             layout.size(&tcx.data_layout).bytes()
632         })
633     })
634 }
635
636 /**
637  * Integrator.
638  *
639  * Integrates blocks from the callee function into the calling function.
640  * Updates block indices, references to locals and other control flow
641  * stuff.
642  */
643 struct Integrator<'a, 'tcx: 'a> {
644     block_idx: usize,
645     args: &'a [Operand<'tcx>],
646     local_map: IndexVec<Local, Local>,
647     scope_map: IndexVec<VisibilityScope, VisibilityScope>,
648     promoted_map: IndexVec<Promoted, Promoted>,
649     _callsite: CallSite<'tcx>,
650     destination: Lvalue<'tcx>,
651     return_block: BasicBlock,
652     cleanup_block: Option<BasicBlock>,
653     in_cleanup_block: bool,
654 }
655
656 impl<'a, 'tcx> Integrator<'a, 'tcx> {
657     fn update_target(&self, tgt: BasicBlock) -> BasicBlock {
658         let new = BasicBlock::new(tgt.index() + self.block_idx);
659         debug!("Updating target `{:?}`, new: `{:?}`", tgt, new);
660         new
661     }
662
663     fn update_local(&self, local: Local) -> Option<Local> {
664         let idx = local.index();
665         if idx < (self.args.len() + 1) {
666             return None;
667         }
668         let idx = idx - (self.args.len() + 1);
669         let local = Local::new(idx);
670         self.local_map.get(local).cloned()
671     }
672
673     fn arg_index(&self, arg: Local) -> Option<usize> {
674         let idx = arg.index();
675         if idx > 0 && idx <= self.args.len() {
676             Some(idx - 1)
677         } else {
678             None
679         }
680     }
681 }
682
683 impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
684     fn visit_lvalue(&mut self,
685                     lvalue: &mut Lvalue<'tcx>,
686                     _ctxt: LvalueContext<'tcx>,
687                     _location: Location) {
688         if let Lvalue::Local(ref mut local) = *lvalue {
689             if let Some(l) = self.update_local(*local) {
690                 // Temp or Var; update the local reference
691                 *local = l;
692                 return;
693             }
694         }
695         if let Lvalue::Local(local) = *lvalue {
696             if local == RETURN_POINTER {
697                 // Return pointer; update the lvalue itself
698                 *lvalue = self.destination.clone();
699             } else if local.index() < (self.args.len() + 1) {
700                 // Argument, once again update the the lvalue itself
701                 let idx = local.index() - 1;
702                 if let Operand::Consume(ref lval) = self.args[idx] {
703                     *lvalue = lval.clone();
704                 } else {
705                     bug!("Arg operand `{:?}` is not an Lvalue use.", idx)
706                 }
707             }
708         } else {
709             self.super_lvalue(lvalue, _ctxt, _location)
710         }
711     }
712
713     fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
714         if let Operand::Consume(Lvalue::Local(arg)) = *operand {
715             if let Some(idx) = self.arg_index(arg) {
716                 let new_arg = self.args[idx].clone();
717                 *operand = new_arg;
718                 return;
719             }
720         }
721         self.super_operand(operand, location);
722     }
723
724     fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
725         self.in_cleanup_block = data.is_cleanup;
726         self.super_basic_block_data(block, data);
727         self.in_cleanup_block = false;
728     }
729
730     fn visit_terminator_kind(&mut self, block: BasicBlock,
731                              kind: &mut TerminatorKind<'tcx>, loc: Location) {
732         self.super_terminator_kind(block, kind, loc);
733
734         match *kind {
735             TerminatorKind::Goto { ref mut target} => {
736                 *target = self.update_target(*target);
737             }
738             TerminatorKind::SwitchInt { ref mut targets, .. } => {
739                 for tgt in targets {
740                     *tgt = self.update_target(*tgt);
741                 }
742             }
743             TerminatorKind::Drop { ref mut target, ref mut unwind, .. } |
744             TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
745                 *target = self.update_target(*target);
746                 if let Some(tgt) = *unwind {
747                     *unwind = Some(self.update_target(tgt));
748                 } else if !self.in_cleanup_block {
749                     // Unless this drop is in a cleanup block, add an unwind edge to
750                     // the orignal call's cleanup block
751                     *unwind = self.cleanup_block;
752                 }
753             }
754             TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
755                 if let Some((_, ref mut tgt)) = *destination {
756                     *tgt = self.update_target(*tgt);
757                 }
758                 if let Some(tgt) = *cleanup {
759                     *cleanup = Some(self.update_target(tgt));
760                 } else if !self.in_cleanup_block {
761                     // Unless this call is in a cleanup block, add an unwind edge to
762                     // the orignal call's cleanup block
763                     *cleanup = self.cleanup_block;
764                 }
765             }
766             TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
767                 *target = self.update_target(*target);
768                 if let Some(tgt) = *cleanup {
769                     *cleanup = Some(self.update_target(tgt));
770                 } else if !self.in_cleanup_block {
771                     // Unless this assert is in a cleanup block, add an unwind edge to
772                     // the orignal call's cleanup block
773                     *cleanup = self.cleanup_block;
774                 }
775             }
776             TerminatorKind::Return => {
777                 *kind = TerminatorKind::Goto { target: self.return_block };
778             }
779             TerminatorKind::Resume => {
780                 if let Some(tgt) = self.cleanup_block {
781                     *kind = TerminatorKind::Goto { target: tgt }
782                 }
783             }
784             TerminatorKind::Unreachable => { }
785         }
786     }
787
788     fn visit_visibility_scope(&mut self, scope: &mut VisibilityScope) {
789         *scope = self.scope_map[*scope];
790     }
791
792     fn visit_literal(&mut self, literal: &mut Literal<'tcx>, loc: Location) {
793         if let Literal::Promoted { ref mut index } = *literal {
794             if let Some(p) = self.promoted_map.get(*index).cloned() {
795                 *index = p;
796             }
797         } else {
798             self.super_literal(literal, loc);
799         }
800     }
801 }