]> git.lizzy.rs Git - rust.git/blob - src/librustc_mir/transform/inline.rs
Visit move out of `_0` when visiting `return`
[rust.git] / src / librustc_mir / transform / inline.rs
1 //! Inlining pass for MIR functions
2
3 use rustc_attr as attr;
4 use rustc_hir::def_id::DefId;
5 use rustc_index::bit_set::BitSet;
6 use rustc_index::vec::{Idx, IndexVec};
7 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
8 use rustc_middle::mir::visit::*;
9 use rustc_middle::mir::*;
10 use rustc_middle::ty::subst::{Subst, SubstsRef};
11 use rustc_middle::ty::{self, ConstKind, Instance, InstanceDef, ParamEnv, Ty, TyCtxt};
12 use rustc_session::config::Sanitizer;
13 use rustc_target::spec::abi::Abi;
14
15 use super::simplify::{remove_dead_blocks, CfgSimplifier};
16 use crate::transform::{MirPass, MirSource};
17 use std::collections::VecDeque;
18 use std::iter;
19
20 const DEFAULT_THRESHOLD: usize = 50;
21 const HINT_THRESHOLD: usize = 100;
22
23 const INSTR_COST: usize = 5;
24 const CALL_PENALTY: usize = 25;
25 const LANDINGPAD_PENALTY: usize = 50;
26 const RESUME_PENALTY: usize = 45;
27
28 const UNKNOWN_SIZE_COST: usize = 10;
29
30 pub struct Inline;
31
32 #[derive(Copy, Clone, Debug)]
33 struct CallSite<'tcx> {
34     callee: DefId,
35     substs: SubstsRef<'tcx>,
36     bb: BasicBlock,
37     location: SourceInfo,
38 }
39
40 impl<'tcx> MirPass<'tcx> for Inline {
41     fn run_pass(&self, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, body: &mut Body<'tcx>) {
42         if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 {
43             Inliner { tcx, source }.run_pass(body);
44         }
45     }
46 }
47
48 struct Inliner<'tcx> {
49     tcx: TyCtxt<'tcx>,
50     source: MirSource<'tcx>,
51 }
52
53 impl Inliner<'tcx> {
54     fn run_pass(&self, caller_body: &mut Body<'tcx>) {
55         // Keep a queue of callsites to try inlining on. We take
56         // advantage of the fact that queries detect cycles here to
57         // allow us to try and fetch the fully optimized MIR of a
58         // call; if it succeeds, we can inline it and we know that
59         // they do not call us.  Otherwise, we just don't try to
60         // inline.
61         //
62         // We use a queue so that we inline "broadly" before we inline
63         // in depth. It is unclear if this is the best heuristic,
64         // really, but that's true of all the heuristics in this
65         // file. =)
66
67         let mut callsites = VecDeque::new();
68
69         let param_env = self.tcx.param_env(self.source.def_id()).with_reveal_all();
70
71         // Only do inlining into fn bodies.
72         let id = self.tcx.hir().as_local_hir_id(self.source.def_id().expect_local());
73         if self.tcx.hir().body_owner_kind(id).is_fn_or_closure() && self.source.promoted.is_none() {
74             for (bb, bb_data) in caller_body.basic_blocks().iter_enumerated() {
75                 if let Some(callsite) =
76                     self.get_valid_function_call(bb, bb_data, caller_body, param_env)
77                 {
78                     callsites.push_back(callsite);
79                 }
80             }
81         } else {
82             return;
83         }
84
85         let mut local_change;
86         let mut changed = false;
87
88         loop {
89             local_change = false;
90             while let Some(callsite) = callsites.pop_front() {
91                 debug!("checking whether to inline callsite {:?}", callsite);
92                 if !self.tcx.is_mir_available(callsite.callee) {
93                     debug!("checking whether to inline callsite {:?} - MIR unavailable", callsite);
94                     continue;
95                 }
96
97                 let callee_body = if let Some(callee_def_id) = callsite.callee.as_local() {
98                     let callee_hir_id = self.tcx.hir().as_local_hir_id(callee_def_id);
99                     let self_hir_id =
100                         self.tcx.hir().as_local_hir_id(self.source.def_id().expect_local());
101                     // Avoid a cycle here by only using `optimized_mir` only if we have
102                     // a lower `HirId` than the callee. This ensures that the callee will
103                     // not inline us. This trick only works without incremental compilation.
104                     // So don't do it if that is enabled.
105                     if !self.tcx.dep_graph.is_fully_enabled() && self_hir_id < callee_hir_id {
106                         self.tcx.optimized_mir(callsite.callee)
107                     } else {
108                         continue;
109                     }
110                 } else {
111                     // This cannot result in a cycle since the callee MIR is from another crate
112                     // and is already optimized.
113                     self.tcx.optimized_mir(callsite.callee)
114                 };
115
116                 let callee_body = if self.consider_optimizing(callsite, callee_body) {
117                     self.tcx.subst_and_normalize_erasing_regions(
118                         &callsite.substs,
119                         param_env,
120                         callee_body,
121                     )
122                 } else {
123                     continue;
124                 };
125
126                 // Copy only unevaluated constants from the callee_body into the caller_body.
127                 // Although we are only pushing `ConstKind::Unevaluated` consts to
128                 // `required_consts`, here we may not only have `ConstKind::Unevaluated`
129                 // because we are calling `subst_and_normalize_erasing_regions`.
130                 caller_body.required_consts.extend(
131                     callee_body.required_consts.iter().copied().filter(|&constant| {
132                         matches!(constant.literal.val, ConstKind::Unevaluated(_, _, _))
133                     }),
134                 );
135
136                 let start = caller_body.basic_blocks().len();
137                 debug!("attempting to inline callsite {:?} - body={:?}", callsite, callee_body);
138                 if !self.inline_call(callsite, caller_body, callee_body) {
139                     debug!("attempting to inline callsite {:?} - failure", callsite);
140                     continue;
141                 }
142                 debug!("attempting to inline callsite {:?} - success", callsite);
143
144                 // Add callsites from inlined function
145                 for (bb, bb_data) in caller_body.basic_blocks().iter_enumerated().skip(start) {
146                     if let Some(new_callsite) =
147                         self.get_valid_function_call(bb, bb_data, caller_body, param_env)
148                     {
149                         // Don't inline the same function multiple times.
150                         if callsite.callee != new_callsite.callee {
151                             callsites.push_back(new_callsite);
152                         }
153                     }
154                 }
155
156                 local_change = true;
157                 changed = true;
158             }
159
160             if !local_change {
161                 break;
162             }
163         }
164
165         // Simplify if we inlined anything.
166         if changed {
167             debug!("running simplify cfg on {:?}", self.source);
168             CfgSimplifier::new(caller_body).simplify();
169             remove_dead_blocks(caller_body);
170         }
171     }
172
173     fn get_valid_function_call(
174         &self,
175         bb: BasicBlock,
176         bb_data: &BasicBlockData<'tcx>,
177         caller_body: &Body<'tcx>,
178         param_env: ParamEnv<'tcx>,
179     ) -> Option<CallSite<'tcx>> {
180         // Don't inline calls that are in cleanup blocks.
181         if bb_data.is_cleanup {
182             return None;
183         }
184
185         // Only consider direct calls to functions
186         let terminator = bb_data.terminator();
187         if let TerminatorKind::Call { func: ref op, .. } = terminator.kind {
188             if let ty::FnDef(callee_def_id, substs) = op.ty(caller_body, self.tcx).kind {
189                 let instance =
190                     Instance::resolve(self.tcx, param_env, callee_def_id, substs).ok().flatten()?;
191
192                 if let InstanceDef::Virtual(..) = instance.def {
193                     return None;
194                 }
195
196                 return Some(CallSite {
197                     callee: instance.def_id(),
198                     substs: instance.substs,
199                     bb,
200                     location: terminator.source_info,
201                 });
202             }
203         }
204
205         None
206     }
207
208     fn consider_optimizing(&self, callsite: CallSite<'tcx>, callee_body: &Body<'tcx>) -> bool {
209         debug!("consider_optimizing({:?})", callsite);
210         self.should_inline(callsite, callee_body)
211             && self.tcx.consider_optimizing(|| {
212                 format!("Inline {:?} into {:?}", callee_body.span, callsite)
213             })
214     }
215
216     fn should_inline(&self, callsite: CallSite<'tcx>, callee_body: &Body<'tcx>) -> bool {
217         debug!("should_inline({:?})", callsite);
218         let tcx = self.tcx;
219
220         // Cannot inline generators which haven't been transformed yet
221         if callee_body.yield_ty.is_some() {
222             debug!("    yield ty present - not inlining");
223             return false;
224         }
225
226         let codegen_fn_attrs = tcx.codegen_fn_attrs(callsite.callee);
227
228         if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::TRACK_CALLER) {
229             debug!("`#[track_caller]` present - not inlining");
230             return false;
231         }
232
233         // Avoid inlining functions marked as no_sanitize if sanitizer is enabled,
234         // since instrumentation might be enabled and performed on the caller.
235         match self.tcx.sess.opts.debugging_opts.sanitizer {
236             Some(Sanitizer::Address) => {
237                 if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NO_SANITIZE_ADDRESS) {
238                     return false;
239                 }
240             }
241             Some(Sanitizer::Memory) => {
242                 if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NO_SANITIZE_MEMORY) {
243                     return false;
244                 }
245             }
246             Some(Sanitizer::Thread) => {
247                 if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NO_SANITIZE_THREAD) {
248                     return false;
249                 }
250             }
251             Some(Sanitizer::Leak) => {}
252             None => {}
253         }
254
255         let hinted = match codegen_fn_attrs.inline {
256             // Just treat inline(always) as a hint for now,
257             // there are cases that prevent inlining that we
258             // need to check for first.
259             attr::InlineAttr::Always => true,
260             attr::InlineAttr::Never => {
261                 debug!("`#[inline(never)]` present - not inlining");
262                 return false;
263             }
264             attr::InlineAttr::Hint => true,
265             attr::InlineAttr::None => false,
266         };
267
268         // Only inline local functions if they would be eligible for cross-crate
269         // inlining. This is to ensure that the final crate doesn't have MIR that
270         // reference unexported symbols
271         if callsite.callee.is_local() {
272             if callsite.substs.non_erasable_generics().count() == 0 && !hinted {
273                 debug!("    callee is an exported function - not inlining");
274                 return false;
275             }
276         }
277
278         let mut threshold = if hinted { HINT_THRESHOLD } else { DEFAULT_THRESHOLD };
279
280         // Significantly lower the threshold for inlining cold functions
281         if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
282             threshold /= 5;
283         }
284
285         // Give a bonus functions with a small number of blocks,
286         // We normally have two or three blocks for even
287         // very small functions.
288         if callee_body.basic_blocks().len() <= 3 {
289             threshold += threshold / 4;
290         }
291         debug!("    final inline threshold = {}", threshold);
292
293         // FIXME: Give a bonus to functions with only a single caller
294
295         let param_env = tcx.param_env(self.source.def_id());
296
297         let mut first_block = true;
298         let mut cost = 0;
299
300         // Traverse the MIR manually so we can account for the effects of
301         // inlining on the CFG.
302         let mut work_list = vec![START_BLOCK];
303         let mut visited = BitSet::new_empty(callee_body.basic_blocks().len());
304         while let Some(bb) = work_list.pop() {
305             if !visited.insert(bb.index()) {
306                 continue;
307             }
308             let blk = &callee_body.basic_blocks()[bb];
309
310             for stmt in &blk.statements {
311                 // Don't count StorageLive/StorageDead in the inlining cost.
312                 match stmt.kind {
313                     StatementKind::StorageLive(_)
314                     | StatementKind::StorageDead(_)
315                     | StatementKind::Nop => {}
316                     _ => cost += INSTR_COST,
317                 }
318             }
319             let term = blk.terminator();
320             let mut is_drop = false;
321             match term.kind {
322                 TerminatorKind::Drop { ref location, target, unwind }
323                 | TerminatorKind::DropAndReplace { ref location, target, unwind, .. } => {
324                     is_drop = true;
325                     work_list.push(target);
326                     // If the location doesn't actually need dropping, treat it like
327                     // a regular goto.
328                     let ty = location.ty(callee_body, tcx).subst(tcx, callsite.substs).ty;
329                     if ty.needs_drop(tcx, param_env) {
330                         cost += CALL_PENALTY;
331                         if let Some(unwind) = unwind {
332                             cost += LANDINGPAD_PENALTY;
333                             work_list.push(unwind);
334                         }
335                     } else {
336                         cost += INSTR_COST;
337                     }
338                 }
339
340                 TerminatorKind::Unreachable | TerminatorKind::Call { destination: None, .. }
341                     if first_block =>
342                 {
343                     // If the function always diverges, don't inline
344                     // unless the cost is zero
345                     threshold = 0;
346                 }
347
348                 TerminatorKind::Call { func: Operand::Constant(ref f), cleanup, .. } => {
349                     if let ty::FnDef(def_id, _) = f.literal.ty.kind {
350                         // Don't give intrinsics the extra penalty for calls
351                         let f = tcx.fn_sig(def_id);
352                         if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
353                             cost += INSTR_COST;
354                         } else {
355                             cost += CALL_PENALTY;
356                         }
357                     } else {
358                         cost += CALL_PENALTY;
359                     }
360                     if cleanup.is_some() {
361                         cost += LANDINGPAD_PENALTY;
362                     }
363                 }
364                 TerminatorKind::Assert { cleanup, .. } => {
365                     cost += CALL_PENALTY;
366
367                     if cleanup.is_some() {
368                         cost += LANDINGPAD_PENALTY;
369                     }
370                 }
371                 TerminatorKind::Resume => cost += RESUME_PENALTY,
372                 _ => cost += INSTR_COST,
373             }
374
375             if !is_drop {
376                 for &succ in term.successors() {
377                     work_list.push(succ);
378                 }
379             }
380
381             first_block = false;
382         }
383
384         // Count up the cost of local variables and temps, if we know the size
385         // use that, otherwise we use a moderately-large dummy cost.
386
387         let ptr_size = tcx.data_layout.pointer_size.bytes();
388
389         for v in callee_body.vars_and_temps_iter() {
390             let v = &callee_body.local_decls[v];
391             let ty = v.ty.subst(tcx, callsite.substs);
392             // Cost of the var is the size in machine-words, if we know
393             // it.
394             if let Some(size) = type_size_of(tcx, param_env, ty) {
395                 cost += (size / ptr_size) as usize;
396             } else {
397                 cost += UNKNOWN_SIZE_COST;
398             }
399         }
400
401         if let attr::InlineAttr::Always = codegen_fn_attrs.inline {
402             debug!("INLINING {:?} because inline(always) [cost={}]", callsite, cost);
403             true
404         } else {
405             if cost <= threshold {
406                 debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold);
407                 true
408             } else {
409                 debug!("NOT inlining {:?} [cost={} > threshold={}]", callsite, cost, threshold);
410                 false
411             }
412         }
413     }
414
415     fn inline_call(
416         &self,
417         callsite: CallSite<'tcx>,
418         caller_body: &mut Body<'tcx>,
419         mut callee_body: Body<'tcx>,
420     ) -> bool {
421         let terminator = caller_body[callsite.bb].terminator.take().unwrap();
422         match terminator.kind {
423             // FIXME: Handle inlining of diverging calls
424             TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => {
425                 debug!("inlined {:?} into {:?}", callsite.callee, self.source);
426
427                 let mut local_map = IndexVec::with_capacity(callee_body.local_decls.len());
428                 let mut scope_map = IndexVec::with_capacity(callee_body.source_scopes.len());
429
430                 for mut scope in callee_body.source_scopes.iter().cloned() {
431                     if scope.parent_scope.is_none() {
432                         scope.parent_scope = Some(callsite.location.scope);
433                         // FIXME(eddyb) is this really needed?
434                         // (also note that it's always overwritten below)
435                         scope.span = callee_body.span;
436                     }
437
438                     // FIXME(eddyb) this doesn't seem right at all.
439                     // The inlined source scopes should probably be annotated as
440                     // such, but also contain all of the original information.
441                     scope.span = callsite.location.span;
442
443                     let idx = caller_body.source_scopes.push(scope);
444                     scope_map.push(idx);
445                 }
446
447                 for loc in callee_body.vars_and_temps_iter() {
448                     let mut local = callee_body.local_decls[loc].clone();
449
450                     local.source_info.scope = scope_map[local.source_info.scope];
451                     local.source_info.span = callsite.location.span;
452
453                     let idx = caller_body.local_decls.push(local);
454                     local_map.push(idx);
455                 }
456
457                 // If the call is something like `a[*i] = f(i)`, where
458                 // `i : &mut usize`, then just duplicating the `a[*i]`
459                 // Place could result in two different locations if `f`
460                 // writes to `i`. To prevent this we need to create a temporary
461                 // borrow of the place and pass the destination as `*temp` instead.
462                 fn dest_needs_borrow(place: Place<'_>) -> bool {
463                     for elem in place.projection.iter() {
464                         match elem {
465                             ProjectionElem::Deref | ProjectionElem::Index(_) => return true,
466                             _ => {}
467                         }
468                     }
469
470                     false
471                 }
472
473                 let dest = if dest_needs_borrow(destination.0) {
474                     debug!("creating temp for return destination");
475                     let dest = Rvalue::Ref(
476                         self.tcx.lifetimes.re_erased,
477                         BorrowKind::Mut { allow_two_phase_borrow: false },
478                         destination.0,
479                     );
480
481                     let ty = dest.ty(caller_body, self.tcx);
482
483                     let temp = LocalDecl::new(ty, callsite.location.span);
484
485                     let tmp = caller_body.local_decls.push(temp);
486                     let tmp = Place::from(tmp);
487
488                     let stmt = Statement {
489                         source_info: callsite.location,
490                         kind: StatementKind::Assign(box (tmp, dest)),
491                     };
492                     caller_body[callsite.bb].statements.push(stmt);
493                     self.tcx.mk_place_deref(tmp)
494                 } else {
495                     destination.0
496                 };
497
498                 let return_block = destination.1;
499
500                 // Copy the arguments if needed.
501                 let args: Vec<_> = self.make_call_args(args, &callsite, caller_body);
502
503                 let bb_len = caller_body.basic_blocks().len();
504                 let mut integrator = Integrator {
505                     block_idx: bb_len,
506                     args: &args,
507                     local_map,
508                     scope_map,
509                     destination: dest,
510                     return_block,
511                     cleanup_block: cleanup,
512                     in_cleanup_block: false,
513                     tcx: self.tcx,
514                 };
515
516                 for mut var_debug_info in callee_body.var_debug_info.drain(..) {
517                     integrator.visit_var_debug_info(&mut var_debug_info);
518                     caller_body.var_debug_info.push(var_debug_info);
519                 }
520
521                 for (bb, mut block) in callee_body.basic_blocks_mut().drain_enumerated(..) {
522                     integrator.visit_basic_block_data(bb, &mut block);
523                     caller_body.basic_blocks_mut().push(block);
524                 }
525
526                 let terminator = Terminator {
527                     source_info: callsite.location,
528                     kind: TerminatorKind::Goto { target: BasicBlock::new(bb_len) },
529                 };
530
531                 caller_body[callsite.bb].terminator = Some(terminator);
532
533                 true
534             }
535             kind => {
536                 caller_body[callsite.bb].terminator =
537                     Some(Terminator { source_info: terminator.source_info, kind });
538                 false
539             }
540         }
541     }
542
543     fn make_call_args(
544         &self,
545         args: Vec<Operand<'tcx>>,
546         callsite: &CallSite<'tcx>,
547         caller_body: &mut Body<'tcx>,
548     ) -> Vec<Local> {
549         let tcx = self.tcx;
550
551         // There is a bit of a mismatch between the *caller* of a closure and the *callee*.
552         // The caller provides the arguments wrapped up in a tuple:
553         //
554         //     tuple_tmp = (a, b, c)
555         //     Fn::call(closure_ref, tuple_tmp)
556         //
557         // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`)
558         // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, codegen has
559         // the job of unpacking this tuple. But here, we are codegen. =) So we want to create
560         // a vector like
561         //
562         //     [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2]
563         //
564         // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient
565         // if we "spill" that into *another* temporary, so that we can map the argument
566         // variable in the callee MIR directly to an argument variable on our side.
567         // So we introduce temporaries like:
568         //
569         //     tmp0 = tuple_tmp.0
570         //     tmp1 = tuple_tmp.1
571         //     tmp2 = tuple_tmp.2
572         //
573         // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
574         if tcx.is_closure(callsite.callee) {
575             let mut args = args.into_iter();
576             let self_ = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
577             let tuple = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
578             assert!(args.next().is_none());
579
580             let tuple = Place::from(tuple);
581             let tuple_tys = if let ty::Tuple(s) = tuple.ty(caller_body, tcx).ty.kind {
582                 s
583             } else {
584                 bug!("Closure arguments are not passed as a tuple");
585             };
586
587             // The `closure_ref` in our example above.
588             let closure_ref_arg = iter::once(self_);
589
590             // The `tmp0`, `tmp1`, and `tmp2` in our example abonve.
591             let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| {
592                 // This is e.g., `tuple_tmp.0` in our example above.
593                 let tuple_field =
594                     Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty.expect_ty()));
595
596                 // Spill to a local to make e.g., `tmp0`.
597                 self.create_temp_if_necessary(tuple_field, callsite, caller_body)
598             });
599
600             closure_ref_arg.chain(tuple_tmp_args).collect()
601         } else {
602             args.into_iter()
603                 .map(|a| self.create_temp_if_necessary(a, callsite, caller_body))
604                 .collect()
605         }
606     }
607
608     /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
609     /// temporary `T` and an instruction `T = arg`, and returns `T`.
610     fn create_temp_if_necessary(
611         &self,
612         arg: Operand<'tcx>,
613         callsite: &CallSite<'tcx>,
614         caller_body: &mut Body<'tcx>,
615     ) -> Local {
616         // FIXME: Analysis of the usage of the arguments to avoid
617         // unnecessary temporaries.
618
619         if let Operand::Move(place) = &arg {
620             if let Some(local) = place.as_local() {
621                 if caller_body.local_kind(local) == LocalKind::Temp {
622                     // Reuse the operand if it's a temporary already
623                     return local;
624                 }
625             }
626         }
627
628         debug!("creating temp for argument {:?}", arg);
629         // Otherwise, create a temporary for the arg
630         let arg = Rvalue::Use(arg);
631
632         let ty = arg.ty(caller_body, self.tcx);
633
634         let arg_tmp = LocalDecl::new(ty, callsite.location.span);
635         let arg_tmp = caller_body.local_decls.push(arg_tmp);
636
637         let stmt = Statement {
638             source_info: callsite.location,
639             kind: StatementKind::Assign(box (Place::from(arg_tmp), arg)),
640         };
641         caller_body[callsite.bb].statements.push(stmt);
642         arg_tmp
643     }
644 }
645
646 fn type_size_of<'tcx>(
647     tcx: TyCtxt<'tcx>,
648     param_env: ty::ParamEnv<'tcx>,
649     ty: Ty<'tcx>,
650 ) -> Option<u64> {
651     tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
652 }
653
654 /**
655  * Integrator.
656  *
657  * Integrates blocks from the callee function into the calling function.
658  * Updates block indices, references to locals and other control flow
659  * stuff.
660 */
661 struct Integrator<'a, 'tcx> {
662     block_idx: usize,
663     args: &'a [Local],
664     local_map: IndexVec<Local, Local>,
665     scope_map: IndexVec<SourceScope, SourceScope>,
666     destination: Place<'tcx>,
667     return_block: BasicBlock,
668     cleanup_block: Option<BasicBlock>,
669     in_cleanup_block: bool,
670     tcx: TyCtxt<'tcx>,
671 }
672
673 impl<'a, 'tcx> Integrator<'a, 'tcx> {
674     fn update_target(&self, tgt: BasicBlock) -> BasicBlock {
675         let new = BasicBlock::new(tgt.index() + self.block_idx);
676         debug!("updating target `{:?}`, new: `{:?}`", tgt, new);
677         new
678     }
679
680     fn make_integrate_local(&self, local: Local) -> Local {
681         if local == RETURN_PLACE {
682             return self.destination.local;
683         }
684
685         let idx = local.index() - 1;
686         if idx < self.args.len() {
687             return self.args[idx];
688         }
689
690         self.local_map[Local::new(idx - self.args.len())]
691     }
692 }
693
694 impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
695     fn tcx(&self) -> TyCtxt<'tcx> {
696         self.tcx
697     }
698
699     fn visit_local(&mut self, local: &mut Local, _ctxt: PlaceContext, _location: Location) {
700         *local = self.make_integrate_local(*local);
701     }
702
703     fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
704         // If this is the `RETURN_PLACE`, we need to rebase any projections onto it.
705         let dest_proj_len = self.destination.projection.len();
706         if place.local == RETURN_PLACE && dest_proj_len > 0 {
707             let mut projs = Vec::with_capacity(dest_proj_len + place.projection.len());
708             projs.extend(self.destination.projection);
709             projs.extend(place.projection);
710
711             place.projection = self.tcx.intern_place_elems(&*projs);
712         }
713         // Handles integrating any locals that occur in the base
714         // or projections
715         self.super_place(place, context, location)
716     }
717
718     fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
719         self.in_cleanup_block = data.is_cleanup;
720         self.super_basic_block_data(block, data);
721         self.in_cleanup_block = false;
722     }
723
724     fn visit_retag(&mut self, kind: &mut RetagKind, place: &mut Place<'tcx>, loc: Location) {
725         self.super_retag(kind, place, loc);
726
727         // We have to patch all inlined retags to be aware that they are no longer
728         // happening on function entry.
729         if *kind == RetagKind::FnEntry {
730             *kind = RetagKind::Default;
731         }
732     }
733
734     fn visit_terminator_kind(&mut self, kind: &mut TerminatorKind<'tcx>, loc: Location) {
735         // Don't try to modify the implicit `_0` access on return (`return` terminators are
736         // replaced down below anyways).
737         if !matches!(kind, TerminatorKind::Return) {
738             self.super_terminator_kind(kind, loc);
739         }
740
741         match *kind {
742             TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => bug!(),
743             TerminatorKind::Goto { ref mut target } => {
744                 *target = self.update_target(*target);
745             }
746             TerminatorKind::SwitchInt { ref mut targets, .. } => {
747                 for tgt in targets {
748                     *tgt = self.update_target(*tgt);
749                 }
750             }
751             TerminatorKind::Drop { ref mut target, ref mut unwind, .. }
752             | TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
753                 *target = self.update_target(*target);
754                 if let Some(tgt) = *unwind {
755                     *unwind = Some(self.update_target(tgt));
756                 } else if !self.in_cleanup_block {
757                     // Unless this drop is in a cleanup block, add an unwind edge to
758                     // the original call's cleanup block
759                     *unwind = self.cleanup_block;
760                 }
761             }
762             TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
763                 if let Some((_, ref mut tgt)) = *destination {
764                     *tgt = self.update_target(*tgt);
765                 }
766                 if let Some(tgt) = *cleanup {
767                     *cleanup = Some(self.update_target(tgt));
768                 } else if !self.in_cleanup_block {
769                     // Unless this call is in a cleanup block, add an unwind edge to
770                     // the original call's cleanup block
771                     *cleanup = self.cleanup_block;
772                 }
773             }
774             TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
775                 *target = self.update_target(*target);
776                 if let Some(tgt) = *cleanup {
777                     *cleanup = Some(self.update_target(tgt));
778                 } else if !self.in_cleanup_block {
779                     // Unless this assert is in a cleanup block, add an unwind edge to
780                     // the original call's cleanup block
781                     *cleanup = self.cleanup_block;
782                 }
783             }
784             TerminatorKind::Return => {
785                 *kind = TerminatorKind::Goto { target: self.return_block };
786             }
787             TerminatorKind::Resume => {
788                 if let Some(tgt) = self.cleanup_block {
789                     *kind = TerminatorKind::Goto { target: tgt }
790                 }
791             }
792             TerminatorKind::Abort => {}
793             TerminatorKind::Unreachable => {}
794             TerminatorKind::FalseEdges { ref mut real_target, ref mut imaginary_target } => {
795                 *real_target = self.update_target(*real_target);
796                 *imaginary_target = self.update_target(*imaginary_target);
797             }
798             TerminatorKind::FalseUnwind { real_target: _, unwind: _ } =>
799             // see the ordering of passes in the optimized_mir query.
800             {
801                 bug!("False unwinds should have been removed before inlining")
802             }
803         }
804     }
805
806     fn visit_source_scope(&mut self, scope: &mut SourceScope) {
807         *scope = self.scope_map[*scope];
808     }
809 }