]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir/src/transform/inline.rs
Rollup merge of #81904 - jhpratt:const_int_fn-stabilization, r=jyn514
[rust.git] / compiler / rustc_mir / src / transform / inline.rs
1 //! Inlining pass for MIR functions
2
3 use rustc_attr as attr;
4 use rustc_hir as hir;
5 use rustc_index::bit_set::BitSet;
6 use rustc_index::vec::Idx;
7 use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
8 use rustc_middle::mir::visit::*;
9 use rustc_middle::mir::*;
10 use rustc_middle::ty::subst::Subst;
11 use rustc_middle::ty::{self, ConstKind, Instance, InstanceDef, ParamEnv, Ty, TyCtxt};
12 use rustc_span::{hygiene::ExpnKind, ExpnData, Span};
13 use rustc_target::spec::abi::Abi;
14
15 use super::simplify::{remove_dead_blocks, CfgSimplifier};
16 use crate::transform::MirPass;
17 use std::iter;
18 use std::ops::{Range, RangeFrom};
19
20 crate mod cycle;
21
22 const INSTR_COST: usize = 5;
23 const CALL_PENALTY: usize = 25;
24 const LANDINGPAD_PENALTY: usize = 50;
25 const RESUME_PENALTY: usize = 45;
26
27 const UNKNOWN_SIZE_COST: usize = 10;
28
29 pub struct Inline;
30
31 #[derive(Copy, Clone, Debug)]
32 struct CallSite<'tcx> {
33     callee: Instance<'tcx>,
34     fn_sig: ty::PolyFnSig<'tcx>,
35     block: BasicBlock,
36     target: Option<BasicBlock>,
37     source_info: SourceInfo,
38 }
39
40 impl<'tcx> MirPass<'tcx> for Inline {
41     fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
42         // If you change this optimization level, also change the level in
43         // `mir_drops_elaborated_and_const_checked` for the call to `mir_inliner_callees`.
44         // Otherwise you will get an ICE about stolen MIR.
45         if tcx.sess.opts.debugging_opts.mir_opt_level < 2 {
46             return;
47         }
48
49         if tcx.sess.opts.debugging_opts.instrument_coverage {
50             // Since `Inline` happens after `InstrumentCoverage`, the function-specific coverage
51             // counters can be invalidated, such as by merging coverage counter statements from
52             // a pre-inlined function into a different function. This kind of change is invalid,
53             // so inlining must be skipped. Note: This check is performed here so inlining can
54             // be disabled without preventing other optimizations (regardless of `mir_opt_level`).
55             return;
56         }
57
58         let span = trace_span!("inline", body = %tcx.def_path_str(body.source.def_id()));
59         let _guard = span.enter();
60         if inline(tcx, body) {
61             debug!("running simplify cfg on {:?}", body.source);
62             CfgSimplifier::new(body).simplify();
63             remove_dead_blocks(body);
64         }
65     }
66 }
67
68 fn inline(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool {
69     let def_id = body.source.def_id();
70     let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
71
72     // Only do inlining into fn bodies.
73     if !tcx.hir().body_owner_kind(hir_id).is_fn_or_closure() {
74         return false;
75     }
76     if body.source.promoted.is_some() {
77         return false;
78     }
79
80     let mut this = Inliner {
81         tcx,
82         param_env: tcx.param_env_reveal_all_normalized(body.source.def_id()),
83         codegen_fn_attrs: tcx.codegen_fn_attrs(body.source.def_id()),
84         hir_id,
85         history: Vec::new(),
86         changed: false,
87     };
88     let blocks = BasicBlock::new(0)..body.basic_blocks().next_index();
89     this.process_blocks(body, blocks);
90     this.changed
91 }
92
93 struct Inliner<'tcx> {
94     tcx: TyCtxt<'tcx>,
95     param_env: ParamEnv<'tcx>,
96     /// Caller codegen attributes.
97     codegen_fn_attrs: &'tcx CodegenFnAttrs,
98     /// Caller HirID.
99     hir_id: hir::HirId,
100     /// Stack of inlined Instances.
101     history: Vec<ty::Instance<'tcx>>,
102     /// Indicates that the caller body has been modified.
103     changed: bool,
104 }
105
106 impl Inliner<'tcx> {
107     fn process_blocks(&mut self, caller_body: &mut Body<'tcx>, blocks: Range<BasicBlock>) {
108         for bb in blocks {
109             let callsite = match self.get_valid_function_call(bb, &caller_body[bb], caller_body) {
110                 None => continue,
111                 Some(it) => it,
112             };
113             let span = trace_span!("process_blocks", %callsite.callee, ?bb);
114             let _guard = span.enter();
115
116             trace!(
117                 "checking for self recursion ({:?} vs body_source: {:?})",
118                 callsite.callee.def_id(),
119                 caller_body.source.def_id()
120             );
121             if callsite.callee.def_id() == caller_body.source.def_id() {
122                 debug!("Not inlining a function into itself");
123                 continue;
124             }
125
126             if !self.is_mir_available(callsite.callee, caller_body) {
127                 debug!("MIR unavailable {}", callsite.callee);
128                 continue;
129             }
130
131             let span = trace_span!("instance_mir", %callsite.callee);
132             let instance_mir_guard = span.enter();
133             let callee_body = self.tcx.instance_mir(callsite.callee.def);
134             drop(instance_mir_guard);
135             if !self.should_inline(callsite, callee_body) {
136                 continue;
137             }
138
139             if !self.tcx.consider_optimizing(|| {
140                 format!("Inline {:?} into {}", callee_body.span, callsite.callee)
141             }) {
142                 return;
143             }
144
145             let callee_body = callsite.callee.subst_mir_and_normalize_erasing_regions(
146                 self.tcx,
147                 self.param_env,
148                 callee_body.clone(),
149             );
150
151             let old_blocks = caller_body.basic_blocks().next_index();
152             self.inline_call(callsite, caller_body, callee_body);
153             let new_blocks = old_blocks..caller_body.basic_blocks().next_index();
154             self.changed = true;
155
156             self.history.push(callsite.callee);
157             self.process_blocks(caller_body, new_blocks);
158             self.history.pop();
159         }
160     }
161
162     #[instrument(skip(self, caller_body))]
163     fn is_mir_available(&self, callee: Instance<'tcx>, caller_body: &Body<'tcx>) -> bool {
164         match callee.def {
165             InstanceDef::Item(_) => {
166                 // If there is no MIR available (either because it was not in metadata or
167                 // because it has no MIR because it's an extern function), then the inliner
168                 // won't cause cycles on this.
169                 if !self.tcx.is_mir_available(callee.def_id()) {
170                     return false;
171                 }
172             }
173             // These have no own callable MIR.
174             InstanceDef::Intrinsic(_) | InstanceDef::Virtual(..) => return false,
175             // This cannot result in an immediate cycle since the callee MIR is a shim, which does
176             // not get any optimizations run on it. Any subsequent inlining may cause cycles, but we
177             // do not need to catch this here, we can wait until the inliner decides to continue
178             // inlining a second time.
179             InstanceDef::VtableShim(_)
180             | InstanceDef::ReifyShim(_)
181             | InstanceDef::FnPtrShim(..)
182             | InstanceDef::ClosureOnceShim { .. }
183             | InstanceDef::DropGlue(..)
184             | InstanceDef::CloneShim(..) => return true,
185         }
186
187         if self.tcx.is_constructor(callee.def_id()) {
188             trace!("constructors always have MIR");
189             // Constructor functions cannot cause a query cycle.
190             return true;
191         }
192
193         if let Some(callee_def_id) = callee.def_id().as_local() {
194             let callee_hir_id = self.tcx.hir().local_def_id_to_hir_id(callee_def_id);
195             // Avoid inlining into generators,
196             // since their `optimized_mir` is used for layout computation, which can
197             // create a cycle, even when no attempt is made to inline the function
198             // in the other direction.
199             caller_body.generator_kind.is_none()
200                 && (
201                     // Avoid a cycle here by only using `instance_mir` only if we have
202                     // a lower `HirId` than the callee. This ensures that the callee will
203                     // not inline us. This trick only works without incremental compilation.
204                     // So don't do it if that is enabled.
205                     !self.tcx.dep_graph.is_fully_enabled()
206                 && self.hir_id < callee_hir_id
207                 // If we know for sure that the function we're calling will itself try to
208                 // call us, then we avoid inlining that function.
209                 || !self.tcx.mir_callgraph_reachable((callee, caller_body.source.def_id().expect_local()))
210                 )
211         } else {
212             // This cannot result in an immediate cycle since the callee MIR is from another crate
213             // and is already optimized. Any subsequent inlining may cause cycles, but we do
214             // not need to catch this here, we can wait until the inliner decides to continue
215             // inlining a second time.
216             trace!("functions from other crates always have MIR");
217             true
218         }
219     }
220
221     fn get_valid_function_call(
222         &self,
223         bb: BasicBlock,
224         bb_data: &BasicBlockData<'tcx>,
225         caller_body: &Body<'tcx>,
226     ) -> Option<CallSite<'tcx>> {
227         // Don't inline calls that are in cleanup blocks.
228         if bb_data.is_cleanup {
229             return None;
230         }
231
232         // Only consider direct calls to functions
233         let terminator = bb_data.terminator();
234         if let TerminatorKind::Call { ref func, ref destination, .. } = terminator.kind {
235             let func_ty = func.ty(caller_body, self.tcx);
236             if let ty::FnDef(def_id, substs) = *func_ty.kind() {
237                 // To resolve an instance its substs have to be fully normalized.
238                 let substs = self.tcx.normalize_erasing_regions(self.param_env, substs);
239                 let callee =
240                     Instance::resolve(self.tcx, self.param_env, def_id, substs).ok().flatten()?;
241
242                 if let InstanceDef::Virtual(..) | InstanceDef::Intrinsic(_) = callee.def {
243                     return None;
244                 }
245
246                 let fn_sig = self.tcx.fn_sig(def_id).subst(self.tcx, substs);
247
248                 return Some(CallSite {
249                     callee,
250                     fn_sig,
251                     block: bb,
252                     target: destination.map(|(_, target)| target),
253                     source_info: terminator.source_info,
254                 });
255             }
256         }
257
258         None
259     }
260
261     #[instrument(skip(self, callee_body))]
262     fn should_inline(&self, callsite: CallSite<'tcx>, callee_body: &Body<'tcx>) -> bool {
263         let tcx = self.tcx;
264
265         if callsite.fn_sig.c_variadic() {
266             debug!("callee is variadic - not inlining");
267             return false;
268         }
269
270         let codegen_fn_attrs = tcx.codegen_fn_attrs(callsite.callee.def_id());
271
272         let self_features = &self.codegen_fn_attrs.target_features;
273         let callee_features = &codegen_fn_attrs.target_features;
274         if callee_features.iter().any(|feature| !self_features.contains(feature)) {
275             debug!("`callee has extra target features - not inlining");
276             return false;
277         }
278
279         if self.codegen_fn_attrs.no_sanitize != codegen_fn_attrs.no_sanitize {
280             debug!("`callee has incompatible no_sanitize attribute - not inlining");
281             return false;
282         }
283
284         if self.codegen_fn_attrs.instruction_set != codegen_fn_attrs.instruction_set {
285             debug!("`callee has incompatible instruction set - not inlining");
286             return false;
287         }
288
289         let hinted = match codegen_fn_attrs.inline {
290             // Just treat inline(always) as a hint for now,
291             // there are cases that prevent inlining that we
292             // need to check for first.
293             attr::InlineAttr::Always => true,
294             attr::InlineAttr::Never => {
295                 debug!("`#[inline(never)]` present - not inlining");
296                 return false;
297             }
298             attr::InlineAttr::Hint => true,
299             attr::InlineAttr::None => false,
300         };
301
302         // Only inline local functions if they would be eligible for cross-crate
303         // inlining. This is to ensure that the final crate doesn't have MIR that
304         // reference unexported symbols
305         if callsite.callee.def_id().is_local() {
306             if callsite.callee.substs.non_erasable_generics().count() == 0 && !hinted {
307                 debug!("    callee is an exported function - not inlining");
308                 return false;
309             }
310         }
311
312         let mut threshold = if hinted {
313             self.tcx.sess.opts.debugging_opts.inline_mir_hint_threshold
314         } else {
315             self.tcx.sess.opts.debugging_opts.inline_mir_threshold
316         };
317
318         if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
319             debug!("#[naked] present - not inlining");
320             return false;
321         }
322
323         if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
324             debug!("#[cold] present - not inlining");
325             return false;
326         }
327
328         // Give a bonus functions with a small number of blocks,
329         // We normally have two or three blocks for even
330         // very small functions.
331         if callee_body.basic_blocks().len() <= 3 {
332             threshold += threshold / 4;
333         }
334         debug!("    final inline threshold = {}", threshold);
335
336         // FIXME: Give a bonus to functions with only a single caller
337         let mut first_block = true;
338         let mut cost = 0;
339
340         // Traverse the MIR manually so we can account for the effects of
341         // inlining on the CFG.
342         let mut work_list = vec![START_BLOCK];
343         let mut visited = BitSet::new_empty(callee_body.basic_blocks().len());
344         while let Some(bb) = work_list.pop() {
345             if !visited.insert(bb.index()) {
346                 continue;
347             }
348             let blk = &callee_body.basic_blocks()[bb];
349
350             for stmt in &blk.statements {
351                 // Don't count StorageLive/StorageDead in the inlining cost.
352                 match stmt.kind {
353                     StatementKind::StorageLive(_)
354                     | StatementKind::StorageDead(_)
355                     | StatementKind::Nop => {}
356                     _ => cost += INSTR_COST,
357                 }
358             }
359             let term = blk.terminator();
360             let mut is_drop = false;
361             match term.kind {
362                 TerminatorKind::Drop { ref place, target, unwind }
363                 | TerminatorKind::DropAndReplace { ref place, target, unwind, .. } => {
364                     is_drop = true;
365                     work_list.push(target);
366                     // If the place doesn't actually need dropping, treat it like
367                     // a regular goto.
368                     let ty = callsite.callee.subst_mir(self.tcx, &place.ty(callee_body, tcx).ty);
369                     if ty.needs_drop(tcx, self.param_env) {
370                         cost += CALL_PENALTY;
371                         if let Some(unwind) = unwind {
372                             cost += LANDINGPAD_PENALTY;
373                             work_list.push(unwind);
374                         }
375                     } else {
376                         cost += INSTR_COST;
377                     }
378                 }
379
380                 TerminatorKind::Unreachable | TerminatorKind::Call { destination: None, .. }
381                     if first_block =>
382                 {
383                     // If the function always diverges, don't inline
384                     // unless the cost is zero
385                     threshold = 0;
386                 }
387
388                 TerminatorKind::Call { func: Operand::Constant(ref f), cleanup, .. } => {
389                     if let ty::FnDef(def_id, substs) =
390                         *callsite.callee.subst_mir(self.tcx, &f.literal.ty).kind()
391                     {
392                         let substs = self.tcx.normalize_erasing_regions(self.param_env, substs);
393                         if let Ok(Some(instance)) =
394                             Instance::resolve(self.tcx, self.param_env, def_id, substs)
395                         {
396                             if callsite.callee.def_id() == instance.def_id()
397                                 || self.history.contains(&instance)
398                             {
399                                 debug!("`callee is recursive - not inlining");
400                                 return false;
401                             }
402                         }
403                         // Don't give intrinsics the extra penalty for calls
404                         let f = tcx.fn_sig(def_id);
405                         if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
406                             cost += INSTR_COST;
407                         } else {
408                             cost += CALL_PENALTY;
409                         }
410                     } else {
411                         cost += CALL_PENALTY;
412                     }
413                     if cleanup.is_some() {
414                         cost += LANDINGPAD_PENALTY;
415                     }
416                 }
417                 TerminatorKind::Assert { cleanup, .. } => {
418                     cost += CALL_PENALTY;
419
420                     if cleanup.is_some() {
421                         cost += LANDINGPAD_PENALTY;
422                     }
423                 }
424                 TerminatorKind::Resume => cost += RESUME_PENALTY,
425                 _ => cost += INSTR_COST,
426             }
427
428             if !is_drop {
429                 for &succ in term.successors() {
430                     work_list.push(succ);
431                 }
432             }
433
434             first_block = false;
435         }
436
437         // Count up the cost of local variables and temps, if we know the size
438         // use that, otherwise we use a moderately-large dummy cost.
439
440         let ptr_size = tcx.data_layout.pointer_size.bytes();
441
442         for v in callee_body.vars_and_temps_iter() {
443             let ty = callsite.callee.subst_mir(self.tcx, &callee_body.local_decls[v].ty);
444             // Cost of the var is the size in machine-words, if we know
445             // it.
446             if let Some(size) = type_size_of(tcx, self.param_env, ty) {
447                 cost += ((size + ptr_size - 1) / ptr_size) as usize;
448             } else {
449                 cost += UNKNOWN_SIZE_COST;
450             }
451         }
452
453         if let attr::InlineAttr::Always = codegen_fn_attrs.inline {
454             debug!("INLINING {:?} because inline(always) [cost={}]", callsite, cost);
455             true
456         } else {
457             if cost <= threshold {
458                 debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold);
459                 true
460             } else {
461                 debug!("NOT inlining {:?} [cost={} > threshold={}]", callsite, cost, threshold);
462                 false
463             }
464         }
465     }
466
467     fn inline_call(
468         &self,
469         callsite: CallSite<'tcx>,
470         caller_body: &mut Body<'tcx>,
471         mut callee_body: Body<'tcx>,
472     ) {
473         let terminator = caller_body[callsite.block].terminator.take().unwrap();
474         match terminator.kind {
475             TerminatorKind::Call { args, destination, cleanup, .. } => {
476                 // If the call is something like `a[*i] = f(i)`, where
477                 // `i : &mut usize`, then just duplicating the `a[*i]`
478                 // Place could result in two different locations if `f`
479                 // writes to `i`. To prevent this we need to create a temporary
480                 // borrow of the place and pass the destination as `*temp` instead.
481                 fn dest_needs_borrow(place: Place<'_>) -> bool {
482                     for elem in place.projection.iter() {
483                         match elem {
484                             ProjectionElem::Deref | ProjectionElem::Index(_) => return true,
485                             _ => {}
486                         }
487                     }
488
489                     false
490                 }
491
492                 let dest = if let Some((destination_place, _)) = destination {
493                     if dest_needs_borrow(destination_place) {
494                         trace!("creating temp for return destination");
495                         let dest = Rvalue::Ref(
496                             self.tcx.lifetimes.re_erased,
497                             BorrowKind::Mut { allow_two_phase_borrow: false },
498                             destination_place,
499                         );
500                         let dest_ty = dest.ty(caller_body, self.tcx);
501                         let temp = Place::from(self.new_call_temp(caller_body, &callsite, dest_ty));
502                         caller_body[callsite.block].statements.push(Statement {
503                             source_info: callsite.source_info,
504                             kind: StatementKind::Assign(box (temp, dest)),
505                         });
506                         self.tcx.mk_place_deref(temp)
507                     } else {
508                         destination_place
509                     }
510                 } else {
511                     trace!("creating temp for return place");
512                     Place::from(self.new_call_temp(caller_body, &callsite, callee_body.return_ty()))
513                 };
514
515                 // Copy the arguments if needed.
516                 let args: Vec<_> = self.make_call_args(args, &callsite, caller_body, &callee_body);
517
518                 let mut integrator = Integrator {
519                     args: &args,
520                     new_locals: Local::new(caller_body.local_decls.len())..,
521                     new_scopes: SourceScope::new(caller_body.source_scopes.len())..,
522                     new_blocks: BasicBlock::new(caller_body.basic_blocks().len())..,
523                     destination: dest,
524                     return_block: callsite.target,
525                     cleanup_block: cleanup,
526                     in_cleanup_block: false,
527                     tcx: self.tcx,
528                     callsite_span: callsite.source_info.span,
529                     body_span: callee_body.span,
530                     always_live_locals: BitSet::new_filled(callee_body.local_decls.len()),
531                 };
532
533                 // Map all `Local`s, `SourceScope`s and `BasicBlock`s to new ones
534                 // (or existing ones, in a few special cases) in the caller.
535                 integrator.visit_body(&mut callee_body);
536
537                 for scope in &mut callee_body.source_scopes {
538                     // FIXME(eddyb) move this into a `fn visit_scope_data` in `Integrator`.
539                     if scope.parent_scope.is_none() {
540                         let callsite_scope = &caller_body.source_scopes[callsite.source_info.scope];
541
542                         // Attach the outermost callee scope as a child of the callsite
543                         // scope, via the `parent_scope` and `inlined_parent_scope` chains.
544                         scope.parent_scope = Some(callsite.source_info.scope);
545                         assert_eq!(scope.inlined_parent_scope, None);
546                         scope.inlined_parent_scope = if callsite_scope.inlined.is_some() {
547                             Some(callsite.source_info.scope)
548                         } else {
549                             callsite_scope.inlined_parent_scope
550                         };
551
552                         // Mark the outermost callee scope as an inlined one.
553                         assert_eq!(scope.inlined, None);
554                         scope.inlined = Some((callsite.callee, callsite.source_info.span));
555                     } else if scope.inlined_parent_scope.is_none() {
556                         // Make it easy to find the scope with `inlined` set above.
557                         scope.inlined_parent_scope =
558                             Some(integrator.map_scope(OUTERMOST_SOURCE_SCOPE));
559                     }
560                 }
561
562                 // If there are any locals without storage markers, give them storage only for the
563                 // duration of the call.
564                 for local in callee_body.vars_and_temps_iter() {
565                     if integrator.always_live_locals.contains(local) {
566                         let new_local = integrator.map_local(local);
567                         caller_body[callsite.block].statements.push(Statement {
568                             source_info: callsite.source_info,
569                             kind: StatementKind::StorageLive(new_local),
570                         });
571                     }
572                 }
573                 if let Some(block) = callsite.target {
574                     // To avoid repeated O(n) insert, push any new statements to the end and rotate
575                     // the slice once.
576                     let mut n = 0;
577                     for local in callee_body.vars_and_temps_iter().rev() {
578                         if integrator.always_live_locals.contains(local) {
579                             let new_local = integrator.map_local(local);
580                             caller_body[block].statements.push(Statement {
581                                 source_info: callsite.source_info,
582                                 kind: StatementKind::StorageDead(new_local),
583                             });
584                             n += 1;
585                         }
586                     }
587                     caller_body[block].statements.rotate_right(n);
588                 }
589
590                 // Insert all of the (mapped) parts of the callee body into the caller.
591                 caller_body.local_decls.extend(
592                     // FIXME(eddyb) make `Range<Local>` iterable so that we can use
593                     // `callee_body.local_decls.drain(callee_body.vars_and_temps())`
594                     callee_body
595                         .vars_and_temps_iter()
596                         .map(|local| callee_body.local_decls[local].clone()),
597                 );
598                 caller_body.source_scopes.extend(callee_body.source_scopes.drain(..));
599                 caller_body.var_debug_info.extend(callee_body.var_debug_info.drain(..));
600                 caller_body.basic_blocks_mut().extend(callee_body.basic_blocks_mut().drain(..));
601
602                 caller_body[callsite.block].terminator = Some(Terminator {
603                     source_info: callsite.source_info,
604                     kind: TerminatorKind::Goto { target: integrator.map_block(START_BLOCK) },
605                 });
606
607                 // Copy only unevaluated constants from the callee_body into the caller_body.
608                 // Although we are only pushing `ConstKind::Unevaluated` consts to
609                 // `required_consts`, here we may not only have `ConstKind::Unevaluated`
610                 // because we are calling `subst_and_normalize_erasing_regions`.
611                 caller_body.required_consts.extend(
612                     callee_body.required_consts.iter().copied().filter(|&constant| {
613                         matches!(constant.literal.val, ConstKind::Unevaluated(_, _, _))
614                     }),
615                 );
616             }
617             kind => bug!("unexpected terminator kind {:?}", kind),
618         }
619     }
620
621     fn make_call_args(
622         &self,
623         args: Vec<Operand<'tcx>>,
624         callsite: &CallSite<'tcx>,
625         caller_body: &mut Body<'tcx>,
626         callee_body: &Body<'tcx>,
627     ) -> Vec<Local> {
628         let tcx = self.tcx;
629
630         // There is a bit of a mismatch between the *caller* of a closure and the *callee*.
631         // The caller provides the arguments wrapped up in a tuple:
632         //
633         //     tuple_tmp = (a, b, c)
634         //     Fn::call(closure_ref, tuple_tmp)
635         //
636         // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`)
637         // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, codegen has
638         // the job of unpacking this tuple. But here, we are codegen. =) So we want to create
639         // a vector like
640         //
641         //     [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2]
642         //
643         // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient
644         // if we "spill" that into *another* temporary, so that we can map the argument
645         // variable in the callee MIR directly to an argument variable on our side.
646         // So we introduce temporaries like:
647         //
648         //     tmp0 = tuple_tmp.0
649         //     tmp1 = tuple_tmp.1
650         //     tmp2 = tuple_tmp.2
651         //
652         // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
653         if callsite.fn_sig.abi() == Abi::RustCall && callee_body.spread_arg.is_none() {
654             let mut args = args.into_iter();
655             let self_ = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
656             let tuple = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
657             assert!(args.next().is_none());
658
659             let tuple = Place::from(tuple);
660             let tuple_tys = if let ty::Tuple(s) = tuple.ty(caller_body, tcx).ty.kind() {
661                 s
662             } else {
663                 bug!("Closure arguments are not passed as a tuple");
664             };
665
666             // The `closure_ref` in our example above.
667             let closure_ref_arg = iter::once(self_);
668
669             // The `tmp0`, `tmp1`, and `tmp2` in our example abonve.
670             let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| {
671                 // This is e.g., `tuple_tmp.0` in our example above.
672                 let tuple_field =
673                     Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty.expect_ty()));
674
675                 // Spill to a local to make e.g., `tmp0`.
676                 self.create_temp_if_necessary(tuple_field, callsite, caller_body)
677             });
678
679             closure_ref_arg.chain(tuple_tmp_args).collect()
680         } else {
681             args.into_iter()
682                 .map(|a| self.create_temp_if_necessary(a, callsite, caller_body))
683                 .collect()
684         }
685     }
686
687     /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
688     /// temporary `T` and an instruction `T = arg`, and returns `T`.
689     fn create_temp_if_necessary(
690         &self,
691         arg: Operand<'tcx>,
692         callsite: &CallSite<'tcx>,
693         caller_body: &mut Body<'tcx>,
694     ) -> Local {
695         // Reuse the operand if it is a moved temporary.
696         if let Operand::Move(place) = &arg {
697             if let Some(local) = place.as_local() {
698                 if caller_body.local_kind(local) == LocalKind::Temp {
699                     return local;
700                 }
701             }
702         }
703
704         // Otherwise, create a temporary for the argument.
705         trace!("creating temp for argument {:?}", arg);
706         let arg_ty = arg.ty(caller_body, self.tcx);
707         let local = self.new_call_temp(caller_body, callsite, arg_ty);
708         caller_body[callsite.block].statements.push(Statement {
709             source_info: callsite.source_info,
710             kind: StatementKind::Assign(box (Place::from(local), Rvalue::Use(arg))),
711         });
712         local
713     }
714
715     /// Introduces a new temporary into the caller body that is live for the duration of the call.
716     fn new_call_temp(
717         &self,
718         caller_body: &mut Body<'tcx>,
719         callsite: &CallSite<'tcx>,
720         ty: Ty<'tcx>,
721     ) -> Local {
722         let local = caller_body.local_decls.push(LocalDecl::new(ty, callsite.source_info.span));
723
724         caller_body[callsite.block].statements.push(Statement {
725             source_info: callsite.source_info,
726             kind: StatementKind::StorageLive(local),
727         });
728
729         if let Some(block) = callsite.target {
730             caller_body[block].statements.insert(
731                 0,
732                 Statement {
733                     source_info: callsite.source_info,
734                     kind: StatementKind::StorageDead(local),
735                 },
736             );
737         }
738
739         local
740     }
741 }
742
743 fn type_size_of<'tcx>(
744     tcx: TyCtxt<'tcx>,
745     param_env: ty::ParamEnv<'tcx>,
746     ty: Ty<'tcx>,
747 ) -> Option<u64> {
748     tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
749 }
750
751 /**
752  * Integrator.
753  *
754  * Integrates blocks from the callee function into the calling function.
755  * Updates block indices, references to locals and other control flow
756  * stuff.
757 */
758 struct Integrator<'a, 'tcx> {
759     args: &'a [Local],
760     new_locals: RangeFrom<Local>,
761     new_scopes: RangeFrom<SourceScope>,
762     new_blocks: RangeFrom<BasicBlock>,
763     destination: Place<'tcx>,
764     return_block: Option<BasicBlock>,
765     cleanup_block: Option<BasicBlock>,
766     in_cleanup_block: bool,
767     tcx: TyCtxt<'tcx>,
768     callsite_span: Span,
769     body_span: Span,
770     always_live_locals: BitSet<Local>,
771 }
772
773 impl<'a, 'tcx> Integrator<'a, 'tcx> {
774     fn map_local(&self, local: Local) -> Local {
775         let new = if local == RETURN_PLACE {
776             self.destination.local
777         } else {
778             let idx = local.index() - 1;
779             if idx < self.args.len() {
780                 self.args[idx]
781             } else {
782                 Local::new(self.new_locals.start.index() + (idx - self.args.len()))
783             }
784         };
785         trace!("mapping local `{:?}` to `{:?}`", local, new);
786         new
787     }
788
789     fn map_scope(&self, scope: SourceScope) -> SourceScope {
790         let new = SourceScope::new(self.new_scopes.start.index() + scope.index());
791         trace!("mapping scope `{:?}` to `{:?}`", scope, new);
792         new
793     }
794
795     fn map_block(&self, block: BasicBlock) -> BasicBlock {
796         let new = BasicBlock::new(self.new_blocks.start.index() + block.index());
797         trace!("mapping block `{:?}` to `{:?}`", block, new);
798         new
799     }
800 }
801
802 impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
803     fn tcx(&self) -> TyCtxt<'tcx> {
804         self.tcx
805     }
806
807     fn visit_local(&mut self, local: &mut Local, _ctxt: PlaceContext, _location: Location) {
808         *local = self.map_local(*local);
809     }
810
811     fn visit_source_scope(&mut self, scope: &mut SourceScope) {
812         *scope = self.map_scope(*scope);
813     }
814
815     fn visit_span(&mut self, span: &mut Span) {
816         let mut expn_data =
817             ExpnData::default(ExpnKind::Inlined, *span, self.tcx.sess.edition(), None);
818         expn_data.def_site = self.body_span;
819         // Make sure that all spans track the fact that they were inlined.
820         *span = self.callsite_span.fresh_expansion(expn_data);
821     }
822
823     fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
824         for elem in place.projection {
825             // FIXME: Make sure that return place is not used in an indexing projection, since it
826             // won't be rebased as it is supposed to be.
827             assert_ne!(ProjectionElem::Index(RETURN_PLACE), elem);
828         }
829
830         // If this is the `RETURN_PLACE`, we need to rebase any projections onto it.
831         let dest_proj_len = self.destination.projection.len();
832         if place.local == RETURN_PLACE && dest_proj_len > 0 {
833             let mut projs = Vec::with_capacity(dest_proj_len + place.projection.len());
834             projs.extend(self.destination.projection);
835             projs.extend(place.projection);
836
837             place.projection = self.tcx.intern_place_elems(&*projs);
838         }
839         // Handles integrating any locals that occur in the base
840         // or projections
841         self.super_place(place, context, location)
842     }
843
844     fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
845         self.in_cleanup_block = data.is_cleanup;
846         self.super_basic_block_data(block, data);
847         self.in_cleanup_block = false;
848     }
849
850     fn visit_retag(&mut self, kind: &mut RetagKind, place: &mut Place<'tcx>, loc: Location) {
851         self.super_retag(kind, place, loc);
852
853         // We have to patch all inlined retags to be aware that they are no longer
854         // happening on function entry.
855         if *kind == RetagKind::FnEntry {
856             *kind = RetagKind::Default;
857         }
858     }
859
860     fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
861         if let StatementKind::StorageLive(local) | StatementKind::StorageDead(local) =
862             statement.kind
863         {
864             self.always_live_locals.remove(local);
865         }
866         self.super_statement(statement, location);
867     }
868
869     fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, loc: Location) {
870         // Don't try to modify the implicit `_0` access on return (`return` terminators are
871         // replaced down below anyways).
872         if !matches!(terminator.kind, TerminatorKind::Return) {
873             self.super_terminator(terminator, loc);
874         }
875
876         match terminator.kind {
877             TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => bug!(),
878             TerminatorKind::Goto { ref mut target } => {
879                 *target = self.map_block(*target);
880             }
881             TerminatorKind::SwitchInt { ref mut targets, .. } => {
882                 for tgt in targets.all_targets_mut() {
883                     *tgt = self.map_block(*tgt);
884                 }
885             }
886             TerminatorKind::Drop { ref mut target, ref mut unwind, .. }
887             | TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
888                 *target = self.map_block(*target);
889                 if let Some(tgt) = *unwind {
890                     *unwind = Some(self.map_block(tgt));
891                 } else if !self.in_cleanup_block {
892                     // Unless this drop is in a cleanup block, add an unwind edge to
893                     // the original call's cleanup block
894                     *unwind = self.cleanup_block;
895                 }
896             }
897             TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
898                 if let Some((_, ref mut tgt)) = *destination {
899                     *tgt = self.map_block(*tgt);
900                 }
901                 if let Some(tgt) = *cleanup {
902                     *cleanup = Some(self.map_block(tgt));
903                 } else if !self.in_cleanup_block {
904                     // Unless this call is in a cleanup block, add an unwind edge to
905                     // the original call's cleanup block
906                     *cleanup = self.cleanup_block;
907                 }
908             }
909             TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
910                 *target = self.map_block(*target);
911                 if let Some(tgt) = *cleanup {
912                     *cleanup = Some(self.map_block(tgt));
913                 } else if !self.in_cleanup_block {
914                     // Unless this assert is in a cleanup block, add an unwind edge to
915                     // the original call's cleanup block
916                     *cleanup = self.cleanup_block;
917                 }
918             }
919             TerminatorKind::Return => {
920                 terminator.kind = if let Some(tgt) = self.return_block {
921                     TerminatorKind::Goto { target: tgt }
922                 } else {
923                     TerminatorKind::Unreachable
924                 }
925             }
926             TerminatorKind::Resume => {
927                 if let Some(tgt) = self.cleanup_block {
928                     terminator.kind = TerminatorKind::Goto { target: tgt }
929                 }
930             }
931             TerminatorKind::Abort => {}
932             TerminatorKind::Unreachable => {}
933             TerminatorKind::FalseEdge { ref mut real_target, ref mut imaginary_target } => {
934                 *real_target = self.map_block(*real_target);
935                 *imaginary_target = self.map_block(*imaginary_target);
936             }
937             TerminatorKind::FalseUnwind { real_target: _, unwind: _ } =>
938             // see the ordering of passes in the optimized_mir query.
939             {
940                 bug!("False unwinds should have been removed before inlining")
941             }
942             TerminatorKind::InlineAsm { ref mut destination, .. } => {
943                 if let Some(ref mut tgt) = *destination {
944                     *tgt = self.map_block(*tgt);
945                 }
946             }
947         }
948     }
949 }