]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir_transform/src/inline.rs
85b7fb5eb25fc1a6a58c4796d73255c9eac540b6
[rust.git] / compiler / rustc_mir_transform / src / inline.rs
1 //! Inlining pass for MIR functions
2
3 use rustc_attr::InlineAttr;
4 use rustc_index::bit_set::BitSet;
5 use rustc_index::vec::Idx;
6 use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
7 use rustc_middle::mir::visit::*;
8 use rustc_middle::mir::*;
9 use rustc_middle::traits::ObligationCause;
10 use rustc_middle::ty::subst::Subst;
11 use rustc_middle::ty::{self, ConstKind, Instance, InstanceDef, ParamEnv, Ty, TyCtxt};
12 use rustc_span::{hygiene::ExpnKind, ExpnData, LocalExpnId, Span};
13 use rustc_target::spec::abi::Abi;
14
15 use super::simplify::{remove_dead_blocks, CfgSimplifier};
16 use crate::MirPass;
17 use std::iter;
18 use std::ops::{Range, RangeFrom};
19
20 crate mod cycle;
21
22 const INSTR_COST: usize = 5;
23 const CALL_PENALTY: usize = 25;
24 const LANDINGPAD_PENALTY: usize = 50;
25 const RESUME_PENALTY: usize = 45;
26
27 const UNKNOWN_SIZE_COST: usize = 10;
28
29 pub struct Inline;
30
31 #[derive(Copy, Clone, Debug)]
32 struct CallSite<'tcx> {
33     callee: Instance<'tcx>,
34     fn_sig: ty::PolyFnSig<'tcx>,
35     block: BasicBlock,
36     target: Option<BasicBlock>,
37     source_info: SourceInfo,
38 }
39
40 impl<'tcx> MirPass<'tcx> for Inline {
41     fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
42         if let Some(enabled) = sess.opts.debugging_opts.inline_mir {
43             return enabled;
44         }
45
46         sess.opts.mir_opt_level() >= 3
47     }
48
49     fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
50         let span = trace_span!("inline", body = %tcx.def_path_str(body.source.def_id()));
51         let _guard = span.enter();
52         if inline(tcx, body) {
53             debug!("running simplify cfg on {:?}", body.source);
54             CfgSimplifier::new(body).simplify();
55             remove_dead_blocks(tcx, body);
56         }
57     }
58 }
59
60 fn inline<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool {
61     let def_id = body.source.def_id().expect_local();
62
63     // Only do inlining into fn bodies.
64     if !tcx.hir().body_owner_kind(def_id).is_fn_or_closure() {
65         return false;
66     }
67     if body.source.promoted.is_some() {
68         return false;
69     }
70     // Avoid inlining into generators, since their `optimized_mir` is used for layout computation,
71     // which can create a cycle, even when no attempt is made to inline the function in the other
72     // direction.
73     if body.generator.is_some() {
74         return false;
75     }
76
77     let param_env = tcx.param_env_reveal_all_normalized(def_id);
78     let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
79     let param_env = rustc_trait_selection::traits::normalize_param_env_or_error(
80         tcx,
81         def_id.to_def_id(),
82         param_env,
83         ObligationCause::misc(body.span, hir_id),
84     );
85
86     let mut this = Inliner {
87         tcx,
88         param_env,
89         codegen_fn_attrs: tcx.codegen_fn_attrs(def_id),
90         history: Vec::new(),
91         changed: false,
92     };
93     let blocks = BasicBlock::new(0)..body.basic_blocks().next_index();
94     this.process_blocks(body, blocks);
95     this.changed
96 }
97
98 struct Inliner<'tcx> {
99     tcx: TyCtxt<'tcx>,
100     param_env: ParamEnv<'tcx>,
101     /// Caller codegen attributes.
102     codegen_fn_attrs: &'tcx CodegenFnAttrs,
103     /// Stack of inlined Instances.
104     history: Vec<ty::Instance<'tcx>>,
105     /// Indicates that the caller body has been modified.
106     changed: bool,
107 }
108
109 impl<'tcx> Inliner<'tcx> {
110     fn process_blocks(&mut self, caller_body: &mut Body<'tcx>, blocks: Range<BasicBlock>) {
111         for bb in blocks {
112             let bb_data = &caller_body[bb];
113             if bb_data.is_cleanup {
114                 continue;
115             }
116
117             let Some(callsite) = self.resolve_callsite(caller_body, bb, bb_data) else {
118                 continue;
119             };
120
121             let span = trace_span!("process_blocks", %callsite.callee, ?bb);
122             let _guard = span.enter();
123
124             match self.try_inlining(caller_body, &callsite) {
125                 Err(reason) => {
126                     debug!("not-inlined {} [{}]", callsite.callee, reason);
127                     continue;
128                 }
129                 Ok(new_blocks) => {
130                     debug!("inlined {}", callsite.callee);
131                     self.changed = true;
132                     self.history.push(callsite.callee);
133                     self.process_blocks(caller_body, new_blocks);
134                     self.history.pop();
135                 }
136             }
137         }
138     }
139
140     /// Attempts to inline a callsite into the caller body. When successful returns basic blocks
141     /// containing the inlined body. Otherwise returns an error describing why inlining didn't take
142     /// place.
143     fn try_inlining(
144         &self,
145         caller_body: &mut Body<'tcx>,
146         callsite: &CallSite<'tcx>,
147     ) -> Result<std::ops::Range<BasicBlock>, &'static str> {
148         let callee_attrs = self.tcx.codegen_fn_attrs(callsite.callee.def_id());
149         self.check_codegen_attributes(callsite, callee_attrs)?;
150         self.check_mir_is_available(caller_body, &callsite.callee)?;
151         let callee_body = self.tcx.instance_mir(callsite.callee.def);
152         self.check_mir_body(callsite, callee_body, callee_attrs)?;
153
154         if !self.tcx.consider_optimizing(|| {
155             format!("Inline {:?} into {:?}", callsite.callee, caller_body.source)
156         }) {
157             return Err("optimization fuel exhausted");
158         }
159
160         let callee_body = callsite.callee.subst_mir_and_normalize_erasing_regions(
161             self.tcx,
162             self.param_env,
163             callee_body.clone(),
164         );
165
166         let old_blocks = caller_body.basic_blocks().next_index();
167         self.inline_call(caller_body, &callsite, callee_body);
168         let new_blocks = old_blocks..caller_body.basic_blocks().next_index();
169
170         Ok(new_blocks)
171     }
172
173     fn check_mir_is_available(
174         &self,
175         caller_body: &Body<'tcx>,
176         callee: &Instance<'tcx>,
177     ) -> Result<(), &'static str> {
178         let caller_def_id = caller_body.source.def_id();
179         let callee_def_id = callee.def_id();
180         if callee_def_id == caller_def_id {
181             return Err("self-recursion");
182         }
183
184         match callee.def {
185             InstanceDef::Item(_) => {
186                 // If there is no MIR available (either because it was not in metadata or
187                 // because it has no MIR because it's an extern function), then the inliner
188                 // won't cause cycles on this.
189                 if !self.tcx.is_mir_available(callee_def_id) {
190                     return Err("item MIR unavailable");
191                 }
192             }
193             // These have no own callable MIR.
194             InstanceDef::Intrinsic(_) | InstanceDef::Virtual(..) => {
195                 return Err("instance without MIR (intrinsic / virtual)");
196             }
197             // This cannot result in an immediate cycle since the callee MIR is a shim, which does
198             // not get any optimizations run on it. Any subsequent inlining may cause cycles, but we
199             // do not need to catch this here, we can wait until the inliner decides to continue
200             // inlining a second time.
201             InstanceDef::VtableShim(_)
202             | InstanceDef::ReifyShim(_)
203             | InstanceDef::FnPtrShim(..)
204             | InstanceDef::ClosureOnceShim { .. }
205             | InstanceDef::DropGlue(..)
206             | InstanceDef::CloneShim(..) => return Ok(()),
207         }
208
209         if self.tcx.is_constructor(callee_def_id) {
210             trace!("constructors always have MIR");
211             // Constructor functions cannot cause a query cycle.
212             return Ok(());
213         }
214
215         if callee_def_id.is_local() {
216             // Avoid a cycle here by only using `instance_mir` only if we have
217             // a lower `DefPathHash` than the callee. This ensures that the callee will
218             // not inline us. This trick even works with incremental compilation,
219             // since `DefPathHash` is stable.
220             if self.tcx.def_path_hash(caller_def_id).local_hash()
221                 < self.tcx.def_path_hash(callee_def_id).local_hash()
222             {
223                 return Ok(());
224             }
225
226             // If we know for sure that the function we're calling will itself try to
227             // call us, then we avoid inlining that function.
228             if self.tcx.mir_callgraph_reachable((*callee, caller_def_id.expect_local())) {
229                 return Err("caller might be reachable from callee (query cycle avoidance)");
230             }
231
232             Ok(())
233         } else {
234             // This cannot result in an immediate cycle since the callee MIR is from another crate
235             // and is already optimized. Any subsequent inlining may cause cycles, but we do
236             // not need to catch this here, we can wait until the inliner decides to continue
237             // inlining a second time.
238             trace!("functions from other crates always have MIR");
239             Ok(())
240         }
241     }
242
243     fn resolve_callsite(
244         &self,
245         caller_body: &Body<'tcx>,
246         bb: BasicBlock,
247         bb_data: &BasicBlockData<'tcx>,
248     ) -> Option<CallSite<'tcx>> {
249         // Only consider direct calls to functions
250         let terminator = bb_data.terminator();
251         if let TerminatorKind::Call { ref func, ref destination, .. } = terminator.kind {
252             let func_ty = func.ty(caller_body, self.tcx);
253             if let ty::FnDef(def_id, substs) = *func_ty.kind() {
254                 // To resolve an instance its substs have to be fully normalized.
255                 let substs = self.tcx.normalize_erasing_regions(self.param_env, substs);
256                 let callee =
257                     Instance::resolve(self.tcx, self.param_env, def_id, substs).ok().flatten()?;
258
259                 if let InstanceDef::Virtual(..) | InstanceDef::Intrinsic(_) = callee.def {
260                     return None;
261                 }
262
263                 let fn_sig = self.tcx.bound_fn_sig(def_id).subst(self.tcx, substs);
264
265                 return Some(CallSite {
266                     callee,
267                     fn_sig,
268                     block: bb,
269                     target: destination.map(|(_, target)| target),
270                     source_info: terminator.source_info,
271                 });
272             }
273         }
274
275         None
276     }
277
278     /// Returns an error if inlining is not possible based on codegen attributes alone. A success
279     /// indicates that inlining decision should be based on other criteria.
280     fn check_codegen_attributes(
281         &self,
282         callsite: &CallSite<'tcx>,
283         callee_attrs: &CodegenFnAttrs,
284     ) -> Result<(), &'static str> {
285         if let InlineAttr::Never = callee_attrs.inline {
286             return Err("never inline hint");
287         }
288
289         // Only inline local functions if they would be eligible for cross-crate
290         // inlining. This is to ensure that the final crate doesn't have MIR that
291         // reference unexported symbols
292         if callsite.callee.def_id().is_local() {
293             let is_generic = callsite.callee.substs.non_erasable_generics().next().is_some();
294             if !is_generic && !callee_attrs.requests_inline() {
295                 return Err("not exported");
296             }
297         }
298
299         if callsite.fn_sig.c_variadic() {
300             return Err("C variadic");
301         }
302
303         if callee_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
304             return Err("naked");
305         }
306
307         if callee_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
308             return Err("cold");
309         }
310
311         if callee_attrs.no_sanitize != self.codegen_fn_attrs.no_sanitize {
312             return Err("incompatible sanitizer set");
313         }
314
315         if callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set {
316             return Err("incompatible instruction set");
317         }
318
319         for feature in &callee_attrs.target_features {
320             if !self.codegen_fn_attrs.target_features.contains(feature) {
321                 return Err("incompatible target feature");
322             }
323         }
324
325         Ok(())
326     }
327
328     /// Returns inlining decision that is based on the examination of callee MIR body.
329     /// Assumes that codegen attributes have been checked for compatibility already.
330     #[instrument(level = "debug", skip(self, callee_body))]
331     fn check_mir_body(
332         &self,
333         callsite: &CallSite<'tcx>,
334         callee_body: &Body<'tcx>,
335         callee_attrs: &CodegenFnAttrs,
336     ) -> Result<(), &'static str> {
337         let tcx = self.tcx;
338
339         let mut threshold = if callee_attrs.requests_inline() {
340             self.tcx.sess.opts.debugging_opts.inline_mir_hint_threshold.unwrap_or(100)
341         } else {
342             self.tcx.sess.opts.debugging_opts.inline_mir_threshold.unwrap_or(50)
343         };
344
345         // Give a bonus functions with a small number of blocks,
346         // We normally have two or three blocks for even
347         // very small functions.
348         if callee_body.basic_blocks().len() <= 3 {
349             threshold += threshold / 4;
350         }
351         debug!("    final inline threshold = {}", threshold);
352
353         // FIXME: Give a bonus to functions with only a single caller
354         let mut first_block = true;
355         let mut cost = 0;
356
357         // Traverse the MIR manually so we can account for the effects of
358         // inlining on the CFG.
359         let mut work_list = vec![START_BLOCK];
360         let mut visited = BitSet::new_empty(callee_body.basic_blocks().len());
361         while let Some(bb) = work_list.pop() {
362             if !visited.insert(bb.index()) {
363                 continue;
364             }
365             let blk = &callee_body.basic_blocks()[bb];
366
367             for stmt in &blk.statements {
368                 // Don't count StorageLive/StorageDead in the inlining cost.
369                 match stmt.kind {
370                     StatementKind::StorageLive(_)
371                     | StatementKind::StorageDead(_)
372                     | StatementKind::Deinit(_)
373                     | StatementKind::Nop => {}
374                     _ => cost += INSTR_COST,
375                 }
376             }
377             let term = blk.terminator();
378             let mut is_drop = false;
379             match term.kind {
380                 TerminatorKind::Drop { ref place, target, unwind }
381                 | TerminatorKind::DropAndReplace { ref place, target, unwind, .. } => {
382                     is_drop = true;
383                     work_list.push(target);
384                     // If the place doesn't actually need dropping, treat it like
385                     // a regular goto.
386                     let ty = callsite.callee.subst_mir(self.tcx, &place.ty(callee_body, tcx).ty);
387                     if ty.needs_drop(tcx, self.param_env) {
388                         cost += CALL_PENALTY;
389                         if let Some(unwind) = unwind {
390                             cost += LANDINGPAD_PENALTY;
391                             work_list.push(unwind);
392                         }
393                     } else {
394                         cost += INSTR_COST;
395                     }
396                 }
397
398                 TerminatorKind::Unreachable | TerminatorKind::Call { destination: None, .. }
399                     if first_block =>
400                 {
401                     // If the function always diverges, don't inline
402                     // unless the cost is zero
403                     threshold = 0;
404                 }
405
406                 TerminatorKind::Call { func: Operand::Constant(ref f), cleanup, .. } => {
407                     if let ty::FnDef(def_id, substs) =
408                         *callsite.callee.subst_mir(self.tcx, &f.literal.ty()).kind()
409                     {
410                         let substs = self.tcx.normalize_erasing_regions(self.param_env, substs);
411                         if let Ok(Some(instance)) =
412                             Instance::resolve(self.tcx, self.param_env, def_id, substs)
413                         {
414                             if callsite.callee.def_id() == instance.def_id() {
415                                 return Err("self-recursion");
416                             } else if self.history.contains(&instance) {
417                                 return Err("already inlined");
418                             }
419                         }
420                         // Don't give intrinsics the extra penalty for calls
421                         if tcx.is_intrinsic(def_id) {
422                             cost += INSTR_COST;
423                         } else {
424                             cost += CALL_PENALTY;
425                         }
426                     } else {
427                         cost += CALL_PENALTY;
428                     }
429                     if cleanup.is_some() {
430                         cost += LANDINGPAD_PENALTY;
431                     }
432                 }
433                 TerminatorKind::Assert { cleanup, .. } => {
434                     cost += CALL_PENALTY;
435
436                     if cleanup.is_some() {
437                         cost += LANDINGPAD_PENALTY;
438                     }
439                 }
440                 TerminatorKind::Resume => cost += RESUME_PENALTY,
441                 TerminatorKind::InlineAsm { cleanup, .. } => {
442                     cost += INSTR_COST;
443
444                     if cleanup.is_some() {
445                         cost += LANDINGPAD_PENALTY;
446                     }
447                 }
448                 _ => cost += INSTR_COST,
449             }
450
451             if !is_drop {
452                 for succ in term.successors() {
453                     work_list.push(succ);
454                 }
455             }
456
457             first_block = false;
458         }
459
460         // Count up the cost of local variables and temps, if we know the size
461         // use that, otherwise we use a moderately-large dummy cost.
462
463         let ptr_size = tcx.data_layout.pointer_size.bytes();
464
465         for v in callee_body.vars_and_temps_iter() {
466             let ty = callsite.callee.subst_mir(self.tcx, &callee_body.local_decls[v].ty);
467             // Cost of the var is the size in machine-words, if we know
468             // it.
469             if let Some(size) = type_size_of(tcx, self.param_env, ty) {
470                 cost += ((size + ptr_size - 1) / ptr_size) as usize;
471             } else {
472                 cost += UNKNOWN_SIZE_COST;
473             }
474         }
475
476         if let InlineAttr::Always = callee_attrs.inline {
477             debug!("INLINING {:?} because inline(always) [cost={}]", callsite, cost);
478             Ok(())
479         } else {
480             if cost <= threshold {
481                 debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold);
482                 Ok(())
483             } else {
484                 debug!("NOT inlining {:?} [cost={} > threshold={}]", callsite, cost, threshold);
485                 Err("cost above threshold")
486             }
487         }
488     }
489
490     fn inline_call(
491         &self,
492         caller_body: &mut Body<'tcx>,
493         callsite: &CallSite<'tcx>,
494         mut callee_body: Body<'tcx>,
495     ) {
496         let terminator = caller_body[callsite.block].terminator.take().unwrap();
497         match terminator.kind {
498             TerminatorKind::Call { args, destination, cleanup, .. } => {
499                 // If the call is something like `a[*i] = f(i)`, where
500                 // `i : &mut usize`, then just duplicating the `a[*i]`
501                 // Place could result in two different locations if `f`
502                 // writes to `i`. To prevent this we need to create a temporary
503                 // borrow of the place and pass the destination as `*temp` instead.
504                 fn dest_needs_borrow(place: Place<'_>) -> bool {
505                     for elem in place.projection.iter() {
506                         match elem {
507                             ProjectionElem::Deref | ProjectionElem::Index(_) => return true,
508                             _ => {}
509                         }
510                     }
511
512                     false
513                 }
514
515                 let dest = if let Some((destination_place, _)) = destination {
516                     if dest_needs_borrow(destination_place) {
517                         trace!("creating temp for return destination");
518                         let dest = Rvalue::Ref(
519                             self.tcx.lifetimes.re_erased,
520                             BorrowKind::Mut { allow_two_phase_borrow: false },
521                             destination_place,
522                         );
523                         let dest_ty = dest.ty(caller_body, self.tcx);
524                         let temp = Place::from(self.new_call_temp(caller_body, &callsite, dest_ty));
525                         caller_body[callsite.block].statements.push(Statement {
526                             source_info: callsite.source_info,
527                             kind: StatementKind::Assign(Box::new((temp, dest))),
528                         });
529                         self.tcx.mk_place_deref(temp)
530                     } else {
531                         destination_place
532                     }
533                 } else {
534                     trace!("creating temp for return place");
535                     Place::from(self.new_call_temp(caller_body, &callsite, callee_body.return_ty()))
536                 };
537
538                 // Copy the arguments if needed.
539                 let args: Vec<_> = self.make_call_args(args, &callsite, caller_body, &callee_body);
540
541                 let mut expn_data = ExpnData::default(
542                     ExpnKind::Inlined,
543                     callsite.source_info.span,
544                     self.tcx.sess.edition(),
545                     None,
546                     None,
547                 );
548                 expn_data.def_site = callee_body.span;
549                 let expn_data =
550                     LocalExpnId::fresh(expn_data, self.tcx.create_stable_hashing_context());
551                 let mut integrator = Integrator {
552                     args: &args,
553                     new_locals: Local::new(caller_body.local_decls.len())..,
554                     new_scopes: SourceScope::new(caller_body.source_scopes.len())..,
555                     new_blocks: BasicBlock::new(caller_body.basic_blocks().len())..,
556                     destination: dest,
557                     callsite_scope: caller_body.source_scopes[callsite.source_info.scope].clone(),
558                     callsite,
559                     cleanup_block: cleanup,
560                     in_cleanup_block: false,
561                     tcx: self.tcx,
562                     expn_data,
563                     always_live_locals: BitSet::new_filled(callee_body.local_decls.len()),
564                 };
565
566                 // Map all `Local`s, `SourceScope`s and `BasicBlock`s to new ones
567                 // (or existing ones, in a few special cases) in the caller.
568                 integrator.visit_body(&mut callee_body);
569
570                 // If there are any locals without storage markers, give them storage only for the
571                 // duration of the call.
572                 for local in callee_body.vars_and_temps_iter() {
573                     if integrator.always_live_locals.contains(local) {
574                         let new_local = integrator.map_local(local);
575                         caller_body[callsite.block].statements.push(Statement {
576                             source_info: callsite.source_info,
577                             kind: StatementKind::StorageLive(new_local),
578                         });
579                     }
580                 }
581                 if let Some(block) = callsite.target {
582                     // To avoid repeated O(n) insert, push any new statements to the end and rotate
583                     // the slice once.
584                     let mut n = 0;
585                     for local in callee_body.vars_and_temps_iter().rev() {
586                         if integrator.always_live_locals.contains(local) {
587                             let new_local = integrator.map_local(local);
588                             caller_body[block].statements.push(Statement {
589                                 source_info: callsite.source_info,
590                                 kind: StatementKind::StorageDead(new_local),
591                             });
592                             n += 1;
593                         }
594                     }
595                     caller_body[block].statements.rotate_right(n);
596                 }
597
598                 // Insert all of the (mapped) parts of the callee body into the caller.
599                 caller_body.local_decls.extend(callee_body.drain_vars_and_temps());
600                 caller_body.source_scopes.extend(&mut callee_body.source_scopes.drain(..));
601                 caller_body.var_debug_info.append(&mut callee_body.var_debug_info);
602                 caller_body.basic_blocks_mut().extend(callee_body.basic_blocks_mut().drain(..));
603
604                 caller_body[callsite.block].terminator = Some(Terminator {
605                     source_info: callsite.source_info,
606                     kind: TerminatorKind::Goto { target: integrator.map_block(START_BLOCK) },
607                 });
608
609                 // Copy only unevaluated constants from the callee_body into the caller_body.
610                 // Although we are only pushing `ConstKind::Unevaluated` consts to
611                 // `required_consts`, here we may not only have `ConstKind::Unevaluated`
612                 // because we are calling `subst_and_normalize_erasing_regions`.
613                 caller_body.required_consts.extend(
614                     callee_body.required_consts.iter().copied().filter(|&ct| {
615                         match ct.literal.const_for_ty() {
616                             Some(ct) => matches!(ct.val(), ConstKind::Unevaluated(_)),
617                             None => true,
618                         }
619                     }),
620                 );
621             }
622             kind => bug!("unexpected terminator kind {:?}", kind),
623         }
624     }
625
626     fn make_call_args(
627         &self,
628         args: Vec<Operand<'tcx>>,
629         callsite: &CallSite<'tcx>,
630         caller_body: &mut Body<'tcx>,
631         callee_body: &Body<'tcx>,
632     ) -> Vec<Local> {
633         let tcx = self.tcx;
634
635         // There is a bit of a mismatch between the *caller* of a closure and the *callee*.
636         // The caller provides the arguments wrapped up in a tuple:
637         //
638         //     tuple_tmp = (a, b, c)
639         //     Fn::call(closure_ref, tuple_tmp)
640         //
641         // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`)
642         // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, codegen has
643         // the job of unpacking this tuple. But here, we are codegen. =) So we want to create
644         // a vector like
645         //
646         //     [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2]
647         //
648         // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient
649         // if we "spill" that into *another* temporary, so that we can map the argument
650         // variable in the callee MIR directly to an argument variable on our side.
651         // So we introduce temporaries like:
652         //
653         //     tmp0 = tuple_tmp.0
654         //     tmp1 = tuple_tmp.1
655         //     tmp2 = tuple_tmp.2
656         //
657         // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
658         if callsite.fn_sig.abi() == Abi::RustCall && callee_body.spread_arg.is_none() {
659             let mut args = args.into_iter();
660             let self_ = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
661             let tuple = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
662             assert!(args.next().is_none());
663
664             let tuple = Place::from(tuple);
665             let ty::Tuple(tuple_tys) = tuple.ty(caller_body, tcx).ty.kind() else {
666                 bug!("Closure arguments are not passed as a tuple");
667             };
668
669             // The `closure_ref` in our example above.
670             let closure_ref_arg = iter::once(self_);
671
672             // The `tmp0`, `tmp1`, and `tmp2` in our example above.
673             let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| {
674                 // This is e.g., `tuple_tmp.0` in our example above.
675                 let tuple_field = Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty));
676
677                 // Spill to a local to make e.g., `tmp0`.
678                 self.create_temp_if_necessary(tuple_field, callsite, caller_body)
679             });
680
681             closure_ref_arg.chain(tuple_tmp_args).collect()
682         } else {
683             args.into_iter()
684                 .map(|a| self.create_temp_if_necessary(a, callsite, caller_body))
685                 .collect()
686         }
687     }
688
689     /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
690     /// temporary `T` and an instruction `T = arg`, and returns `T`.
691     fn create_temp_if_necessary(
692         &self,
693         arg: Operand<'tcx>,
694         callsite: &CallSite<'tcx>,
695         caller_body: &mut Body<'tcx>,
696     ) -> Local {
697         // Reuse the operand if it is a moved temporary.
698         if let Operand::Move(place) = &arg
699             && let Some(local) = place.as_local()
700             && caller_body.local_kind(local) == LocalKind::Temp
701         {
702             return local;
703         }
704
705         // Otherwise, create a temporary for the argument.
706         trace!("creating temp for argument {:?}", arg);
707         let arg_ty = arg.ty(caller_body, self.tcx);
708         let local = self.new_call_temp(caller_body, callsite, arg_ty);
709         caller_body[callsite.block].statements.push(Statement {
710             source_info: callsite.source_info,
711             kind: StatementKind::Assign(Box::new((Place::from(local), Rvalue::Use(arg)))),
712         });
713         local
714     }
715
716     /// Introduces a new temporary into the caller body that is live for the duration of the call.
717     fn new_call_temp(
718         &self,
719         caller_body: &mut Body<'tcx>,
720         callsite: &CallSite<'tcx>,
721         ty: Ty<'tcx>,
722     ) -> Local {
723         let local = caller_body.local_decls.push(LocalDecl::new(ty, callsite.source_info.span));
724
725         caller_body[callsite.block].statements.push(Statement {
726             source_info: callsite.source_info,
727             kind: StatementKind::StorageLive(local),
728         });
729
730         if let Some(block) = callsite.target {
731             caller_body[block].statements.insert(
732                 0,
733                 Statement {
734                     source_info: callsite.source_info,
735                     kind: StatementKind::StorageDead(local),
736                 },
737             );
738         }
739
740         local
741     }
742 }
743
744 fn type_size_of<'tcx>(
745     tcx: TyCtxt<'tcx>,
746     param_env: ty::ParamEnv<'tcx>,
747     ty: Ty<'tcx>,
748 ) -> Option<u64> {
749     tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
750 }
751
752 /**
753  * Integrator.
754  *
755  * Integrates blocks from the callee function into the calling function.
756  * Updates block indices, references to locals and other control flow
757  * stuff.
758 */
759 struct Integrator<'a, 'tcx> {
760     args: &'a [Local],
761     new_locals: RangeFrom<Local>,
762     new_scopes: RangeFrom<SourceScope>,
763     new_blocks: RangeFrom<BasicBlock>,
764     destination: Place<'tcx>,
765     callsite_scope: SourceScopeData<'tcx>,
766     callsite: &'a CallSite<'tcx>,
767     cleanup_block: Option<BasicBlock>,
768     in_cleanup_block: bool,
769     tcx: TyCtxt<'tcx>,
770     expn_data: LocalExpnId,
771     always_live_locals: BitSet<Local>,
772 }
773
774 impl Integrator<'_, '_> {
775     fn map_local(&self, local: Local) -> Local {
776         let new = if local == RETURN_PLACE {
777             self.destination.local
778         } else {
779             let idx = local.index() - 1;
780             if idx < self.args.len() {
781                 self.args[idx]
782             } else {
783                 Local::new(self.new_locals.start.index() + (idx - self.args.len()))
784             }
785         };
786         trace!("mapping local `{:?}` to `{:?}`", local, new);
787         new
788     }
789
790     fn map_scope(&self, scope: SourceScope) -> SourceScope {
791         let new = SourceScope::new(self.new_scopes.start.index() + scope.index());
792         trace!("mapping scope `{:?}` to `{:?}`", scope, new);
793         new
794     }
795
796     fn map_block(&self, block: BasicBlock) -> BasicBlock {
797         let new = BasicBlock::new(self.new_blocks.start.index() + block.index());
798         trace!("mapping block `{:?}` to `{:?}`", block, new);
799         new
800     }
801 }
802
803 impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> {
804     fn tcx(&self) -> TyCtxt<'tcx> {
805         self.tcx
806     }
807
808     fn visit_local(&mut self, local: &mut Local, _ctxt: PlaceContext, _location: Location) {
809         *local = self.map_local(*local);
810     }
811
812     fn visit_source_scope_data(&mut self, scope_data: &mut SourceScopeData<'tcx>) {
813         self.super_source_scope_data(scope_data);
814         if scope_data.parent_scope.is_none() {
815             // Attach the outermost callee scope as a child of the callsite
816             // scope, via the `parent_scope` and `inlined_parent_scope` chains.
817             scope_data.parent_scope = Some(self.callsite.source_info.scope);
818             assert_eq!(scope_data.inlined_parent_scope, None);
819             scope_data.inlined_parent_scope = if self.callsite_scope.inlined.is_some() {
820                 Some(self.callsite.source_info.scope)
821             } else {
822                 self.callsite_scope.inlined_parent_scope
823             };
824
825             // Mark the outermost callee scope as an inlined one.
826             assert_eq!(scope_data.inlined, None);
827             scope_data.inlined = Some((self.callsite.callee, self.callsite.source_info.span));
828         } else if scope_data.inlined_parent_scope.is_none() {
829             // Make it easy to find the scope with `inlined` set above.
830             scope_data.inlined_parent_scope = Some(self.map_scope(OUTERMOST_SOURCE_SCOPE));
831         }
832     }
833
834     fn visit_source_scope(&mut self, scope: &mut SourceScope) {
835         *scope = self.map_scope(*scope);
836     }
837
838     fn visit_span(&mut self, span: &mut Span) {
839         // Make sure that all spans track the fact that they were inlined.
840         *span = span.fresh_expansion(self.expn_data);
841     }
842
843     fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
844         for elem in place.projection {
845             // FIXME: Make sure that return place is not used in an indexing projection, since it
846             // won't be rebased as it is supposed to be.
847             assert_ne!(ProjectionElem::Index(RETURN_PLACE), elem);
848         }
849
850         // If this is the `RETURN_PLACE`, we need to rebase any projections onto it.
851         let dest_proj_len = self.destination.projection.len();
852         if place.local == RETURN_PLACE && dest_proj_len > 0 {
853             let mut projs = Vec::with_capacity(dest_proj_len + place.projection.len());
854             projs.extend(self.destination.projection);
855             projs.extend(place.projection);
856
857             place.projection = self.tcx.intern_place_elems(&*projs);
858         }
859         // Handles integrating any locals that occur in the base
860         // or projections
861         self.super_place(place, context, location)
862     }
863
864     fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
865         self.in_cleanup_block = data.is_cleanup;
866         self.super_basic_block_data(block, data);
867         self.in_cleanup_block = false;
868     }
869
870     fn visit_retag(&mut self, kind: &mut RetagKind, place: &mut Place<'tcx>, loc: Location) {
871         self.super_retag(kind, place, loc);
872
873         // We have to patch all inlined retags to be aware that they are no longer
874         // happening on function entry.
875         if *kind == RetagKind::FnEntry {
876             *kind = RetagKind::Default;
877         }
878     }
879
880     fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
881         if let StatementKind::StorageLive(local) | StatementKind::StorageDead(local) =
882             statement.kind
883         {
884             self.always_live_locals.remove(local);
885         }
886         self.super_statement(statement, location);
887     }
888
889     fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, loc: Location) {
890         // Don't try to modify the implicit `_0` access on return (`return` terminators are
891         // replaced down below anyways).
892         if !matches!(terminator.kind, TerminatorKind::Return) {
893             self.super_terminator(terminator, loc);
894         }
895
896         match terminator.kind {
897             TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => bug!(),
898             TerminatorKind::Goto { ref mut target } => {
899                 *target = self.map_block(*target);
900             }
901             TerminatorKind::SwitchInt { ref mut targets, .. } => {
902                 for tgt in targets.all_targets_mut() {
903                     *tgt = self.map_block(*tgt);
904                 }
905             }
906             TerminatorKind::Drop { ref mut target, ref mut unwind, .. }
907             | TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
908                 *target = self.map_block(*target);
909                 if let Some(tgt) = *unwind {
910                     *unwind = Some(self.map_block(tgt));
911                 } else if !self.in_cleanup_block {
912                     // Unless this drop is in a cleanup block, add an unwind edge to
913                     // the original call's cleanup block
914                     *unwind = self.cleanup_block;
915                 }
916             }
917             TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
918                 if let Some((_, ref mut tgt)) = *destination {
919                     *tgt = self.map_block(*tgt);
920                 }
921                 if let Some(tgt) = *cleanup {
922                     *cleanup = Some(self.map_block(tgt));
923                 } else if !self.in_cleanup_block {
924                     // Unless this call is in a cleanup block, add an unwind edge to
925                     // the original call's cleanup block
926                     *cleanup = self.cleanup_block;
927                 }
928             }
929             TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
930                 *target = self.map_block(*target);
931                 if let Some(tgt) = *cleanup {
932                     *cleanup = Some(self.map_block(tgt));
933                 } else if !self.in_cleanup_block {
934                     // Unless this assert is in a cleanup block, add an unwind edge to
935                     // the original call's cleanup block
936                     *cleanup = self.cleanup_block;
937                 }
938             }
939             TerminatorKind::Return => {
940                 terminator.kind = if let Some(tgt) = self.callsite.target {
941                     TerminatorKind::Goto { target: tgt }
942                 } else {
943                     TerminatorKind::Unreachable
944                 }
945             }
946             TerminatorKind::Resume => {
947                 if let Some(tgt) = self.cleanup_block {
948                     terminator.kind = TerminatorKind::Goto { target: tgt }
949                 }
950             }
951             TerminatorKind::Abort => {}
952             TerminatorKind::Unreachable => {}
953             TerminatorKind::FalseEdge { ref mut real_target, ref mut imaginary_target } => {
954                 *real_target = self.map_block(*real_target);
955                 *imaginary_target = self.map_block(*imaginary_target);
956             }
957             TerminatorKind::FalseUnwind { real_target: _, unwind: _ } =>
958             // see the ordering of passes in the optimized_mir query.
959             {
960                 bug!("False unwinds should have been removed before inlining")
961             }
962             TerminatorKind::InlineAsm { ref mut destination, ref mut cleanup, .. } => {
963                 if let Some(ref mut tgt) = *destination {
964                     *tgt = self.map_block(*tgt);
965                 } else if !self.in_cleanup_block {
966                     // Unless this inline asm is in a cleanup block, add an unwind edge to
967                     // the original call's cleanup block
968                     *cleanup = self.cleanup_block;
969                 }
970             }
971         }
972     }
973 }