]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir_transform/src/inline.rs
Rollup merge of #104121 - Lokathor:mir-opt-when-instruction-set-missing-on-callee...
[rust.git] / compiler / rustc_mir_transform / src / inline.rs
1 //! Inlining pass for MIR functions
2 use crate::deref_separator::deref_finder;
3 use rustc_attr::InlineAttr;
4 use rustc_index::bit_set::BitSet;
5 use rustc_index::vec::Idx;
6 use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
7 use rustc_middle::mir::visit::*;
8 use rustc_middle::mir::*;
9 use rustc_middle::ty::{self, Instance, InstanceDef, ParamEnv, Ty, TyCtxt};
10 use rustc_session::config::OptLevel;
11 use rustc_span::def_id::DefId;
12 use rustc_span::{hygiene::ExpnKind, ExpnData, LocalExpnId, Span};
13 use rustc_target::abi::VariantIdx;
14 use rustc_target::spec::abi::Abi;
15
16 use crate::simplify::{remove_dead_blocks, CfgSimplifier};
17 use crate::util;
18 use crate::MirPass;
19 use std::iter;
20 use std::ops::{Range, RangeFrom};
21
22 pub(crate) mod cycle;
23
24 const INSTR_COST: usize = 5;
25 const CALL_PENALTY: usize = 25;
26 const LANDINGPAD_PENALTY: usize = 50;
27 const RESUME_PENALTY: usize = 45;
28
29 const UNKNOWN_SIZE_COST: usize = 10;
30
31 pub struct Inline;
32
33 #[derive(Copy, Clone, Debug)]
34 struct CallSite<'tcx> {
35     callee: Instance<'tcx>,
36     fn_sig: ty::PolyFnSig<'tcx>,
37     block: BasicBlock,
38     target: Option<BasicBlock>,
39     source_info: SourceInfo,
40 }
41
42 impl<'tcx> MirPass<'tcx> for Inline {
43     fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
44         if let Some(enabled) = sess.opts.unstable_opts.inline_mir {
45             return enabled;
46         }
47
48         match sess.mir_opt_level() {
49             0 | 1 => false,
50             2 => {
51                 (sess.opts.optimize == OptLevel::Default
52                     || sess.opts.optimize == OptLevel::Aggressive)
53                     && sess.opts.incremental == None
54             }
55             _ => true,
56         }
57     }
58
59     fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
60         let span = trace_span!("inline", body = %tcx.def_path_str(body.source.def_id()));
61         let _guard = span.enter();
62         if inline(tcx, body) {
63             debug!("running simplify cfg on {:?}", body.source);
64             CfgSimplifier::new(body).simplify();
65             remove_dead_blocks(tcx, body);
66             deref_finder(tcx, body);
67         }
68     }
69 }
70
71 fn inline<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool {
72     let def_id = body.source.def_id().expect_local();
73
74     // Only do inlining into fn bodies.
75     if !tcx.hir().body_owner_kind(def_id).is_fn_or_closure() {
76         return false;
77     }
78     if body.source.promoted.is_some() {
79         return false;
80     }
81     // Avoid inlining into generators, since their `optimized_mir` is used for layout computation,
82     // which can create a cycle, even when no attempt is made to inline the function in the other
83     // direction.
84     if body.generator.is_some() {
85         return false;
86     }
87
88     let param_env = tcx.param_env_reveal_all_normalized(def_id);
89
90     let mut this = Inliner {
91         tcx,
92         param_env,
93         codegen_fn_attrs: tcx.codegen_fn_attrs(def_id),
94         history: Vec::new(),
95         changed: false,
96     };
97     let blocks = BasicBlock::new(0)..body.basic_blocks.next_index();
98     this.process_blocks(body, blocks);
99     this.changed
100 }
101
102 struct Inliner<'tcx> {
103     tcx: TyCtxt<'tcx>,
104     param_env: ParamEnv<'tcx>,
105     /// Caller codegen attributes.
106     codegen_fn_attrs: &'tcx CodegenFnAttrs,
107     /// Stack of inlined instances.
108     /// We only check the `DefId` and not the substs because we want to
109     /// avoid inlining cases of polymorphic recursion.
110     /// The number of `DefId`s is finite, so checking history is enough
111     /// to ensure that we do not loop endlessly while inlining.
112     history: Vec<DefId>,
113     /// Indicates that the caller body has been modified.
114     changed: bool,
115 }
116
117 impl<'tcx> Inliner<'tcx> {
118     fn process_blocks(&mut self, caller_body: &mut Body<'tcx>, blocks: Range<BasicBlock>) {
119         for bb in blocks {
120             let bb_data = &caller_body[bb];
121             if bb_data.is_cleanup {
122                 continue;
123             }
124
125             let Some(callsite) = self.resolve_callsite(caller_body, bb, bb_data) else {
126                 continue;
127             };
128
129             let span = trace_span!("process_blocks", %callsite.callee, ?bb);
130             let _guard = span.enter();
131
132             match self.try_inlining(caller_body, &callsite) {
133                 Err(reason) => {
134                     debug!("not-inlined {} [{}]", callsite.callee, reason);
135                     continue;
136                 }
137                 Ok(new_blocks) => {
138                     debug!("inlined {}", callsite.callee);
139                     self.changed = true;
140                     self.history.push(callsite.callee.def_id());
141                     self.process_blocks(caller_body, new_blocks);
142                     self.history.pop();
143                 }
144             }
145         }
146     }
147
148     /// Attempts to inline a callsite into the caller body. When successful returns basic blocks
149     /// containing the inlined body. Otherwise returns an error describing why inlining didn't take
150     /// place.
151     fn try_inlining(
152         &self,
153         caller_body: &mut Body<'tcx>,
154         callsite: &CallSite<'tcx>,
155     ) -> Result<std::ops::Range<BasicBlock>, &'static str> {
156         let callee_attrs = self.tcx.codegen_fn_attrs(callsite.callee.def_id());
157         self.check_codegen_attributes(callsite, callee_attrs)?;
158         self.check_mir_is_available(caller_body, &callsite.callee)?;
159         let callee_body = self.tcx.instance_mir(callsite.callee.def);
160         self.check_mir_body(callsite, callee_body, callee_attrs)?;
161
162         if !self.tcx.consider_optimizing(|| {
163             format!("Inline {:?} into {:?}", callsite.callee, caller_body.source)
164         }) {
165             return Err("optimization fuel exhausted");
166         }
167
168         let Ok(callee_body) = callsite.callee.try_subst_mir_and_normalize_erasing_regions(
169             self.tcx,
170             self.param_env,
171             callee_body.clone(),
172         ) else {
173             return Err("failed to normalize callee body");
174         };
175
176         // Check call signature compatibility.
177         // Normally, this shouldn't be required, but trait normalization failure can create a
178         // validation ICE.
179         let terminator = caller_body[callsite.block].terminator.as_ref().unwrap();
180         let TerminatorKind::Call { args, destination, .. } = &terminator.kind else { bug!() };
181         let destination_ty = destination.ty(&caller_body.local_decls, self.tcx).ty;
182         let output_type = callee_body.return_ty();
183         if !util::is_subtype(self.tcx, self.param_env, output_type, destination_ty) {
184             trace!(?output_type, ?destination_ty);
185             return Err("failed to normalize return type");
186         }
187         if callsite.fn_sig.abi() == Abi::RustCall {
188             let (arg_tuple, skipped_args) = match &args[..] {
189                 [arg_tuple] => (arg_tuple, 0),
190                 [_, arg_tuple] => (arg_tuple, 1),
191                 _ => bug!("Expected `rust-call` to have 1 or 2 args"),
192             };
193
194             let arg_tuple_ty = arg_tuple.ty(&caller_body.local_decls, self.tcx);
195             let ty::Tuple(arg_tuple_tys) = arg_tuple_ty.kind() else {
196                 bug!("Closure arguments are not passed as a tuple");
197             };
198
199             for (arg_ty, input) in
200                 arg_tuple_tys.iter().zip(callee_body.args_iter().skip(skipped_args))
201             {
202                 let input_type = callee_body.local_decls[input].ty;
203                 if !util::is_subtype(self.tcx, self.param_env, input_type, arg_ty) {
204                     trace!(?arg_ty, ?input_type);
205                     return Err("failed to normalize tuple argument type");
206                 }
207             }
208         } else {
209             for (arg, input) in args.iter().zip(callee_body.args_iter()) {
210                 let input_type = callee_body.local_decls[input].ty;
211                 let arg_ty = arg.ty(&caller_body.local_decls, self.tcx);
212                 if !util::is_subtype(self.tcx, self.param_env, input_type, arg_ty) {
213                     trace!(?arg_ty, ?input_type);
214                     return Err("failed to normalize argument type");
215                 }
216             }
217         }
218
219         let old_blocks = caller_body.basic_blocks.next_index();
220         self.inline_call(caller_body, &callsite, callee_body);
221         let new_blocks = old_blocks..caller_body.basic_blocks.next_index();
222
223         Ok(new_blocks)
224     }
225
226     fn check_mir_is_available(
227         &self,
228         caller_body: &Body<'tcx>,
229         callee: &Instance<'tcx>,
230     ) -> Result<(), &'static str> {
231         let caller_def_id = caller_body.source.def_id();
232         let callee_def_id = callee.def_id();
233         if callee_def_id == caller_def_id {
234             return Err("self-recursion");
235         }
236
237         match callee.def {
238             InstanceDef::Item(_) => {
239                 // If there is no MIR available (either because it was not in metadata or
240                 // because it has no MIR because it's an extern function), then the inliner
241                 // won't cause cycles on this.
242                 if !self.tcx.is_mir_available(callee_def_id) {
243                     return Err("item MIR unavailable");
244                 }
245             }
246             // These have no own callable MIR.
247             InstanceDef::Intrinsic(_) | InstanceDef::Virtual(..) => {
248                 return Err("instance without MIR (intrinsic / virtual)");
249             }
250             // This cannot result in an immediate cycle since the callee MIR is a shim, which does
251             // not get any optimizations run on it. Any subsequent inlining may cause cycles, but we
252             // do not need to catch this here, we can wait until the inliner decides to continue
253             // inlining a second time.
254             InstanceDef::VTableShim(_)
255             | InstanceDef::ReifyShim(_)
256             | InstanceDef::FnPtrShim(..)
257             | InstanceDef::ClosureOnceShim { .. }
258             | InstanceDef::DropGlue(..)
259             | InstanceDef::CloneShim(..) => return Ok(()),
260         }
261
262         if self.tcx.is_constructor(callee_def_id) {
263             trace!("constructors always have MIR");
264             // Constructor functions cannot cause a query cycle.
265             return Ok(());
266         }
267
268         if callee_def_id.is_local() {
269             // Avoid a cycle here by only using `instance_mir` only if we have
270             // a lower `DefPathHash` than the callee. This ensures that the callee will
271             // not inline us. This trick even works with incremental compilation,
272             // since `DefPathHash` is stable.
273             if self.tcx.def_path_hash(caller_def_id).local_hash()
274                 < self.tcx.def_path_hash(callee_def_id).local_hash()
275             {
276                 return Ok(());
277             }
278
279             // If we know for sure that the function we're calling will itself try to
280             // call us, then we avoid inlining that function.
281             if self.tcx.mir_callgraph_reachable((*callee, caller_def_id.expect_local())) {
282                 return Err("caller might be reachable from callee (query cycle avoidance)");
283             }
284
285             Ok(())
286         } else {
287             // This cannot result in an immediate cycle since the callee MIR is from another crate
288             // and is already optimized. Any subsequent inlining may cause cycles, but we do
289             // not need to catch this here, we can wait until the inliner decides to continue
290             // inlining a second time.
291             trace!("functions from other crates always have MIR");
292             Ok(())
293         }
294     }
295
296     fn resolve_callsite(
297         &self,
298         caller_body: &Body<'tcx>,
299         bb: BasicBlock,
300         bb_data: &BasicBlockData<'tcx>,
301     ) -> Option<CallSite<'tcx>> {
302         // Only consider direct calls to functions
303         let terminator = bb_data.terminator();
304         if let TerminatorKind::Call { ref func, target, .. } = terminator.kind {
305             let func_ty = func.ty(caller_body, self.tcx);
306             if let ty::FnDef(def_id, substs) = *func_ty.kind() {
307                 // To resolve an instance its substs have to be fully normalized.
308                 let substs = self.tcx.try_normalize_erasing_regions(self.param_env, substs).ok()?;
309                 let callee =
310                     Instance::resolve(self.tcx, self.param_env, def_id, substs).ok().flatten()?;
311
312                 if let InstanceDef::Virtual(..) | InstanceDef::Intrinsic(_) = callee.def {
313                     return None;
314                 }
315
316                 if self.history.contains(&callee.def_id()) {
317                     return None;
318                 }
319
320                 let fn_sig = self.tcx.bound_fn_sig(def_id).subst(self.tcx, substs);
321
322                 return Some(CallSite {
323                     callee,
324                     fn_sig,
325                     block: bb,
326                     target,
327                     source_info: terminator.source_info,
328                 });
329             }
330         }
331
332         None
333     }
334
335     /// Returns an error if inlining is not possible based on codegen attributes alone. A success
336     /// indicates that inlining decision should be based on other criteria.
337     fn check_codegen_attributes(
338         &self,
339         callsite: &CallSite<'tcx>,
340         callee_attrs: &CodegenFnAttrs,
341     ) -> Result<(), &'static str> {
342         match callee_attrs.inline {
343             InlineAttr::Never => return Err("never inline hint"),
344             InlineAttr::Always | InlineAttr::Hint => {}
345             InlineAttr::None => {
346                 if self.tcx.sess.mir_opt_level() <= 2 {
347                     return Err("at mir-opt-level=2, only #[inline] is inlined");
348                 }
349             }
350         }
351
352         // Only inline local functions if they would be eligible for cross-crate
353         // inlining. This is to ensure that the final crate doesn't have MIR that
354         // reference unexported symbols
355         if callsite.callee.def_id().is_local() {
356             let is_generic = callsite.callee.substs.non_erasable_generics().next().is_some();
357             if !is_generic && !callee_attrs.requests_inline() {
358                 return Err("not exported");
359             }
360         }
361
362         if callsite.fn_sig.c_variadic() {
363             return Err("C variadic");
364         }
365
366         if callee_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
367             return Err("naked");
368         }
369
370         if callee_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
371             return Err("cold");
372         }
373
374         if callee_attrs.no_sanitize != self.codegen_fn_attrs.no_sanitize {
375             return Err("incompatible sanitizer set");
376         }
377
378         // Two functions are compatible if the callee has no attribute (meaning
379         // that it's codegen agnostic), or sets an attribute that is identical
380         // to this function's attribute.
381         if callee_attrs.instruction_set.is_some()
382             && callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set
383         {
384             return Err("incompatible instruction set");
385         }
386
387         for feature in &callee_attrs.target_features {
388             if !self.codegen_fn_attrs.target_features.contains(feature) {
389                 return Err("incompatible target feature");
390             }
391         }
392
393         Ok(())
394     }
395
396     /// Returns inlining decision that is based on the examination of callee MIR body.
397     /// Assumes that codegen attributes have been checked for compatibility already.
398     #[instrument(level = "debug", skip(self, callee_body))]
399     fn check_mir_body(
400         &self,
401         callsite: &CallSite<'tcx>,
402         callee_body: &Body<'tcx>,
403         callee_attrs: &CodegenFnAttrs,
404     ) -> Result<(), &'static str> {
405         let tcx = self.tcx;
406
407         let mut threshold = if callee_attrs.requests_inline() {
408             self.tcx.sess.opts.unstable_opts.inline_mir_hint_threshold.unwrap_or(100)
409         } else {
410             self.tcx.sess.opts.unstable_opts.inline_mir_threshold.unwrap_or(50)
411         };
412
413         // Give a bonus functions with a small number of blocks,
414         // We normally have two or three blocks for even
415         // very small functions.
416         if callee_body.basic_blocks.len() <= 3 {
417             threshold += threshold / 4;
418         }
419         debug!("    final inline threshold = {}", threshold);
420
421         // FIXME: Give a bonus to functions with only a single caller
422         let diverges = matches!(
423             callee_body.basic_blocks[START_BLOCK].terminator().kind,
424             TerminatorKind::Unreachable | TerminatorKind::Call { target: None, .. }
425         );
426         if diverges && !matches!(callee_attrs.inline, InlineAttr::Always) {
427             return Err("callee diverges unconditionally");
428         }
429
430         let mut checker = CostChecker {
431             tcx: self.tcx,
432             param_env: self.param_env,
433             instance: callsite.callee,
434             callee_body,
435             cost: 0,
436             validation: Ok(()),
437         };
438
439         // Traverse the MIR manually so we can account for the effects of inlining on the CFG.
440         let mut work_list = vec![START_BLOCK];
441         let mut visited = BitSet::new_empty(callee_body.basic_blocks.len());
442         while let Some(bb) = work_list.pop() {
443             if !visited.insert(bb.index()) {
444                 continue;
445             }
446
447             let blk = &callee_body.basic_blocks[bb];
448             checker.visit_basic_block_data(bb, blk);
449
450             let term = blk.terminator();
451             if let TerminatorKind::Drop { ref place, target, unwind }
452             | TerminatorKind::DropAndReplace { ref place, target, unwind, .. } = term.kind
453             {
454                 work_list.push(target);
455
456                 // If the place doesn't actually need dropping, treat it like a regular goto.
457                 let ty = callsite.callee.subst_mir(self.tcx, &place.ty(callee_body, tcx).ty);
458                 if ty.needs_drop(tcx, self.param_env) && let Some(unwind) = unwind {
459                         work_list.push(unwind);
460                     }
461             } else if callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set
462                 && matches!(term.kind, TerminatorKind::InlineAsm { .. })
463             {
464                 // During the attribute checking stage we allow a callee with no
465                 // instruction_set assigned to count as compatible with a function that does
466                 // assign one. However, during this stage we require an exact match when any
467                 // inline-asm is detected. LLVM will still possibly do an inline later on
468                 // if the no-attribute function ends up with the same instruction set anyway.
469                 return Err("Cannot move inline-asm across instruction sets");
470             } else {
471                 work_list.extend(term.successors())
472             }
473         }
474
475         // Count up the cost of local variables and temps, if we know the size
476         // use that, otherwise we use a moderately-large dummy cost.
477         for v in callee_body.vars_and_temps_iter() {
478             checker.visit_local_decl(v, &callee_body.local_decls[v]);
479         }
480
481         // Abort if type validation found anything fishy.
482         checker.validation?;
483
484         let cost = checker.cost;
485         if let InlineAttr::Always = callee_attrs.inline {
486             debug!("INLINING {:?} because inline(always) [cost={}]", callsite, cost);
487             Ok(())
488         } else if cost <= threshold {
489             debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold);
490             Ok(())
491         } else {
492             debug!("NOT inlining {:?} [cost={} > threshold={}]", callsite, cost, threshold);
493             Err("cost above threshold")
494         }
495     }
496
497     fn inline_call(
498         &self,
499         caller_body: &mut Body<'tcx>,
500         callsite: &CallSite<'tcx>,
501         mut callee_body: Body<'tcx>,
502     ) {
503         let terminator = caller_body[callsite.block].terminator.take().unwrap();
504         match terminator.kind {
505             TerminatorKind::Call { args, destination, cleanup, .. } => {
506                 // If the call is something like `a[*i] = f(i)`, where
507                 // `i : &mut usize`, then just duplicating the `a[*i]`
508                 // Place could result in two different locations if `f`
509                 // writes to `i`. To prevent this we need to create a temporary
510                 // borrow of the place and pass the destination as `*temp` instead.
511                 fn dest_needs_borrow(place: Place<'_>) -> bool {
512                     for elem in place.projection.iter() {
513                         match elem {
514                             ProjectionElem::Deref | ProjectionElem::Index(_) => return true,
515                             _ => {}
516                         }
517                     }
518
519                     false
520                 }
521
522                 let dest = if dest_needs_borrow(destination) {
523                     trace!("creating temp for return destination");
524                     let dest = Rvalue::Ref(
525                         self.tcx.lifetimes.re_erased,
526                         BorrowKind::Mut { allow_two_phase_borrow: false },
527                         destination,
528                     );
529                     let dest_ty = dest.ty(caller_body, self.tcx);
530                     let temp = Place::from(self.new_call_temp(caller_body, &callsite, dest_ty));
531                     caller_body[callsite.block].statements.push(Statement {
532                         source_info: callsite.source_info,
533                         kind: StatementKind::Assign(Box::new((temp, dest))),
534                     });
535                     self.tcx.mk_place_deref(temp)
536                 } else {
537                     destination
538                 };
539
540                 // Copy the arguments if needed.
541                 let args: Vec<_> = self.make_call_args(args, &callsite, caller_body, &callee_body);
542
543                 let mut expn_data = ExpnData::default(
544                     ExpnKind::Inlined,
545                     callsite.source_info.span,
546                     self.tcx.sess.edition(),
547                     None,
548                     None,
549                 );
550                 expn_data.def_site = callee_body.span;
551                 let expn_data =
552                     self.tcx.with_stable_hashing_context(|hcx| LocalExpnId::fresh(expn_data, hcx));
553                 let mut integrator = Integrator {
554                     args: &args,
555                     new_locals: Local::new(caller_body.local_decls.len())..,
556                     new_scopes: SourceScope::new(caller_body.source_scopes.len())..,
557                     new_blocks: BasicBlock::new(caller_body.basic_blocks.len())..,
558                     destination: dest,
559                     callsite_scope: caller_body.source_scopes[callsite.source_info.scope].clone(),
560                     callsite,
561                     cleanup_block: cleanup,
562                     in_cleanup_block: false,
563                     tcx: self.tcx,
564                     expn_data,
565                     always_live_locals: BitSet::new_filled(callee_body.local_decls.len()),
566                 };
567
568                 // Map all `Local`s, `SourceScope`s and `BasicBlock`s to new ones
569                 // (or existing ones, in a few special cases) in the caller.
570                 integrator.visit_body(&mut callee_body);
571
572                 // If there are any locals without storage markers, give them storage only for the
573                 // duration of the call.
574                 for local in callee_body.vars_and_temps_iter() {
575                     if !callee_body.local_decls[local].internal
576                         && integrator.always_live_locals.contains(local)
577                     {
578                         let new_local = integrator.map_local(local);
579                         caller_body[callsite.block].statements.push(Statement {
580                             source_info: callsite.source_info,
581                             kind: StatementKind::StorageLive(new_local),
582                         });
583                     }
584                 }
585                 if let Some(block) = callsite.target {
586                     // To avoid repeated O(n) insert, push any new statements to the end and rotate
587                     // the slice once.
588                     let mut n = 0;
589                     for local in callee_body.vars_and_temps_iter().rev() {
590                         if !callee_body.local_decls[local].internal
591                             && integrator.always_live_locals.contains(local)
592                         {
593                             let new_local = integrator.map_local(local);
594                             caller_body[block].statements.push(Statement {
595                                 source_info: callsite.source_info,
596                                 kind: StatementKind::StorageDead(new_local),
597                             });
598                             n += 1;
599                         }
600                     }
601                     caller_body[block].statements.rotate_right(n);
602                 }
603
604                 // Insert all of the (mapped) parts of the callee body into the caller.
605                 caller_body.local_decls.extend(callee_body.drain_vars_and_temps());
606                 caller_body.source_scopes.extend(&mut callee_body.source_scopes.drain(..));
607                 caller_body.var_debug_info.append(&mut callee_body.var_debug_info);
608                 caller_body.basic_blocks_mut().extend(callee_body.basic_blocks_mut().drain(..));
609
610                 caller_body[callsite.block].terminator = Some(Terminator {
611                     source_info: callsite.source_info,
612                     kind: TerminatorKind::Goto { target: integrator.map_block(START_BLOCK) },
613                 });
614
615                 // Copy only unevaluated constants from the callee_body into the caller_body.
616                 // Although we are only pushing `ConstKind::Unevaluated` consts to
617                 // `required_consts`, here we may not only have `ConstKind::Unevaluated`
618                 // because we are calling `subst_and_normalize_erasing_regions`.
619                 caller_body.required_consts.extend(
620                     callee_body.required_consts.iter().copied().filter(|&ct| match ct.literal {
621                         ConstantKind::Ty(_) => {
622                             bug!("should never encounter ty::UnevaluatedConst in `required_consts`")
623                         }
624                         ConstantKind::Val(..) | ConstantKind::Unevaluated(..) => true,
625                     }),
626                 );
627             }
628             kind => bug!("unexpected terminator kind {:?}", kind),
629         }
630     }
631
632     fn make_call_args(
633         &self,
634         args: Vec<Operand<'tcx>>,
635         callsite: &CallSite<'tcx>,
636         caller_body: &mut Body<'tcx>,
637         callee_body: &Body<'tcx>,
638     ) -> Vec<Local> {
639         let tcx = self.tcx;
640
641         // There is a bit of a mismatch between the *caller* of a closure and the *callee*.
642         // The caller provides the arguments wrapped up in a tuple:
643         //
644         //     tuple_tmp = (a, b, c)
645         //     Fn::call(closure_ref, tuple_tmp)
646         //
647         // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`)
648         // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, codegen has
649         // the job of unpacking this tuple. But here, we are codegen. =) So we want to create
650         // a vector like
651         //
652         //     [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2]
653         //
654         // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient
655         // if we "spill" that into *another* temporary, so that we can map the argument
656         // variable in the callee MIR directly to an argument variable on our side.
657         // So we introduce temporaries like:
658         //
659         //     tmp0 = tuple_tmp.0
660         //     tmp1 = tuple_tmp.1
661         //     tmp2 = tuple_tmp.2
662         //
663         // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
664         if callsite.fn_sig.abi() == Abi::RustCall && callee_body.spread_arg.is_none() {
665             let mut args = args.into_iter();
666             let self_ = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
667             let tuple = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
668             assert!(args.next().is_none());
669
670             let tuple = Place::from(tuple);
671             let ty::Tuple(tuple_tys) = tuple.ty(caller_body, tcx).ty.kind() else {
672                 bug!("Closure arguments are not passed as a tuple");
673             };
674
675             // The `closure_ref` in our example above.
676             let closure_ref_arg = iter::once(self_);
677
678             // The `tmp0`, `tmp1`, and `tmp2` in our example above.
679             let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| {
680                 // This is e.g., `tuple_tmp.0` in our example above.
681                 let tuple_field = Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty));
682
683                 // Spill to a local to make e.g., `tmp0`.
684                 self.create_temp_if_necessary(tuple_field, callsite, caller_body)
685             });
686
687             closure_ref_arg.chain(tuple_tmp_args).collect()
688         } else {
689             args.into_iter()
690                 .map(|a| self.create_temp_if_necessary(a, callsite, caller_body))
691                 .collect()
692         }
693     }
694
695     /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
696     /// temporary `T` and an instruction `T = arg`, and returns `T`.
697     fn create_temp_if_necessary(
698         &self,
699         arg: Operand<'tcx>,
700         callsite: &CallSite<'tcx>,
701         caller_body: &mut Body<'tcx>,
702     ) -> Local {
703         // Reuse the operand if it is a moved temporary.
704         if let Operand::Move(place) = &arg
705             && let Some(local) = place.as_local()
706             && caller_body.local_kind(local) == LocalKind::Temp
707         {
708             return local;
709         }
710
711         // Otherwise, create a temporary for the argument.
712         trace!("creating temp for argument {:?}", arg);
713         let arg_ty = arg.ty(caller_body, self.tcx);
714         let local = self.new_call_temp(caller_body, callsite, arg_ty);
715         caller_body[callsite.block].statements.push(Statement {
716             source_info: callsite.source_info,
717             kind: StatementKind::Assign(Box::new((Place::from(local), Rvalue::Use(arg)))),
718         });
719         local
720     }
721
722     /// Introduces a new temporary into the caller body that is live for the duration of the call.
723     fn new_call_temp(
724         &self,
725         caller_body: &mut Body<'tcx>,
726         callsite: &CallSite<'tcx>,
727         ty: Ty<'tcx>,
728     ) -> Local {
729         let local = caller_body.local_decls.push(LocalDecl::new(ty, callsite.source_info.span));
730
731         caller_body[callsite.block].statements.push(Statement {
732             source_info: callsite.source_info,
733             kind: StatementKind::StorageLive(local),
734         });
735
736         if let Some(block) = callsite.target {
737             caller_body[block].statements.insert(
738                 0,
739                 Statement {
740                     source_info: callsite.source_info,
741                     kind: StatementKind::StorageDead(local),
742                 },
743             );
744         }
745
746         local
747     }
748 }
749
750 fn type_size_of<'tcx>(
751     tcx: TyCtxt<'tcx>,
752     param_env: ty::ParamEnv<'tcx>,
753     ty: Ty<'tcx>,
754 ) -> Option<u64> {
755     tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
756 }
757
758 /// Verify that the callee body is compatible with the caller.
759 ///
760 /// This visitor mostly computes the inlining cost,
761 /// but also needs to verify that types match because of normalization failure.
762 struct CostChecker<'b, 'tcx> {
763     tcx: TyCtxt<'tcx>,
764     param_env: ParamEnv<'tcx>,
765     cost: usize,
766     callee_body: &'b Body<'tcx>,
767     instance: ty::Instance<'tcx>,
768     validation: Result<(), &'static str>,
769 }
770
771 impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
772     fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
773         // Don't count StorageLive/StorageDead in the inlining cost.
774         match statement.kind {
775             StatementKind::StorageLive(_)
776             | StatementKind::StorageDead(_)
777             | StatementKind::Deinit(_)
778             | StatementKind::Nop => {}
779             _ => self.cost += INSTR_COST,
780         }
781
782         self.super_statement(statement, location);
783     }
784
785     fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
786         let tcx = self.tcx;
787         match terminator.kind {
788             TerminatorKind::Drop { ref place, unwind, .. }
789             | TerminatorKind::DropAndReplace { ref place, unwind, .. } => {
790                 // If the place doesn't actually need dropping, treat it like a regular goto.
791                 let ty = self.instance.subst_mir(tcx, &place.ty(self.callee_body, tcx).ty);
792                 if ty.needs_drop(tcx, self.param_env) {
793                     self.cost += CALL_PENALTY;
794                     if unwind.is_some() {
795                         self.cost += LANDINGPAD_PENALTY;
796                     }
797                 } else {
798                     self.cost += INSTR_COST;
799                 }
800             }
801             TerminatorKind::Call { func: Operand::Constant(ref f), cleanup, .. } => {
802                 let fn_ty = self.instance.subst_mir(tcx, &f.literal.ty());
803                 self.cost += if let ty::FnDef(def_id, _) = *fn_ty.kind() && tcx.is_intrinsic(def_id) {
804                     // Don't give intrinsics the extra penalty for calls
805                     INSTR_COST
806                 } else {
807                     CALL_PENALTY
808                 };
809                 if cleanup.is_some() {
810                     self.cost += LANDINGPAD_PENALTY;
811                 }
812             }
813             TerminatorKind::Assert { cleanup, .. } => {
814                 self.cost += CALL_PENALTY;
815                 if cleanup.is_some() {
816                     self.cost += LANDINGPAD_PENALTY;
817                 }
818             }
819             TerminatorKind::Resume => self.cost += RESUME_PENALTY,
820             TerminatorKind::InlineAsm { cleanup, .. } => {
821                 self.cost += INSTR_COST;
822                 if cleanup.is_some() {
823                     self.cost += LANDINGPAD_PENALTY;
824                 }
825             }
826             _ => self.cost += INSTR_COST,
827         }
828
829         self.super_terminator(terminator, location);
830     }
831
832     /// Count up the cost of local variables and temps, if we know the size
833     /// use that, otherwise we use a moderately-large dummy cost.
834     fn visit_local_decl(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) {
835         let tcx = self.tcx;
836         let ptr_size = tcx.data_layout.pointer_size.bytes();
837
838         let ty = self.instance.subst_mir(tcx, &local_decl.ty);
839         // Cost of the var is the size in machine-words, if we know
840         // it.
841         if let Some(size) = type_size_of(tcx, self.param_env, ty) {
842             self.cost += ((size + ptr_size - 1) / ptr_size) as usize;
843         } else {
844             self.cost += UNKNOWN_SIZE_COST;
845         }
846
847         self.super_local_decl(local, local_decl)
848     }
849
850     /// This method duplicates code from MIR validation in an attempt to detect type mismatches due
851     /// to normalization failure.
852     fn visit_projection_elem(
853         &mut self,
854         local: Local,
855         proj_base: &[PlaceElem<'tcx>],
856         elem: PlaceElem<'tcx>,
857         context: PlaceContext,
858         location: Location,
859     ) {
860         if let ProjectionElem::Field(f, ty) = elem {
861             let parent = Place { local, projection: self.tcx.intern_place_elems(proj_base) };
862             let parent_ty = parent.ty(&self.callee_body.local_decls, self.tcx);
863             let check_equal = |this: &mut Self, f_ty| {
864                 if !util::is_equal_up_to_subtyping(this.tcx, this.param_env, ty, f_ty) {
865                     trace!(?ty, ?f_ty);
866                     this.validation = Err("failed to normalize projection type");
867                     return;
868                 }
869             };
870
871             let kind = match parent_ty.ty.kind() {
872                 &ty::Opaque(def_id, substs) => {
873                     self.tcx.bound_type_of(def_id).subst(self.tcx, substs).kind()
874                 }
875                 kind => kind,
876             };
877
878             match kind {
879                 ty::Tuple(fields) => {
880                     let Some(f_ty) = fields.get(f.as_usize()) else {
881                         self.validation = Err("malformed MIR");
882                         return;
883                     };
884                     check_equal(self, *f_ty);
885                 }
886                 ty::Adt(adt_def, substs) => {
887                     let var = parent_ty.variant_index.unwrap_or(VariantIdx::from_u32(0));
888                     let Some(field) = adt_def.variant(var).fields.get(f.as_usize()) else {
889                         self.validation = Err("malformed MIR");
890                         return;
891                     };
892                     check_equal(self, field.ty(self.tcx, substs));
893                 }
894                 ty::Closure(_, substs) => {
895                     let substs = substs.as_closure();
896                     let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else {
897                         self.validation = Err("malformed MIR");
898                         return;
899                     };
900                     check_equal(self, f_ty);
901                 }
902                 &ty::Generator(def_id, substs, _) => {
903                     let f_ty = if let Some(var) = parent_ty.variant_index {
904                         let gen_body = if def_id == self.callee_body.source.def_id() {
905                             self.callee_body
906                         } else {
907                             self.tcx.optimized_mir(def_id)
908                         };
909
910                         let Some(layout) = gen_body.generator_layout() else {
911                             self.validation = Err("malformed MIR");
912                             return;
913                         };
914
915                         let Some(&local) = layout.variant_fields[var].get(f) else {
916                             self.validation = Err("malformed MIR");
917                             return;
918                         };
919
920                         let Some(&f_ty) = layout.field_tys.get(local) else {
921                             self.validation = Err("malformed MIR");
922                             return;
923                         };
924
925                         f_ty
926                     } else {
927                         let Some(f_ty) = substs.as_generator().prefix_tys().nth(f.index()) else {
928                             self.validation = Err("malformed MIR");
929                             return;
930                         };
931
932                         f_ty
933                     };
934
935                     check_equal(self, f_ty);
936                 }
937                 _ => self.validation = Err("malformed MIR"),
938             }
939         }
940
941         self.super_projection_elem(local, proj_base, elem, context, location);
942     }
943 }
944
945 /**
946  * Integrator.
947  *
948  * Integrates blocks from the callee function into the calling function.
949  * Updates block indices, references to locals and other control flow
950  * stuff.
951 */
952 struct Integrator<'a, 'tcx> {
953     args: &'a [Local],
954     new_locals: RangeFrom<Local>,
955     new_scopes: RangeFrom<SourceScope>,
956     new_blocks: RangeFrom<BasicBlock>,
957     destination: Place<'tcx>,
958     callsite_scope: SourceScopeData<'tcx>,
959     callsite: &'a CallSite<'tcx>,
960     cleanup_block: Option<BasicBlock>,
961     in_cleanup_block: bool,
962     tcx: TyCtxt<'tcx>,
963     expn_data: LocalExpnId,
964     always_live_locals: BitSet<Local>,
965 }
966
967 impl Integrator<'_, '_> {
968     fn map_local(&self, local: Local) -> Local {
969         let new = if local == RETURN_PLACE {
970             self.destination.local
971         } else {
972             let idx = local.index() - 1;
973             if idx < self.args.len() {
974                 self.args[idx]
975             } else {
976                 Local::new(self.new_locals.start.index() + (idx - self.args.len()))
977             }
978         };
979         trace!("mapping local `{:?}` to `{:?}`", local, new);
980         new
981     }
982
983     fn map_scope(&self, scope: SourceScope) -> SourceScope {
984         let new = SourceScope::new(self.new_scopes.start.index() + scope.index());
985         trace!("mapping scope `{:?}` to `{:?}`", scope, new);
986         new
987     }
988
989     fn map_block(&self, block: BasicBlock) -> BasicBlock {
990         let new = BasicBlock::new(self.new_blocks.start.index() + block.index());
991         trace!("mapping block `{:?}` to `{:?}`", block, new);
992         new
993     }
994
995     fn map_unwind(&self, unwind: Option<BasicBlock>) -> Option<BasicBlock> {
996         if self.in_cleanup_block {
997             if unwind.is_some() {
998                 bug!("cleanup on cleanup block");
999             }
1000             return unwind;
1001         }
1002
1003         match unwind {
1004             Some(target) => Some(self.map_block(target)),
1005             // Add an unwind edge to the original call's cleanup block
1006             None => self.cleanup_block,
1007         }
1008     }
1009 }
1010
1011 impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> {
1012     fn tcx(&self) -> TyCtxt<'tcx> {
1013         self.tcx
1014     }
1015
1016     fn visit_local(&mut self, local: &mut Local, _ctxt: PlaceContext, _location: Location) {
1017         *local = self.map_local(*local);
1018     }
1019
1020     fn visit_source_scope_data(&mut self, scope_data: &mut SourceScopeData<'tcx>) {
1021         self.super_source_scope_data(scope_data);
1022         if scope_data.parent_scope.is_none() {
1023             // Attach the outermost callee scope as a child of the callsite
1024             // scope, via the `parent_scope` and `inlined_parent_scope` chains.
1025             scope_data.parent_scope = Some(self.callsite.source_info.scope);
1026             assert_eq!(scope_data.inlined_parent_scope, None);
1027             scope_data.inlined_parent_scope = if self.callsite_scope.inlined.is_some() {
1028                 Some(self.callsite.source_info.scope)
1029             } else {
1030                 self.callsite_scope.inlined_parent_scope
1031             };
1032
1033             // Mark the outermost callee scope as an inlined one.
1034             assert_eq!(scope_data.inlined, None);
1035             scope_data.inlined = Some((self.callsite.callee, self.callsite.source_info.span));
1036         } else if scope_data.inlined_parent_scope.is_none() {
1037             // Make it easy to find the scope with `inlined` set above.
1038             scope_data.inlined_parent_scope = Some(self.map_scope(OUTERMOST_SOURCE_SCOPE));
1039         }
1040     }
1041
1042     fn visit_source_scope(&mut self, scope: &mut SourceScope) {
1043         *scope = self.map_scope(*scope);
1044     }
1045
1046     fn visit_span(&mut self, span: &mut Span) {
1047         // Make sure that all spans track the fact that they were inlined.
1048         *span = span.fresh_expansion(self.expn_data);
1049     }
1050
1051     fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1052         for elem in place.projection {
1053             // FIXME: Make sure that return place is not used in an indexing projection, since it
1054             // won't be rebased as it is supposed to be.
1055             assert_ne!(ProjectionElem::Index(RETURN_PLACE), elem);
1056         }
1057
1058         // If this is the `RETURN_PLACE`, we need to rebase any projections onto it.
1059         let dest_proj_len = self.destination.projection.len();
1060         if place.local == RETURN_PLACE && dest_proj_len > 0 {
1061             let mut projs = Vec::with_capacity(dest_proj_len + place.projection.len());
1062             projs.extend(self.destination.projection);
1063             projs.extend(place.projection);
1064
1065             place.projection = self.tcx.intern_place_elems(&*projs);
1066         }
1067         // Handles integrating any locals that occur in the base
1068         // or projections
1069         self.super_place(place, context, location)
1070     }
1071
1072     fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
1073         self.in_cleanup_block = data.is_cleanup;
1074         self.super_basic_block_data(block, data);
1075         self.in_cleanup_block = false;
1076     }
1077
1078     fn visit_retag(&mut self, kind: &mut RetagKind, place: &mut Place<'tcx>, loc: Location) {
1079         self.super_retag(kind, place, loc);
1080
1081         // We have to patch all inlined retags to be aware that they are no longer
1082         // happening on function entry.
1083         if *kind == RetagKind::FnEntry {
1084             *kind = RetagKind::Default;
1085         }
1086     }
1087
1088     fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
1089         if let StatementKind::StorageLive(local) | StatementKind::StorageDead(local) =
1090             statement.kind
1091         {
1092             self.always_live_locals.remove(local);
1093         }
1094         self.super_statement(statement, location);
1095     }
1096
1097     fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, loc: Location) {
1098         // Don't try to modify the implicit `_0` access on return (`return` terminators are
1099         // replaced down below anyways).
1100         if !matches!(terminator.kind, TerminatorKind::Return) {
1101             self.super_terminator(terminator, loc);
1102         }
1103
1104         match terminator.kind {
1105             TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => bug!(),
1106             TerminatorKind::Goto { ref mut target } => {
1107                 *target = self.map_block(*target);
1108             }
1109             TerminatorKind::SwitchInt { ref mut targets, .. } => {
1110                 for tgt in targets.all_targets_mut() {
1111                     *tgt = self.map_block(*tgt);
1112                 }
1113             }
1114             TerminatorKind::Drop { ref mut target, ref mut unwind, .. }
1115             | TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
1116                 *target = self.map_block(*target);
1117                 *unwind = self.map_unwind(*unwind);
1118             }
1119             TerminatorKind::Call { ref mut target, ref mut cleanup, .. } => {
1120                 if let Some(ref mut tgt) = *target {
1121                     *tgt = self.map_block(*tgt);
1122                 }
1123                 *cleanup = self.map_unwind(*cleanup);
1124             }
1125             TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
1126                 *target = self.map_block(*target);
1127                 *cleanup = self.map_unwind(*cleanup);
1128             }
1129             TerminatorKind::Return => {
1130                 terminator.kind = if let Some(tgt) = self.callsite.target {
1131                     TerminatorKind::Goto { target: tgt }
1132                 } else {
1133                     TerminatorKind::Unreachable
1134                 }
1135             }
1136             TerminatorKind::Resume => {
1137                 if let Some(tgt) = self.cleanup_block {
1138                     terminator.kind = TerminatorKind::Goto { target: tgt }
1139                 }
1140             }
1141             TerminatorKind::Abort => {}
1142             TerminatorKind::Unreachable => {}
1143             TerminatorKind::FalseEdge { ref mut real_target, ref mut imaginary_target } => {
1144                 *real_target = self.map_block(*real_target);
1145                 *imaginary_target = self.map_block(*imaginary_target);
1146             }
1147             TerminatorKind::FalseUnwind { real_target: _, unwind: _ } =>
1148             // see the ordering of passes in the optimized_mir query.
1149             {
1150                 bug!("False unwinds should have been removed before inlining")
1151             }
1152             TerminatorKind::InlineAsm { ref mut destination, ref mut cleanup, .. } => {
1153                 if let Some(ref mut tgt) = *destination {
1154                     *tgt = self.map_block(*tgt);
1155                 }
1156                 *cleanup = self.map_unwind(*cleanup);
1157             }
1158         }
1159     }
1160 }