]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir_transform/src/inline.rs
Rollup merge of #94002 - GuillaumeGomez:duplicated-sidebar-macro, r=notriddle
[rust.git] / compiler / rustc_mir_transform / src / inline.rs
1 //! Inlining pass for MIR functions
2
3 use rustc_attr::InlineAttr;
4 use rustc_hir as hir;
5 use rustc_index::bit_set::BitSet;
6 use rustc_index::vec::Idx;
7 use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
8 use rustc_middle::mir::visit::*;
9 use rustc_middle::mir::*;
10 use rustc_middle::traits::ObligationCause;
11 use rustc_middle::ty::subst::Subst;
12 use rustc_middle::ty::{self, ConstKind, Instance, InstanceDef, ParamEnv, Ty, TyCtxt};
13 use rustc_span::{hygiene::ExpnKind, ExpnData, Span};
14 use rustc_target::spec::abi::Abi;
15
16 use super::simplify::{remove_dead_blocks, CfgSimplifier};
17 use crate::MirPass;
18 use std::iter;
19 use std::ops::{Range, RangeFrom};
20
21 crate mod cycle;
22
23 const INSTR_COST: usize = 5;
24 const CALL_PENALTY: usize = 25;
25 const LANDINGPAD_PENALTY: usize = 50;
26 const RESUME_PENALTY: usize = 45;
27
28 const UNKNOWN_SIZE_COST: usize = 10;
29
30 pub struct Inline;
31
32 #[derive(Copy, Clone, Debug)]
33 struct CallSite<'tcx> {
34     callee: Instance<'tcx>,
35     fn_sig: ty::PolyFnSig<'tcx>,
36     block: BasicBlock,
37     target: Option<BasicBlock>,
38     source_info: SourceInfo,
39 }
40
41 impl<'tcx> MirPass<'tcx> for Inline {
42     fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
43         if let Some(enabled) = sess.opts.debugging_opts.inline_mir {
44             return enabled;
45         }
46
47         sess.opts.mir_opt_level() >= 3
48     }
49
50     fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
51         let span = trace_span!("inline", body = %tcx.def_path_str(body.source.def_id()));
52         let _guard = span.enter();
53         if inline(tcx, body) {
54             debug!("running simplify cfg on {:?}", body.source);
55             CfgSimplifier::new(body).simplify();
56             remove_dead_blocks(tcx, body);
57         }
58     }
59 }
60
61 fn inline<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool {
62     let def_id = body.source.def_id();
63     let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
64
65     // Only do inlining into fn bodies.
66     if !tcx.hir().body_owner_kind(hir_id).is_fn_or_closure() {
67         return false;
68     }
69     if body.source.promoted.is_some() {
70         return false;
71     }
72     // Avoid inlining into generators, since their `optimized_mir` is used for layout computation,
73     // which can create a cycle, even when no attempt is made to inline the function in the other
74     // direction.
75     if body.generator.is_some() {
76         return false;
77     }
78
79     let param_env = tcx.param_env_reveal_all_normalized(def_id);
80     let param_env = rustc_trait_selection::traits::normalize_param_env_or_error(
81         tcx,
82         def_id,
83         param_env,
84         ObligationCause::misc(body.span, hir_id),
85     );
86
87     let mut this = Inliner {
88         tcx,
89         param_env,
90         codegen_fn_attrs: tcx.codegen_fn_attrs(def_id),
91         hir_id,
92         history: Vec::new(),
93         changed: false,
94     };
95     let blocks = BasicBlock::new(0)..body.basic_blocks().next_index();
96     this.process_blocks(body, blocks);
97     this.changed
98 }
99
100 struct Inliner<'tcx> {
101     tcx: TyCtxt<'tcx>,
102     param_env: ParamEnv<'tcx>,
103     /// Caller codegen attributes.
104     codegen_fn_attrs: &'tcx CodegenFnAttrs,
105     /// Caller HirID.
106     hir_id: hir::HirId,
107     /// Stack of inlined Instances.
108     history: Vec<ty::Instance<'tcx>>,
109     /// Indicates that the caller body has been modified.
110     changed: bool,
111 }
112
113 impl<'tcx> Inliner<'tcx> {
114     fn process_blocks(&mut self, caller_body: &mut Body<'tcx>, blocks: Range<BasicBlock>) {
115         for bb in blocks {
116             let bb_data = &caller_body[bb];
117             if bb_data.is_cleanup {
118                 continue;
119             }
120
121             let callsite = match self.resolve_callsite(caller_body, bb, bb_data) {
122                 None => continue,
123                 Some(it) => it,
124             };
125
126             let span = trace_span!("process_blocks", %callsite.callee, ?bb);
127             let _guard = span.enter();
128
129             match self.try_inlining(caller_body, &callsite) {
130                 Err(reason) => {
131                     debug!("not-inlined {} [{}]", callsite.callee, reason);
132                     continue;
133                 }
134                 Ok(new_blocks) => {
135                     debug!("inlined {}", callsite.callee);
136                     self.changed = true;
137                     self.history.push(callsite.callee);
138                     self.process_blocks(caller_body, new_blocks);
139                     self.history.pop();
140                 }
141             }
142         }
143     }
144
145     /// Attempts to inline a callsite into the caller body. When successful returns basic blocks
146     /// containing the inlined body. Otherwise returns an error describing why inlining didn't take
147     /// place.
148     fn try_inlining(
149         &self,
150         caller_body: &mut Body<'tcx>,
151         callsite: &CallSite<'tcx>,
152     ) -> Result<std::ops::Range<BasicBlock>, &'static str> {
153         let callee_attrs = self.tcx.codegen_fn_attrs(callsite.callee.def_id());
154         self.check_codegen_attributes(callsite, callee_attrs)?;
155         self.check_mir_is_available(caller_body, &callsite.callee)?;
156         let callee_body = self.tcx.instance_mir(callsite.callee.def);
157         self.check_mir_body(callsite, callee_body, callee_attrs)?;
158
159         if !self.tcx.consider_optimizing(|| {
160             format!("Inline {:?} into {:?}", callsite.callee, caller_body.source)
161         }) {
162             return Err("optimization fuel exhausted");
163         }
164
165         let callee_body = callsite.callee.subst_mir_and_normalize_erasing_regions(
166             self.tcx,
167             self.param_env,
168             callee_body.clone(),
169         );
170
171         let old_blocks = caller_body.basic_blocks().next_index();
172         self.inline_call(caller_body, &callsite, callee_body);
173         let new_blocks = old_blocks..caller_body.basic_blocks().next_index();
174
175         Ok(new_blocks)
176     }
177
178     fn check_mir_is_available(
179         &self,
180         caller_body: &Body<'tcx>,
181         callee: &Instance<'tcx>,
182     ) -> Result<(), &'static str> {
183         if callee.def_id() == caller_body.source.def_id() {
184             return Err("self-recursion");
185         }
186
187         match callee.def {
188             InstanceDef::Item(_) => {
189                 // If there is no MIR available (either because it was not in metadata or
190                 // because it has no MIR because it's an extern function), then the inliner
191                 // won't cause cycles on this.
192                 if !self.tcx.is_mir_available(callee.def_id()) {
193                     return Err("item MIR unavailable");
194                 }
195             }
196             // These have no own callable MIR.
197             InstanceDef::Intrinsic(_) | InstanceDef::Virtual(..) => {
198                 return Err("instance without MIR (intrinsic / virtual)");
199             }
200             // This cannot result in an immediate cycle since the callee MIR is a shim, which does
201             // not get any optimizations run on it. Any subsequent inlining may cause cycles, but we
202             // do not need to catch this here, we can wait until the inliner decides to continue
203             // inlining a second time.
204             InstanceDef::VtableShim(_)
205             | InstanceDef::ReifyShim(_)
206             | InstanceDef::FnPtrShim(..)
207             | InstanceDef::ClosureOnceShim { .. }
208             | InstanceDef::DropGlue(..)
209             | InstanceDef::CloneShim(..) => return Ok(()),
210         }
211
212         if self.tcx.is_constructor(callee.def_id()) {
213             trace!("constructors always have MIR");
214             // Constructor functions cannot cause a query cycle.
215             return Ok(());
216         }
217
218         if let Some(callee_def_id) = callee.def_id().as_local() {
219             let callee_hir_id = self.tcx.hir().local_def_id_to_hir_id(callee_def_id);
220             // Avoid a cycle here by only using `instance_mir` only if we have
221             // a lower `HirId` than the callee. This ensures that the callee will
222             // not inline us. This trick only works without incremental compilation.
223             // So don't do it if that is enabled.
224             if !self.tcx.dep_graph.is_fully_enabled() && self.hir_id.index() < callee_hir_id.index()
225             {
226                 return Ok(());
227             }
228
229             // If we know for sure that the function we're calling will itself try to
230             // call us, then we avoid inlining that function.
231             if self
232                 .tcx
233                 .mir_callgraph_reachable((*callee, caller_body.source.def_id().expect_local()))
234             {
235                 return Err("caller might be reachable from callee (query cycle avoidance)");
236             }
237
238             Ok(())
239         } else {
240             // This cannot result in an immediate cycle since the callee MIR is from another crate
241             // and is already optimized. Any subsequent inlining may cause cycles, but we do
242             // not need to catch this here, we can wait until the inliner decides to continue
243             // inlining a second time.
244             trace!("functions from other crates always have MIR");
245             Ok(())
246         }
247     }
248
249     fn resolve_callsite(
250         &self,
251         caller_body: &Body<'tcx>,
252         bb: BasicBlock,
253         bb_data: &BasicBlockData<'tcx>,
254     ) -> Option<CallSite<'tcx>> {
255         // Only consider direct calls to functions
256         let terminator = bb_data.terminator();
257         if let TerminatorKind::Call { ref func, ref destination, .. } = terminator.kind {
258             let func_ty = func.ty(caller_body, self.tcx);
259             if let ty::FnDef(def_id, substs) = *func_ty.kind() {
260                 // To resolve an instance its substs have to be fully normalized.
261                 let substs = self.tcx.normalize_erasing_regions(self.param_env, substs);
262                 let callee =
263                     Instance::resolve(self.tcx, self.param_env, def_id, substs).ok().flatten()?;
264
265                 if let InstanceDef::Virtual(..) | InstanceDef::Intrinsic(_) = callee.def {
266                     return None;
267                 }
268
269                 let fn_sig = self.tcx.fn_sig(def_id).subst(self.tcx, substs);
270
271                 return Some(CallSite {
272                     callee,
273                     fn_sig,
274                     block: bb,
275                     target: destination.map(|(_, target)| target),
276                     source_info: terminator.source_info,
277                 });
278             }
279         }
280
281         None
282     }
283
284     /// Returns an error if inlining is not possible based on codegen attributes alone. A success
285     /// indicates that inlining decision should be based on other criteria.
286     fn check_codegen_attributes(
287         &self,
288         callsite: &CallSite<'tcx>,
289         callee_attrs: &CodegenFnAttrs,
290     ) -> Result<(), &'static str> {
291         if let InlineAttr::Never = callee_attrs.inline {
292             return Err("never inline hint");
293         }
294
295         // Only inline local functions if they would be eligible for cross-crate
296         // inlining. This is to ensure that the final crate doesn't have MIR that
297         // reference unexported symbols
298         if callsite.callee.def_id().is_local() {
299             let is_generic = callsite.callee.substs.non_erasable_generics().next().is_some();
300             if !is_generic && !callee_attrs.requests_inline() {
301                 return Err("not exported");
302             }
303         }
304
305         if callsite.fn_sig.c_variadic() {
306             return Err("C variadic");
307         }
308
309         if callee_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
310             return Err("naked");
311         }
312
313         if callee_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
314             return Err("cold");
315         }
316
317         if callee_attrs.no_sanitize != self.codegen_fn_attrs.no_sanitize {
318             return Err("incompatible sanitizer set");
319         }
320
321         if callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set {
322             return Err("incompatible instruction set");
323         }
324
325         for feature in &callee_attrs.target_features {
326             if !self.codegen_fn_attrs.target_features.contains(feature) {
327                 return Err("incompatible target feature");
328             }
329         }
330
331         Ok(())
332     }
333
334     /// Returns inlining decision that is based on the examination of callee MIR body.
335     /// Assumes that codegen attributes have been checked for compatibility already.
336     #[instrument(level = "debug", skip(self, callee_body))]
337     fn check_mir_body(
338         &self,
339         callsite: &CallSite<'tcx>,
340         callee_body: &Body<'tcx>,
341         callee_attrs: &CodegenFnAttrs,
342     ) -> Result<(), &'static str> {
343         let tcx = self.tcx;
344
345         let mut threshold = if callee_attrs.requests_inline() {
346             self.tcx.sess.opts.debugging_opts.inline_mir_hint_threshold.unwrap_or(100)
347         } else {
348             self.tcx.sess.opts.debugging_opts.inline_mir_threshold.unwrap_or(50)
349         };
350
351         // Give a bonus functions with a small number of blocks,
352         // We normally have two or three blocks for even
353         // very small functions.
354         if callee_body.basic_blocks().len() <= 3 {
355             threshold += threshold / 4;
356         }
357         debug!("    final inline threshold = {}", threshold);
358
359         // FIXME: Give a bonus to functions with only a single caller
360         let mut first_block = true;
361         let mut cost = 0;
362
363         // Traverse the MIR manually so we can account for the effects of
364         // inlining on the CFG.
365         let mut work_list = vec![START_BLOCK];
366         let mut visited = BitSet::new_empty(callee_body.basic_blocks().len());
367         while let Some(bb) = work_list.pop() {
368             if !visited.insert(bb.index()) {
369                 continue;
370             }
371             let blk = &callee_body.basic_blocks()[bb];
372
373             for stmt in &blk.statements {
374                 // Don't count StorageLive/StorageDead in the inlining cost.
375                 match stmt.kind {
376                     StatementKind::StorageLive(_)
377                     | StatementKind::StorageDead(_)
378                     | StatementKind::Nop => {}
379                     _ => cost += INSTR_COST,
380                 }
381             }
382             let term = blk.terminator();
383             let mut is_drop = false;
384             match term.kind {
385                 TerminatorKind::Drop { ref place, target, unwind }
386                 | TerminatorKind::DropAndReplace { ref place, target, unwind, .. } => {
387                     is_drop = true;
388                     work_list.push(target);
389                     // If the place doesn't actually need dropping, treat it like
390                     // a regular goto.
391                     let ty = callsite.callee.subst_mir(self.tcx, &place.ty(callee_body, tcx).ty);
392                     if ty.needs_drop(tcx, self.param_env) {
393                         cost += CALL_PENALTY;
394                         if let Some(unwind) = unwind {
395                             cost += LANDINGPAD_PENALTY;
396                             work_list.push(unwind);
397                         }
398                     } else {
399                         cost += INSTR_COST;
400                     }
401                 }
402
403                 TerminatorKind::Unreachable | TerminatorKind::Call { destination: None, .. }
404                     if first_block =>
405                 {
406                     // If the function always diverges, don't inline
407                     // unless the cost is zero
408                     threshold = 0;
409                 }
410
411                 TerminatorKind::Call { func: Operand::Constant(ref f), cleanup, .. } => {
412                     if let ty::FnDef(def_id, substs) =
413                         *callsite.callee.subst_mir(self.tcx, &f.literal.ty()).kind()
414                     {
415                         let substs = self.tcx.normalize_erasing_regions(self.param_env, substs);
416                         if let Ok(Some(instance)) =
417                             Instance::resolve(self.tcx, self.param_env, def_id, substs)
418                         {
419                             if callsite.callee.def_id() == instance.def_id() {
420                                 return Err("self-recursion");
421                             } else if self.history.contains(&instance) {
422                                 return Err("already inlined");
423                             }
424                         }
425                         // Don't give intrinsics the extra penalty for calls
426                         let f = tcx.fn_sig(def_id);
427                         if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
428                             cost += INSTR_COST;
429                         } else {
430                             cost += CALL_PENALTY;
431                         }
432                     } else {
433                         cost += CALL_PENALTY;
434                     }
435                     if cleanup.is_some() {
436                         cost += LANDINGPAD_PENALTY;
437                     }
438                 }
439                 TerminatorKind::Assert { cleanup, .. } => {
440                     cost += CALL_PENALTY;
441
442                     if cleanup.is_some() {
443                         cost += LANDINGPAD_PENALTY;
444                     }
445                 }
446                 TerminatorKind::Resume => cost += RESUME_PENALTY,
447                 TerminatorKind::InlineAsm { cleanup, .. } => {
448                     cost += INSTR_COST;
449
450                     if cleanup.is_some() {
451                         cost += LANDINGPAD_PENALTY;
452                     }
453                 }
454                 _ => cost += INSTR_COST,
455             }
456
457             if !is_drop {
458                 for &succ in term.successors() {
459                     work_list.push(succ);
460                 }
461             }
462
463             first_block = false;
464         }
465
466         // Count up the cost of local variables and temps, if we know the size
467         // use that, otherwise we use a moderately-large dummy cost.
468
469         let ptr_size = tcx.data_layout.pointer_size.bytes();
470
471         for v in callee_body.vars_and_temps_iter() {
472             let ty = callsite.callee.subst_mir(self.tcx, &callee_body.local_decls[v].ty);
473             // Cost of the var is the size in machine-words, if we know
474             // it.
475             if let Some(size) = type_size_of(tcx, self.param_env, ty) {
476                 cost += ((size + ptr_size - 1) / ptr_size) as usize;
477             } else {
478                 cost += UNKNOWN_SIZE_COST;
479             }
480         }
481
482         if let InlineAttr::Always = callee_attrs.inline {
483             debug!("INLINING {:?} because inline(always) [cost={}]", callsite, cost);
484             Ok(())
485         } else {
486             if cost <= threshold {
487                 debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold);
488                 Ok(())
489             } else {
490                 debug!("NOT inlining {:?} [cost={} > threshold={}]", callsite, cost, threshold);
491                 Err("cost above threshold")
492             }
493         }
494     }
495
496     fn inline_call(
497         &self,
498         caller_body: &mut Body<'tcx>,
499         callsite: &CallSite<'tcx>,
500         mut callee_body: Body<'tcx>,
501     ) {
502         let terminator = caller_body[callsite.block].terminator.take().unwrap();
503         match terminator.kind {
504             TerminatorKind::Call { args, destination, cleanup, .. } => {
505                 // If the call is something like `a[*i] = f(i)`, where
506                 // `i : &mut usize`, then just duplicating the `a[*i]`
507                 // Place could result in two different locations if `f`
508                 // writes to `i`. To prevent this we need to create a temporary
509                 // borrow of the place and pass the destination as `*temp` instead.
510                 fn dest_needs_borrow(place: Place<'_>) -> bool {
511                     for elem in place.projection.iter() {
512                         match elem {
513                             ProjectionElem::Deref | ProjectionElem::Index(_) => return true,
514                             _ => {}
515                         }
516                     }
517
518                     false
519                 }
520
521                 let dest = if let Some((destination_place, _)) = destination {
522                     if dest_needs_borrow(destination_place) {
523                         trace!("creating temp for return destination");
524                         let dest = Rvalue::Ref(
525                             self.tcx.lifetimes.re_erased,
526                             BorrowKind::Mut { allow_two_phase_borrow: false },
527                             destination_place,
528                         );
529                         let dest_ty = dest.ty(caller_body, self.tcx);
530                         let temp = Place::from(self.new_call_temp(caller_body, &callsite, dest_ty));
531                         caller_body[callsite.block].statements.push(Statement {
532                             source_info: callsite.source_info,
533                             kind: StatementKind::Assign(Box::new((temp, dest))),
534                         });
535                         self.tcx.mk_place_deref(temp)
536                     } else {
537                         destination_place
538                     }
539                 } else {
540                     trace!("creating temp for return place");
541                     Place::from(self.new_call_temp(caller_body, &callsite, callee_body.return_ty()))
542                 };
543
544                 // Copy the arguments if needed.
545                 let args: Vec<_> = self.make_call_args(args, &callsite, caller_body, &callee_body);
546
547                 let mut integrator = Integrator {
548                     args: &args,
549                     new_locals: Local::new(caller_body.local_decls.len())..,
550                     new_scopes: SourceScope::new(caller_body.source_scopes.len())..,
551                     new_blocks: BasicBlock::new(caller_body.basic_blocks().len())..,
552                     destination: dest,
553                     return_block: callsite.target,
554                     cleanup_block: cleanup,
555                     in_cleanup_block: false,
556                     tcx: self.tcx,
557                     callsite_span: callsite.source_info.span,
558                     body_span: callee_body.span,
559                     always_live_locals: BitSet::new_filled(callee_body.local_decls.len()),
560                 };
561
562                 // Map all `Local`s, `SourceScope`s and `BasicBlock`s to new ones
563                 // (or existing ones, in a few special cases) in the caller.
564                 integrator.visit_body(&mut callee_body);
565
566                 for scope in &mut callee_body.source_scopes {
567                     // FIXME(eddyb) move this into a `fn visit_scope_data` in `Integrator`.
568                     if scope.parent_scope.is_none() {
569                         let callsite_scope = &caller_body.source_scopes[callsite.source_info.scope];
570
571                         // Attach the outermost callee scope as a child of the callsite
572                         // scope, via the `parent_scope` and `inlined_parent_scope` chains.
573                         scope.parent_scope = Some(callsite.source_info.scope);
574                         assert_eq!(scope.inlined_parent_scope, None);
575                         scope.inlined_parent_scope = if callsite_scope.inlined.is_some() {
576                             Some(callsite.source_info.scope)
577                         } else {
578                             callsite_scope.inlined_parent_scope
579                         };
580
581                         // Mark the outermost callee scope as an inlined one.
582                         assert_eq!(scope.inlined, None);
583                         scope.inlined = Some((callsite.callee, callsite.source_info.span));
584                     } else if scope.inlined_parent_scope.is_none() {
585                         // Make it easy to find the scope with `inlined` set above.
586                         scope.inlined_parent_scope =
587                             Some(integrator.map_scope(OUTERMOST_SOURCE_SCOPE));
588                     }
589                 }
590
591                 // If there are any locals without storage markers, give them storage only for the
592                 // duration of the call.
593                 for local in callee_body.vars_and_temps_iter() {
594                     if integrator.always_live_locals.contains(local) {
595                         let new_local = integrator.map_local(local);
596                         caller_body[callsite.block].statements.push(Statement {
597                             source_info: callsite.source_info,
598                             kind: StatementKind::StorageLive(new_local),
599                         });
600                     }
601                 }
602                 if let Some(block) = callsite.target {
603                     // To avoid repeated O(n) insert, push any new statements to the end and rotate
604                     // the slice once.
605                     let mut n = 0;
606                     for local in callee_body.vars_and_temps_iter().rev() {
607                         if integrator.always_live_locals.contains(local) {
608                             let new_local = integrator.map_local(local);
609                             caller_body[block].statements.push(Statement {
610                                 source_info: callsite.source_info,
611                                 kind: StatementKind::StorageDead(new_local),
612                             });
613                             n += 1;
614                         }
615                     }
616                     caller_body[block].statements.rotate_right(n);
617                 }
618
619                 // Insert all of the (mapped) parts of the callee body into the caller.
620                 caller_body.local_decls.extend(callee_body.drain_vars_and_temps());
621                 caller_body.source_scopes.extend(&mut callee_body.source_scopes.drain(..));
622                 caller_body.var_debug_info.append(&mut callee_body.var_debug_info);
623                 caller_body.basic_blocks_mut().extend(callee_body.basic_blocks_mut().drain(..));
624
625                 caller_body[callsite.block].terminator = Some(Terminator {
626                     source_info: callsite.source_info,
627                     kind: TerminatorKind::Goto { target: integrator.map_block(START_BLOCK) },
628                 });
629
630                 // Copy only unevaluated constants from the callee_body into the caller_body.
631                 // Although we are only pushing `ConstKind::Unevaluated` consts to
632                 // `required_consts`, here we may not only have `ConstKind::Unevaluated`
633                 // because we are calling `subst_and_normalize_erasing_regions`.
634                 caller_body.required_consts.extend(
635                     callee_body.required_consts.iter().copied().filter(|&ct| {
636                         match ct.literal.const_for_ty() {
637                             Some(ct) => matches!(ct.val(), ConstKind::Unevaluated(_)),
638                             None => true,
639                         }
640                     }),
641                 );
642             }
643             kind => bug!("unexpected terminator kind {:?}", kind),
644         }
645     }
646
647     fn make_call_args(
648         &self,
649         args: Vec<Operand<'tcx>>,
650         callsite: &CallSite<'tcx>,
651         caller_body: &mut Body<'tcx>,
652         callee_body: &Body<'tcx>,
653     ) -> Vec<Local> {
654         let tcx = self.tcx;
655
656         // There is a bit of a mismatch between the *caller* of a closure and the *callee*.
657         // The caller provides the arguments wrapped up in a tuple:
658         //
659         //     tuple_tmp = (a, b, c)
660         //     Fn::call(closure_ref, tuple_tmp)
661         //
662         // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`)
663         // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, codegen has
664         // the job of unpacking this tuple. But here, we are codegen. =) So we want to create
665         // a vector like
666         //
667         //     [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2]
668         //
669         // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient
670         // if we "spill" that into *another* temporary, so that we can map the argument
671         // variable in the callee MIR directly to an argument variable on our side.
672         // So we introduce temporaries like:
673         //
674         //     tmp0 = tuple_tmp.0
675         //     tmp1 = tuple_tmp.1
676         //     tmp2 = tuple_tmp.2
677         //
678         // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
679         if callsite.fn_sig.abi() == Abi::RustCall && callee_body.spread_arg.is_none() {
680             let mut args = args.into_iter();
681             let self_ = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
682             let tuple = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
683             assert!(args.next().is_none());
684
685             let tuple = Place::from(tuple);
686             let ty::Tuple(tuple_tys) = tuple.ty(caller_body, tcx).ty.kind() else {
687                 bug!("Closure arguments are not passed as a tuple");
688             };
689
690             // The `closure_ref` in our example above.
691             let closure_ref_arg = iter::once(self_);
692
693             // The `tmp0`, `tmp1`, and `tmp2` in our example abonve.
694             let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| {
695                 // This is e.g., `tuple_tmp.0` in our example above.
696                 let tuple_field =
697                     Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty.expect_ty()));
698
699                 // Spill to a local to make e.g., `tmp0`.
700                 self.create_temp_if_necessary(tuple_field, callsite, caller_body)
701             });
702
703             closure_ref_arg.chain(tuple_tmp_args).collect()
704         } else {
705             args.into_iter()
706                 .map(|a| self.create_temp_if_necessary(a, callsite, caller_body))
707                 .collect()
708         }
709     }
710
711     /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
712     /// temporary `T` and an instruction `T = arg`, and returns `T`.
713     fn create_temp_if_necessary(
714         &self,
715         arg: Operand<'tcx>,
716         callsite: &CallSite<'tcx>,
717         caller_body: &mut Body<'tcx>,
718     ) -> Local {
719         // Reuse the operand if it is a moved temporary.
720         if let Operand::Move(place) = &arg {
721             if let Some(local) = place.as_local() {
722                 if caller_body.local_kind(local) == LocalKind::Temp {
723                     return local;
724                 }
725             }
726         }
727
728         // Otherwise, create a temporary for the argument.
729         trace!("creating temp for argument {:?}", arg);
730         let arg_ty = arg.ty(caller_body, self.tcx);
731         let local = self.new_call_temp(caller_body, callsite, arg_ty);
732         caller_body[callsite.block].statements.push(Statement {
733             source_info: callsite.source_info,
734             kind: StatementKind::Assign(Box::new((Place::from(local), Rvalue::Use(arg)))),
735         });
736         local
737     }
738
739     /// Introduces a new temporary into the caller body that is live for the duration of the call.
740     fn new_call_temp(
741         &self,
742         caller_body: &mut Body<'tcx>,
743         callsite: &CallSite<'tcx>,
744         ty: Ty<'tcx>,
745     ) -> Local {
746         let local = caller_body.local_decls.push(LocalDecl::new(ty, callsite.source_info.span));
747
748         caller_body[callsite.block].statements.push(Statement {
749             source_info: callsite.source_info,
750             kind: StatementKind::StorageLive(local),
751         });
752
753         if let Some(block) = callsite.target {
754             caller_body[block].statements.insert(
755                 0,
756                 Statement {
757                     source_info: callsite.source_info,
758                     kind: StatementKind::StorageDead(local),
759                 },
760             );
761         }
762
763         local
764     }
765 }
766
767 fn type_size_of<'tcx>(
768     tcx: TyCtxt<'tcx>,
769     param_env: ty::ParamEnv<'tcx>,
770     ty: Ty<'tcx>,
771 ) -> Option<u64> {
772     tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
773 }
774
775 /**
776  * Integrator.
777  *
778  * Integrates blocks from the callee function into the calling function.
779  * Updates block indices, references to locals and other control flow
780  * stuff.
781 */
782 struct Integrator<'a, 'tcx> {
783     args: &'a [Local],
784     new_locals: RangeFrom<Local>,
785     new_scopes: RangeFrom<SourceScope>,
786     new_blocks: RangeFrom<BasicBlock>,
787     destination: Place<'tcx>,
788     return_block: Option<BasicBlock>,
789     cleanup_block: Option<BasicBlock>,
790     in_cleanup_block: bool,
791     tcx: TyCtxt<'tcx>,
792     callsite_span: Span,
793     body_span: Span,
794     always_live_locals: BitSet<Local>,
795 }
796
797 impl Integrator<'_, '_> {
798     fn map_local(&self, local: Local) -> Local {
799         let new = if local == RETURN_PLACE {
800             self.destination.local
801         } else {
802             let idx = local.index() - 1;
803             if idx < self.args.len() {
804                 self.args[idx]
805             } else {
806                 Local::new(self.new_locals.start.index() + (idx - self.args.len()))
807             }
808         };
809         trace!("mapping local `{:?}` to `{:?}`", local, new);
810         new
811     }
812
813     fn map_scope(&self, scope: SourceScope) -> SourceScope {
814         let new = SourceScope::new(self.new_scopes.start.index() + scope.index());
815         trace!("mapping scope `{:?}` to `{:?}`", scope, new);
816         new
817     }
818
819     fn map_block(&self, block: BasicBlock) -> BasicBlock {
820         let new = BasicBlock::new(self.new_blocks.start.index() + block.index());
821         trace!("mapping block `{:?}` to `{:?}`", block, new);
822         new
823     }
824 }
825
826 impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> {
827     fn tcx(&self) -> TyCtxt<'tcx> {
828         self.tcx
829     }
830
831     fn visit_local(&mut self, local: &mut Local, _ctxt: PlaceContext, _location: Location) {
832         *local = self.map_local(*local);
833     }
834
835     fn visit_source_scope(&mut self, scope: &mut SourceScope) {
836         *scope = self.map_scope(*scope);
837     }
838
839     fn visit_span(&mut self, span: &mut Span) {
840         let mut expn_data =
841             ExpnData::default(ExpnKind::Inlined, *span, self.tcx.sess.edition(), None, None);
842         expn_data.def_site = self.body_span;
843         // Make sure that all spans track the fact that they were inlined.
844         *span =
845             self.callsite_span.fresh_expansion(expn_data, self.tcx.create_stable_hashing_context());
846     }
847
848     fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
849         for elem in place.projection {
850             // FIXME: Make sure that return place is not used in an indexing projection, since it
851             // won't be rebased as it is supposed to be.
852             assert_ne!(ProjectionElem::Index(RETURN_PLACE), elem);
853         }
854
855         // If this is the `RETURN_PLACE`, we need to rebase any projections onto it.
856         let dest_proj_len = self.destination.projection.len();
857         if place.local == RETURN_PLACE && dest_proj_len > 0 {
858             let mut projs = Vec::with_capacity(dest_proj_len + place.projection.len());
859             projs.extend(self.destination.projection);
860             projs.extend(place.projection);
861
862             place.projection = self.tcx.intern_place_elems(&*projs);
863         }
864         // Handles integrating any locals that occur in the base
865         // or projections
866         self.super_place(place, context, location)
867     }
868
869     fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
870         self.in_cleanup_block = data.is_cleanup;
871         self.super_basic_block_data(block, data);
872         self.in_cleanup_block = false;
873     }
874
875     fn visit_retag(&mut self, kind: &mut RetagKind, place: &mut Place<'tcx>, loc: Location) {
876         self.super_retag(kind, place, loc);
877
878         // We have to patch all inlined retags to be aware that they are no longer
879         // happening on function entry.
880         if *kind == RetagKind::FnEntry {
881             *kind = RetagKind::Default;
882         }
883     }
884
885     fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
886         if let StatementKind::StorageLive(local) | StatementKind::StorageDead(local) =
887             statement.kind
888         {
889             self.always_live_locals.remove(local);
890         }
891         self.super_statement(statement, location);
892     }
893
894     fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, loc: Location) {
895         // Don't try to modify the implicit `_0` access on return (`return` terminators are
896         // replaced down below anyways).
897         if !matches!(terminator.kind, TerminatorKind::Return) {
898             self.super_terminator(terminator, loc);
899         }
900
901         match terminator.kind {
902             TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => bug!(),
903             TerminatorKind::Goto { ref mut target } => {
904                 *target = self.map_block(*target);
905             }
906             TerminatorKind::SwitchInt { ref mut targets, .. } => {
907                 for tgt in targets.all_targets_mut() {
908                     *tgt = self.map_block(*tgt);
909                 }
910             }
911             TerminatorKind::Drop { ref mut target, ref mut unwind, .. }
912             | TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
913                 *target = self.map_block(*target);
914                 if let Some(tgt) = *unwind {
915                     *unwind = Some(self.map_block(tgt));
916                 } else if !self.in_cleanup_block {
917                     // Unless this drop is in a cleanup block, add an unwind edge to
918                     // the original call's cleanup block
919                     *unwind = self.cleanup_block;
920                 }
921             }
922             TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
923                 if let Some((_, ref mut tgt)) = *destination {
924                     *tgt = self.map_block(*tgt);
925                 }
926                 if let Some(tgt) = *cleanup {
927                     *cleanup = Some(self.map_block(tgt));
928                 } else if !self.in_cleanup_block {
929                     // Unless this call is in a cleanup block, add an unwind edge to
930                     // the original call's cleanup block
931                     *cleanup = self.cleanup_block;
932                 }
933             }
934             TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
935                 *target = self.map_block(*target);
936                 if let Some(tgt) = *cleanup {
937                     *cleanup = Some(self.map_block(tgt));
938                 } else if !self.in_cleanup_block {
939                     // Unless this assert is in a cleanup block, add an unwind edge to
940                     // the original call's cleanup block
941                     *cleanup = self.cleanup_block;
942                 }
943             }
944             TerminatorKind::Return => {
945                 terminator.kind = if let Some(tgt) = self.return_block {
946                     TerminatorKind::Goto { target: tgt }
947                 } else {
948                     TerminatorKind::Unreachable
949                 }
950             }
951             TerminatorKind::Resume => {
952                 if let Some(tgt) = self.cleanup_block {
953                     terminator.kind = TerminatorKind::Goto { target: tgt }
954                 }
955             }
956             TerminatorKind::Abort => {}
957             TerminatorKind::Unreachable => {}
958             TerminatorKind::FalseEdge { ref mut real_target, ref mut imaginary_target } => {
959                 *real_target = self.map_block(*real_target);
960                 *imaginary_target = self.map_block(*imaginary_target);
961             }
962             TerminatorKind::FalseUnwind { real_target: _, unwind: _ } =>
963             // see the ordering of passes in the optimized_mir query.
964             {
965                 bug!("False unwinds should have been removed before inlining")
966             }
967             TerminatorKind::InlineAsm { ref mut destination, ref mut cleanup, .. } => {
968                 if let Some(ref mut tgt) = *destination {
969                     *tgt = self.map_block(*tgt);
970                 } else if !self.in_cleanup_block {
971                     // Unless this inline asm is in a cleanup block, add an unwind edge to
972                     // the original call's cleanup block
973                     *cleanup = self.cleanup_block;
974                 }
975             }
976         }
977     }
978 }