]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir/src/transform/inline.rs
Fix border radius for doc code blocks in rustdoc
[rust.git] / compiler / rustc_mir / src / transform / inline.rs
1 //! Inlining pass for MIR functions
2
3 use rustc_attr::InlineAttr;
4 use rustc_hir as hir;
5 use rustc_index::bit_set::BitSet;
6 use rustc_index::vec::Idx;
7 use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
8 use rustc_middle::mir::visit::*;
9 use rustc_middle::mir::*;
10 use rustc_middle::ty::subst::Subst;
11 use rustc_middle::ty::{self, ConstKind, Instance, InstanceDef, ParamEnv, Ty, TyCtxt};
12 use rustc_span::{hygiene::ExpnKind, ExpnData, Span};
13 use rustc_target::spec::abi::Abi;
14
15 use super::simplify::{remove_dead_blocks, CfgSimplifier};
16 use crate::transform::MirPass;
17 use std::iter;
18 use std::ops::{Range, RangeFrom};
19
20 crate mod cycle;
21
22 const INSTR_COST: usize = 5;
23 const CALL_PENALTY: usize = 25;
24 const LANDINGPAD_PENALTY: usize = 50;
25 const RESUME_PENALTY: usize = 45;
26
27 const UNKNOWN_SIZE_COST: usize = 10;
28
29 pub struct Inline;
30
31 #[derive(Copy, Clone, Debug)]
32 struct CallSite<'tcx> {
33     callee: Instance<'tcx>,
34     fn_sig: ty::PolyFnSig<'tcx>,
35     block: BasicBlock,
36     target: Option<BasicBlock>,
37     source_info: SourceInfo,
38 }
39
40 /// Returns true if MIR inlining is enabled in the current compilation session.
41 crate fn is_enabled(tcx: TyCtxt<'_>) -> bool {
42     if let Some(enabled) = tcx.sess.opts.debugging_opts.inline_mir {
43         return enabled;
44     }
45
46     tcx.sess.mir_opt_level() >= 3
47 }
48
49 impl<'tcx> MirPass<'tcx> for Inline {
50     fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
51         if !is_enabled(tcx) {
52             return;
53         }
54
55         let span = trace_span!("inline", body = %tcx.def_path_str(body.source.def_id()));
56         let _guard = span.enter();
57         if inline(tcx, body) {
58             debug!("running simplify cfg on {:?}", body.source);
59             CfgSimplifier::new(body).simplify();
60             remove_dead_blocks(tcx, body);
61         }
62     }
63 }
64
65 fn inline(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool {
66     let def_id = body.source.def_id();
67     let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
68
69     // Only do inlining into fn bodies.
70     if !tcx.hir().body_owner_kind(hir_id).is_fn_or_closure() {
71         return false;
72     }
73     if body.source.promoted.is_some() {
74         return false;
75     }
76
77     let mut this = Inliner {
78         tcx,
79         param_env: tcx.param_env_reveal_all_normalized(body.source.def_id()),
80         codegen_fn_attrs: tcx.codegen_fn_attrs(body.source.def_id()),
81         hir_id,
82         history: Vec::new(),
83         changed: false,
84     };
85     let blocks = BasicBlock::new(0)..body.basic_blocks().next_index();
86     this.process_blocks(body, blocks);
87     this.changed
88 }
89
90 struct Inliner<'tcx> {
91     tcx: TyCtxt<'tcx>,
92     param_env: ParamEnv<'tcx>,
93     /// Caller codegen attributes.
94     codegen_fn_attrs: &'tcx CodegenFnAttrs,
95     /// Caller HirID.
96     hir_id: hir::HirId,
97     /// Stack of inlined Instances.
98     history: Vec<ty::Instance<'tcx>>,
99     /// Indicates that the caller body has been modified.
100     changed: bool,
101 }
102
103 impl Inliner<'tcx> {
104     fn process_blocks(&mut self, caller_body: &mut Body<'tcx>, blocks: Range<BasicBlock>) {
105         for bb in blocks {
106             let bb_data = &caller_body[bb];
107             if bb_data.is_cleanup {
108                 continue;
109             }
110
111             let callsite = match self.resolve_callsite(caller_body, bb, bb_data) {
112                 None => continue,
113                 Some(it) => it,
114             };
115
116             let span = trace_span!("process_blocks", %callsite.callee, ?bb);
117             let _guard = span.enter();
118
119             match self.try_inlining(caller_body, &callsite) {
120                 Err(reason) => {
121                     debug!("not-inlined {} [{}]", callsite.callee, reason);
122                     continue;
123                 }
124                 Ok(new_blocks) => {
125                     debug!("inlined {}", callsite.callee);
126                     self.changed = true;
127                     self.history.push(callsite.callee);
128                     self.process_blocks(caller_body, new_blocks);
129                     self.history.pop();
130                 }
131             }
132         }
133     }
134
135     /// Attempts to inline a callsite into the caller body. When successful returns basic blocks
136     /// containing the inlined body. Otherwise returns an error describing why inlining didn't take
137     /// place.
138     fn try_inlining(
139         &self,
140         caller_body: &mut Body<'tcx>,
141         callsite: &CallSite<'tcx>,
142     ) -> Result<std::ops::Range<BasicBlock>, &'static str> {
143         let callee_attrs = self.tcx.codegen_fn_attrs(callsite.callee.def_id());
144         self.check_codegen_attributes(callsite, callee_attrs)?;
145         self.check_mir_is_available(caller_body, &callsite.callee)?;
146         let callee_body = self.tcx.instance_mir(callsite.callee.def);
147         self.check_mir_body(callsite, callee_body, callee_attrs)?;
148
149         if !self.tcx.consider_optimizing(|| {
150             format!("Inline {:?} into {}", callee_body.span, callsite.callee)
151         }) {
152             return Err("optimization fuel exhausted");
153         }
154
155         let callee_body = callsite.callee.subst_mir_and_normalize_erasing_regions(
156             self.tcx,
157             self.param_env,
158             callee_body.clone(),
159         );
160
161         let old_blocks = caller_body.basic_blocks().next_index();
162         self.inline_call(caller_body, &callsite, callee_body);
163         let new_blocks = old_blocks..caller_body.basic_blocks().next_index();
164
165         Ok(new_blocks)
166     }
167
168     fn check_mir_is_available(
169         &self,
170         caller_body: &Body<'tcx>,
171         callee: &Instance<'tcx>,
172     ) -> Result<(), &'static str> {
173         if callee.def_id() == caller_body.source.def_id() {
174             return Err("self-recursion");
175         }
176
177         match callee.def {
178             InstanceDef::Item(_) => {
179                 // If there is no MIR available (either because it was not in metadata or
180                 // because it has no MIR because it's an extern function), then the inliner
181                 // won't cause cycles on this.
182                 if !self.tcx.is_mir_available(callee.def_id()) {
183                     return Err("item MIR unavailable");
184                 }
185             }
186             // These have no own callable MIR.
187             InstanceDef::Intrinsic(_) | InstanceDef::Virtual(..) => {
188                 return Err("instance without MIR (intrinsic / virtual)");
189             }
190             // This cannot result in an immediate cycle since the callee MIR is a shim, which does
191             // not get any optimizations run on it. Any subsequent inlining may cause cycles, but we
192             // do not need to catch this here, we can wait until the inliner decides to continue
193             // inlining a second time.
194             InstanceDef::VtableShim(_)
195             | InstanceDef::ReifyShim(_)
196             | InstanceDef::FnPtrShim(..)
197             | InstanceDef::ClosureOnceShim { .. }
198             | InstanceDef::DropGlue(..)
199             | InstanceDef::CloneShim(..) => return Ok(()),
200         }
201
202         if self.tcx.is_constructor(callee.def_id()) {
203             trace!("constructors always have MIR");
204             // Constructor functions cannot cause a query cycle.
205             return Ok(());
206         }
207
208         if let Some(callee_def_id) = callee.def_id().as_local() {
209             let callee_hir_id = self.tcx.hir().local_def_id_to_hir_id(callee_def_id);
210             // Avoid inlining into generators,
211             // since their `optimized_mir` is used for layout computation, which can
212             // create a cycle, even when no attempt is made to inline the function
213             // in the other direction.
214             if caller_body.generator.is_some() {
215                 return Err("local generator (query cycle avoidance)");
216             }
217
218             // Avoid a cycle here by only using `instance_mir` only if we have
219             // a lower `HirId` than the callee. This ensures that the callee will
220             // not inline us. This trick only works without incremental compilation.
221             // So don't do it if that is enabled.
222             if !self.tcx.dep_graph.is_fully_enabled() && self.hir_id < callee_hir_id {
223                 return Ok(());
224             }
225
226             // If we know for sure that the function we're calling will itself try to
227             // call us, then we avoid inlining that function.
228             if self
229                 .tcx
230                 .mir_callgraph_reachable((*callee, caller_body.source.def_id().expect_local()))
231             {
232                 return Err("caller might be reachable from callee (query cycle avoidance)");
233             }
234
235             Ok(())
236         } else {
237             // This cannot result in an immediate cycle since the callee MIR is from another crate
238             // and is already optimized. Any subsequent inlining may cause cycles, but we do
239             // not need to catch this here, we can wait until the inliner decides to continue
240             // inlining a second time.
241             trace!("functions from other crates always have MIR");
242             Ok(())
243         }
244     }
245
246     fn resolve_callsite(
247         &self,
248         caller_body: &Body<'tcx>,
249         bb: BasicBlock,
250         bb_data: &BasicBlockData<'tcx>,
251     ) -> Option<CallSite<'tcx>> {
252         // Only consider direct calls to functions
253         let terminator = bb_data.terminator();
254         if let TerminatorKind::Call { ref func, ref destination, .. } = terminator.kind {
255             let func_ty = func.ty(caller_body, self.tcx);
256             if let ty::FnDef(def_id, substs) = *func_ty.kind() {
257                 // To resolve an instance its substs have to be fully normalized.
258                 let substs = self.tcx.normalize_erasing_regions(self.param_env, substs);
259                 let callee =
260                     Instance::resolve(self.tcx, self.param_env, def_id, substs).ok().flatten()?;
261
262                 if let InstanceDef::Virtual(..) | InstanceDef::Intrinsic(_) = callee.def {
263                     return None;
264                 }
265
266                 let fn_sig = self.tcx.fn_sig(def_id).subst(self.tcx, substs);
267
268                 return Some(CallSite {
269                     callee,
270                     fn_sig,
271                     block: bb,
272                     target: destination.map(|(_, target)| target),
273                     source_info: terminator.source_info,
274                 });
275             }
276         }
277
278         None
279     }
280
281     /// Returns an error if inlining is not possible based on codegen attributes alone. A success
282     /// indicates that inlining decision should be based on other criteria.
283     fn check_codegen_attributes(
284         &self,
285         callsite: &CallSite<'tcx>,
286         callee_attrs: &CodegenFnAttrs,
287     ) -> Result<(), &'satic str> {
288         if let InlineAttr::Never = callee_attrs.inline {
289             return Err("never inline hint");
290         }
291
292         // Only inline local functions if they would be eligible for cross-crate
293         // inlining. This is to ensure that the final crate doesn't have MIR that
294         // reference unexported symbols
295         if callsite.callee.def_id().is_local() {
296             let is_generic = callsite.callee.substs.non_erasable_generics().next().is_some();
297             if !is_generic && !callee_attrs.requests_inline() {
298                 return Err("not exported");
299             }
300         }
301
302         if callsite.fn_sig.c_variadic() {
303             return Err("C variadic");
304         }
305
306         if callee_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
307             return Err("naked");
308         }
309
310         if callee_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
311             return Err("cold");
312         }
313
314         if callee_attrs.no_sanitize != self.codegen_fn_attrs.no_sanitize {
315             return Err("incompatible sanitizer set");
316         }
317
318         if callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set {
319             return Err("incompatible instruction set");
320         }
321
322         for feature in &callee_attrs.target_features {
323             if !self.codegen_fn_attrs.target_features.contains(feature) {
324                 return Err("incompatible target feature");
325             }
326         }
327
328         Ok(())
329     }
330
331     /// Returns inlining decision that is based on the examination of callee MIR body.
332     /// Assumes that codegen attributes have been checked for compatibility already.
333     #[instrument(level = "debug", skip(self, callee_body))]
334     fn check_mir_body(
335         &self,
336         callsite: &CallSite<'tcx>,
337         callee_body: &Body<'tcx>,
338         callee_attrs: &CodegenFnAttrs,
339     ) -> Result<(), &'static str> {
340         let tcx = self.tcx;
341
342         let mut threshold = if callee_attrs.requests_inline() {
343             self.tcx.sess.opts.debugging_opts.inline_mir_hint_threshold.unwrap_or(100)
344         } else {
345             self.tcx.sess.opts.debugging_opts.inline_mir_threshold.unwrap_or(50)
346         };
347
348         // Give a bonus functions with a small number of blocks,
349         // We normally have two or three blocks for even
350         // very small functions.
351         if callee_body.basic_blocks().len() <= 3 {
352             threshold += threshold / 4;
353         }
354         debug!("    final inline threshold = {}", threshold);
355
356         // FIXME: Give a bonus to functions with only a single caller
357         let mut first_block = true;
358         let mut cost = 0;
359
360         // Traverse the MIR manually so we can account for the effects of
361         // inlining on the CFG.
362         let mut work_list = vec![START_BLOCK];
363         let mut visited = BitSet::new_empty(callee_body.basic_blocks().len());
364         while let Some(bb) = work_list.pop() {
365             if !visited.insert(bb.index()) {
366                 continue;
367             }
368             let blk = &callee_body.basic_blocks()[bb];
369
370             for stmt in &blk.statements {
371                 // Don't count StorageLive/StorageDead in the inlining cost.
372                 match stmt.kind {
373                     StatementKind::StorageLive(_)
374                     | StatementKind::StorageDead(_)
375                     | StatementKind::Nop => {}
376                     _ => cost += INSTR_COST,
377                 }
378             }
379             let term = blk.terminator();
380             let mut is_drop = false;
381             match term.kind {
382                 TerminatorKind::Drop { ref place, target, unwind }
383                 | TerminatorKind::DropAndReplace { ref place, target, unwind, .. } => {
384                     is_drop = true;
385                     work_list.push(target);
386                     // If the place doesn't actually need dropping, treat it like
387                     // a regular goto.
388                     let ty = callsite.callee.subst_mir(self.tcx, &place.ty(callee_body, tcx).ty);
389                     if ty.needs_drop(tcx, self.param_env) {
390                         cost += CALL_PENALTY;
391                         if let Some(unwind) = unwind {
392                             cost += LANDINGPAD_PENALTY;
393                             work_list.push(unwind);
394                         }
395                     } else {
396                         cost += INSTR_COST;
397                     }
398                 }
399
400                 TerminatorKind::Unreachable | TerminatorKind::Call { destination: None, .. }
401                     if first_block =>
402                 {
403                     // If the function always diverges, don't inline
404                     // unless the cost is zero
405                     threshold = 0;
406                 }
407
408                 TerminatorKind::Call { func: Operand::Constant(ref f), cleanup, .. } => {
409                     if let ty::FnDef(def_id, substs) =
410                         *callsite.callee.subst_mir(self.tcx, &f.literal.ty()).kind()
411                     {
412                         let substs = self.tcx.normalize_erasing_regions(self.param_env, substs);
413                         if let Ok(Some(instance)) =
414                             Instance::resolve(self.tcx, self.param_env, def_id, substs)
415                         {
416                             if callsite.callee.def_id() == instance.def_id() {
417                                 return Err("self-recursion");
418                             } else if self.history.contains(&instance) {
419                                 return Err("already inlined");
420                             }
421                         }
422                         // Don't give intrinsics the extra penalty for calls
423                         let f = tcx.fn_sig(def_id);
424                         if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
425                             cost += INSTR_COST;
426                         } else {
427                             cost += CALL_PENALTY;
428                         }
429                     } else {
430                         cost += CALL_PENALTY;
431                     }
432                     if cleanup.is_some() {
433                         cost += LANDINGPAD_PENALTY;
434                     }
435                 }
436                 TerminatorKind::Assert { cleanup, .. } => {
437                     cost += CALL_PENALTY;
438
439                     if cleanup.is_some() {
440                         cost += LANDINGPAD_PENALTY;
441                     }
442                 }
443                 TerminatorKind::Resume => cost += RESUME_PENALTY,
444                 _ => cost += INSTR_COST,
445             }
446
447             if !is_drop {
448                 for &succ in term.successors() {
449                     work_list.push(succ);
450                 }
451             }
452
453             first_block = false;
454         }
455
456         // Count up the cost of local variables and temps, if we know the size
457         // use that, otherwise we use a moderately-large dummy cost.
458
459         let ptr_size = tcx.data_layout.pointer_size.bytes();
460
461         for v in callee_body.vars_and_temps_iter() {
462             let ty = callsite.callee.subst_mir(self.tcx, &callee_body.local_decls[v].ty);
463             // Cost of the var is the size in machine-words, if we know
464             // it.
465             if let Some(size) = type_size_of(tcx, self.param_env, ty) {
466                 cost += ((size + ptr_size - 1) / ptr_size) as usize;
467             } else {
468                 cost += UNKNOWN_SIZE_COST;
469             }
470         }
471
472         if let InlineAttr::Always = callee_attrs.inline {
473             debug!("INLINING {:?} because inline(always) [cost={}]", callsite, cost);
474             Ok(())
475         } else {
476             if cost <= threshold {
477                 debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold);
478                 Ok(())
479             } else {
480                 debug!("NOT inlining {:?} [cost={} > threshold={}]", callsite, cost, threshold);
481                 Err("cost above threshold")
482             }
483         }
484     }
485
486     fn inline_call(
487         &self,
488         caller_body: &mut Body<'tcx>,
489         callsite: &CallSite<'tcx>,
490         mut callee_body: Body<'tcx>,
491     ) {
492         let terminator = caller_body[callsite.block].terminator.take().unwrap();
493         match terminator.kind {
494             TerminatorKind::Call { args, destination, cleanup, .. } => {
495                 // If the call is something like `a[*i] = f(i)`, where
496                 // `i : &mut usize`, then just duplicating the `a[*i]`
497                 // Place could result in two different locations if `f`
498                 // writes to `i`. To prevent this we need to create a temporary
499                 // borrow of the place and pass the destination as `*temp` instead.
500                 fn dest_needs_borrow(place: Place<'_>) -> bool {
501                     for elem in place.projection.iter() {
502                         match elem {
503                             ProjectionElem::Deref | ProjectionElem::Index(_) => return true,
504                             _ => {}
505                         }
506                     }
507
508                     false
509                 }
510
511                 let dest = if let Some((destination_place, _)) = destination {
512                     if dest_needs_borrow(destination_place) {
513                         trace!("creating temp for return destination");
514                         let dest = Rvalue::Ref(
515                             self.tcx.lifetimes.re_erased,
516                             BorrowKind::Mut { allow_two_phase_borrow: false },
517                             destination_place,
518                         );
519                         let dest_ty = dest.ty(caller_body, self.tcx);
520                         let temp = Place::from(self.new_call_temp(caller_body, &callsite, dest_ty));
521                         caller_body[callsite.block].statements.push(Statement {
522                             source_info: callsite.source_info,
523                             kind: StatementKind::Assign(box (temp, dest)),
524                         });
525                         self.tcx.mk_place_deref(temp)
526                     } else {
527                         destination_place
528                     }
529                 } else {
530                     trace!("creating temp for return place");
531                     Place::from(self.new_call_temp(caller_body, &callsite, callee_body.return_ty()))
532                 };
533
534                 // Copy the arguments if needed.
535                 let args: Vec<_> = self.make_call_args(args, &callsite, caller_body, &callee_body);
536
537                 let mut integrator = Integrator {
538                     args: &args,
539                     new_locals: Local::new(caller_body.local_decls.len())..,
540                     new_scopes: SourceScope::new(caller_body.source_scopes.len())..,
541                     new_blocks: BasicBlock::new(caller_body.basic_blocks().len())..,
542                     destination: dest,
543                     return_block: callsite.target,
544                     cleanup_block: cleanup,
545                     in_cleanup_block: false,
546                     tcx: self.tcx,
547                     callsite_span: callsite.source_info.span,
548                     body_span: callee_body.span,
549                     always_live_locals: BitSet::new_filled(callee_body.local_decls.len()),
550                 };
551
552                 // Map all `Local`s, `SourceScope`s and `BasicBlock`s to new ones
553                 // (or existing ones, in a few special cases) in the caller.
554                 integrator.visit_body(&mut callee_body);
555
556                 for scope in &mut callee_body.source_scopes {
557                     // FIXME(eddyb) move this into a `fn visit_scope_data` in `Integrator`.
558                     if scope.parent_scope.is_none() {
559                         let callsite_scope = &caller_body.source_scopes[callsite.source_info.scope];
560
561                         // Attach the outermost callee scope as a child of the callsite
562                         // scope, via the `parent_scope` and `inlined_parent_scope` chains.
563                         scope.parent_scope = Some(callsite.source_info.scope);
564                         assert_eq!(scope.inlined_parent_scope, None);
565                         scope.inlined_parent_scope = if callsite_scope.inlined.is_some() {
566                             Some(callsite.source_info.scope)
567                         } else {
568                             callsite_scope.inlined_parent_scope
569                         };
570
571                         // Mark the outermost callee scope as an inlined one.
572                         assert_eq!(scope.inlined, None);
573                         scope.inlined = Some((callsite.callee, callsite.source_info.span));
574                     } else if scope.inlined_parent_scope.is_none() {
575                         // Make it easy to find the scope with `inlined` set above.
576                         scope.inlined_parent_scope =
577                             Some(integrator.map_scope(OUTERMOST_SOURCE_SCOPE));
578                     }
579                 }
580
581                 // If there are any locals without storage markers, give them storage only for the
582                 // duration of the call.
583                 for local in callee_body.vars_and_temps_iter() {
584                     if integrator.always_live_locals.contains(local) {
585                         let new_local = integrator.map_local(local);
586                         caller_body[callsite.block].statements.push(Statement {
587                             source_info: callsite.source_info,
588                             kind: StatementKind::StorageLive(new_local),
589                         });
590                     }
591                 }
592                 if let Some(block) = callsite.target {
593                     // To avoid repeated O(n) insert, push any new statements to the end and rotate
594                     // the slice once.
595                     let mut n = 0;
596                     for local in callee_body.vars_and_temps_iter().rev() {
597                         if integrator.always_live_locals.contains(local) {
598                             let new_local = integrator.map_local(local);
599                             caller_body[block].statements.push(Statement {
600                                 source_info: callsite.source_info,
601                                 kind: StatementKind::StorageDead(new_local),
602                             });
603                             n += 1;
604                         }
605                     }
606                     caller_body[block].statements.rotate_right(n);
607                 }
608
609                 // Insert all of the (mapped) parts of the callee body into the caller.
610                 caller_body.local_decls.extend(
611                     // FIXME(eddyb) make `Range<Local>` iterable so that we can use
612                     // `callee_body.local_decls.drain(callee_body.vars_and_temps())`
613                     callee_body
614                         .vars_and_temps_iter()
615                         .map(|local| callee_body.local_decls[local].clone()),
616                 );
617                 caller_body.source_scopes.extend(callee_body.source_scopes.drain(..));
618                 caller_body.var_debug_info.extend(callee_body.var_debug_info.drain(..));
619                 caller_body.basic_blocks_mut().extend(callee_body.basic_blocks_mut().drain(..));
620
621                 caller_body[callsite.block].terminator = Some(Terminator {
622                     source_info: callsite.source_info,
623                     kind: TerminatorKind::Goto { target: integrator.map_block(START_BLOCK) },
624                 });
625
626                 // Copy only unevaluated constants from the callee_body into the caller_body.
627                 // Although we are only pushing `ConstKind::Unevaluated` consts to
628                 // `required_consts`, here we may not only have `ConstKind::Unevaluated`
629                 // because we are calling `subst_and_normalize_erasing_regions`.
630                 caller_body.required_consts.extend(
631                     callee_body.required_consts.iter().copied().filter(|&ct| {
632                         match ct.literal.const_for_ty() {
633                             Some(ct) => matches!(ct.val, ConstKind::Unevaluated(_)),
634                             None => true,
635                         }
636                     }),
637                 );
638             }
639             kind => bug!("unexpected terminator kind {:?}", kind),
640         }
641     }
642
643     fn make_call_args(
644         &self,
645         args: Vec<Operand<'tcx>>,
646         callsite: &CallSite<'tcx>,
647         caller_body: &mut Body<'tcx>,
648         callee_body: &Body<'tcx>,
649     ) -> Vec<Local> {
650         let tcx = self.tcx;
651
652         // There is a bit of a mismatch between the *caller* of a closure and the *callee*.
653         // The caller provides the arguments wrapped up in a tuple:
654         //
655         //     tuple_tmp = (a, b, c)
656         //     Fn::call(closure_ref, tuple_tmp)
657         //
658         // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`)
659         // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, codegen has
660         // the job of unpacking this tuple. But here, we are codegen. =) So we want to create
661         // a vector like
662         //
663         //     [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2]
664         //
665         // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient
666         // if we "spill" that into *another* temporary, so that we can map the argument
667         // variable in the callee MIR directly to an argument variable on our side.
668         // So we introduce temporaries like:
669         //
670         //     tmp0 = tuple_tmp.0
671         //     tmp1 = tuple_tmp.1
672         //     tmp2 = tuple_tmp.2
673         //
674         // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
675         if callsite.fn_sig.abi() == Abi::RustCall && callee_body.spread_arg.is_none() {
676             let mut args = args.into_iter();
677             let self_ = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
678             let tuple = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
679             assert!(args.next().is_none());
680
681             let tuple = Place::from(tuple);
682             let tuple_tys = if let ty::Tuple(s) = tuple.ty(caller_body, tcx).ty.kind() {
683                 s
684             } else {
685                 bug!("Closure arguments are not passed as a tuple");
686             };
687
688             // The `closure_ref` in our example above.
689             let closure_ref_arg = iter::once(self_);
690
691             // The `tmp0`, `tmp1`, and `tmp2` in our example abonve.
692             let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| {
693                 // This is e.g., `tuple_tmp.0` in our example above.
694                 let tuple_field =
695                     Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty.expect_ty()));
696
697                 // Spill to a local to make e.g., `tmp0`.
698                 self.create_temp_if_necessary(tuple_field, callsite, caller_body)
699             });
700
701             closure_ref_arg.chain(tuple_tmp_args).collect()
702         } else {
703             args.into_iter()
704                 .map(|a| self.create_temp_if_necessary(a, callsite, caller_body))
705                 .collect()
706         }
707     }
708
709     /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
710     /// temporary `T` and an instruction `T = arg`, and returns `T`.
711     fn create_temp_if_necessary(
712         &self,
713         arg: Operand<'tcx>,
714         callsite: &CallSite<'tcx>,
715         caller_body: &mut Body<'tcx>,
716     ) -> Local {
717         // Reuse the operand if it is a moved temporary.
718         if let Operand::Move(place) = &arg {
719             if let Some(local) = place.as_local() {
720                 if caller_body.local_kind(local) == LocalKind::Temp {
721                     return local;
722                 }
723             }
724         }
725
726         // Otherwise, create a temporary for the argument.
727         trace!("creating temp for argument {:?}", arg);
728         let arg_ty = arg.ty(caller_body, self.tcx);
729         let local = self.new_call_temp(caller_body, callsite, arg_ty);
730         caller_body[callsite.block].statements.push(Statement {
731             source_info: callsite.source_info,
732             kind: StatementKind::Assign(box (Place::from(local), Rvalue::Use(arg))),
733         });
734         local
735     }
736
737     /// Introduces a new temporary into the caller body that is live for the duration of the call.
738     fn new_call_temp(
739         &self,
740         caller_body: &mut Body<'tcx>,
741         callsite: &CallSite<'tcx>,
742         ty: Ty<'tcx>,
743     ) -> Local {
744         let local = caller_body.local_decls.push(LocalDecl::new(ty, callsite.source_info.span));
745
746         caller_body[callsite.block].statements.push(Statement {
747             source_info: callsite.source_info,
748             kind: StatementKind::StorageLive(local),
749         });
750
751         if let Some(block) = callsite.target {
752             caller_body[block].statements.insert(
753                 0,
754                 Statement {
755                     source_info: callsite.source_info,
756                     kind: StatementKind::StorageDead(local),
757                 },
758             );
759         }
760
761         local
762     }
763 }
764
765 fn type_size_of<'tcx>(
766     tcx: TyCtxt<'tcx>,
767     param_env: ty::ParamEnv<'tcx>,
768     ty: Ty<'tcx>,
769 ) -> Option<u64> {
770     tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
771 }
772
773 /**
774  * Integrator.
775  *
776  * Integrates blocks from the callee function into the calling function.
777  * Updates block indices, references to locals and other control flow
778  * stuff.
779 */
780 struct Integrator<'a, 'tcx> {
781     args: &'a [Local],
782     new_locals: RangeFrom<Local>,
783     new_scopes: RangeFrom<SourceScope>,
784     new_blocks: RangeFrom<BasicBlock>,
785     destination: Place<'tcx>,
786     return_block: Option<BasicBlock>,
787     cleanup_block: Option<BasicBlock>,
788     in_cleanup_block: bool,
789     tcx: TyCtxt<'tcx>,
790     callsite_span: Span,
791     body_span: Span,
792     always_live_locals: BitSet<Local>,
793 }
794
795 impl<'a, 'tcx> Integrator<'a, 'tcx> {
796     fn map_local(&self, local: Local) -> Local {
797         let new = if local == RETURN_PLACE {
798             self.destination.local
799         } else {
800             let idx = local.index() - 1;
801             if idx < self.args.len() {
802                 self.args[idx]
803             } else {
804                 Local::new(self.new_locals.start.index() + (idx - self.args.len()))
805             }
806         };
807         trace!("mapping local `{:?}` to `{:?}`", local, new);
808         new
809     }
810
811     fn map_scope(&self, scope: SourceScope) -> SourceScope {
812         let new = SourceScope::new(self.new_scopes.start.index() + scope.index());
813         trace!("mapping scope `{:?}` to `{:?}`", scope, new);
814         new
815     }
816
817     fn map_block(&self, block: BasicBlock) -> BasicBlock {
818         let new = BasicBlock::new(self.new_blocks.start.index() + block.index());
819         trace!("mapping block `{:?}` to `{:?}`", block, new);
820         new
821     }
822 }
823
824 impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
825     fn tcx(&self) -> TyCtxt<'tcx> {
826         self.tcx
827     }
828
829     fn visit_local(&mut self, local: &mut Local, _ctxt: PlaceContext, _location: Location) {
830         *local = self.map_local(*local);
831     }
832
833     fn visit_source_scope(&mut self, scope: &mut SourceScope) {
834         *scope = self.map_scope(*scope);
835     }
836
837     fn visit_span(&mut self, span: &mut Span) {
838         let mut expn_data =
839             ExpnData::default(ExpnKind::Inlined, *span, self.tcx.sess.edition(), None);
840         expn_data.def_site = self.body_span;
841         // Make sure that all spans track the fact that they were inlined.
842         *span = self.callsite_span.fresh_expansion(expn_data);
843     }
844
845     fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
846         for elem in place.projection {
847             // FIXME: Make sure that return place is not used in an indexing projection, since it
848             // won't be rebased as it is supposed to be.
849             assert_ne!(ProjectionElem::Index(RETURN_PLACE), elem);
850         }
851
852         // If this is the `RETURN_PLACE`, we need to rebase any projections onto it.
853         let dest_proj_len = self.destination.projection.len();
854         if place.local == RETURN_PLACE && dest_proj_len > 0 {
855             let mut projs = Vec::with_capacity(dest_proj_len + place.projection.len());
856             projs.extend(self.destination.projection);
857             projs.extend(place.projection);
858
859             place.projection = self.tcx.intern_place_elems(&*projs);
860         }
861         // Handles integrating any locals that occur in the base
862         // or projections
863         self.super_place(place, context, location)
864     }
865
866     fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
867         self.in_cleanup_block = data.is_cleanup;
868         self.super_basic_block_data(block, data);
869         self.in_cleanup_block = false;
870     }
871
872     fn visit_retag(&mut self, kind: &mut RetagKind, place: &mut Place<'tcx>, loc: Location) {
873         self.super_retag(kind, place, loc);
874
875         // We have to patch all inlined retags to be aware that they are no longer
876         // happening on function entry.
877         if *kind == RetagKind::FnEntry {
878             *kind = RetagKind::Default;
879         }
880     }
881
882     fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
883         if let StatementKind::StorageLive(local) | StatementKind::StorageDead(local) =
884             statement.kind
885         {
886             self.always_live_locals.remove(local);
887         }
888         self.super_statement(statement, location);
889     }
890
891     fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, loc: Location) {
892         // Don't try to modify the implicit `_0` access on return (`return` terminators are
893         // replaced down below anyways).
894         if !matches!(terminator.kind, TerminatorKind::Return) {
895             self.super_terminator(terminator, loc);
896         }
897
898         match terminator.kind {
899             TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => bug!(),
900             TerminatorKind::Goto { ref mut target } => {
901                 *target = self.map_block(*target);
902             }
903             TerminatorKind::SwitchInt { ref mut targets, .. } => {
904                 for tgt in targets.all_targets_mut() {
905                     *tgt = self.map_block(*tgt);
906                 }
907             }
908             TerminatorKind::Drop { ref mut target, ref mut unwind, .. }
909             | TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
910                 *target = self.map_block(*target);
911                 if let Some(tgt) = *unwind {
912                     *unwind = Some(self.map_block(tgt));
913                 } else if !self.in_cleanup_block {
914                     // Unless this drop is in a cleanup block, add an unwind edge to
915                     // the original call's cleanup block
916                     *unwind = self.cleanup_block;
917                 }
918             }
919             TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
920                 if let Some((_, ref mut tgt)) = *destination {
921                     *tgt = self.map_block(*tgt);
922                 }
923                 if let Some(tgt) = *cleanup {
924                     *cleanup = Some(self.map_block(tgt));
925                 } else if !self.in_cleanup_block {
926                     // Unless this call is in a cleanup block, add an unwind edge to
927                     // the original call's cleanup block
928                     *cleanup = self.cleanup_block;
929                 }
930             }
931             TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
932                 *target = self.map_block(*target);
933                 if let Some(tgt) = *cleanup {
934                     *cleanup = Some(self.map_block(tgt));
935                 } else if !self.in_cleanup_block {
936                     // Unless this assert is in a cleanup block, add an unwind edge to
937                     // the original call's cleanup block
938                     *cleanup = self.cleanup_block;
939                 }
940             }
941             TerminatorKind::Return => {
942                 terminator.kind = if let Some(tgt) = self.return_block {
943                     TerminatorKind::Goto { target: tgt }
944                 } else {
945                     TerminatorKind::Unreachable
946                 }
947             }
948             TerminatorKind::Resume => {
949                 if let Some(tgt) = self.cleanup_block {
950                     terminator.kind = TerminatorKind::Goto { target: tgt }
951                 }
952             }
953             TerminatorKind::Abort => {}
954             TerminatorKind::Unreachable => {}
955             TerminatorKind::FalseEdge { ref mut real_target, ref mut imaginary_target } => {
956                 *real_target = self.map_block(*real_target);
957                 *imaginary_target = self.map_block(*imaginary_target);
958             }
959             TerminatorKind::FalseUnwind { real_target: _, unwind: _ } =>
960             // see the ordering of passes in the optimized_mir query.
961             {
962                 bug!("False unwinds should have been removed before inlining")
963             }
964             TerminatorKind::InlineAsm { ref mut destination, .. } => {
965                 if let Some(ref mut tgt) = *destination {
966                     *tgt = self.map_block(*tgt);
967                 }
968             }
969         }
970     }
971 }