]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir_transform/src/lib.rs
remap mir before running optimization passes
[rust.git] / compiler / rustc_mir_transform / src / lib.rs
1 #![allow(rustc::potential_query_instability)]
2 #![feature(box_patterns)]
3 #![feature(let_chains)]
4 #![feature(let_else)]
5 #![feature(map_try_insert)]
6 #![feature(min_specialization)]
7 #![feature(never_type)]
8 #![feature(once_cell)]
9 #![feature(option_get_or_insert_default)]
10 #![feature(trusted_step)]
11 #![feature(try_blocks)]
12 #![feature(yeet_expr)]
13 #![feature(if_let_guard)]
14 #![recursion_limit = "256"]
15
16 #[macro_use]
17 extern crate tracing;
18 #[macro_use]
19 extern crate rustc_middle;
20
21 use required_consts::RequiredConstsVisitor;
22 use rustc_const_eval::util;
23 use rustc_data_structures::fx::FxIndexSet;
24 use rustc_data_structures::steal::Steal;
25 use rustc_hir as hir;
26 use rustc_hir::def_id::{DefId, LocalDefId};
27 use rustc_hir::intravisit::{self, Visitor};
28 use rustc_index::vec::IndexVec;
29 use rustc_middle::mir::visit::Visitor as _;
30 use rustc_middle::mir::{
31     traversal, AnalysisPhase, Body, ConstQualifs, Constant, LocalDecl, MirPass, MirPhase, Operand,
32     Place, ProjectionElem, Promoted, RuntimePhase, Rvalue, SourceInfo, Statement, StatementKind,
33     TerminatorKind,
34 };
35 use rustc_middle::ty::query::Providers;
36 use rustc_middle::ty::{self, TyCtxt, TypeVisitable};
37 use rustc_span::sym;
38
39 #[macro_use]
40 mod pass_manager;
41
42 use pass_manager::{self as pm, Lint, MirLint, WithMinOptLevel};
43
44 mod abort_unwinding_calls;
45 mod add_call_guards;
46 mod add_moves_for_packed_drops;
47 mod add_retag;
48 mod check_const_item_mutation;
49 mod check_packed_ref;
50 pub mod check_unsafety;
51 // This pass is public to allow external drivers to perform MIR cleanup
52 pub mod cleanup_post_borrowck;
53 mod const_debuginfo;
54 mod const_goto;
55 mod const_prop;
56 mod const_prop_lint;
57 mod coverage;
58 mod dead_store_elimination;
59 mod deaggregator;
60 mod deduplicate_blocks;
61 mod deref_separator;
62 mod dest_prop;
63 pub mod dump_mir;
64 mod early_otherwise_branch;
65 mod elaborate_box_derefs;
66 mod elaborate_drops;
67 mod ffi_unwind_calls;
68 mod function_item_references;
69 mod generator;
70 mod inline;
71 mod instcombine;
72 mod lower_intrinsics;
73 mod lower_slice_len;
74 mod marker;
75 mod match_branches;
76 mod multiple_return_terminators;
77 mod normalize_array_len;
78 mod nrvo;
79 // This pass is public to allow external drivers to perform MIR cleanup
80 pub mod remove_false_edges;
81 mod remove_noop_landing_pads;
82 mod remove_storage_markers;
83 mod remove_uninit_drops;
84 mod remove_unneeded_drops;
85 mod remove_zsts;
86 mod required_consts;
87 mod reveal_all;
88 mod separate_const_switch;
89 mod shim;
90 // This pass is public to allow external drivers to perform MIR cleanup
91 pub mod simplify;
92 mod simplify_branches;
93 mod simplify_comparison_integral;
94 mod simplify_try;
95 mod uninhabited_enum_branching;
96 mod unreachable_prop;
97
98 use rustc_const_eval::transform::check_consts::{self, ConstCx};
99 use rustc_const_eval::transform::promote_consts;
100 use rustc_const_eval::transform::validate;
101 use rustc_mir_dataflow::rustc_peek;
102
103 pub fn provide(providers: &mut Providers) {
104     check_unsafety::provide(providers);
105     check_packed_ref::provide(providers);
106     coverage::query::provide(providers);
107     ffi_unwind_calls::provide(providers);
108     shim::provide(providers);
109     *providers = Providers {
110         mir_keys,
111         mir_const,
112         mir_const_qualif: |tcx, def_id| {
113             let def_id = def_id.expect_local();
114             if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
115                 tcx.mir_const_qualif_const_arg(def)
116             } else {
117                 mir_const_qualif(tcx, ty::WithOptConstParam::unknown(def_id))
118             }
119         },
120         mir_const_qualif_const_arg: |tcx, (did, param_did)| {
121             mir_const_qualif(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
122         },
123         mir_promoted,
124         mir_drops_elaborated_and_const_checked,
125         mir_for_ctfe,
126         mir_for_ctfe_of_const_arg,
127         optimized_mir,
128         is_mir_available,
129         is_ctfe_mir_available: |tcx, did| is_mir_available(tcx, did),
130         mir_callgraph_reachable: inline::cycle::mir_callgraph_reachable,
131         mir_inliner_callees: inline::cycle::mir_inliner_callees,
132         promoted_mir: |tcx, def_id| {
133             let def_id = def_id.expect_local();
134             if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
135                 tcx.promoted_mir_of_const_arg(def)
136             } else {
137                 promoted_mir(tcx, ty::WithOptConstParam::unknown(def_id))
138             }
139         },
140         promoted_mir_of_const_arg: |tcx, (did, param_did)| {
141             promoted_mir(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
142         },
143         ..*providers
144     };
145 }
146
147 fn remap_mir_for_const_eval_select<'tcx>(
148     tcx: TyCtxt<'tcx>,
149     mut body: Body<'tcx>,
150     context: hir::Constness,
151 ) -> Body<'tcx> {
152     for bb in body.basic_blocks.as_mut().iter_mut() {
153         let terminator = bb.terminator.as_mut().expect("invalid terminator");
154         match terminator.kind {
155             TerminatorKind::Call {
156                 func: Operand::Constant(box Constant { ref literal, .. }),
157                 ref mut args,
158                 destination,
159                 target,
160                 cleanup,
161                 fn_span,
162                 ..
163             } if let ty::FnDef(def_id, _) = *literal.ty().kind()
164                 && tcx.item_name(def_id) == sym::const_eval_select
165                 && tcx.is_intrinsic(def_id) =>
166             {
167                 let [tupled_args, called_in_const, called_at_rt]: [_; 3] = std::mem::take(args).try_into().unwrap();
168                 let ty = tupled_args.ty(&body.local_decls, tcx);
169                 let fields = ty.tuple_fields();
170                 let num_args = fields.len();
171                 let func = if context == hir::Constness::Const { called_in_const } else { called_at_rt };
172                 let (method, place): (fn(Place<'tcx>) -> Operand<'tcx>, Place<'tcx>) = match tupled_args {
173                     Operand::Constant(_) => {
174                         // there is no good way of extracting a tuple arg from a constant (const generic stuff)
175                         // so we just create a temporary and deconstruct that.
176                         let local = body.local_decls.push(LocalDecl::new(ty, fn_span));
177                         bb.statements.push(Statement {
178                             source_info: SourceInfo::outermost(fn_span),
179                             kind: StatementKind::Assign(Box::new((local.into(), Rvalue::Use(tupled_args.clone())))),
180                         });
181                         (Operand::Move, local.into())
182                     }
183                     Operand::Move(place) => (Operand::Move, place),
184                     Operand::Copy(place) => (Operand::Copy, place),
185                 };
186                 let place_elems = place.projection;
187                 let arguments = (0..num_args).map(|x| {
188                     let mut place_elems = place_elems.to_vec();
189                     place_elems.push(ProjectionElem::Field(x.into(), fields[x]));
190                     let projection = tcx.intern_place_elems(&place_elems);
191                     let place = Place {
192                         local: place.local,
193                         projection,
194                     };
195                     method(place)
196                 }).collect();
197                 terminator.kind = TerminatorKind::Call { func, args: arguments, destination, target, cleanup, from_hir_call: false, fn_span };
198             }
199             _ => {}
200         }
201     }
202     body
203 }
204
205 fn is_mir_available(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
206     let def_id = def_id.expect_local();
207     tcx.mir_keys(()).contains(&def_id)
208 }
209
210 /// Finds the full set of `DefId`s within the current crate that have
211 /// MIR associated with them.
212 fn mir_keys(tcx: TyCtxt<'_>, (): ()) -> FxIndexSet<LocalDefId> {
213     let mut set = FxIndexSet::default();
214
215     // All body-owners have MIR associated with them.
216     set.extend(tcx.hir().body_owners());
217
218     // Additionally, tuple struct/variant constructors have MIR, but
219     // they don't have a BodyId, so we need to build them separately.
220     struct GatherCtors<'a, 'tcx> {
221         tcx: TyCtxt<'tcx>,
222         set: &'a mut FxIndexSet<LocalDefId>,
223     }
224     impl<'tcx> Visitor<'tcx> for GatherCtors<'_, 'tcx> {
225         fn visit_variant_data(&mut self, v: &'tcx hir::VariantData<'tcx>) {
226             if let hir::VariantData::Tuple(_, hir_id) = *v {
227                 self.set.insert(self.tcx.hir().local_def_id(hir_id));
228             }
229             intravisit::walk_struct_def(self, v)
230         }
231     }
232     tcx.hir().visit_all_item_likes_in_crate(&mut GatherCtors { tcx, set: &mut set });
233
234     set
235 }
236
237 fn mir_const_qualif(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> ConstQualifs {
238     let const_kind = tcx.hir().body_const_context(def.did);
239
240     // No need to const-check a non-const `fn`.
241     if const_kind.is_none() {
242         return Default::default();
243     }
244
245     // N.B., this `borrow()` is guaranteed to be valid (i.e., the value
246     // cannot yet be stolen), because `mir_promoted()`, which steals
247     // from `mir_const(), forces this query to execute before
248     // performing the steal.
249     let body = &tcx.mir_const(def).borrow();
250
251     if body.return_ty().references_error() {
252         tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors");
253         return Default::default();
254     }
255
256     let ccx = check_consts::ConstCx { body, tcx, const_kind, param_env: tcx.param_env(def.did) };
257
258     let mut validator = check_consts::check::Checker::new(&ccx);
259     validator.check_body();
260
261     // We return the qualifs in the return place for every MIR body, even though it is only used
262     // when deciding to promote a reference to a `const` for now.
263     validator.qualifs_in_return_place()
264 }
265
266 /// Make MIR ready for const evaluation. This is run on all MIR, not just on consts!
267 /// FIXME(oli-obk): it's unclear whether we still need this phase (and its corresponding query).
268 /// We used to have this for pre-miri MIR based const eval.
269 fn mir_const<'tcx>(
270     tcx: TyCtxt<'tcx>,
271     def: ty::WithOptConstParam<LocalDefId>,
272 ) -> &'tcx Steal<Body<'tcx>> {
273     if let Some(def) = def.try_upgrade(tcx) {
274         return tcx.mir_const(def);
275     }
276
277     // Unsafety check uses the raw mir, so make sure it is run.
278     if !tcx.sess.opts.unstable_opts.thir_unsafeck {
279         if let Some(param_did) = def.const_param_did {
280             tcx.ensure().unsafety_check_result_for_const_arg((def.did, param_did));
281         } else {
282             tcx.ensure().unsafety_check_result(def.did);
283         }
284     }
285
286     // has_ffi_unwind_calls query uses the raw mir, so make sure it is run.
287     tcx.ensure().has_ffi_unwind_calls(def.did);
288
289     let mut body = tcx.mir_built(def).steal();
290
291     rustc_middle::mir::dump_mir(tcx, None, "mir_map", &0, &body, |_, _| Ok(()));
292
293     pm::run_passes(
294         tcx,
295         &mut body,
296         &[
297             // MIR-level lints.
298             &Lint(check_packed_ref::CheckPackedRef),
299             &Lint(check_const_item_mutation::CheckConstItemMutation),
300             &Lint(function_item_references::FunctionItemReferences),
301             // What we need to do constant evaluation.
302             &simplify::SimplifyCfg::new("initial"),
303             &rustc_peek::SanityCheck, // Just a lint
304         ],
305     );
306     tcx.alloc_steal_mir(body)
307 }
308
309 /// Compute the main MIR body and the list of MIR bodies of the promoteds.
310 fn mir_promoted<'tcx>(
311     tcx: TyCtxt<'tcx>,
312     def: ty::WithOptConstParam<LocalDefId>,
313 ) -> (&'tcx Steal<Body<'tcx>>, &'tcx Steal<IndexVec<Promoted, Body<'tcx>>>) {
314     if let Some(def) = def.try_upgrade(tcx) {
315         return tcx.mir_promoted(def);
316     }
317
318     // Ensure that we compute the `mir_const_qualif` for constants at
319     // this point, before we steal the mir-const result.
320     // Also this means promotion can rely on all const checks having been done.
321     let const_qualifs = tcx.mir_const_qualif_opt_const_arg(def);
322     let mut body = tcx.mir_const(def).steal();
323     if let Some(error_reported) = const_qualifs.tainted_by_errors {
324         body.tainted_by_errors = Some(error_reported);
325     }
326
327     let mut required_consts = Vec::new();
328     let mut required_consts_visitor = RequiredConstsVisitor::new(&mut required_consts);
329     for (bb, bb_data) in traversal::reverse_postorder(&body) {
330         required_consts_visitor.visit_basic_block_data(bb, bb_data);
331     }
332     body.required_consts = required_consts;
333
334     // What we need to run borrowck etc.
335     let promote_pass = promote_consts::PromoteTemps::default();
336     pm::run_passes(
337         tcx,
338         &mut body,
339         &[
340             &promote_pass,
341             &simplify::SimplifyCfg::new("promote-consts"),
342             &coverage::InstrumentCoverage,
343         ],
344     );
345
346     let promoted = promote_pass.promoted_fragments.into_inner();
347     (tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted))
348 }
349
350 /// Compute the MIR that is used during CTFE (and thus has no optimizations run on it)
351 fn mir_for_ctfe<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx Body<'tcx> {
352     let did = def_id.expect_local();
353     if let Some(def) = ty::WithOptConstParam::try_lookup(did, tcx) {
354         tcx.mir_for_ctfe_of_const_arg(def)
355     } else {
356         tcx.arena.alloc(inner_mir_for_ctfe(tcx, ty::WithOptConstParam::unknown(did)))
357     }
358 }
359
360 /// Same as `mir_for_ctfe`, but used to get the MIR of a const generic parameter.
361 /// The docs on `WithOptConstParam` explain this a bit more, but the TLDR is that
362 /// we'd get cycle errors with `mir_for_ctfe`, because typeck would need to typeck
363 /// the const parameter while type checking the main body, which in turn would try
364 /// to type check the main body again.
365 fn mir_for_ctfe_of_const_arg<'tcx>(
366     tcx: TyCtxt<'tcx>,
367     (did, param_did): (LocalDefId, DefId),
368 ) -> &'tcx Body<'tcx> {
369     tcx.arena.alloc(inner_mir_for_ctfe(
370         tcx,
371         ty::WithOptConstParam { did, const_param_did: Some(param_did) },
372     ))
373 }
374
375 fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> Body<'_> {
376     // FIXME: don't duplicate this between the optimized_mir/mir_for_ctfe queries
377     if tcx.is_constructor(def.did.to_def_id()) {
378         // There's no reason to run all of the MIR passes on constructors when
379         // we can just output the MIR we want directly. This also saves const
380         // qualification and borrow checking the trouble of special casing
381         // constructors.
382         return shim::build_adt_ctor(tcx, def.did.to_def_id());
383     }
384
385     let context = tcx
386         .hir()
387         .body_const_context(def.did)
388         .expect("mir_for_ctfe should not be used for runtime functions");
389
390     let body = tcx.mir_drops_elaborated_and_const_checked(def).borrow().clone();
391
392     let mut body = remap_mir_for_const_eval_select(tcx, body, hir::Constness::Const);
393
394     match context {
395         // Do not const prop functions, either they get executed at runtime or exported to metadata,
396         // so we run const prop on them, or they don't, in which case we const evaluate some control
397         // flow paths of the function and any errors in those paths will get emitted as const eval
398         // errors.
399         hir::ConstContext::ConstFn => {}
400         // Static items always get evaluated, so we can just let const eval see if any erroneous
401         // control flow paths get executed.
402         hir::ConstContext::Static(_) => {}
403         // Associated constants get const prop run so we detect common failure situations in the
404         // crate that defined the constant.
405         // Technically we want to not run on regular const items, but oli-obk doesn't know how to
406         // conveniently detect that at this point without looking at the HIR.
407         hir::ConstContext::Const => {
408             pm::run_passes(
409                 tcx,
410                 &mut body,
411                 &[
412                     &const_prop::ConstProp,
413                     &marker::PhaseChange(MirPhase::Runtime(RuntimePhase::Optimized)),
414                 ],
415             );
416         }
417     }
418
419     debug_assert!(!body.has_free_regions(), "Free regions in MIR for CTFE");
420
421     body
422 }
423
424 /// Obtain just the main MIR (no promoteds) and run some cleanups on it. This also runs
425 /// mir borrowck *before* doing so in order to ensure that borrowck can be run and doesn't
426 /// end up missing the source MIR due to stealing happening.
427 fn mir_drops_elaborated_and_const_checked<'tcx>(
428     tcx: TyCtxt<'tcx>,
429     def: ty::WithOptConstParam<LocalDefId>,
430 ) -> &'tcx Steal<Body<'tcx>> {
431     if let Some(def) = def.try_upgrade(tcx) {
432         return tcx.mir_drops_elaborated_and_const_checked(def);
433     }
434
435     let mir_borrowck = tcx.mir_borrowck_opt_const_arg(def);
436
437     let is_fn_like = tcx.def_kind(def.did).is_fn_like();
438     if is_fn_like {
439         let did = def.did.to_def_id();
440         let def = ty::WithOptConstParam::unknown(did);
441
442         // Do not compute the mir call graph without said call graph actually being used.
443         if inline::Inline.is_enabled(&tcx.sess) {
444             let _ = tcx.mir_inliner_callees(ty::InstanceDef::Item(def));
445         }
446     }
447
448     let (body, _) = tcx.mir_promoted(def);
449     let mut body = body.steal();
450     if let Some(error_reported) = mir_borrowck.tainted_by_errors {
451         body.tainted_by_errors = Some(error_reported);
452     }
453
454     run_analysis_to_runtime_passes(tcx, &mut body);
455
456     tcx.alloc_steal_mir(body)
457 }
458
459 fn run_analysis_to_runtime_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
460     assert!(body.phase == MirPhase::Analysis(AnalysisPhase::Initial));
461     let did = body.source.def_id();
462
463     debug!("analysis_mir_cleanup({:?})", did);
464     run_analysis_cleanup_passes(tcx, body);
465     assert!(body.phase == MirPhase::Analysis(AnalysisPhase::PostCleanup));
466
467     // Do a little drop elaboration before const-checking if `const_precise_live_drops` is enabled.
468     if check_consts::post_drop_elaboration::checking_enabled(&ConstCx::new(tcx, &body)) {
469         pm::run_passes(
470             tcx,
471             body,
472             &[
473                 &remove_uninit_drops::RemoveUninitDrops,
474                 &simplify::SimplifyCfg::new("remove-false-edges"),
475             ],
476         );
477         check_consts::post_drop_elaboration::check_live_drops(tcx, &body); // FIXME: make this a MIR lint
478     }
479
480     debug!("runtime_mir_lowering({:?})", did);
481     run_runtime_lowering_passes(tcx, body);
482     assert!(body.phase == MirPhase::Runtime(RuntimePhase::Initial));
483
484     debug!("runtime_mir_cleanup({:?})", did);
485     run_runtime_cleanup_passes(tcx, body);
486     assert!(body.phase == MirPhase::Runtime(RuntimePhase::PostCleanup));
487 }
488
489 // FIXME(JakobDegen): Can we make these lists of passes consts?
490
491 /// After this series of passes, no lifetime analysis based on borrowing can be done.
492 fn run_analysis_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
493     let passes: &[&dyn MirPass<'tcx>] = &[
494         &remove_false_edges::RemoveFalseEdges,
495         &simplify_branches::SimplifyConstCondition::new("initial"),
496         &remove_noop_landing_pads::RemoveNoopLandingPads,
497         &cleanup_post_borrowck::CleanupNonCodegenStatements,
498         &simplify::SimplifyCfg::new("early-opt"),
499         &deref_separator::Derefer,
500         &marker::PhaseChange(MirPhase::Analysis(AnalysisPhase::PostCleanup)),
501     ];
502
503     pm::run_passes(tcx, body, passes);
504 }
505
506 /// Returns the sequence of passes that lowers analysis to runtime MIR.
507 fn run_runtime_lowering_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
508     let passes: &[&dyn MirPass<'tcx>] = &[
509         // These next passes must be executed together
510         &add_call_guards::CriticalCallEdges,
511         &elaborate_drops::ElaborateDrops,
512         // This will remove extraneous landing pads which are no longer
513         // necessary as well as well as forcing any call in a non-unwinding
514         // function calling a possibly-unwinding function to abort the process.
515         &abort_unwinding_calls::AbortUnwindingCalls,
516         // AddMovesForPackedDrops needs to run after drop
517         // elaboration.
518         &add_moves_for_packed_drops::AddMovesForPackedDrops,
519         // `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
520         // but before optimizations begin.
521         &elaborate_box_derefs::ElaborateBoxDerefs,
522         &generator::StateTransform,
523         &add_retag::AddRetag,
524         // Deaggregator is necessary for const prop. We may want to consider implementing
525         // CTFE support for aggregates.
526         &deaggregator::Deaggregator,
527         &Lint(const_prop_lint::ConstProp),
528         &marker::PhaseChange(MirPhase::Runtime(RuntimePhase::Initial)),
529     ];
530     pm::run_passes_no_validate(tcx, body, passes);
531 }
532
533 /// Returns the sequence of passes that do the initial cleanup of runtime MIR.
534 fn run_runtime_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
535     let passes: &[&dyn MirPass<'tcx>] = &[
536         &elaborate_box_derefs::ElaborateBoxDerefs,
537         &lower_intrinsics::LowerIntrinsics,
538         &simplify::SimplifyCfg::new("elaborate-drops"),
539         &marker::PhaseChange(MirPhase::Runtime(RuntimePhase::PostCleanup)),
540     ];
541
542     pm::run_passes(tcx, body, passes);
543 }
544
545 fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
546     fn o1<T>(x: T) -> WithMinOptLevel<T> {
547         WithMinOptLevel(1, x)
548     }
549
550     // The main optimizations that we do on MIR.
551     pm::run_passes(
552         tcx,
553         body,
554         &[
555             &reveal_all::RevealAll, // has to be done before inlining, since inlined code is in RevealAll mode.
556             &lower_slice_len::LowerSliceLenCalls, // has to be done before inlining, otherwise actual call will be almost always inlined. Also simple, so can just do first
557             &normalize_array_len::NormalizeArrayLen, // has to run after `slice::len` lowering
558             &unreachable_prop::UnreachablePropagation,
559             &uninhabited_enum_branching::UninhabitedEnumBranching,
560             &o1(simplify::SimplifyCfg::new("after-uninhabited-enum-branching")),
561             &inline::Inline,
562             &remove_storage_markers::RemoveStorageMarkers,
563             &remove_zsts::RemoveZsts,
564             &const_goto::ConstGoto,
565             &remove_unneeded_drops::RemoveUnneededDrops,
566             &match_branches::MatchBranchSimplification,
567             // inst combine is after MatchBranchSimplification to clean up Ne(_1, false)
568             &multiple_return_terminators::MultipleReturnTerminators,
569             &instcombine::InstCombine,
570             &separate_const_switch::SeparateConstSwitch,
571             //
572             // FIXME(#70073): This pass is responsible for both optimization as well as some lints.
573             &const_prop::ConstProp,
574             //
575             // Const-prop runs unconditionally, but doesn't mutate the MIR at mir-opt-level=0.
576             &const_debuginfo::ConstDebugInfo,
577             &o1(simplify_branches::SimplifyConstCondition::new("after-const-prop")),
578             &early_otherwise_branch::EarlyOtherwiseBranch,
579             &simplify_comparison_integral::SimplifyComparisonIntegral,
580             &simplify_try::SimplifyArmIdentity,
581             &simplify_try::SimplifyBranchSame,
582             &dead_store_elimination::DeadStoreElimination,
583             &dest_prop::DestinationPropagation,
584             &o1(simplify_branches::SimplifyConstCondition::new("final")),
585             &o1(remove_noop_landing_pads::RemoveNoopLandingPads),
586             &o1(simplify::SimplifyCfg::new("final")),
587             &nrvo::RenameReturnPlace,
588             &simplify::SimplifyLocals,
589             &multiple_return_terminators::MultipleReturnTerminators,
590             &deduplicate_blocks::DeduplicateBlocks,
591             // Some cleanup necessary at least for LLVM and potentially other codegen backends.
592             &add_call_guards::CriticalCallEdges,
593             &marker::PhaseChange(MirPhase::Runtime(RuntimePhase::Optimized)),
594             // Dump the end result for testing and debugging purposes.
595             &dump_mir::Marker("PreCodegen"),
596         ],
597     );
598 }
599
600 /// Optimize the MIR and prepare it for codegen.
601 fn optimized_mir<'tcx>(tcx: TyCtxt<'tcx>, did: DefId) -> &'tcx Body<'tcx> {
602     let did = did.expect_local();
603     assert_eq!(ty::WithOptConstParam::try_lookup(did, tcx), None);
604     tcx.arena.alloc(inner_optimized_mir(tcx, did))
605 }
606
607 fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> {
608     if tcx.is_constructor(did.to_def_id()) {
609         // There's no reason to run all of the MIR passes on constructors when
610         // we can just output the MIR we want directly. This also saves const
611         // qualification and borrow checking the trouble of special casing
612         // constructors.
613         return shim::build_adt_ctor(tcx, did.to_def_id());
614     }
615
616     match tcx.hir().body_const_context(did) {
617         // Run the `mir_for_ctfe` query, which depends on `mir_drops_elaborated_and_const_checked`
618         // which we are going to steal below. Thus we need to run `mir_for_ctfe` first, so it
619         // computes and caches its result.
620         Some(hir::ConstContext::ConstFn) => tcx.ensure().mir_for_ctfe(did),
621         None => {}
622         Some(other) => panic!("do not use `optimized_mir` for constants: {:?}", other),
623     }
624     debug!("about to call mir_drops_elaborated...");
625     let body =
626         tcx.mir_drops_elaborated_and_const_checked(ty::WithOptConstParam::unknown(did)).steal();
627     let mut body = remap_mir_for_const_eval_select(tcx, body, hir::Constness::NotConst);
628     debug!("body: {:#?}", body);
629     run_optimization_passes(tcx, &mut body);
630
631     debug_assert!(!body.has_free_regions(), "Free regions in optimized MIR");
632
633     body
634 }
635
636 /// Fetch all the promoteds of an item and prepare their MIR bodies to be ready for
637 /// constant evaluation once all substitutions become known.
638 fn promoted_mir<'tcx>(
639     tcx: TyCtxt<'tcx>,
640     def: ty::WithOptConstParam<LocalDefId>,
641 ) -> &'tcx IndexVec<Promoted, Body<'tcx>> {
642     if tcx.is_constructor(def.did.to_def_id()) {
643         return tcx.arena.alloc(IndexVec::new());
644     }
645
646     let tainted_by_errors = tcx.mir_borrowck_opt_const_arg(def).tainted_by_errors;
647     let mut promoted = tcx.mir_promoted(def).1.steal();
648
649     for body in &mut promoted {
650         if let Some(error_reported) = tainted_by_errors {
651             body.tainted_by_errors = Some(error_reported);
652         }
653         run_analysis_to_runtime_passes(tcx, body);
654     }
655
656     debug_assert!(!promoted.has_free_regions(), "Free regions in promoted MIR");
657
658     tcx.arena.alloc(promoted)
659 }