]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir_transform/src/lib.rs
Rollup merge of #102854 - semarie:openbsd-immutablestack, r=m-ou-se
[rust.git] / compiler / rustc_mir_transform / src / lib.rs
1 #![allow(rustc::potential_query_instability)]
2 #![feature(box_patterns)]
3 #![feature(let_chains)]
4 #![feature(map_try_insert)]
5 #![feature(min_specialization)]
6 #![feature(never_type)]
7 #![feature(once_cell)]
8 #![feature(option_get_or_insert_default)]
9 #![feature(trusted_step)]
10 #![feature(try_blocks)]
11 #![feature(yeet_expr)]
12 #![feature(if_let_guard)]
13 #![recursion_limit = "256"]
14
15 #[macro_use]
16 extern crate tracing;
17 #[macro_use]
18 extern crate rustc_middle;
19
20 use required_consts::RequiredConstsVisitor;
21 use rustc_const_eval::util;
22 use rustc_data_structures::fx::FxIndexSet;
23 use rustc_data_structures::steal::Steal;
24 use rustc_hir as hir;
25 use rustc_hir::def_id::{DefId, LocalDefId};
26 use rustc_hir::intravisit::{self, Visitor};
27 use rustc_index::vec::IndexVec;
28 use rustc_middle::mir::visit::Visitor as _;
29 use rustc_middle::mir::{
30     traversal, AnalysisPhase, Body, ConstQualifs, Constant, LocalDecl, MirPass, MirPhase, Operand,
31     Place, ProjectionElem, Promoted, RuntimePhase, Rvalue, SourceInfo, Statement, StatementKind,
32     TerminatorKind,
33 };
34 use rustc_middle::ty::query::Providers;
35 use rustc_middle::ty::{self, TyCtxt, TypeVisitable};
36 use rustc_span::sym;
37
38 #[macro_use]
39 mod pass_manager;
40
41 use pass_manager::{self as pm, Lint, MirLint, WithMinOptLevel};
42
43 mod abort_unwinding_calls;
44 mod add_call_guards;
45 mod add_moves_for_packed_drops;
46 mod add_retag;
47 mod check_const_item_mutation;
48 mod check_packed_ref;
49 pub mod check_unsafety;
50 // This pass is public to allow external drivers to perform MIR cleanup
51 pub mod cleanup_post_borrowck;
52 mod const_debuginfo;
53 mod const_goto;
54 mod const_prop;
55 mod const_prop_lint;
56 mod coverage;
57 mod dead_store_elimination;
58 mod deaggregator;
59 mod deduplicate_blocks;
60 mod deref_separator;
61 mod dest_prop;
62 pub mod dump_mir;
63 mod early_otherwise_branch;
64 mod elaborate_box_derefs;
65 mod elaborate_drops;
66 mod ffi_unwind_calls;
67 mod function_item_references;
68 mod generator;
69 mod inline;
70 mod instcombine;
71 mod lower_intrinsics;
72 mod lower_slice_len;
73 mod marker;
74 mod match_branches;
75 mod multiple_return_terminators;
76 mod normalize_array_len;
77 mod nrvo;
78 // This pass is public to allow external drivers to perform MIR cleanup
79 pub mod remove_false_edges;
80 mod remove_noop_landing_pads;
81 mod remove_storage_markers;
82 mod remove_uninit_drops;
83 mod remove_unneeded_drops;
84 mod remove_zsts;
85 mod required_consts;
86 mod reveal_all;
87 mod separate_const_switch;
88 mod shim;
89 // This pass is public to allow external drivers to perform MIR cleanup
90 pub mod simplify;
91 mod simplify_branches;
92 mod simplify_comparison_integral;
93 mod simplify_try;
94 mod uninhabited_enum_branching;
95 mod unreachable_prop;
96
97 use rustc_const_eval::transform::check_consts::{self, ConstCx};
98 use rustc_const_eval::transform::promote_consts;
99 use rustc_const_eval::transform::validate;
100 use rustc_mir_dataflow::rustc_peek;
101
102 pub fn provide(providers: &mut Providers) {
103     check_unsafety::provide(providers);
104     check_packed_ref::provide(providers);
105     coverage::query::provide(providers);
106     ffi_unwind_calls::provide(providers);
107     shim::provide(providers);
108     *providers = Providers {
109         mir_keys,
110         mir_const,
111         mir_const_qualif: |tcx, def_id| {
112             let def_id = def_id.expect_local();
113             if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
114                 tcx.mir_const_qualif_const_arg(def)
115             } else {
116                 mir_const_qualif(tcx, ty::WithOptConstParam::unknown(def_id))
117             }
118         },
119         mir_const_qualif_const_arg: |tcx, (did, param_did)| {
120             mir_const_qualif(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
121         },
122         mir_promoted,
123         mir_drops_elaborated_and_const_checked,
124         mir_for_ctfe,
125         mir_for_ctfe_of_const_arg,
126         optimized_mir,
127         is_mir_available,
128         is_ctfe_mir_available: |tcx, did| is_mir_available(tcx, did),
129         mir_callgraph_reachable: inline::cycle::mir_callgraph_reachable,
130         mir_inliner_callees: inline::cycle::mir_inliner_callees,
131         promoted_mir: |tcx, def_id| {
132             let def_id = def_id.expect_local();
133             if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
134                 tcx.promoted_mir_of_const_arg(def)
135             } else {
136                 promoted_mir(tcx, ty::WithOptConstParam::unknown(def_id))
137             }
138         },
139         promoted_mir_of_const_arg: |tcx, (did, param_did)| {
140             promoted_mir(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
141         },
142         ..*providers
143     };
144 }
145
146 fn remap_mir_for_const_eval_select<'tcx>(
147     tcx: TyCtxt<'tcx>,
148     mut body: Body<'tcx>,
149     context: hir::Constness,
150 ) -> Body<'tcx> {
151     for bb in body.basic_blocks.as_mut().iter_mut() {
152         let terminator = bb.terminator.as_mut().expect("invalid terminator");
153         match terminator.kind {
154             TerminatorKind::Call {
155                 func: Operand::Constant(box Constant { ref literal, .. }),
156                 ref mut args,
157                 destination,
158                 target,
159                 cleanup,
160                 fn_span,
161                 ..
162             } if let ty::FnDef(def_id, _) = *literal.ty().kind()
163                 && tcx.item_name(def_id) == sym::const_eval_select
164                 && tcx.is_intrinsic(def_id) =>
165             {
166                 let [tupled_args, called_in_const, called_at_rt]: [_; 3] = std::mem::take(args).try_into().unwrap();
167                 let ty = tupled_args.ty(&body.local_decls, tcx);
168                 let fields = ty.tuple_fields();
169                 let num_args = fields.len();
170                 let func = if context == hir::Constness::Const { called_in_const } else { called_at_rt };
171                 let (method, place): (fn(Place<'tcx>) -> Operand<'tcx>, Place<'tcx>) = match tupled_args {
172                     Operand::Constant(_) => {
173                         // there is no good way of extracting a tuple arg from a constant (const generic stuff)
174                         // so we just create a temporary and deconstruct that.
175                         let local = body.local_decls.push(LocalDecl::new(ty, fn_span));
176                         bb.statements.push(Statement {
177                             source_info: SourceInfo::outermost(fn_span),
178                             kind: StatementKind::Assign(Box::new((local.into(), Rvalue::Use(tupled_args.clone())))),
179                         });
180                         (Operand::Move, local.into())
181                     }
182                     Operand::Move(place) => (Operand::Move, place),
183                     Operand::Copy(place) => (Operand::Copy, place),
184                 };
185                 let place_elems = place.projection;
186                 let arguments = (0..num_args).map(|x| {
187                     let mut place_elems = place_elems.to_vec();
188                     place_elems.push(ProjectionElem::Field(x.into(), fields[x]));
189                     let projection = tcx.intern_place_elems(&place_elems);
190                     let place = Place {
191                         local: place.local,
192                         projection,
193                     };
194                     method(place)
195                 }).collect();
196                 terminator.kind = TerminatorKind::Call { func, args: arguments, destination, target, cleanup, from_hir_call: false, fn_span };
197             }
198             _ => {}
199         }
200     }
201     body
202 }
203
204 fn is_mir_available(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
205     let def_id = def_id.expect_local();
206     tcx.mir_keys(()).contains(&def_id)
207 }
208
209 /// Finds the full set of `DefId`s within the current crate that have
210 /// MIR associated with them.
211 fn mir_keys(tcx: TyCtxt<'_>, (): ()) -> FxIndexSet<LocalDefId> {
212     let mut set = FxIndexSet::default();
213
214     // All body-owners have MIR associated with them.
215     set.extend(tcx.hir().body_owners());
216
217     // Additionally, tuple struct/variant constructors have MIR, but
218     // they don't have a BodyId, so we need to build them separately.
219     struct GatherCtors<'a, 'tcx> {
220         tcx: TyCtxt<'tcx>,
221         set: &'a mut FxIndexSet<LocalDefId>,
222     }
223     impl<'tcx> Visitor<'tcx> for GatherCtors<'_, 'tcx> {
224         fn visit_variant_data(&mut self, v: &'tcx hir::VariantData<'tcx>) {
225             if let hir::VariantData::Tuple(_, hir_id) = *v {
226                 self.set.insert(self.tcx.hir().local_def_id(hir_id));
227             }
228             intravisit::walk_struct_def(self, v)
229         }
230     }
231     tcx.hir().visit_all_item_likes_in_crate(&mut GatherCtors { tcx, set: &mut set });
232
233     set
234 }
235
236 fn mir_const_qualif(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> ConstQualifs {
237     let const_kind = tcx.hir().body_const_context(def.did);
238
239     // No need to const-check a non-const `fn`.
240     if const_kind.is_none() {
241         return Default::default();
242     }
243
244     // N.B., this `borrow()` is guaranteed to be valid (i.e., the value
245     // cannot yet be stolen), because `mir_promoted()`, which steals
246     // from `mir_const(), forces this query to execute before
247     // performing the steal.
248     let body = &tcx.mir_const(def).borrow();
249
250     if body.return_ty().references_error() {
251         tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors");
252         return Default::default();
253     }
254
255     let ccx = check_consts::ConstCx { body, tcx, const_kind, param_env: tcx.param_env(def.did) };
256
257     let mut validator = check_consts::check::Checker::new(&ccx);
258     validator.check_body();
259
260     // We return the qualifs in the return place for every MIR body, even though it is only used
261     // when deciding to promote a reference to a `const` for now.
262     validator.qualifs_in_return_place()
263 }
264
265 /// Make MIR ready for const evaluation. This is run on all MIR, not just on consts!
266 /// FIXME(oli-obk): it's unclear whether we still need this phase (and its corresponding query).
267 /// We used to have this for pre-miri MIR based const eval.
268 fn mir_const<'tcx>(
269     tcx: TyCtxt<'tcx>,
270     def: ty::WithOptConstParam<LocalDefId>,
271 ) -> &'tcx Steal<Body<'tcx>> {
272     if let Some(def) = def.try_upgrade(tcx) {
273         return tcx.mir_const(def);
274     }
275
276     // Unsafety check uses the raw mir, so make sure it is run.
277     if !tcx.sess.opts.unstable_opts.thir_unsafeck {
278         if let Some(param_did) = def.const_param_did {
279             tcx.ensure().unsafety_check_result_for_const_arg((def.did, param_did));
280         } else {
281             tcx.ensure().unsafety_check_result(def.did);
282         }
283     }
284
285     // has_ffi_unwind_calls query uses the raw mir, so make sure it is run.
286     tcx.ensure().has_ffi_unwind_calls(def.did);
287
288     let mut body = tcx.mir_built(def).steal();
289
290     rustc_middle::mir::dump_mir(tcx, None, "mir_map", &0, &body, |_, _| Ok(()));
291
292     pm::run_passes(
293         tcx,
294         &mut body,
295         &[
296             // MIR-level lints.
297             &Lint(check_packed_ref::CheckPackedRef),
298             &Lint(check_const_item_mutation::CheckConstItemMutation),
299             &Lint(function_item_references::FunctionItemReferences),
300             // What we need to do constant evaluation.
301             &simplify::SimplifyCfg::new("initial"),
302             &rustc_peek::SanityCheck, // Just a lint
303         ],
304     );
305     tcx.alloc_steal_mir(body)
306 }
307
308 /// Compute the main MIR body and the list of MIR bodies of the promoteds.
309 fn mir_promoted<'tcx>(
310     tcx: TyCtxt<'tcx>,
311     def: ty::WithOptConstParam<LocalDefId>,
312 ) -> (&'tcx Steal<Body<'tcx>>, &'tcx Steal<IndexVec<Promoted, Body<'tcx>>>) {
313     if let Some(def) = def.try_upgrade(tcx) {
314         return tcx.mir_promoted(def);
315     }
316
317     // Ensure that we compute the `mir_const_qualif` for constants at
318     // this point, before we steal the mir-const result.
319     // Also this means promotion can rely on all const checks having been done.
320     let const_qualifs = tcx.mir_const_qualif_opt_const_arg(def);
321     let mut body = tcx.mir_const(def).steal();
322     if let Some(error_reported) = const_qualifs.tainted_by_errors {
323         body.tainted_by_errors = Some(error_reported);
324     }
325
326     let mut required_consts = Vec::new();
327     let mut required_consts_visitor = RequiredConstsVisitor::new(&mut required_consts);
328     for (bb, bb_data) in traversal::reverse_postorder(&body) {
329         required_consts_visitor.visit_basic_block_data(bb, bb_data);
330     }
331     body.required_consts = required_consts;
332
333     // What we need to run borrowck etc.
334     let promote_pass = promote_consts::PromoteTemps::default();
335     pm::run_passes(
336         tcx,
337         &mut body,
338         &[
339             &promote_pass,
340             &simplify::SimplifyCfg::new("promote-consts"),
341             &coverage::InstrumentCoverage,
342         ],
343     );
344
345     let promoted = promote_pass.promoted_fragments.into_inner();
346     (tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted))
347 }
348
349 /// Compute the MIR that is used during CTFE (and thus has no optimizations run on it)
350 fn mir_for_ctfe<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx Body<'tcx> {
351     let did = def_id.expect_local();
352     if let Some(def) = ty::WithOptConstParam::try_lookup(did, tcx) {
353         tcx.mir_for_ctfe_of_const_arg(def)
354     } else {
355         tcx.arena.alloc(inner_mir_for_ctfe(tcx, ty::WithOptConstParam::unknown(did)))
356     }
357 }
358
359 /// Same as `mir_for_ctfe`, but used to get the MIR of a const generic parameter.
360 /// The docs on `WithOptConstParam` explain this a bit more, but the TLDR is that
361 /// we'd get cycle errors with `mir_for_ctfe`, because typeck would need to typeck
362 /// the const parameter while type checking the main body, which in turn would try
363 /// to type check the main body again.
364 fn mir_for_ctfe_of_const_arg<'tcx>(
365     tcx: TyCtxt<'tcx>,
366     (did, param_did): (LocalDefId, DefId),
367 ) -> &'tcx Body<'tcx> {
368     tcx.arena.alloc(inner_mir_for_ctfe(
369         tcx,
370         ty::WithOptConstParam { did, const_param_did: Some(param_did) },
371     ))
372 }
373
374 fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> Body<'_> {
375     // FIXME: don't duplicate this between the optimized_mir/mir_for_ctfe queries
376     if tcx.is_constructor(def.did.to_def_id()) {
377         // There's no reason to run all of the MIR passes on constructors when
378         // we can just output the MIR we want directly. This also saves const
379         // qualification and borrow checking the trouble of special casing
380         // constructors.
381         return shim::build_adt_ctor(tcx, def.did.to_def_id());
382     }
383
384     let context = tcx
385         .hir()
386         .body_const_context(def.did)
387         .expect("mir_for_ctfe should not be used for runtime functions");
388
389     let body = tcx.mir_drops_elaborated_and_const_checked(def).borrow().clone();
390
391     let mut body = remap_mir_for_const_eval_select(tcx, body, hir::Constness::Const);
392
393     match context {
394         // Do not const prop functions, either they get executed at runtime or exported to metadata,
395         // so we run const prop on them, or they don't, in which case we const evaluate some control
396         // flow paths of the function and any errors in those paths will get emitted as const eval
397         // errors.
398         hir::ConstContext::ConstFn => {}
399         // Static items always get evaluated, so we can just let const eval see if any erroneous
400         // control flow paths get executed.
401         hir::ConstContext::Static(_) => {}
402         // Associated constants get const prop run so we detect common failure situations in the
403         // crate that defined the constant.
404         // Technically we want to not run on regular const items, but oli-obk doesn't know how to
405         // conveniently detect that at this point without looking at the HIR.
406         hir::ConstContext::Const => {
407             pm::run_passes(
408                 tcx,
409                 &mut body,
410                 &[
411                     &const_prop::ConstProp,
412                     &marker::PhaseChange(MirPhase::Runtime(RuntimePhase::Optimized)),
413                 ],
414             );
415         }
416     }
417
418     debug_assert!(!body.has_free_regions(), "Free regions in MIR for CTFE");
419
420     body
421 }
422
423 /// Obtain just the main MIR (no promoteds) and run some cleanups on it. This also runs
424 /// mir borrowck *before* doing so in order to ensure that borrowck can be run and doesn't
425 /// end up missing the source MIR due to stealing happening.
426 fn mir_drops_elaborated_and_const_checked<'tcx>(
427     tcx: TyCtxt<'tcx>,
428     def: ty::WithOptConstParam<LocalDefId>,
429 ) -> &'tcx Steal<Body<'tcx>> {
430     if let Some(def) = def.try_upgrade(tcx) {
431         return tcx.mir_drops_elaborated_and_const_checked(def);
432     }
433
434     let mir_borrowck = tcx.mir_borrowck_opt_const_arg(def);
435
436     let is_fn_like = tcx.def_kind(def.did).is_fn_like();
437     if is_fn_like {
438         let did = def.did.to_def_id();
439         let def = ty::WithOptConstParam::unknown(did);
440
441         // Do not compute the mir call graph without said call graph actually being used.
442         if inline::Inline.is_enabled(&tcx.sess) {
443             let _ = tcx.mir_inliner_callees(ty::InstanceDef::Item(def));
444         }
445     }
446
447     let (body, _) = tcx.mir_promoted(def);
448     let mut body = body.steal();
449     if let Some(error_reported) = mir_borrowck.tainted_by_errors {
450         body.tainted_by_errors = Some(error_reported);
451     }
452
453     run_analysis_to_runtime_passes(tcx, &mut body);
454
455     tcx.alloc_steal_mir(body)
456 }
457
458 fn run_analysis_to_runtime_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
459     assert!(body.phase == MirPhase::Analysis(AnalysisPhase::Initial));
460     let did = body.source.def_id();
461
462     debug!("analysis_mir_cleanup({:?})", did);
463     run_analysis_cleanup_passes(tcx, body);
464     assert!(body.phase == MirPhase::Analysis(AnalysisPhase::PostCleanup));
465
466     // Do a little drop elaboration before const-checking if `const_precise_live_drops` is enabled.
467     if check_consts::post_drop_elaboration::checking_enabled(&ConstCx::new(tcx, &body)) {
468         pm::run_passes(
469             tcx,
470             body,
471             &[
472                 &remove_uninit_drops::RemoveUninitDrops,
473                 &simplify::SimplifyCfg::new("remove-false-edges"),
474             ],
475         );
476         check_consts::post_drop_elaboration::check_live_drops(tcx, &body); // FIXME: make this a MIR lint
477     }
478
479     debug!("runtime_mir_lowering({:?})", did);
480     run_runtime_lowering_passes(tcx, body);
481     assert!(body.phase == MirPhase::Runtime(RuntimePhase::Initial));
482
483     debug!("runtime_mir_cleanup({:?})", did);
484     run_runtime_cleanup_passes(tcx, body);
485     assert!(body.phase == MirPhase::Runtime(RuntimePhase::PostCleanup));
486 }
487
488 // FIXME(JakobDegen): Can we make these lists of passes consts?
489
490 /// After this series of passes, no lifetime analysis based on borrowing can be done.
491 fn run_analysis_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
492     let passes: &[&dyn MirPass<'tcx>] = &[
493         &remove_false_edges::RemoveFalseEdges,
494         &simplify_branches::SimplifyConstCondition::new("initial"),
495         &remove_noop_landing_pads::RemoveNoopLandingPads,
496         &cleanup_post_borrowck::CleanupNonCodegenStatements,
497         &simplify::SimplifyCfg::new("early-opt"),
498         &deref_separator::Derefer,
499         &marker::PhaseChange(MirPhase::Analysis(AnalysisPhase::PostCleanup)),
500     ];
501
502     pm::run_passes(tcx, body, passes);
503 }
504
505 /// Returns the sequence of passes that lowers analysis to runtime MIR.
506 fn run_runtime_lowering_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
507     let passes: &[&dyn MirPass<'tcx>] = &[
508         // These next passes must be executed together
509         &add_call_guards::CriticalCallEdges,
510         &elaborate_drops::ElaborateDrops,
511         // This will remove extraneous landing pads which are no longer
512         // necessary as well as well as forcing any call in a non-unwinding
513         // function calling a possibly-unwinding function to abort the process.
514         &abort_unwinding_calls::AbortUnwindingCalls,
515         // AddMovesForPackedDrops needs to run after drop
516         // elaboration.
517         &add_moves_for_packed_drops::AddMovesForPackedDrops,
518         // `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
519         // but before optimizations begin.
520         &elaborate_box_derefs::ElaborateBoxDerefs,
521         &generator::StateTransform,
522         &add_retag::AddRetag,
523         // Deaggregator is necessary for const prop. We may want to consider implementing
524         // CTFE support for aggregates.
525         &deaggregator::Deaggregator,
526         &Lint(const_prop_lint::ConstProp),
527         &marker::PhaseChange(MirPhase::Runtime(RuntimePhase::Initial)),
528     ];
529     pm::run_passes_no_validate(tcx, body, passes);
530 }
531
532 /// Returns the sequence of passes that do the initial cleanup of runtime MIR.
533 fn run_runtime_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
534     let passes: &[&dyn MirPass<'tcx>] = &[
535         &elaborate_box_derefs::ElaborateBoxDerefs,
536         &lower_intrinsics::LowerIntrinsics,
537         &simplify::SimplifyCfg::new("elaborate-drops"),
538         &marker::PhaseChange(MirPhase::Runtime(RuntimePhase::PostCleanup)),
539     ];
540
541     pm::run_passes(tcx, body, passes);
542 }
543
544 fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
545     fn o1<T>(x: T) -> WithMinOptLevel<T> {
546         WithMinOptLevel(1, x)
547     }
548
549     // The main optimizations that we do on MIR.
550     pm::run_passes(
551         tcx,
552         body,
553         &[
554             &reveal_all::RevealAll, // has to be done before inlining, since inlined code is in RevealAll mode.
555             &lower_slice_len::LowerSliceLenCalls, // has to be done before inlining, otherwise actual call will be almost always inlined. Also simple, so can just do first
556             &normalize_array_len::NormalizeArrayLen, // has to run after `slice::len` lowering
557             &unreachable_prop::UnreachablePropagation,
558             &uninhabited_enum_branching::UninhabitedEnumBranching,
559             &o1(simplify::SimplifyCfg::new("after-uninhabited-enum-branching")),
560             &inline::Inline,
561             &remove_storage_markers::RemoveStorageMarkers,
562             &remove_zsts::RemoveZsts,
563             &const_goto::ConstGoto,
564             &remove_unneeded_drops::RemoveUnneededDrops,
565             &match_branches::MatchBranchSimplification,
566             // inst combine is after MatchBranchSimplification to clean up Ne(_1, false)
567             &multiple_return_terminators::MultipleReturnTerminators,
568             &instcombine::InstCombine,
569             &separate_const_switch::SeparateConstSwitch,
570             //
571             // FIXME(#70073): This pass is responsible for both optimization as well as some lints.
572             &const_prop::ConstProp,
573             //
574             // Const-prop runs unconditionally, but doesn't mutate the MIR at mir-opt-level=0.
575             &const_debuginfo::ConstDebugInfo,
576             &o1(simplify_branches::SimplifyConstCondition::new("after-const-prop")),
577             &early_otherwise_branch::EarlyOtherwiseBranch,
578             &simplify_comparison_integral::SimplifyComparisonIntegral,
579             &simplify_try::SimplifyArmIdentity,
580             &simplify_try::SimplifyBranchSame,
581             &dead_store_elimination::DeadStoreElimination,
582             &dest_prop::DestinationPropagation,
583             &o1(simplify_branches::SimplifyConstCondition::new("final")),
584             &o1(remove_noop_landing_pads::RemoveNoopLandingPads),
585             &o1(simplify::SimplifyCfg::new("final")),
586             &nrvo::RenameReturnPlace,
587             &simplify::SimplifyLocals,
588             &multiple_return_terminators::MultipleReturnTerminators,
589             &deduplicate_blocks::DeduplicateBlocks,
590             // Some cleanup necessary at least for LLVM and potentially other codegen backends.
591             &add_call_guards::CriticalCallEdges,
592             &marker::PhaseChange(MirPhase::Runtime(RuntimePhase::Optimized)),
593             // Dump the end result for testing and debugging purposes.
594             &dump_mir::Marker("PreCodegen"),
595         ],
596     );
597 }
598
599 /// Optimize the MIR and prepare it for codegen.
600 fn optimized_mir<'tcx>(tcx: TyCtxt<'tcx>, did: DefId) -> &'tcx Body<'tcx> {
601     let did = did.expect_local();
602     assert_eq!(ty::WithOptConstParam::try_lookup(did, tcx), None);
603     tcx.arena.alloc(inner_optimized_mir(tcx, did))
604 }
605
606 fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> {
607     if tcx.is_constructor(did.to_def_id()) {
608         // There's no reason to run all of the MIR passes on constructors when
609         // we can just output the MIR we want directly. This also saves const
610         // qualification and borrow checking the trouble of special casing
611         // constructors.
612         return shim::build_adt_ctor(tcx, did.to_def_id());
613     }
614
615     match tcx.hir().body_const_context(did) {
616         // Run the `mir_for_ctfe` query, which depends on `mir_drops_elaborated_and_const_checked`
617         // which we are going to steal below. Thus we need to run `mir_for_ctfe` first, so it
618         // computes and caches its result.
619         Some(hir::ConstContext::ConstFn) => tcx.ensure().mir_for_ctfe(did),
620         None => {}
621         Some(other) => panic!("do not use `optimized_mir` for constants: {:?}", other),
622     }
623     debug!("about to call mir_drops_elaborated...");
624     let body =
625         tcx.mir_drops_elaborated_and_const_checked(ty::WithOptConstParam::unknown(did)).steal();
626     let mut body = remap_mir_for_const_eval_select(tcx, body, hir::Constness::NotConst);
627     debug!("body: {:#?}", body);
628     run_optimization_passes(tcx, &mut body);
629
630     debug_assert!(!body.has_free_regions(), "Free regions in optimized MIR");
631
632     body
633 }
634
635 /// Fetch all the promoteds of an item and prepare their MIR bodies to be ready for
636 /// constant evaluation once all substitutions become known.
637 fn promoted_mir<'tcx>(
638     tcx: TyCtxt<'tcx>,
639     def: ty::WithOptConstParam<LocalDefId>,
640 ) -> &'tcx IndexVec<Promoted, Body<'tcx>> {
641     if tcx.is_constructor(def.did.to_def_id()) {
642         return tcx.arena.alloc(IndexVec::new());
643     }
644
645     let tainted_by_errors = tcx.mir_borrowck_opt_const_arg(def).tainted_by_errors;
646     let mut promoted = tcx.mir_promoted(def).1.steal();
647
648     for body in &mut promoted {
649         if let Some(error_reported) = tainted_by_errors {
650             body.tainted_by_errors = Some(error_reported);
651         }
652         run_analysis_to_runtime_passes(tcx, body);
653     }
654
655     debug_assert!(!promoted.has_free_regions(), "Free regions in promoted MIR");
656
657     tcx.arena.alloc(promoted)
658 }