1 use crate::{shim, util};
2 use required_consts::RequiredConstsVisitor;
3 use rustc_data_structures::fx::FxHashSet;
4 use rustc_data_structures::steal::Steal;
6 use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE};
7 use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
8 use rustc_index::vec::IndexVec;
9 use rustc_middle::mir::visit::Visitor as _;
10 use rustc_middle::mir::{traversal, Body, ConstQualifs, MirPhase, Promoted};
11 use rustc_middle::ty::query::Providers;
12 use rustc_middle::ty::{self, TyCtxt, TypeFoldable};
13 use rustc_span::{Span, Symbol};
16 pub mod add_call_guards;
17 pub mod add_moves_for_packed_drops;
19 pub mod check_const_item_mutation;
21 pub mod check_packed_ref;
22 pub mod check_unsafety;
23 pub mod cleanup_post_borrowck;
24 pub mod const_debuginfo;
29 pub mod deduplicate_blocks;
32 pub mod early_otherwise_branch;
33 pub mod elaborate_drops;
34 pub mod function_item_references;
38 pub mod lower_intrinsics;
39 pub mod match_branches;
40 pub mod multiple_return_terminators;
41 pub mod no_landing_pads;
43 pub mod promote_consts;
44 pub mod remove_noop_landing_pads;
45 pub mod remove_storage_markers;
46 pub mod remove_unneeded_drops;
48 pub mod required_consts;
51 pub mod simplify_branches;
52 pub mod simplify_comparison_integral;
54 pub mod uninhabited_enum_branching;
55 pub mod unreachable_prop;
58 pub use rustc_middle::mir::MirSource;
60 pub(crate) fn provide(providers: &mut Providers) {
61 self::check_unsafety::provide(providers);
62 self::check_packed_ref::provide(providers);
63 *providers = Providers {
66 mir_const_qualif: |tcx, def_id| {
67 let def_id = def_id.expect_local();
68 if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
69 tcx.mir_const_qualif_const_arg(def)
71 mir_const_qualif(tcx, ty::WithOptConstParam::unknown(def_id))
74 mir_const_qualif_const_arg: |tcx, (did, param_did)| {
75 mir_const_qualif(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
78 mir_drops_elaborated_and_const_checked,
80 mir_for_ctfe_of_const_arg,
83 is_ctfe_mir_available: |tcx, did| is_mir_available(tcx, did),
84 promoted_mir: |tcx, def_id| {
85 let def_id = def_id.expect_local();
86 if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
87 tcx.promoted_mir_of_const_arg(def)
89 promoted_mir(tcx, ty::WithOptConstParam::unknown(def_id))
92 promoted_mir_of_const_arg: |tcx, (did, param_did)| {
93 promoted_mir(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
97 coverage::query::provide(providers);
100 fn is_mir_available(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
101 tcx.mir_keys(def_id.krate).contains(&def_id.expect_local())
104 /// Finds the full set of `DefId`s within the current crate that have
105 /// MIR associated with them.
106 fn mir_keys(tcx: TyCtxt<'_>, krate: CrateNum) -> FxHashSet<LocalDefId> {
107 assert_eq!(krate, LOCAL_CRATE);
109 let mut set = FxHashSet::default();
111 // All body-owners have MIR associated with them.
112 set.extend(tcx.body_owners());
114 // Additionally, tuple struct/variant constructors have MIR, but
115 // they don't have a BodyId, so we need to build them separately.
116 struct GatherCtors<'a, 'tcx> {
118 set: &'a mut FxHashSet<LocalDefId>,
120 impl<'a, 'tcx> Visitor<'tcx> for GatherCtors<'a, 'tcx> {
121 fn visit_variant_data(
123 v: &'tcx hir::VariantData<'tcx>,
125 _: &'tcx hir::Generics<'tcx>,
129 if let hir::VariantData::Tuple(_, hir_id) = *v {
130 self.set.insert(self.tcx.hir().local_def_id(hir_id));
132 intravisit::walk_struct_def(self, v)
134 type Map = intravisit::ErasedMap<'tcx>;
135 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
136 NestedVisitorMap::None
141 .visit_all_item_likes(&mut GatherCtors { tcx, set: &mut set }.as_deep_visitor());
146 /// Generates a default name for the pass based on the name of the
148 pub fn default_name<T: ?Sized>() -> Cow<'static, str> {
149 let name = std::any::type_name::<T>();
150 if let Some(tail) = name.rfind(':') { Cow::from(&name[tail + 1..]) } else { Cow::from(name) }
153 /// A streamlined trait that you can implement to create a pass; the
154 /// pass will be named after the type, and it will consist of a main
155 /// loop that goes over each available MIR and applies `run_pass`.
156 pub trait MirPass<'tcx> {
157 fn name(&self) -> Cow<'_, str> {
158 default_name::<Self>()
161 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>);
166 body: &mut Body<'tcx>,
168 passes: &[&[&dyn MirPass<'tcx>]],
170 let phase_index = mir_phase.phase_index();
171 let validate = tcx.sess.opts.debugging_opts.validate_mir;
173 if body.phase >= mir_phase {
178 validate::Validator { when: format!("input to phase {:?}", mir_phase), mir_phase }
179 .run_pass(tcx, body);
183 let mut run_pass = |pass: &dyn MirPass<'tcx>| {
184 let run_hooks = |body: &_, index, is_after| {
185 dump_mir::on_mir_pass(
187 &format_args!("{:03}-{:03}", phase_index, index),
193 run_hooks(body, index, false);
194 pass.run_pass(tcx, body);
195 run_hooks(body, index, true);
198 validate::Validator {
199 when: format!("after {} in phase {:?}", pass.name(), mir_phase),
202 .run_pass(tcx, body);
208 for pass_group in passes {
209 for pass in *pass_group {
214 body.phase = mir_phase;
216 if mir_phase == MirPhase::Optimization {
217 validate::Validator { when: format!("end of phase {:?}", mir_phase), mir_phase }
218 .run_pass(tcx, body);
222 fn mir_const_qualif(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> ConstQualifs {
223 let const_kind = tcx.hir().body_const_context(def.did);
225 // No need to const-check a non-const `fn`.
226 if const_kind.is_none() {
227 return Default::default();
230 // N.B., this `borrow()` is guaranteed to be valid (i.e., the value
231 // cannot yet be stolen), because `mir_promoted()`, which steals
232 // from `mir_const(), forces this query to execute before
233 // performing the steal.
234 let body = &tcx.mir_const(def).borrow();
236 if body.return_ty().references_error() {
237 tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors");
238 return Default::default();
241 let ccx = check_consts::ConstCx { body, tcx, const_kind, param_env: tcx.param_env(def.did) };
243 let mut validator = check_consts::validation::Validator::new(&ccx);
244 validator.check_body();
246 // We return the qualifs in the return place for every MIR body, even though it is only used
247 // when deciding to promote a reference to a `const` for now.
248 validator.qualifs_in_return_place()
251 /// Make MIR ready for const evaluation. This is run on all MIR, not just on consts!
254 def: ty::WithOptConstParam<LocalDefId>,
255 ) -> &'tcx Steal<Body<'tcx>> {
256 if let Some(def) = def.try_upgrade(tcx) {
257 return tcx.mir_const(def);
260 // Unsafety check uses the raw mir, so make sure it is run.
261 if let Some(param_did) = def.const_param_did {
262 tcx.ensure().unsafety_check_result_for_const_arg((def.did, param_did));
264 tcx.ensure().unsafety_check_result(def.did);
267 let mut body = tcx.mir_built(def).steal();
269 util::dump_mir(tcx, None, "mir_map", &0, &body, |_, _| Ok(()));
277 &check_packed_ref::CheckPackedRef,
278 &check_const_item_mutation::CheckConstItemMutation,
279 &function_item_references::FunctionItemReferences,
280 // What we need to do constant evaluation.
281 &simplify::SimplifyCfg::new("initial"),
282 &rustc_peek::SanityCheck,
285 tcx.alloc_steal_mir(body)
288 /// Compute the main MIR body and the list of MIR bodies of the promoteds.
291 def: ty::WithOptConstParam<LocalDefId>,
292 ) -> (&'tcx Steal<Body<'tcx>>, &'tcx Steal<IndexVec<Promoted, Body<'tcx>>>) {
293 if let Some(def) = def.try_upgrade(tcx) {
294 return tcx.mir_promoted(def);
297 // Ensure that we compute the `mir_const_qualif` for constants at
298 // this point, before we steal the mir-const result.
299 // Also this means promotion can rely on all const checks having been done.
300 let _ = tcx.mir_const_qualif_opt_const_arg(def);
301 let _ = tcx.mir_abstract_const_opt_const_arg(def.to_global());
302 let mut body = tcx.mir_const(def).steal();
304 let mut required_consts = Vec::new();
305 let mut required_consts_visitor = RequiredConstsVisitor::new(&mut required_consts);
306 for (bb, bb_data) in traversal::reverse_postorder(&body) {
307 required_consts_visitor.visit_basic_block_data(bb, bb_data);
309 body.required_consts = required_consts;
311 let promote_pass = promote_consts::PromoteTemps::default();
312 let promote: &[&dyn MirPass<'tcx>] = &[
313 // What we need to run borrowck etc.
315 &simplify::SimplifyCfg::new("promote-consts"),
318 let opt_coverage: &[&dyn MirPass<'tcx>] =
319 if tcx.sess.instrument_coverage() { &[&coverage::InstrumentCoverage] } else { &[] };
321 run_passes(tcx, &mut body, MirPhase::ConstPromotion, &[promote, opt_coverage]);
323 let promoted = promote_pass.promoted_fragments.into_inner();
324 (tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted))
327 /// Compute the MIR that is used during CTFE (and thus has no optimizations run on it)
328 fn mir_for_ctfe<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx Body<'tcx> {
329 let did = def_id.expect_local();
330 if let Some(def) = ty::WithOptConstParam::try_lookup(did, tcx) {
331 tcx.mir_for_ctfe_of_const_arg(def)
333 tcx.arena.alloc(inner_mir_for_ctfe(tcx, ty::WithOptConstParam::unknown(did)))
337 /// Same as `mir_for_ctfe`, but used to get the MIR of a const generic parameter.
338 /// The docs on `WithOptConstParam` explain this a bit more, but the TLDR is that
339 /// we'd get cycle errors with `mir_for_ctfe`, because typeck would need to typeck
340 /// the const parameter while type checking the main body, which in turn would try
341 /// to type check the main body again.
342 fn mir_for_ctfe_of_const_arg<'tcx>(
344 (did, param_did): (LocalDefId, DefId),
345 ) -> &'tcx Body<'tcx> {
346 tcx.arena.alloc(inner_mir_for_ctfe(
348 ty::WithOptConstParam { did, const_param_did: Some(param_did) },
352 fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> Body<'_> {
353 // FIXME: don't duplicate this between the optimized_mir/mir_for_ctfe queries
354 if tcx.is_constructor(def.did.to_def_id()) {
355 // There's no reason to run all of the MIR passes on constructors when
356 // we can just output the MIR we want directly. This also saves const
357 // qualification and borrow checking the trouble of special casing
359 return shim::build_adt_ctor(tcx, def.did.to_def_id());
364 .body_const_context(def.did)
365 .expect("mir_for_ctfe should not be used for runtime functions");
367 let mut body = tcx.mir_drops_elaborated_and_const_checked(def).borrow().clone();
370 // Do not const prop functions, either they get executed at runtime or exported to metadata,
371 // so we run const prop on them, or they don't, in which case we const evaluate some control
372 // flow paths of the function and any errors in those paths will get emitted as const eval
374 hir::ConstContext::ConstFn => {}
375 // Static items always get evaluated, so we can just let const eval see if any erroneous
376 // control flow paths get executed.
377 hir::ConstContext::Static(_) => {}
378 // Associated constants get const prop run so we detect common failure situations in the
379 // crate that defined the constant.
380 // Technically we want to not run on regular const items, but oli-obk doesn't know how to
381 // conveniently detect that at this point without looking at the HIR.
382 hir::ConstContext::Const => {
384 let optimizations: &[&dyn MirPass<'_>] = &[
385 &const_prop::ConstProp,
392 MirPhase::Optimization,
400 debug_assert!(!body.has_free_regions(), "Free regions in MIR for CTFE");
405 /// Obtain just the main MIR (no promoteds) and run some cleanups on it. This also runs
406 /// mir borrowck *before* doing so in order to ensure that borrowck can be run and doesn't
407 /// end up missing the source MIR due to stealing happening.
408 fn mir_drops_elaborated_and_const_checked<'tcx>(
410 def: ty::WithOptConstParam<LocalDefId>,
411 ) -> &'tcx Steal<Body<'tcx>> {
412 if let Some(def) = def.try_upgrade(tcx) {
413 return tcx.mir_drops_elaborated_and_const_checked(def);
416 // (Mir-)Borrowck uses `mir_promoted`, so we have to force it to
417 // execute before we can steal.
418 if let Some(param_did) = def.const_param_did {
419 tcx.ensure().mir_borrowck_const_arg((def.did, param_did));
421 tcx.ensure().mir_borrowck(def.did);
424 let hir_id = tcx.hir().local_def_id_to_hir_id(def.did);
425 use rustc_middle::hir::map::blocks::FnLikeNode;
426 let is_fn_like = FnLikeNode::from_node(tcx.hir().get(hir_id)).is_some();
428 let did = def.did.to_def_id();
429 let def = ty::WithOptConstParam::unknown(did);
431 // Do not compute the mir call graph without said call graph actually being used.
432 if inline::is_enabled(tcx) {
433 let _ = tcx.mir_inliner_callees(ty::InstanceDef::Item(def));
437 let (body, _) = tcx.mir_promoted(def);
438 let mut body = body.steal();
440 run_post_borrowck_cleanup_passes(tcx, &mut body);
441 check_consts::post_drop_elaboration::check_live_drops(tcx, &body);
442 tcx.alloc_steal_mir(body)
445 /// After this series of passes, no lifetime analysis based on borrowing can be done.
446 fn run_post_borrowck_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
447 debug!("post_borrowck_cleanup({:?})", body.source.def_id());
449 let post_borrowck_cleanup: &[&dyn MirPass<'tcx>] = &[
450 // Remove all things only needed by analysis
451 &no_landing_pads::NoLandingPads,
452 &simplify_branches::SimplifyBranches::new("initial"),
453 &remove_noop_landing_pads::RemoveNoopLandingPads,
454 &cleanup_post_borrowck::CleanupNonCodegenStatements,
455 &simplify::SimplifyCfg::new("early-opt"),
456 // These next passes must be executed together
457 &add_call_guards::CriticalCallEdges,
458 &elaborate_drops::ElaborateDrops,
459 &no_landing_pads::NoLandingPads,
460 // AddMovesForPackedDrops needs to run after drop
462 &add_moves_for_packed_drops::AddMovesForPackedDrops,
463 // `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
464 // but before optimizations begin.
465 &add_retag::AddRetag,
466 &lower_intrinsics::LowerIntrinsics,
467 &simplify::SimplifyCfg::new("elaborate-drops"),
468 // `Deaggregator` is conceptually part of MIR building, some backends rely on it happening
469 // and it can help optimizations.
470 &deaggregator::Deaggregator,
473 run_passes(tcx, body, MirPhase::DropLowering, &[post_borrowck_cleanup]);
476 fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
477 let mir_opt_level = tcx.sess.mir_opt_level();
479 // Lowering generator control-flow and variables has to happen before we do anything else
480 // to them. We run some optimizations before that, because they may be harder to do on the state
481 // machine than on MIR with async primitives.
482 let optimizations_with_generators: &[&dyn MirPass<'tcx>] = &[
483 &unreachable_prop::UnreachablePropagation,
484 &uninhabited_enum_branching::UninhabitedEnumBranching,
485 &simplify::SimplifyCfg::new("after-uninhabited-enum-branching"),
487 &generator::StateTransform,
490 // Even if we don't do optimizations, we still have to lower generators for codegen.
491 let no_optimizations_with_generators: &[&dyn MirPass<'tcx>] = &[&generator::StateTransform];
493 // The main optimizations that we do on MIR.
494 let optimizations: &[&dyn MirPass<'tcx>] = &[
495 &remove_storage_markers::RemoveStorageMarkers,
496 &remove_zsts::RemoveZsts,
497 &const_goto::ConstGoto,
498 &remove_unneeded_drops::RemoveUnneededDrops,
499 &match_branches::MatchBranchSimplification,
500 // inst combine is after MatchBranchSimplification to clean up Ne(_1, false)
501 &multiple_return_terminators::MultipleReturnTerminators,
502 &instcombine::InstCombine,
503 &const_prop::ConstProp,
504 &simplify_branches::SimplifyBranches::new("after-const-prop"),
505 &early_otherwise_branch::EarlyOtherwiseBranch,
506 &simplify_comparison_integral::SimplifyComparisonIntegral,
507 &simplify_try::SimplifyArmIdentity,
508 &simplify_try::SimplifyBranchSame,
509 &dest_prop::DestinationPropagation,
510 &simplify_branches::SimplifyBranches::new("final"),
511 &remove_noop_landing_pads::RemoveNoopLandingPads,
512 &simplify::SimplifyCfg::new("final"),
513 &nrvo::RenameReturnPlace,
514 &const_debuginfo::ConstDebugInfo,
515 &simplify::SimplifyLocals,
516 &multiple_return_terminators::MultipleReturnTerminators,
517 &deduplicate_blocks::DeduplicateBlocks,
520 // Optimizations to run even if mir optimizations have been disabled.
521 let no_optimizations: &[&dyn MirPass<'tcx>] = &[
522 // FIXME(#70073): This pass is responsible for both optimization as well as some lints.
523 &const_prop::ConstProp,
526 // Some cleanup necessary at least for LLVM and potentially other codegen backends.
527 let pre_codegen_cleanup: &[&dyn MirPass<'tcx>] = &[
528 &add_call_guards::CriticalCallEdges,
529 // Dump the end result for testing and debugging purposes.
530 &dump_mir::Marker("PreCodegen"),
533 // End of pass declarations, now actually run the passes.
534 // Generator Lowering
539 MirPhase::GeneratorLowering,
541 if mir_opt_level > 0 {
542 optimizations_with_generators
544 no_optimizations_with_generators
549 // Main optimization passes
554 MirPhase::Optimization,
556 if mir_opt_level > 0 { optimizations } else { no_optimizations },
562 /// Optimize the MIR and prepare it for codegen.
563 fn optimized_mir<'tcx>(tcx: TyCtxt<'tcx>, did: DefId) -> &'tcx Body<'tcx> {
564 let did = did.expect_local();
565 assert_eq!(ty::WithOptConstParam::try_lookup(did, tcx), None);
566 tcx.arena.alloc(inner_optimized_mir(tcx, did))
569 fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> {
570 if tcx.is_constructor(did.to_def_id()) {
571 // There's no reason to run all of the MIR passes on constructors when
572 // we can just output the MIR we want directly. This also saves const
573 // qualification and borrow checking the trouble of special casing
575 return shim::build_adt_ctor(tcx, did.to_def_id());
578 match tcx.hir().body_const_context(did) {
579 // Run the `mir_for_ctfe` query, which depends on `mir_drops_elaborated_and_const_checked`
580 // which we are going to steal below. Thus we need to run `mir_for_ctfe` first, so it
581 // computes and caches its result.
582 Some(hir::ConstContext::ConstFn) => tcx.ensure().mir_for_ctfe(did),
584 Some(other) => panic!("do not use `optimized_mir` for constants: {:?}", other),
587 tcx.mir_drops_elaborated_and_const_checked(ty::WithOptConstParam::unknown(did)).steal();
588 run_optimization_passes(tcx, &mut body);
590 debug_assert!(!body.has_free_regions(), "Free regions in optimized MIR");
595 /// Fetch all the promoteds of an item and prepare their MIR bodies to be ready for
596 /// constant evaluation once all substitutions become known.
597 fn promoted_mir<'tcx>(
599 def: ty::WithOptConstParam<LocalDefId>,
600 ) -> &'tcx IndexVec<Promoted, Body<'tcx>> {
601 if tcx.is_constructor(def.did.to_def_id()) {
602 return tcx.arena.alloc(IndexVec::new());
605 if let Some(param_did) = def.const_param_did {
606 tcx.ensure().mir_borrowck_const_arg((def.did, param_did));
608 tcx.ensure().mir_borrowck(def.did);
610 let (_, promoted) = tcx.mir_promoted(def);
611 let mut promoted = promoted.steal();
613 for body in &mut promoted {
614 run_post_borrowck_cleanup_passes(tcx, body);
617 debug_assert!(!promoted.has_free_regions(), "Free regions in promoted MIR");
619 tcx.arena.alloc(promoted)