1 use crate::{shim, util};
2 use required_consts::RequiredConstsVisitor;
3 use rustc_data_structures::fx::FxHashSet;
5 use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE};
6 use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
7 use rustc_index::vec::IndexVec;
8 use rustc_middle::mir::visit::Visitor as _;
9 use rustc_middle::mir::{traversal, Body, ConstQualifs, MirPhase, Promoted};
10 use rustc_middle::ty::query::Providers;
11 use rustc_middle::ty::steal::Steal;
12 use rustc_middle::ty::{self, InstanceDef, TyCtxt, TypeFoldable};
13 use rustc_span::{Span, Symbol};
16 pub mod add_call_guards;
17 pub mod add_moves_for_packed_drops;
19 pub mod check_const_item_mutation;
21 pub mod check_packed_ref;
22 pub mod check_unsafety;
23 pub mod cleanup_post_borrowck;
29 pub mod early_otherwise_branch;
30 pub mod elaborate_drops;
34 pub mod instrument_coverage;
35 pub mod match_branches;
36 pub mod no_landing_pads;
38 pub mod promote_consts;
39 pub mod qualify_min_const_fn;
40 pub mod remove_noop_landing_pads;
41 pub mod remove_unneeded_drops;
42 pub mod required_consts;
45 pub mod simplify_branches;
46 pub mod simplify_comparison_integral;
48 pub mod uninhabited_enum_branching;
49 pub mod unreachable_prop;
52 pub(crate) fn provide(providers: &mut Providers) {
53 self::check_unsafety::provide(providers);
54 *providers = Providers {
57 mir_const_qualif: |tcx, def_id| {
58 let def_id = def_id.expect_local();
59 if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
60 tcx.mir_const_qualif_const_arg(def)
62 mir_const_qualif(tcx, ty::WithOptConstParam::unknown(def_id))
65 mir_const_qualif_const_arg: |tcx, (did, param_did)| {
66 mir_const_qualif(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
69 mir_drops_elaborated_and_const_checked,
71 optimized_mir_of_const_arg,
73 promoted_mir: |tcx, def_id| {
74 let def_id = def_id.expect_local();
75 if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
76 tcx.promoted_mir_of_const_arg(def)
78 promoted_mir(tcx, ty::WithOptConstParam::unknown(def_id))
81 promoted_mir_of_const_arg: |tcx, (did, param_did)| {
82 promoted_mir(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
86 instrument_coverage::provide(providers);
89 fn is_mir_available(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
90 tcx.mir_keys(def_id.krate).contains(&def_id.expect_local())
93 /// Finds the full set of `DefId`s within the current crate that have
94 /// MIR associated with them.
95 fn mir_keys(tcx: TyCtxt<'_>, krate: CrateNum) -> FxHashSet<LocalDefId> {
96 assert_eq!(krate, LOCAL_CRATE);
98 let mut set = FxHashSet::default();
100 // All body-owners have MIR associated with them.
101 set.extend(tcx.body_owners());
103 // Additionally, tuple struct/variant constructors have MIR, but
104 // they don't have a BodyId, so we need to build them separately.
105 struct GatherCtors<'a, 'tcx> {
107 set: &'a mut FxHashSet<LocalDefId>,
109 impl<'a, 'tcx> Visitor<'tcx> for GatherCtors<'a, 'tcx> {
110 fn visit_variant_data(
112 v: &'tcx hir::VariantData<'tcx>,
114 _: &'tcx hir::Generics<'tcx>,
118 if let hir::VariantData::Tuple(_, hir_id) = *v {
119 self.set.insert(self.tcx.hir().local_def_id(hir_id));
121 intravisit::walk_struct_def(self, v)
123 type Map = intravisit::ErasedMap<'tcx>;
124 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
125 NestedVisitorMap::None
130 .visit_all_item_likes(&mut GatherCtors { tcx, set: &mut set }.as_deep_visitor());
135 /// Where a specific `mir::Body` comes from.
136 #[derive(Debug, Copy, Clone)]
137 pub struct MirSource<'tcx> {
138 pub instance: InstanceDef<'tcx>,
140 /// If `Some`, this is a promoted rvalue within the parent function.
141 pub promoted: Option<Promoted>,
144 impl<'tcx> MirSource<'tcx> {
145 pub fn item(def_id: DefId) -> Self {
147 instance: InstanceDef::Item(ty::WithOptConstParam::unknown(def_id)),
152 pub fn with_opt_param(self) -> ty::WithOptConstParam<DefId> {
153 self.instance.with_opt_param()
157 pub fn def_id(&self) -> DefId {
158 self.instance.def_id()
162 /// Generates a default name for the pass based on the name of the
164 pub fn default_name<T: ?Sized>() -> Cow<'static, str> {
165 let name = ::std::any::type_name::<T>();
166 if let Some(tail) = name.rfind(':') { Cow::from(&name[tail + 1..]) } else { Cow::from(name) }
169 /// A streamlined trait that you can implement to create a pass; the
170 /// pass will be named after the type, and it will consist of a main
171 /// loop that goes over each available MIR and applies `run_pass`.
172 pub trait MirPass<'tcx> {
173 fn name(&self) -> Cow<'_, str> {
174 default_name::<Self>()
177 fn run_pass(&self, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, body: &mut Body<'tcx>);
182 body: &mut Body<'tcx>,
183 instance: InstanceDef<'tcx>,
184 promoted: Option<Promoted>,
186 passes: &[&[&dyn MirPass<'tcx>]],
188 let phase_index = mir_phase.phase_index();
189 let source = MirSource { instance, promoted };
190 let validate = tcx.sess.opts.debugging_opts.validate_mir;
192 if body.phase >= mir_phase {
197 validate::Validator { when: format!("input to phase {:?}", mir_phase), mir_phase }
198 .run_pass(tcx, source, body);
202 let mut run_pass = |pass: &dyn MirPass<'tcx>| {
203 let run_hooks = |body: &_, index, is_after| {
204 dump_mir::on_mir_pass(
206 &format_args!("{:03}-{:03}", phase_index, index),
213 run_hooks(body, index, false);
214 pass.run_pass(tcx, source, body);
215 run_hooks(body, index, true);
218 validate::Validator {
219 when: format!("after {} in phase {:?}", pass.name(), mir_phase),
222 .run_pass(tcx, source, body);
228 for pass_group in passes {
229 for pass in *pass_group {
234 body.phase = mir_phase;
236 if mir_phase == MirPhase::Optimization {
237 validate::Validator { when: format!("end of phase {:?}", mir_phase), mir_phase }
238 .run_pass(tcx, source, body);
242 fn mir_const_qualif(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> ConstQualifs {
243 let const_kind = tcx.hir().body_const_context(def.did);
245 // No need to const-check a non-const `fn`.
246 if const_kind.is_none() {
247 return Default::default();
250 // N.B., this `borrow()` is guaranteed to be valid (i.e., the value
251 // cannot yet be stolen), because `mir_promoted()`, which steals
252 // from `mir_const(), forces this query to execute before
253 // performing the steal.
254 let body = &tcx.mir_const(def).borrow();
256 if body.return_ty().references_error() {
257 tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors");
258 return Default::default();
261 let ccx = check_consts::ConstCx {
266 param_env: tcx.param_env(def.did),
269 let mut validator = check_consts::validation::Validator::new(&ccx);
270 validator.check_body();
272 // We return the qualifs in the return place for every MIR body, even though it is only used
273 // when deciding to promote a reference to a `const` for now.
274 validator.qualifs_in_return_place()
277 /// Make MIR ready for const evaluation. This is run on all MIR, not just on consts!
280 def: ty::WithOptConstParam<LocalDefId>,
281 ) -> &'tcx Steal<Body<'tcx>> {
282 if let Some(def) = def.try_upgrade(tcx) {
283 return tcx.mir_const(def);
286 // Unsafety check uses the raw mir, so make sure it is run.
287 if let Some(param_did) = def.const_param_did {
288 tcx.ensure().unsafety_check_result_for_const_arg((def.did, param_did));
290 tcx.ensure().unsafety_check_result(def.did);
293 let mut body = tcx.mir_built(def).steal();
300 MirSource { instance: InstanceDef::Item(def.to_global()), promoted: None },
308 InstanceDef::Item(def.to_global()),
313 &check_packed_ref::CheckPackedRef,
314 &check_const_item_mutation::CheckConstItemMutation,
315 // What we need to do constant evaluation.
316 &simplify::SimplifyCfg::new("initial"),
317 &rustc_peek::SanityCheck,
320 tcx.alloc_steal_mir(body)
325 def: ty::WithOptConstParam<LocalDefId>,
326 ) -> (&'tcx Steal<Body<'tcx>>, &'tcx Steal<IndexVec<Promoted, Body<'tcx>>>) {
327 if let Some(def) = def.try_upgrade(tcx) {
328 return tcx.mir_promoted(def);
331 // Ensure that we compute the `mir_const_qualif` for constants at
332 // this point, before we steal the mir-const result.
333 // Also this means promotion can rely on all const checks having been done.
334 let _ = tcx.mir_const_qualif_opt_const_arg(def);
335 let _ = if let Some(param_did) = def.const_param_did {
336 tcx.mir_abstract_const_of_const_arg((def.did, param_did))
338 tcx.mir_abstract_const(def.did.to_def_id())
340 let mut body = tcx.mir_const(def).steal();
342 let mut required_consts = Vec::new();
343 let mut required_consts_visitor = RequiredConstsVisitor::new(&mut required_consts);
344 for (bb, bb_data) in traversal::reverse_postorder(&body) {
345 required_consts_visitor.visit_basic_block_data(bb, bb_data);
347 body.required_consts = required_consts;
349 let promote_pass = promote_consts::PromoteTemps::default();
350 let promote: &[&dyn MirPass<'tcx>] = &[
351 // What we need to run borrowck etc.
353 &simplify::SimplifyCfg::new("promote-consts"),
356 let opt_coverage: &[&dyn MirPass<'tcx>] = if tcx.sess.opts.debugging_opts.instrument_coverage {
357 &[&instrument_coverage::InstrumentCoverage]
365 InstanceDef::Item(def.to_global()),
367 MirPhase::ConstPromotion,
368 &[promote, opt_coverage],
371 let promoted = promote_pass.promoted_fragments.into_inner();
372 (tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted))
375 fn mir_drops_elaborated_and_const_checked<'tcx>(
377 def: ty::WithOptConstParam<LocalDefId>,
378 ) -> &'tcx Steal<Body<'tcx>> {
379 if let Some(def) = def.try_upgrade(tcx) {
380 return tcx.mir_drops_elaborated_and_const_checked(def);
383 // (Mir-)Borrowck uses `mir_promoted`, so we have to force it to
384 // execute before we can steal.
385 if let Some(param_did) = def.const_param_did {
386 tcx.ensure().mir_borrowck_const_arg((def.did, param_did));
388 tcx.ensure().mir_borrowck(def.did);
391 let (body, _) = tcx.mir_promoted(def);
392 let mut body = body.steal();
394 run_post_borrowck_cleanup_passes(tcx, &mut body, def.did, None);
395 check_consts::post_drop_elaboration::check_live_drops(tcx, def.did, &body);
396 tcx.alloc_steal_mir(body)
399 /// After this series of passes, no lifetime analysis based on borrowing can be done.
400 fn run_post_borrowck_cleanup_passes<'tcx>(
402 body: &mut Body<'tcx>,
404 promoted: Option<Promoted>,
406 debug!("post_borrowck_cleanup({:?})", def_id);
408 let post_borrowck_cleanup: &[&dyn MirPass<'tcx>] = &[
409 // Remove all things only needed by analysis
410 &no_landing_pads::NoLandingPads::new(tcx),
411 &simplify_branches::SimplifyBranches::new("initial"),
412 &remove_noop_landing_pads::RemoveNoopLandingPads,
413 &cleanup_post_borrowck::CleanupNonCodegenStatements,
414 &simplify::SimplifyCfg::new("early-opt"),
415 // These next passes must be executed together
416 &add_call_guards::CriticalCallEdges,
417 &elaborate_drops::ElaborateDrops,
418 &no_landing_pads::NoLandingPads::new(tcx),
419 // AddMovesForPackedDrops needs to run after drop
421 &add_moves_for_packed_drops::AddMovesForPackedDrops,
422 // `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
423 // but before optimizations begin.
424 &add_retag::AddRetag,
425 &simplify::SimplifyCfg::new("elaborate-drops"),
426 // `Deaggregator` is conceptually part of MIR building, some backends rely on it happening
427 // and it can help optimizations.
428 &deaggregator::Deaggregator,
434 InstanceDef::Item(ty::WithOptConstParam::unknown(def_id.to_def_id())),
436 MirPhase::DropLowering,
437 &[post_borrowck_cleanup],
441 fn run_optimization_passes<'tcx>(
443 body: &mut Body<'tcx>,
445 promoted: Option<Promoted>,
447 let mir_opt_level = tcx.sess.opts.debugging_opts.mir_opt_level;
449 // Lowering generator control-flow and variables has to happen before we do anything else
450 // to them. We run some optimizations before that, because they may be harder to do on the state
451 // machine than on MIR with async primitives.
452 let optimizations_with_generators: &[&dyn MirPass<'tcx>] = &[
453 &unreachable_prop::UnreachablePropagation,
454 &uninhabited_enum_branching::UninhabitedEnumBranching,
455 &simplify::SimplifyCfg::new("after-uninhabited-enum-branching"),
457 &generator::StateTransform,
460 // Even if we don't do optimizations, we still have to lower generators for codegen.
461 let no_optimizations_with_generators: &[&dyn MirPass<'tcx>] = &[&generator::StateTransform];
463 // The main optimizations that we do on MIR.
464 let optimizations: &[&dyn MirPass<'tcx>] = &[
465 &remove_unneeded_drops::RemoveUnneededDrops,
466 &match_branches::MatchBranchSimplification,
467 // inst combine is after MatchBranchSimplification to clean up Ne(_1, false)
468 &instcombine::InstCombine,
469 &const_prop::ConstProp,
470 &simplify_branches::SimplifyBranches::new("after-const-prop"),
471 &early_otherwise_branch::EarlyOtherwiseBranch,
472 &simplify_comparison_integral::SimplifyComparisonIntegral,
473 &simplify_try::SimplifyArmIdentity,
474 &simplify_try::SimplifyBranchSame,
475 &dest_prop::DestinationPropagation,
476 ©_prop::CopyPropagation,
477 &simplify_branches::SimplifyBranches::new("after-copy-prop"),
478 &remove_noop_landing_pads::RemoveNoopLandingPads,
479 &simplify::SimplifyCfg::new("after-remove-noop-landing-pads"),
480 &simplify::SimplifyCfg::new("final"),
481 &nrvo::RenameReturnPlace,
482 &simplify::SimplifyLocals,
485 // Optimizations to run even if mir optimizations have been disabled.
486 let no_optimizations: &[&dyn MirPass<'tcx>] = &[
487 // FIXME(#70073): This pass is responsible for both optimization as well as some lints.
488 &const_prop::ConstProp,
491 // Some cleanup necessary at least for LLVM and potentially other codegen backends.
492 let pre_codegen_cleanup: &[&dyn MirPass<'tcx>] = &[
493 &add_call_guards::CriticalCallEdges,
494 // Dump the end result for testing and debugging purposes.
495 &dump_mir::Marker("PreCodegen"),
498 // End of pass declarations, now actually run the passes.
499 // Generator Lowering
504 InstanceDef::Item(ty::WithOptConstParam::unknown(def_id.to_def_id())),
506 MirPhase::GeneratorLowering,
508 if mir_opt_level > 0 {
509 optimizations_with_generators
511 no_optimizations_with_generators
516 // Main optimization passes
521 InstanceDef::Item(ty::WithOptConstParam::unknown(def_id.to_def_id())),
523 MirPhase::Optimization,
525 if mir_opt_level > 0 { optimizations } else { no_optimizations },
531 fn optimized_mir<'tcx>(tcx: TyCtxt<'tcx>, did: DefId) -> &'tcx Body<'tcx> {
532 let did = did.expect_local();
533 if let Some(def) = ty::WithOptConstParam::try_lookup(did, tcx) {
534 tcx.optimized_mir_of_const_arg(def)
536 tcx.arena.alloc(inner_optimized_mir(tcx, ty::WithOptConstParam::unknown(did)))
540 fn optimized_mir_of_const_arg<'tcx>(
542 (did, param_did): (LocalDefId, DefId),
543 ) -> &'tcx Body<'tcx> {
544 tcx.arena.alloc(inner_optimized_mir(
546 ty::WithOptConstParam { did, const_param_did: Some(param_did) },
550 fn inner_optimized_mir(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> Body<'_> {
551 if tcx.is_constructor(def.did.to_def_id()) {
552 // There's no reason to run all of the MIR passes on constructors when
553 // we can just output the MIR we want directly. This also saves const
554 // qualification and borrow checking the trouble of special casing
556 return shim::build_adt_ctor(tcx, def.did.to_def_id());
559 let mut body = tcx.mir_drops_elaborated_and_const_checked(def).steal();
560 run_optimization_passes(tcx, &mut body, def.did, None);
562 debug_assert!(!body.has_free_regions(), "Free regions in optimized MIR");
567 fn promoted_mir<'tcx>(
569 def: ty::WithOptConstParam<LocalDefId>,
570 ) -> &'tcx IndexVec<Promoted, Body<'tcx>> {
571 if tcx.is_constructor(def.did.to_def_id()) {
572 return tcx.arena.alloc(IndexVec::new());
575 if let Some(param_did) = def.const_param_did {
576 tcx.ensure().mir_borrowck_const_arg((def.did, param_did));
578 tcx.ensure().mir_borrowck(def.did);
580 let (_, promoted) = tcx.mir_promoted(def);
581 let mut promoted = promoted.steal();
583 for (p, mut body) in promoted.iter_enumerated_mut() {
584 run_post_borrowck_cleanup_passes(tcx, &mut body, def.did, Some(p));
585 run_optimization_passes(tcx, &mut body, def.did, Some(p));
588 debug_assert!(!promoted.has_free_regions(), "Free regions in promoted MIR");
590 tcx.arena.alloc(promoted)