1 #![feature(box_patterns)]
2 #![feature(box_syntax)]
3 #![feature(crate_visibility_modifier)]
6 #![feature(map_try_insert)]
7 #![feature(min_specialization)]
8 #![feature(option_get_or_insert_default)]
10 #![feature(never_type)]
11 #![feature(trusted_step)]
12 #![feature(try_blocks)]
13 #![recursion_limit = "256"]
18 extern crate rustc_middle;
20 use required_consts::RequiredConstsVisitor;
21 use rustc_const_eval::util;
22 use rustc_data_structures::fx::FxHashSet;
23 use rustc_data_structures::steal::Steal;
25 use rustc_hir::def_id::{DefId, LocalDefId};
26 use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
27 use rustc_index::vec::IndexVec;
28 use rustc_middle::mir::visit::Visitor as _;
29 use rustc_middle::mir::{traversal, Body, ConstQualifs, MirPass, MirPhase, Promoted};
30 use rustc_middle::ty::query::Providers;
31 use rustc_middle::ty::{self, TyCtxt, TypeFoldable};
32 use rustc_span::{Span, Symbol};
37 use pass_manager::{self as pm, Lint, MirLint, WithMinOptLevel};
39 mod abort_unwinding_calls;
41 mod add_moves_for_packed_drops;
43 mod check_const_item_mutation;
45 pub mod check_unsafety;
46 mod cleanup_post_borrowck;
52 mod deduplicate_blocks;
55 mod early_otherwise_branch;
57 mod function_item_references;
65 mod multiple_return_terminators;
66 mod normalize_array_len;
68 mod remove_false_edges;
69 mod remove_noop_landing_pads;
70 mod remove_storage_markers;
71 mod remove_uninit_drops;
72 mod remove_unneeded_drops;
76 mod separate_const_switch;
79 mod simplify_branches;
80 mod simplify_comparison_integral;
82 mod uninhabited_enum_branching;
85 use rustc_const_eval::transform::check_consts::{self, ConstCx};
86 use rustc_const_eval::transform::promote_consts;
87 use rustc_const_eval::transform::validate;
88 use rustc_mir_dataflow::rustc_peek;
90 pub fn provide(providers: &mut Providers) {
91 check_unsafety::provide(providers);
92 check_packed_ref::provide(providers);
93 coverage::query::provide(providers);
94 shim::provide(providers);
95 *providers = Providers {
98 mir_const_qualif: |tcx, def_id| {
99 let def_id = def_id.expect_local();
100 if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
101 tcx.mir_const_qualif_const_arg(def)
103 mir_const_qualif(tcx, ty::WithOptConstParam::unknown(def_id))
106 mir_const_qualif_const_arg: |tcx, (did, param_did)| {
107 mir_const_qualif(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
110 mir_drops_elaborated_and_const_checked,
112 mir_for_ctfe_of_const_arg,
115 is_ctfe_mir_available: |tcx, did| is_mir_available(tcx, did),
116 mir_callgraph_reachable: inline::cycle::mir_callgraph_reachable,
117 mir_inliner_callees: inline::cycle::mir_inliner_callees,
118 promoted_mir: |tcx, def_id| {
119 let def_id = def_id.expect_local();
120 if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
121 tcx.promoted_mir_of_const_arg(def)
123 promoted_mir(tcx, ty::WithOptConstParam::unknown(def_id))
126 promoted_mir_of_const_arg: |tcx, (did, param_did)| {
127 promoted_mir(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
133 fn is_mir_available(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
134 let def_id = def_id.expect_local();
135 tcx.mir_keys(()).contains(&def_id)
138 /// Finds the full set of `DefId`s within the current crate that have
139 /// MIR associated with them.
140 fn mir_keys(tcx: TyCtxt<'_>, (): ()) -> FxHashSet<LocalDefId> {
141 let mut set = FxHashSet::default();
143 // All body-owners have MIR associated with them.
144 set.extend(tcx.hir().body_owners());
146 // Additionally, tuple struct/variant constructors have MIR, but
147 // they don't have a BodyId, so we need to build them separately.
148 struct GatherCtors<'a, 'tcx> {
150 set: &'a mut FxHashSet<LocalDefId>,
152 impl<'tcx> Visitor<'tcx> for GatherCtors<'_, 'tcx> {
153 fn visit_variant_data(
155 v: &'tcx hir::VariantData<'tcx>,
157 _: &'tcx hir::Generics<'tcx>,
161 if let hir::VariantData::Tuple(_, hir_id) = *v {
162 self.set.insert(self.tcx.hir().local_def_id(hir_id));
164 intravisit::walk_struct_def(self, v)
166 type Map = intravisit::ErasedMap<'tcx>;
167 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
168 NestedVisitorMap::None
171 tcx.hir().visit_all_item_likes(&mut GatherCtors { tcx, set: &mut set }.as_deep_visitor());
176 fn mir_const_qualif(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> ConstQualifs {
177 let const_kind = tcx.hir().body_const_context(def.did);
179 // No need to const-check a non-const `fn`.
180 if const_kind.is_none() {
181 return Default::default();
184 // N.B., this `borrow()` is guaranteed to be valid (i.e., the value
185 // cannot yet be stolen), because `mir_promoted()`, which steals
186 // from `mir_const(), forces this query to execute before
187 // performing the steal.
188 let body = &tcx.mir_const(def).borrow();
190 if body.return_ty().references_error() {
191 tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors");
192 return Default::default();
195 let ccx = check_consts::ConstCx { body, tcx, const_kind, param_env: tcx.param_env(def.did) };
197 let mut validator = check_consts::check::Checker::new(&ccx);
198 validator.check_body();
200 // We return the qualifs in the return place for every MIR body, even though it is only used
201 // when deciding to promote a reference to a `const` for now.
202 validator.qualifs_in_return_place()
205 /// Make MIR ready for const evaluation. This is run on all MIR, not just on consts!
208 def: ty::WithOptConstParam<LocalDefId>,
209 ) -> &'tcx Steal<Body<'tcx>> {
210 if let Some(def) = def.try_upgrade(tcx) {
211 return tcx.mir_const(def);
214 // Unsafety check uses the raw mir, so make sure it is run.
215 if !tcx.sess.opts.debugging_opts.thir_unsafeck {
216 if let Some(param_did) = def.const_param_did {
217 tcx.ensure().unsafety_check_result_for_const_arg((def.did, param_did));
219 tcx.ensure().unsafety_check_result(def.did);
223 let mut body = tcx.mir_built(def).steal();
225 rustc_middle::mir::dump_mir(tcx, None, "mir_map", &0, &body, |_, _| Ok(()));
232 &Lint(check_packed_ref::CheckPackedRef),
233 &Lint(check_const_item_mutation::CheckConstItemMutation),
234 &Lint(function_item_references::FunctionItemReferences),
235 // What we need to do constant evaluation.
236 &simplify::SimplifyCfg::new("initial"),
237 &rustc_peek::SanityCheck, // Just a lint
238 &marker::PhaseChange(MirPhase::Const),
241 tcx.alloc_steal_mir(body)
244 /// Compute the main MIR body and the list of MIR bodies of the promoteds.
245 fn mir_promoted<'tcx>(
247 def: ty::WithOptConstParam<LocalDefId>,
248 ) -> (&'tcx Steal<Body<'tcx>>, &'tcx Steal<IndexVec<Promoted, Body<'tcx>>>) {
249 if let Some(def) = def.try_upgrade(tcx) {
250 return tcx.mir_promoted(def);
253 // Ensure that we compute the `mir_const_qualif` for constants at
254 // this point, before we steal the mir-const result.
255 // Also this means promotion can rely on all const checks having been done.
256 let _ = tcx.mir_const_qualif_opt_const_arg(def);
257 let mut body = tcx.mir_const(def).steal();
259 let mut required_consts = Vec::new();
260 let mut required_consts_visitor = RequiredConstsVisitor::new(&mut required_consts);
261 for (bb, bb_data) in traversal::reverse_postorder(&body) {
262 required_consts_visitor.visit_basic_block_data(bb, bb_data);
264 body.required_consts = required_consts;
266 // What we need to run borrowck etc.
267 let promote_pass = promote_consts::PromoteTemps::default();
273 &simplify::SimplifyCfg::new("promote-consts"),
274 &coverage::InstrumentCoverage,
278 let promoted = promote_pass.promoted_fragments.into_inner();
279 (tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted))
282 /// Compute the MIR that is used during CTFE (and thus has no optimizations run on it)
283 fn mir_for_ctfe<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx Body<'tcx> {
284 let did = def_id.expect_local();
285 if let Some(def) = ty::WithOptConstParam::try_lookup(did, tcx) {
286 tcx.mir_for_ctfe_of_const_arg(def)
288 tcx.arena.alloc(inner_mir_for_ctfe(tcx, ty::WithOptConstParam::unknown(did)))
292 /// Same as `mir_for_ctfe`, but used to get the MIR of a const generic parameter.
293 /// The docs on `WithOptConstParam` explain this a bit more, but the TLDR is that
294 /// we'd get cycle errors with `mir_for_ctfe`, because typeck would need to typeck
295 /// the const parameter while type checking the main body, which in turn would try
296 /// to type check the main body again.
297 fn mir_for_ctfe_of_const_arg<'tcx>(
299 (did, param_did): (LocalDefId, DefId),
300 ) -> &'tcx Body<'tcx> {
301 tcx.arena.alloc(inner_mir_for_ctfe(
303 ty::WithOptConstParam { did, const_param_did: Some(param_did) },
307 fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> Body<'_> {
308 // FIXME: don't duplicate this between the optimized_mir/mir_for_ctfe queries
309 if tcx.is_constructor(def.did.to_def_id()) {
310 // There's no reason to run all of the MIR passes on constructors when
311 // we can just output the MIR we want directly. This also saves const
312 // qualification and borrow checking the trouble of special casing
314 return shim::build_adt_ctor(tcx, def.did.to_def_id());
319 .body_const_context(def.did)
320 .expect("mir_for_ctfe should not be used for runtime functions");
322 let mut body = tcx.mir_drops_elaborated_and_const_checked(def).borrow().clone();
325 // Do not const prop functions, either they get executed at runtime or exported to metadata,
326 // so we run const prop on them, or they don't, in which case we const evaluate some control
327 // flow paths of the function and any errors in those paths will get emitted as const eval
329 hir::ConstContext::ConstFn => {}
330 // Static items always get evaluated, so we can just let const eval see if any erroneous
331 // control flow paths get executed.
332 hir::ConstContext::Static(_) => {}
333 // Associated constants get const prop run so we detect common failure situations in the
334 // crate that defined the constant.
335 // Technically we want to not run on regular const items, but oli-obk doesn't know how to
336 // conveniently detect that at this point without looking at the HIR.
337 hir::ConstContext::Const => {
341 &[&const_prop::ConstProp, &marker::PhaseChange(MirPhase::Optimization)],
346 debug_assert!(!body.has_free_regions(tcx), "Free regions in MIR for CTFE");
351 /// Obtain just the main MIR (no promoteds) and run some cleanups on it. This also runs
352 /// mir borrowck *before* doing so in order to ensure that borrowck can be run and doesn't
353 /// end up missing the source MIR due to stealing happening.
354 fn mir_drops_elaborated_and_const_checked<'tcx>(
356 def: ty::WithOptConstParam<LocalDefId>,
357 ) -> &'tcx Steal<Body<'tcx>> {
358 if let Some(def) = def.try_upgrade(tcx) {
359 return tcx.mir_drops_elaborated_and_const_checked(def);
362 // (Mir-)Borrowck uses `mir_promoted`, so we have to force it to
363 // execute before we can steal.
364 if let Some(param_did) = def.const_param_did {
365 tcx.ensure().mir_borrowck_const_arg((def.did, param_did));
367 tcx.ensure().mir_borrowck(def.did);
370 let hir_id = tcx.hir().local_def_id_to_hir_id(def.did);
371 let is_fn_like = tcx.hir().get(hir_id).fn_kind().is_some();
373 let did = def.did.to_def_id();
374 let def = ty::WithOptConstParam::unknown(did);
376 // Do not compute the mir call graph without said call graph actually being used.
377 if inline::Inline.is_enabled(&tcx.sess) {
378 let _ = tcx.mir_inliner_callees(ty::InstanceDef::Item(def));
382 let (body, _) = tcx.mir_promoted(def);
383 let mut body = body.steal();
386 pm::run_passes(tcx, &mut body, &[&remove_false_edges::RemoveFalseEdges]);
388 // Do a little drop elaboration before const-checking if `const_precise_live_drops` is enabled.
389 if check_consts::post_drop_elaboration::checking_enabled(&ConstCx::new(tcx, &body)) {
394 &simplify::SimplifyCfg::new("remove-false-edges"),
395 &remove_uninit_drops::RemoveUninitDrops,
398 check_consts::post_drop_elaboration::check_live_drops(tcx, &body); // FIXME: make this a MIR lint
401 run_post_borrowck_cleanup_passes(tcx, &mut body);
402 assert!(body.phase == MirPhase::DropLowering);
403 tcx.alloc_steal_mir(body)
406 /// After this series of passes, no lifetime analysis based on borrowing can be done.
407 fn run_post_borrowck_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
408 debug!("post_borrowck_cleanup({:?})", body.source.def_id());
410 let post_borrowck_cleanup: &[&dyn MirPass<'tcx>] = &[
411 // Remove all things only needed by analysis
412 &simplify_branches::SimplifyConstCondition::new("initial"),
413 &remove_noop_landing_pads::RemoveNoopLandingPads,
414 &cleanup_post_borrowck::CleanupNonCodegenStatements,
415 &simplify::SimplifyCfg::new("early-opt"),
416 // These next passes must be executed together
417 &add_call_guards::CriticalCallEdges,
418 &elaborate_drops::ElaborateDrops,
419 // This will remove extraneous landing pads which are no longer
420 // necessary as well as well as forcing any call in a non-unwinding
421 // function calling a possibly-unwinding function to abort the process.
422 &abort_unwinding_calls::AbortUnwindingCalls,
423 // AddMovesForPackedDrops needs to run after drop
425 &add_moves_for_packed_drops::AddMovesForPackedDrops,
426 // `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
427 // but before optimizations begin.
428 &add_retag::AddRetag,
429 &lower_intrinsics::LowerIntrinsics,
430 &simplify::SimplifyCfg::new("elaborate-drops"),
431 // `Deaggregator` is conceptually part of MIR building, some backends rely on it happening
432 // and it can help optimizations.
433 &deaggregator::Deaggregator,
436 pm::run_passes(tcx, body, post_borrowck_cleanup);
439 fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
440 fn o1<T>(x: T) -> WithMinOptLevel<T> {
441 WithMinOptLevel(1, x)
444 // Lowering generator control-flow and variables has to happen before we do anything else
445 // to them. We run some optimizations before that, because they may be harder to do on the state
446 // machine than on MIR with async primitives.
451 &reveal_all::RevealAll, // has to be done before inlining, since inlined code is in RevealAll mode.
452 &lower_slice_len::LowerSliceLenCalls, // has to be done before inlining, otherwise actual call will be almost always inlined. Also simple, so can just do first
453 &normalize_array_len::NormalizeArrayLen, // has to run after `slice::len` lowering
454 &unreachable_prop::UnreachablePropagation,
455 &uninhabited_enum_branching::UninhabitedEnumBranching,
456 &o1(simplify::SimplifyCfg::new("after-uninhabited-enum-branching")),
458 &generator::StateTransform,
462 assert!(body.phase == MirPhase::GeneratorLowering);
464 // The main optimizations that we do on MIR.
469 &remove_storage_markers::RemoveStorageMarkers,
470 &remove_zsts::RemoveZsts,
471 &const_goto::ConstGoto,
472 &remove_unneeded_drops::RemoveUnneededDrops,
473 &match_branches::MatchBranchSimplification,
474 // inst combine is after MatchBranchSimplification to clean up Ne(_1, false)
475 &multiple_return_terminators::MultipleReturnTerminators,
476 &instcombine::InstCombine,
477 &separate_const_switch::SeparateConstSwitch,
479 // FIXME(#70073): This pass is responsible for both optimization as well as some lints.
480 &const_prop::ConstProp,
482 // Const-prop runs unconditionally, but doesn't mutate the MIR at mir-opt-level=0.
483 &o1(simplify_branches::SimplifyConstCondition::new("after-const-prop")),
484 &early_otherwise_branch::EarlyOtherwiseBranch,
485 &simplify_comparison_integral::SimplifyComparisonIntegral,
486 &simplify_try::SimplifyArmIdentity,
487 &simplify_try::SimplifyBranchSame,
488 &dest_prop::DestinationPropagation,
489 &o1(simplify_branches::SimplifyConstCondition::new("final")),
490 &o1(remove_noop_landing_pads::RemoveNoopLandingPads),
491 &o1(simplify::SimplifyCfg::new("final")),
492 &nrvo::RenameReturnPlace,
493 &const_debuginfo::ConstDebugInfo,
494 &simplify::SimplifyLocals,
495 &multiple_return_terminators::MultipleReturnTerminators,
496 &deduplicate_blocks::DeduplicateBlocks,
497 // Some cleanup necessary at least for LLVM and potentially other codegen backends.
498 &add_call_guards::CriticalCallEdges,
499 &marker::PhaseChange(MirPhase::Optimization),
500 // Dump the end result for testing and debugging purposes.
501 &dump_mir::Marker("PreCodegen"),
506 /// Optimize the MIR and prepare it for codegen.
507 fn optimized_mir<'tcx>(tcx: TyCtxt<'tcx>, did: DefId) -> &'tcx Body<'tcx> {
508 let did = did.expect_local();
509 assert_eq!(ty::WithOptConstParam::try_lookup(did, tcx), None);
510 tcx.arena.alloc(inner_optimized_mir(tcx, did))
513 fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> {
514 if tcx.is_constructor(did.to_def_id()) {
515 // There's no reason to run all of the MIR passes on constructors when
516 // we can just output the MIR we want directly. This also saves const
517 // qualification and borrow checking the trouble of special casing
519 return shim::build_adt_ctor(tcx, did.to_def_id());
522 match tcx.hir().body_const_context(did) {
523 // Run the `mir_for_ctfe` query, which depends on `mir_drops_elaborated_and_const_checked`
524 // which we are going to steal below. Thus we need to run `mir_for_ctfe` first, so it
525 // computes and caches its result.
526 Some(hir::ConstContext::ConstFn) => tcx.ensure().mir_for_ctfe(did),
528 Some(other) => panic!("do not use `optimized_mir` for constants: {:?}", other),
531 tcx.mir_drops_elaborated_and_const_checked(ty::WithOptConstParam::unknown(did)).steal();
532 run_optimization_passes(tcx, &mut body);
534 debug_assert!(!body.has_free_regions(tcx), "Free regions in optimized MIR");
539 /// Fetch all the promoteds of an item and prepare their MIR bodies to be ready for
540 /// constant evaluation once all substitutions become known.
541 fn promoted_mir<'tcx>(
543 def: ty::WithOptConstParam<LocalDefId>,
544 ) -> &'tcx IndexVec<Promoted, Body<'tcx>> {
545 if tcx.is_constructor(def.did.to_def_id()) {
546 return tcx.arena.alloc(IndexVec::new());
549 if let Some(param_did) = def.const_param_did {
550 tcx.ensure().mir_borrowck_const_arg((def.did, param_did));
552 tcx.ensure().mir_borrowck(def.did);
554 let (_, promoted) = tcx.mir_promoted(def);
555 let mut promoted = promoted.steal();
557 for body in &mut promoted {
558 run_post_borrowck_cleanup_passes(tcx, body);
561 debug_assert!(!promoted.has_free_regions(tcx), "Free regions in promoted MIR");
563 tcx.arena.alloc(promoted)