1 #![allow(rustc::potential_query_instability)]
2 #![feature(box_patterns)]
3 #![feature(drain_filter)]
4 #![feature(let_chains)]
5 #![feature(map_try_insert)]
6 #![feature(min_specialization)]
7 #![feature(never_type)]
9 #![feature(option_get_or_insert_default)]
10 #![feature(trusted_step)]
11 #![feature(try_blocks)]
12 #![feature(yeet_expr)]
13 #![feature(if_let_guard)]
14 #![recursion_limit = "256"]
19 extern crate rustc_middle;
21 use required_consts::RequiredConstsVisitor;
22 use rustc_const_eval::util;
23 use rustc_data_structures::fx::FxIndexSet;
24 use rustc_data_structures::steal::Steal;
26 use rustc_hir::def_id::{DefId, LocalDefId};
27 use rustc_hir::intravisit::{self, Visitor};
28 use rustc_index::vec::IndexVec;
29 use rustc_middle::mir::visit::Visitor as _;
30 use rustc_middle::mir::{
31 traversal, AnalysisPhase, Body, ConstQualifs, Constant, LocalDecl, MirPass, MirPhase, Operand,
32 Place, ProjectionElem, Promoted, RuntimePhase, Rvalue, SourceInfo, Statement, StatementKind,
35 use rustc_middle::ty::query::Providers;
36 use rustc_middle::ty::{self, TyCtxt, TypeVisitable};
42 use pass_manager::{self as pm, Lint, MirLint, WithMinOptLevel};
44 mod abort_unwinding_calls;
46 mod add_moves_for_packed_drops;
48 mod check_const_item_mutation;
50 pub mod check_unsafety;
51 // This pass is public to allow external drivers to perform MIR cleanup
52 pub mod cleanup_post_borrowck;
60 mod dataflow_const_prop;
61 mod dead_store_elimination;
63 mod deduce_param_attrs;
64 mod deduplicate_blocks;
68 mod early_otherwise_branch;
69 mod elaborate_box_derefs;
72 mod function_item_references;
79 mod multiple_return_terminators;
80 mod normalize_array_len;
82 mod remove_noop_landing_pads;
83 mod remove_storage_markers;
84 mod remove_uninit_drops;
85 mod remove_unneeded_drops;
89 mod separate_const_switch;
92 // This pass is public to allow external drivers to perform MIR cleanup
94 mod simplify_branches;
95 mod simplify_comparison_integral;
97 mod uninhabited_enum_branching;
100 use rustc_const_eval::transform::check_consts::{self, ConstCx};
101 use rustc_const_eval::transform::promote_consts;
102 use rustc_const_eval::transform::validate;
103 use rustc_mir_dataflow::rustc_peek;
105 pub fn provide(providers: &mut Providers) {
106 check_unsafety::provide(providers);
107 check_packed_ref::provide(providers);
108 coverage::query::provide(providers);
109 ffi_unwind_calls::provide(providers);
110 shim::provide(providers);
111 *providers = Providers {
114 mir_const_qualif: |tcx, def_id| {
115 let def_id = def_id.expect_local();
116 if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
117 tcx.mir_const_qualif_const_arg(def)
119 mir_const_qualif(tcx, ty::WithOptConstParam::unknown(def_id))
122 mir_const_qualif_const_arg: |tcx, (did, param_did)| {
123 mir_const_qualif(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
126 mir_drops_elaborated_and_const_checked,
128 mir_for_ctfe_of_const_arg,
129 mir_generator_witnesses: generator::mir_generator_witnesses,
132 is_ctfe_mir_available: |tcx, did| is_mir_available(tcx, did),
133 mir_callgraph_reachable: inline::cycle::mir_callgraph_reachable,
134 mir_inliner_callees: inline::cycle::mir_inliner_callees,
135 promoted_mir: |tcx, def_id| {
136 let def_id = def_id.expect_local();
137 if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
138 tcx.promoted_mir_of_const_arg(def)
140 promoted_mir(tcx, ty::WithOptConstParam::unknown(def_id))
143 promoted_mir_of_const_arg: |tcx, (did, param_did)| {
144 promoted_mir(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
146 deduced_param_attrs: deduce_param_attrs::deduced_param_attrs,
151 fn remap_mir_for_const_eval_select<'tcx>(
153 mut body: Body<'tcx>,
154 context: hir::Constness,
156 for bb in body.basic_blocks.as_mut().iter_mut() {
157 let terminator = bb.terminator.as_mut().expect("invalid terminator");
158 match terminator.kind {
159 TerminatorKind::Call {
160 func: Operand::Constant(box Constant { ref literal, .. }),
167 } if let ty::FnDef(def_id, _) = *literal.ty().kind()
168 && tcx.item_name(def_id) == sym::const_eval_select
169 && tcx.is_intrinsic(def_id) =>
171 let [tupled_args, called_in_const, called_at_rt]: [_; 3] = std::mem::take(args).try_into().unwrap();
172 let ty = tupled_args.ty(&body.local_decls, tcx);
173 let fields = ty.tuple_fields();
174 let num_args = fields.len();
175 let func = if context == hir::Constness::Const { called_in_const } else { called_at_rt };
176 let (method, place): (fn(Place<'tcx>) -> Operand<'tcx>, Place<'tcx>) = match tupled_args {
177 Operand::Constant(_) => {
178 // there is no good way of extracting a tuple arg from a constant (const generic stuff)
179 // so we just create a temporary and deconstruct that.
180 let local = body.local_decls.push(LocalDecl::new(ty, fn_span));
181 bb.statements.push(Statement {
182 source_info: SourceInfo::outermost(fn_span),
183 kind: StatementKind::Assign(Box::new((local.into(), Rvalue::Use(tupled_args.clone())))),
185 (Operand::Move, local.into())
187 Operand::Move(place) => (Operand::Move, place),
188 Operand::Copy(place) => (Operand::Copy, place),
190 let place_elems = place.projection;
191 let arguments = (0..num_args).map(|x| {
192 let mut place_elems = place_elems.to_vec();
193 place_elems.push(ProjectionElem::Field(x.into(), fields[x]));
194 let projection = tcx.intern_place_elems(&place_elems);
201 terminator.kind = TerminatorKind::Call { func, args: arguments, destination, target, cleanup, from_hir_call: false, fn_span };
209 fn is_mir_available(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
210 let def_id = def_id.expect_local();
211 tcx.mir_keys(()).contains(&def_id)
214 /// Finds the full set of `DefId`s within the current crate that have
215 /// MIR associated with them.
216 fn mir_keys(tcx: TyCtxt<'_>, (): ()) -> FxIndexSet<LocalDefId> {
217 let mut set = FxIndexSet::default();
219 // All body-owners have MIR associated with them.
220 set.extend(tcx.hir().body_owners());
222 // Additionally, tuple struct/variant constructors have MIR, but
223 // they don't have a BodyId, so we need to build them separately.
224 struct GatherCtors<'a> {
225 set: &'a mut FxIndexSet<LocalDefId>,
227 impl<'tcx> Visitor<'tcx> for GatherCtors<'_> {
228 fn visit_variant_data(&mut self, v: &'tcx hir::VariantData<'tcx>) {
229 if let hir::VariantData::Tuple(_, _, def_id) = *v {
230 self.set.insert(def_id);
232 intravisit::walk_struct_def(self, v)
235 tcx.hir().visit_all_item_likes_in_crate(&mut GatherCtors { set: &mut set });
240 fn mir_const_qualif(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> ConstQualifs {
241 let const_kind = tcx.hir().body_const_context(def.did);
243 // No need to const-check a non-const `fn`.
244 if const_kind.is_none() {
245 return Default::default();
248 // N.B., this `borrow()` is guaranteed to be valid (i.e., the value
249 // cannot yet be stolen), because `mir_promoted()`, which steals
250 // from `mir_const(), forces this query to execute before
251 // performing the steal.
252 let body = &tcx.mir_const(def).borrow();
254 if body.return_ty().references_error() {
255 tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors");
256 return Default::default();
259 let ccx = check_consts::ConstCx { body, tcx, const_kind, param_env: tcx.param_env(def.did) };
261 let mut validator = check_consts::check::Checker::new(&ccx);
262 validator.check_body();
264 // We return the qualifs in the return place for every MIR body, even though it is only used
265 // when deciding to promote a reference to a `const` for now.
266 validator.qualifs_in_return_place()
269 /// Make MIR ready for const evaluation. This is run on all MIR, not just on consts!
270 /// FIXME(oli-obk): it's unclear whether we still need this phase (and its corresponding query).
271 /// We used to have this for pre-miri MIR based const eval.
272 fn mir_const(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> &Steal<Body<'_>> {
273 if let Some(def) = def.try_upgrade(tcx) {
274 return tcx.mir_const(def);
277 // Unsafety check uses the raw mir, so make sure it is run.
278 if !tcx.sess.opts.unstable_opts.thir_unsafeck {
279 if let Some(param_did) = def.const_param_did {
280 tcx.ensure().unsafety_check_result_for_const_arg((def.did, param_did));
282 tcx.ensure().unsafety_check_result(def.did);
286 // has_ffi_unwind_calls query uses the raw mir, so make sure it is run.
287 tcx.ensure().has_ffi_unwind_calls(def.did);
289 let mut body = tcx.mir_built(def).steal();
291 pass_manager::dump_mir_for_phase_change(tcx, &body);
298 &Lint(check_packed_ref::CheckPackedRef),
299 &Lint(check_const_item_mutation::CheckConstItemMutation),
300 &Lint(function_item_references::FunctionItemReferences),
301 // What we need to do constant evaluation.
302 &simplify::SimplifyCfg::new("initial"),
303 &rustc_peek::SanityCheck, // Just a lint
307 tcx.alloc_steal_mir(body)
310 /// Compute the main MIR body and the list of MIR bodies of the promoteds.
313 def: ty::WithOptConstParam<LocalDefId>,
314 ) -> (&Steal<Body<'_>>, &Steal<IndexVec<Promoted, Body<'_>>>) {
315 if let Some(def) = def.try_upgrade(tcx) {
316 return tcx.mir_promoted(def);
319 // Ensure that we compute the `mir_const_qualif` for constants at
320 // this point, before we steal the mir-const result.
321 // Also this means promotion can rely on all const checks having been done.
322 let const_qualifs = tcx.mir_const_qualif_opt_const_arg(def);
323 let mut body = tcx.mir_const(def).steal();
324 if let Some(error_reported) = const_qualifs.tainted_by_errors {
325 body.tainted_by_errors = Some(error_reported);
328 let mut required_consts = Vec::new();
329 let mut required_consts_visitor = RequiredConstsVisitor::new(&mut required_consts);
330 for (bb, bb_data) in traversal::reverse_postorder(&body) {
331 required_consts_visitor.visit_basic_block_data(bb, bb_data);
333 body.required_consts = required_consts;
335 // What we need to run borrowck etc.
336 let promote_pass = promote_consts::PromoteTemps::default();
342 &simplify::SimplifyCfg::new("promote-consts"),
343 &coverage::InstrumentCoverage,
345 Some(MirPhase::Analysis(AnalysisPhase::Initial)),
348 let promoted = promote_pass.promoted_fragments.into_inner();
349 (tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted))
352 /// Compute the MIR that is used during CTFE (and thus has no optimizations run on it)
353 fn mir_for_ctfe(tcx: TyCtxt<'_>, def_id: DefId) -> &Body<'_> {
354 let did = def_id.expect_local();
355 if let Some(def) = ty::WithOptConstParam::try_lookup(did, tcx) {
356 tcx.mir_for_ctfe_of_const_arg(def)
358 tcx.arena.alloc(inner_mir_for_ctfe(tcx, ty::WithOptConstParam::unknown(did)))
362 /// Same as `mir_for_ctfe`, but used to get the MIR of a const generic parameter.
363 /// The docs on `WithOptConstParam` explain this a bit more, but the TLDR is that
364 /// we'd get cycle errors with `mir_for_ctfe`, because typeck would need to typeck
365 /// the const parameter while type checking the main body, which in turn would try
366 /// to type check the main body again.
367 fn mir_for_ctfe_of_const_arg(tcx: TyCtxt<'_>, (did, param_did): (LocalDefId, DefId)) -> &Body<'_> {
368 tcx.arena.alloc(inner_mir_for_ctfe(
370 ty::WithOptConstParam { did, const_param_did: Some(param_did) },
374 fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> Body<'_> {
375 // FIXME: don't duplicate this between the optimized_mir/mir_for_ctfe queries
376 if tcx.is_constructor(def.did.to_def_id()) {
377 // There's no reason to run all of the MIR passes on constructors when
378 // we can just output the MIR we want directly. This also saves const
379 // qualification and borrow checking the trouble of special casing
381 return shim::build_adt_ctor(tcx, def.did.to_def_id());
386 .body_const_context(def.did)
387 .expect("mir_for_ctfe should not be used for runtime functions");
389 let body = tcx.mir_drops_elaborated_and_const_checked(def).borrow().clone();
391 let mut body = remap_mir_for_const_eval_select(tcx, body, hir::Constness::Const);
394 // Do not const prop functions, either they get executed at runtime or exported to metadata,
395 // so we run const prop on them, or they don't, in which case we const evaluate some control
396 // flow paths of the function and any errors in those paths will get emitted as const eval
398 hir::ConstContext::ConstFn => {}
399 // Static items always get evaluated, so we can just let const eval see if any erroneous
400 // control flow paths get executed.
401 hir::ConstContext::Static(_) => {}
402 // Associated constants get const prop run so we detect common failure situations in the
403 // crate that defined the constant.
404 // Technically we want to not run on regular const items, but oli-obk doesn't know how to
405 // conveniently detect that at this point without looking at the HIR.
406 hir::ConstContext::Const => {
410 &[&const_prop::ConstProp],
411 Some(MirPhase::Runtime(RuntimePhase::Optimized)),
416 pm::run_passes(tcx, &mut body, &[&ctfe_limit::CtfeLimit], None);
418 debug_assert!(!body.has_free_regions(), "Free regions in MIR for CTFE");
423 /// Obtain just the main MIR (no promoteds) and run some cleanups on it. This also runs
424 /// mir borrowck *before* doing so in order to ensure that borrowck can be run and doesn't
425 /// end up missing the source MIR due to stealing happening.
426 fn mir_drops_elaborated_and_const_checked(
428 def: ty::WithOptConstParam<LocalDefId>,
429 ) -> &Steal<Body<'_>> {
430 if let Some(def) = def.try_upgrade(tcx) {
431 return tcx.mir_drops_elaborated_and_const_checked(def);
434 if tcx.generator_kind(def.did).is_some() && tcx.sess.opts.unstable_opts.drop_tracking_mir {
435 tcx.ensure().mir_generator_witnesses(def.did);
437 let mir_borrowck = tcx.mir_borrowck_opt_const_arg(def);
439 let is_fn_like = tcx.def_kind(def.did).is_fn_like();
441 let did = def.did.to_def_id();
442 let def = ty::WithOptConstParam::unknown(did);
444 // Do not compute the mir call graph without said call graph actually being used.
445 if inline::Inline.is_enabled(&tcx.sess) {
446 let _ = tcx.mir_inliner_callees(ty::InstanceDef::Item(def));
450 let (body, _) = tcx.mir_promoted(def);
451 let mut body = body.steal();
452 if let Some(error_reported) = mir_borrowck.tainted_by_errors {
453 body.tainted_by_errors = Some(error_reported);
456 run_analysis_to_runtime_passes(tcx, &mut body);
458 tcx.alloc_steal_mir(body)
461 fn run_analysis_to_runtime_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
462 assert!(body.phase == MirPhase::Analysis(AnalysisPhase::Initial));
463 let did = body.source.def_id();
465 debug!("analysis_mir_cleanup({:?})", did);
466 run_analysis_cleanup_passes(tcx, body);
467 assert!(body.phase == MirPhase::Analysis(AnalysisPhase::PostCleanup));
469 // Do a little drop elaboration before const-checking if `const_precise_live_drops` is enabled.
470 if check_consts::post_drop_elaboration::checking_enabled(&ConstCx::new(tcx, &body)) {
475 &remove_uninit_drops::RemoveUninitDrops,
476 &simplify::SimplifyCfg::new("remove-false-edges"),
480 check_consts::post_drop_elaboration::check_live_drops(tcx, &body); // FIXME: make this a MIR lint
483 debug!("runtime_mir_lowering({:?})", did);
484 run_runtime_lowering_passes(tcx, body);
485 assert!(body.phase == MirPhase::Runtime(RuntimePhase::Initial));
487 debug!("runtime_mir_cleanup({:?})", did);
488 run_runtime_cleanup_passes(tcx, body);
489 assert!(body.phase == MirPhase::Runtime(RuntimePhase::PostCleanup));
492 // FIXME(JakobDegen): Can we make these lists of passes consts?
494 /// After this series of passes, no lifetime analysis based on borrowing can be done.
495 fn run_analysis_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
496 let passes: &[&dyn MirPass<'tcx>] = &[
497 &cleanup_post_borrowck::CleanupPostBorrowck,
498 &remove_noop_landing_pads::RemoveNoopLandingPads,
499 &simplify::SimplifyCfg::new("early-opt"),
500 &deref_separator::Derefer,
503 pm::run_passes(tcx, body, passes, Some(MirPhase::Analysis(AnalysisPhase::PostCleanup)));
506 /// Returns the sequence of passes that lowers analysis to runtime MIR.
507 fn run_runtime_lowering_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
508 let passes: &[&dyn MirPass<'tcx>] = &[
509 // These next passes must be executed together
510 &add_call_guards::CriticalCallEdges,
511 &elaborate_drops::ElaborateDrops,
512 // This will remove extraneous landing pads which are no longer
513 // necessary as well as well as forcing any call in a non-unwinding
514 // function calling a possibly-unwinding function to abort the process.
515 &abort_unwinding_calls::AbortUnwindingCalls,
516 // AddMovesForPackedDrops needs to run after drop
518 &add_moves_for_packed_drops::AddMovesForPackedDrops,
519 // `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
520 // but before optimizations begin.
521 &elaborate_box_derefs::ElaborateBoxDerefs,
522 &generator::StateTransform,
523 &add_retag::AddRetag,
524 // Deaggregator is necessary for const prop. We may want to consider implementing
525 // CTFE support for aggregates.
526 &deaggregator::Deaggregator,
527 &Lint(const_prop_lint::ConstProp),
529 pm::run_passes_no_validate(tcx, body, passes, Some(MirPhase::Runtime(RuntimePhase::Initial)));
532 /// Returns the sequence of passes that do the initial cleanup of runtime MIR.
533 fn run_runtime_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
534 let passes: &[&dyn MirPass<'tcx>] =
535 &[&lower_intrinsics::LowerIntrinsics, &simplify::SimplifyCfg::new("elaborate-drops")];
537 pm::run_passes(tcx, body, passes, Some(MirPhase::Runtime(RuntimePhase::PostCleanup)));
540 fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
541 fn o1<T>(x: T) -> WithMinOptLevel<T> {
542 WithMinOptLevel(1, x)
545 // The main optimizations that we do on MIR.
550 &reveal_all::RevealAll, // has to be done before inlining, since inlined code is in RevealAll mode.
551 &lower_slice_len::LowerSliceLenCalls, // has to be done before inlining, otherwise actual call will be almost always inlined. Also simple, so can just do first
552 &unreachable_prop::UnreachablePropagation,
553 &uninhabited_enum_branching::UninhabitedEnumBranching,
554 &o1(simplify::SimplifyCfg::new("after-uninhabited-enum-branching")),
556 &remove_storage_markers::RemoveStorageMarkers,
557 &remove_zsts::RemoveZsts,
558 &normalize_array_len::NormalizeArrayLen, // has to run after `slice::len` lowering
559 &const_goto::ConstGoto,
560 &remove_unneeded_drops::RemoveUnneededDrops,
561 &sroa::ScalarReplacementOfAggregates,
562 &match_branches::MatchBranchSimplification,
563 // inst combine is after MatchBranchSimplification to clean up Ne(_1, false)
564 &multiple_return_terminators::MultipleReturnTerminators,
565 &instcombine::InstCombine,
566 &separate_const_switch::SeparateConstSwitch,
567 &simplify::SimplifyLocals::new("before-const-prop"),
568 ©_prop::CopyProp,
570 // FIXME(#70073): This pass is responsible for both optimization as well as some lints.
571 &const_prop::ConstProp,
572 &dataflow_const_prop::DataflowConstProp,
574 // Const-prop runs unconditionally, but doesn't mutate the MIR at mir-opt-level=0.
575 &const_debuginfo::ConstDebugInfo,
576 &o1(simplify_branches::SimplifyConstCondition::new("after-const-prop")),
577 &early_otherwise_branch::EarlyOtherwiseBranch,
578 &simplify_comparison_integral::SimplifyComparisonIntegral,
579 &dead_store_elimination::DeadStoreElimination,
580 &dest_prop::DestinationPropagation,
581 &o1(simplify_branches::SimplifyConstCondition::new("final")),
582 &o1(remove_noop_landing_pads::RemoveNoopLandingPads),
583 &o1(simplify::SimplifyCfg::new("final")),
584 &nrvo::RenameReturnPlace,
585 &simplify::SimplifyLocals::new("final"),
586 &multiple_return_terminators::MultipleReturnTerminators,
587 &deduplicate_blocks::DeduplicateBlocks,
588 // Some cleanup necessary at least for LLVM and potentially other codegen backends.
589 &add_call_guards::CriticalCallEdges,
590 // Dump the end result for testing and debugging purposes.
591 &dump_mir::Marker("PreCodegen"),
593 Some(MirPhase::Runtime(RuntimePhase::Optimized)),
597 /// Optimize the MIR and prepare it for codegen.
598 fn optimized_mir(tcx: TyCtxt<'_>, did: DefId) -> &Body<'_> {
599 let did = did.expect_local();
600 assert_eq!(ty::WithOptConstParam::try_lookup(did, tcx), None);
601 tcx.arena.alloc(inner_optimized_mir(tcx, did))
604 fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> {
605 if tcx.is_constructor(did.to_def_id()) {
606 // There's no reason to run all of the MIR passes on constructors when
607 // we can just output the MIR we want directly. This also saves const
608 // qualification and borrow checking the trouble of special casing
610 return shim::build_adt_ctor(tcx, did.to_def_id());
613 match tcx.hir().body_const_context(did) {
614 // Run the `mir_for_ctfe` query, which depends on `mir_drops_elaborated_and_const_checked`
615 // which we are going to steal below. Thus we need to run `mir_for_ctfe` first, so it
616 // computes and caches its result.
617 Some(hir::ConstContext::ConstFn) => tcx.ensure().mir_for_ctfe(did),
619 Some(other) => panic!("do not use `optimized_mir` for constants: {:?}", other),
621 debug!("about to call mir_drops_elaborated...");
623 tcx.mir_drops_elaborated_and_const_checked(ty::WithOptConstParam::unknown(did)).steal();
624 let mut body = remap_mir_for_const_eval_select(tcx, body, hir::Constness::NotConst);
625 debug!("body: {:#?}", body);
626 run_optimization_passes(tcx, &mut body);
628 debug_assert!(!body.has_free_regions(), "Free regions in optimized MIR");
633 /// Fetch all the promoteds of an item and prepare their MIR bodies to be ready for
634 /// constant evaluation once all substitutions become known.
637 def: ty::WithOptConstParam<LocalDefId>,
638 ) -> &IndexVec<Promoted, Body<'_>> {
639 if tcx.is_constructor(def.did.to_def_id()) {
640 return tcx.arena.alloc(IndexVec::new());
643 let tainted_by_errors = tcx.mir_borrowck_opt_const_arg(def).tainted_by_errors;
644 let mut promoted = tcx.mir_promoted(def).1.steal();
646 for body in &mut promoted {
647 if let Some(error_reported) = tainted_by_errors {
648 body.tainted_by_errors = Some(error_reported);
650 run_analysis_to_runtime_passes(tcx, body);
653 debug_assert!(!promoted.has_free_regions(), "Free regions in promoted MIR");
655 tcx.arena.alloc(promoted)