1 //! Propagates constants for early reporting of statically known
4 use crate::const_prop::CanConstProp;
5 use crate::const_prop::ConstPropMachine;
6 use crate::const_prop::ConstPropMode;
8 use rustc_const_eval::const_eval::ConstEvalErr;
9 use rustc_const_eval::interpret::{
10 self, InterpCx, InterpResult, LocalState, LocalValue, MemoryKind, OpTy, Scalar,
11 ScalarMaybeUninit, StackPopCleanup,
13 use rustc_hir::def::DefKind;
15 use rustc_index::bit_set::BitSet;
16 use rustc_index::vec::IndexVec;
17 use rustc_middle::mir::visit::Visitor;
18 use rustc_middle::mir::{
19 AssertKind, BinOp, Body, Constant, ConstantKind, Local, LocalDecl, Location, Operand, Place,
20 Rvalue, SourceInfo, SourceScope, SourceScopeData, Statement, StatementKind, Terminator,
21 TerminatorKind, UnOp, RETURN_PLACE,
23 use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout};
24 use rustc_middle::ty::subst::{InternalSubsts, Subst};
25 use rustc_middle::ty::{
26 self, ConstInt, ConstKind, EarlyBinder, Instance, ParamEnv, ScalarInt, Ty, TyCtxt,
29 use rustc_session::lint;
31 use rustc_target::abi::{HasDataLayout, Size, TargetDataLayout};
32 use rustc_trait_selection::traits;
35 /// The maximum number of bytes that we'll allocate space for a local or the return value.
36 /// Needed for #66397, because otherwise we eval into large places and that can cause OOM or just
37 /// Severely regress performance.
38 const MAX_ALLOC_LIMIT: u64 = 1024;
41 impl<'tcx> MirLint<'tcx> for ConstProp {
42 fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
43 // will be evaluated by miri and produce its errors there
44 if body.source.promoted.is_some() {
48 let def_id = body.source.def_id().expect_local();
49 let is_fn_like = tcx.def_kind(def_id).is_fn_like();
50 let is_assoc_const = tcx.def_kind(def_id) == DefKind::AssocConst;
52 // Only run const prop on functions, methods, closures and associated constants
53 if !is_fn_like && !is_assoc_const {
54 // skip anon_const/statics/consts because they'll be evaluated by miri anyway
55 trace!("ConstProp skipped for {:?}", def_id);
59 let is_generator = tcx.type_of(def_id.to_def_id()).is_generator();
60 // FIXME(welseywiser) const prop doesn't work on generators because of query cycles
61 // computing their layout.
63 trace!("ConstProp skipped for generator {:?}", def_id);
67 // Check if it's even possible to satisfy the 'where' clauses
69 // This branch will never be taken for any normal function.
70 // However, it's possible to `#!feature(trivial_bounds)]` to write
71 // a function with impossible to satisfy clauses, e.g.:
72 // `fn foo() where String: Copy {}`
74 // We don't usually need to worry about this kind of case,
75 // since we would get a compilation error if the user tried
76 // to call it. However, since we can do const propagation
77 // even without any calls to the function, we need to make
78 // sure that it even makes sense to try to evaluate the body.
79 // If there are unsatisfiable where clauses, then all bets are
80 // off, and we just give up.
82 // We manually filter the predicates, skipping anything that's not
83 // "global". We are in a potentially generic context
84 // (e.g. we are evaluating a function without substituting generic
85 // parameters, so this filtering serves two purposes:
87 // 1. We skip evaluating any predicates that we would
88 // never be able prove are unsatisfiable (e.g. `<T as Foo>`
89 // 2. We avoid trying to normalize predicates involving generic
90 // parameters (e.g. `<T as Foo>::MyItem`). This can confuse
91 // the normalization code (leading to cycle errors), since
92 // it's usually never invoked in this way.
94 .predicates_of(def_id.to_def_id())
97 .filter_map(|(p, _)| if p.is_global() { Some(*p) } else { None });
98 if traits::impossible_predicates(
100 traits::elaborate_predicates(tcx, predicates).map(|o| o.predicate).collect(),
102 trace!("ConstProp skipped for {:?}: found unsatisfiable predicates", def_id);
106 trace!("ConstProp starting for {:?}", def_id);
108 let dummy_body = &Body::new(
110 body.basic_blocks().clone(),
111 body.source_scopes.clone(),
112 body.local_decls.clone(),
117 body.generator_kind(),
118 body.tainted_by_errors,
121 // FIXME(oli-obk, eddyb) Optimize locals (or even local paths) to hold
122 // constants, instead of just checking for const-folding succeeding.
123 // That would require a uniform one-def no-mutation analysis
124 // and RPO (or recursing when needing the value of a local).
125 let mut optimization_finder = ConstPropagator::new(body, dummy_body, tcx);
126 optimization_finder.visit_body(body);
128 trace!("ConstProp done for {:?}", def_id);
132 /// Finds optimization opportunities on the MIR.
133 struct ConstPropagator<'mir, 'tcx> {
134 ecx: InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>,
136 param_env: ParamEnv<'tcx>,
137 source_scopes: &'mir IndexVec<SourceScope, SourceScopeData<'tcx>>,
138 local_decls: &'mir IndexVec<Local, LocalDecl<'tcx>>,
139 // Because we have `MutVisitor` we can't obtain the `SourceInfo` from a `Location`. So we store
140 // the last known `SourceInfo` here and just keep revisiting it.
141 source_info: Option<SourceInfo>,
144 impl<'tcx> LayoutOfHelpers<'tcx> for ConstPropagator<'_, 'tcx> {
145 type LayoutOfResult = Result<TyAndLayout<'tcx>, LayoutError<'tcx>>;
148 fn handle_layout_err(&self, err: LayoutError<'tcx>, _: Span, _: Ty<'tcx>) -> LayoutError<'tcx> {
153 impl HasDataLayout for ConstPropagator<'_, '_> {
155 fn data_layout(&self) -> &TargetDataLayout {
156 &self.tcx.data_layout
160 impl<'tcx> ty::layout::HasTyCtxt<'tcx> for ConstPropagator<'_, 'tcx> {
162 fn tcx(&self) -> TyCtxt<'tcx> {
167 impl<'tcx> ty::layout::HasParamEnv<'tcx> for ConstPropagator<'_, 'tcx> {
169 fn param_env(&self) -> ty::ParamEnv<'tcx> {
174 impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
177 dummy_body: &'mir Body<'tcx>,
179 ) -> ConstPropagator<'mir, 'tcx> {
180 let def_id = body.source.def_id();
181 let substs = &InternalSubsts::identity_for_item(tcx, def_id);
182 let param_env = tcx.param_env_reveal_all_normalized(def_id);
184 let can_const_prop = CanConstProp::check(tcx, param_env, body);
185 let mut only_propagate_inside_block_locals = BitSet::new_empty(can_const_prop.len());
186 for (l, mode) in can_const_prop.iter_enumerated() {
187 if *mode == ConstPropMode::OnlyInsideOwnBlock {
188 only_propagate_inside_block_locals.insert(l);
191 let mut ecx = InterpCx::new(
193 tcx.def_span(def_id),
195 ConstPropMachine::new(only_propagate_inside_block_locals, can_const_prop),
199 .layout_of(EarlyBinder(body.return_ty()).subst(tcx, substs))
201 // Don't bother allocating memory for large values.
202 // I don't know how return types can seem to be unsized but this happens in the
203 // `type/type-unsatisfiable.rs` test.
204 .filter(|ret_layout| {
205 !ret_layout.is_unsized() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT)
207 .unwrap_or_else(|| ecx.layout_of(tcx.types.unit).unwrap());
210 .allocate(ret_layout, MemoryKind::Stack)
211 .expect("couldn't perform small allocation")
214 ecx.push_stack_frame(
215 Instance::new(def_id, substs),
218 StackPopCleanup::Root { cleanup: false },
220 .expect("failed to push initial stack frame");
226 source_scopes: &dummy_body.source_scopes,
227 local_decls: &dummy_body.local_decls,
232 fn get_const(&self, place: Place<'tcx>) -> Option<OpTy<'tcx>> {
233 let op = match self.ecx.eval_place_to_op(place, None) {
236 trace!("get_const failed: {}", e);
241 // Try to read the local as an immediate so that if it is representable as a scalar, we can
242 // handle it as such, but otherwise, just return the value as is.
243 Some(match self.ecx.read_immediate_raw(&op, /*force*/ false) {
244 Ok(Ok(imm)) => imm.into(),
249 /// Remove `local` from the pool of `Locals`. Allows writing to them,
250 /// but not reading from them anymore.
251 fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) {
252 ecx.frame_mut().locals[local] = LocalState {
253 value: LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)),
254 layout: Cell::new(None),
258 fn lint_root(&self, source_info: SourceInfo) -> Option<HirId> {
259 source_info.scope.lint_root(self.source_scopes)
262 fn use_ecx<F, T>(&mut self, source_info: SourceInfo, f: F) -> Option<T>
264 F: FnOnce(&mut Self) -> InterpResult<'tcx, T>,
266 // Overwrite the PC -- whatever the interpreter does to it does not make any sense anyway.
267 self.ecx.frame_mut().loc = Err(source_info.span);
269 Ok(val) => Some(val),
271 trace!("InterpCx operation failed: {:?}", error);
272 // Some errors shouldn't come up because creating them causes
273 // an allocation, which we should avoid. When that happens,
274 // dedicated error variants should be introduced instead.
276 !error.kind().formatted_string(),
277 "const-prop encountered formatting error: {}",
285 /// Returns the value, if any, of evaluating `c`.
286 fn eval_constant(&mut self, c: &Constant<'tcx>, source_info: SourceInfo) -> Option<OpTy<'tcx>> {
287 // FIXME we need to revisit this for #67176
292 match self.ecx.mir_const_to_op(&c.literal, None) {
295 let tcx = self.ecx.tcx.at(c.span);
296 let err = ConstEvalErr::new(&self.ecx, error, Some(c.span));
297 if let Some(lint_root) = self.lint_root(source_info) {
298 let lint_only = match c.literal {
299 ConstantKind::Ty(ct) => match ct.kind() {
300 // Promoteds must lint and not error as the user didn't ask for them
301 ConstKind::Unevaluated(ty::Unevaluated {
306 // Out of backwards compatibility we cannot report hard errors in unused
307 // generic functions using associated constants of the generic parameters.
308 _ => c.literal.needs_subst(),
310 ConstantKind::Val(_, ty) => ty.needs_subst(),
313 // Out of backwards compatibility we cannot report hard errors in unused
314 // generic functions using associated constants of the generic parameters.
315 err.report_as_lint(tcx, "erroneous constant used", lint_root, Some(c.span));
317 err.report_as_error(tcx, "erroneous constant used");
320 err.report_as_error(tcx, "erroneous constant used");
327 /// Returns the value, if any, of evaluating `place`.
328 fn eval_place(&mut self, place: Place<'tcx>, source_info: SourceInfo) -> Option<OpTy<'tcx>> {
329 trace!("eval_place(place={:?})", place);
330 self.use_ecx(source_info, |this| this.ecx.eval_place_to_op(place, None))
333 /// Returns the value, if any, of evaluating `op`. Calls upon `eval_constant`
334 /// or `eval_place`, depending on the variant of `Operand` used.
335 fn eval_operand(&mut self, op: &Operand<'tcx>, source_info: SourceInfo) -> Option<OpTy<'tcx>> {
337 Operand::Constant(ref c) => self.eval_constant(c, source_info),
338 Operand::Move(place) | Operand::Copy(place) => self.eval_place(place, source_info),
342 fn report_assert_as_lint(
344 lint: &'static lint::Lint,
345 source_info: SourceInfo,
346 message: &'static str,
347 panic: AssertKind<impl std::fmt::Debug>,
349 if let Some(lint_root) = self.lint_root(source_info) {
350 self.tcx.struct_span_lint_hir(lint, lint_root, source_info.span, |lint| {
351 let mut err = lint.build(message);
352 err.span_label(source_info.span, format!("{:?}", panic));
362 source_info: SourceInfo,
364 if let (val, true) = self.use_ecx(source_info, |this| {
365 let val = this.ecx.read_immediate(&this.ecx.eval_operand(arg, None)?)?;
366 let (_res, overflow, _ty) = this.ecx.overflowing_unary_op(op, &val)?;
369 // `AssertKind` only has an `OverflowNeg` variant, so make sure that is
370 // appropriate to use.
371 assert_eq!(op, UnOp::Neg, "Neg is the only UnOp that can overflow");
372 self.report_assert_as_lint(
373 lint::builtin::ARITHMETIC_OVERFLOW,
375 "this arithmetic operation will overflow",
376 AssertKind::OverflowNeg(val.to_const_int()),
387 left: &Operand<'tcx>,
388 right: &Operand<'tcx>,
389 source_info: SourceInfo,
391 let r = self.use_ecx(source_info, |this| {
392 this.ecx.read_immediate(&this.ecx.eval_operand(right, None)?)
394 let l = self.use_ecx(source_info, |this| {
395 this.ecx.read_immediate(&this.ecx.eval_operand(left, None)?)
397 // Check for exceeding shifts *even if* we cannot evaluate the LHS.
398 if op == BinOp::Shr || op == BinOp::Shl {
400 // We need the type of the LHS. We cannot use `place_layout` as that is the type
401 // of the result, which for checked binops is not the same!
402 let left_ty = left.ty(self.local_decls, self.tcx);
403 let left_size = self.ecx.layout_of(left_ty).ok()?.size;
404 let right_size = r.layout.size;
405 let r_bits = r.to_scalar().ok();
406 let r_bits = r_bits.and_then(|r| r.to_bits(right_size).ok());
407 if r_bits.map_or(false, |b| b >= left_size.bits() as u128) {
408 debug!("check_binary_op: reporting assert for {:?}", source_info);
409 self.report_assert_as_lint(
410 lint::builtin::ARITHMETIC_OVERFLOW,
412 "this arithmetic operation will overflow",
413 AssertKind::Overflow(
416 Some(l) => l.to_const_int(),
417 // Invent a dummy value, the diagnostic ignores it anyway
418 None => ConstInt::new(
419 ScalarInt::try_from_uint(1_u8, left_size).unwrap(),
421 left_ty.is_ptr_sized_integral(),
431 if let (Some(l), Some(r)) = (l, r) {
432 // The remaining operators are handled through `overflowing_binary_op`.
433 if self.use_ecx(source_info, |this| {
434 let (_res, overflow, _ty) = this.ecx.overflowing_binary_op(op, &l, &r)?;
437 self.report_assert_as_lint(
438 lint::builtin::ARITHMETIC_OVERFLOW,
440 "this arithmetic operation will overflow",
441 AssertKind::Overflow(op, l.to_const_int(), r.to_const_int()),
451 rvalue: &Rvalue<'tcx>,
452 source_info: SourceInfo,
455 // Perform any special handling for specific Rvalue types.
456 // Generally, checks here fall into one of two categories:
457 // 1. Additional checking to provide useful lints to the user
458 // - In this case, we will do some validation and then fall through to the
459 // end of the function which evals the assignment.
460 // 2. Working around bugs in other parts of the compiler
461 // - In this case, we'll return `None` from this function to stop evaluation.
463 // Additional checking: give lints to the user if an overflow would occur.
464 // We do this here and not in the `Assert` terminator as that terminator is
465 // only sometimes emitted (overflow checks can be disabled), but we want to always
467 Rvalue::UnaryOp(op, arg) => {
468 trace!("checking UnaryOp(op = {:?}, arg = {:?})", op, arg);
469 self.check_unary_op(*op, arg, source_info)?;
471 Rvalue::BinaryOp(op, box (left, right)) => {
472 trace!("checking BinaryOp(op = {:?}, left = {:?}, right = {:?})", op, left, right);
473 self.check_binary_op(*op, left, right, source_info)?;
475 Rvalue::CheckedBinaryOp(op, box (left, right)) => {
477 "checking CheckedBinaryOp(op = {:?}, left = {:?}, right = {:?})",
482 self.check_binary_op(*op, left, right, source_info)?;
485 // Do not try creating references (#67862)
486 Rvalue::AddressOf(_, place) | Rvalue::Ref(_, _, place) => {
487 trace!("skipping AddressOf | Ref for {:?}", place);
489 // This may be creating mutable references or immutable references to cells.
490 // If that happens, the pointed to value could be mutated via that reference.
491 // Since we aren't tracking references, the const propagator loses track of what
492 // value the local has right now.
493 // Thus, all locals that have their reference taken
494 // must not take part in propagation.
495 Self::remove_const(&mut self.ecx, place.local);
499 Rvalue::ThreadLocalRef(def_id) => {
500 trace!("skipping ThreadLocalRef({:?})", def_id);
505 // There's no other checking to do at this time.
506 Rvalue::Aggregate(..)
508 | Rvalue::CopyForDeref(..)
512 | Rvalue::ShallowInitBox(..)
513 | Rvalue::Discriminant(..)
514 | Rvalue::NullaryOp(..) => {}
517 // FIXME we need to revisit this for #67176
518 if rvalue.needs_subst() {
522 self.use_ecx(source_info, |this| this.ecx.eval_rvalue_into_place(rvalue, place))
526 impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
527 fn visit_body(&mut self, body: &Body<'tcx>) {
528 for (bb, data) in body.basic_blocks().iter_enumerated() {
529 self.visit_basic_block_data(bb, data);
533 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
534 self.super_operand(operand, location);
537 fn visit_constant(&mut self, constant: &Constant<'tcx>, location: Location) {
538 trace!("visit_constant: {:?}", constant);
539 self.super_constant(constant, location);
540 self.eval_constant(constant, self.source_info.unwrap());
543 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
544 trace!("visit_statement: {:?}", statement);
545 let source_info = statement.source_info;
546 self.source_info = Some(source_info);
547 if let StatementKind::Assign(box (place, ref rval)) = statement.kind {
548 let can_const_prop = self.ecx.machine.can_const_prop[place.local];
549 if let Some(()) = self.const_prop(rval, source_info, place) {
550 match can_const_prop {
551 ConstPropMode::OnlyInsideOwnBlock => {
553 "found local restricted to its block. \
554 Will remove it from const-prop after block is finished. Local: {:?}",
558 ConstPropMode::OnlyPropagateInto | ConstPropMode::NoPropagation => {
559 trace!("can't propagate into {:?}", place);
560 if place.local != RETURN_PLACE {
561 Self::remove_const(&mut self.ecx, place.local);
564 ConstPropMode::FullConstProp => {}
567 // Const prop failed, so erase the destination, ensuring that whatever happens
568 // from here on, does not know about the previous value.
569 // This is important in case we have
572 // x = SOME_MUTABLE_STATIC;
573 // // x must now be uninit
575 // FIXME: we overzealously erase the entire local, because that's easier to
578 "propagation into {:?} failed.
579 Nuking the entire site from orbit, it's the only way to be sure",
582 Self::remove_const(&mut self.ecx, place.local);
585 match statement.kind {
586 StatementKind::SetDiscriminant { ref place, .. } => {
587 match self.ecx.machine.can_const_prop[place.local] {
588 ConstPropMode::FullConstProp | ConstPropMode::OnlyInsideOwnBlock => {
590 .use_ecx(source_info, |this| this.ecx.statement(statement))
593 trace!("propped discriminant into {:?}", place);
595 Self::remove_const(&mut self.ecx, place.local);
598 ConstPropMode::OnlyPropagateInto | ConstPropMode::NoPropagation => {
599 Self::remove_const(&mut self.ecx, place.local);
603 StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => {
604 let frame = self.ecx.frame_mut();
605 frame.locals[local].value =
606 if let StatementKind::StorageLive(_) = statement.kind {
607 LocalValue::Live(interpret::Operand::Immediate(
608 interpret::Immediate::Uninit,
618 self.super_statement(statement, location);
621 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
622 let source_info = terminator.source_info;
623 self.source_info = Some(source_info);
624 self.super_terminator(terminator, location);
625 match &terminator.kind {
626 TerminatorKind::Assert { expected, ref msg, ref cond, .. } => {
627 if let Some(ref value) = self.eval_operand(&cond, source_info) {
628 trace!("assertion on {:?} should be {:?}", value, expected);
629 let expected = ScalarMaybeUninit::from(Scalar::from_bool(*expected));
630 let value_const = self.ecx.read_scalar(&value).unwrap();
631 if expected != value_const {
636 impl<T: std::fmt::Debug> std::fmt::Debug for DbgVal<T> {
637 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
639 Self::Val(val) => val.fmt(fmt),
640 Self::Underscore => fmt.write_str("_"),
644 let mut eval_to_int = |op| {
645 // This can be `None` if the lhs wasn't const propagated and we just
646 // triggered the assert on the value of the rhs.
647 self.eval_operand(op, source_info).map_or(DbgVal::Underscore, |op| {
648 DbgVal::Val(self.ecx.read_immediate(&op).unwrap().to_const_int())
651 let msg = match msg {
652 AssertKind::DivisionByZero(op) => {
653 Some(AssertKind::DivisionByZero(eval_to_int(op)))
655 AssertKind::RemainderByZero(op) => {
656 Some(AssertKind::RemainderByZero(eval_to_int(op)))
658 AssertKind::Overflow(bin_op @ (BinOp::Div | BinOp::Rem), op1, op2) => {
659 // Division overflow is *UB* in the MIR, and different than the
660 // other overflow checks.
661 Some(AssertKind::Overflow(
667 AssertKind::BoundsCheck { ref len, ref index } => {
668 let len = eval_to_int(len);
669 let index = eval_to_int(index);
670 Some(AssertKind::BoundsCheck { len, index })
672 // Remaining overflow errors are already covered by checks on the binary operators.
673 AssertKind::Overflow(..) | AssertKind::OverflowNeg(_) => None,
674 // Need proper const propagator for these.
677 // Poison all places this operand references so that further code
678 // doesn't use the invalid value
680 Operand::Move(ref place) | Operand::Copy(ref place) => {
681 Self::remove_const(&mut self.ecx, place.local);
683 Operand::Constant(_) => {}
685 if let Some(msg) = msg {
686 self.report_assert_as_lint(
687 lint::builtin::UNCONDITIONAL_PANIC,
689 "this operation will panic at runtime",
696 // None of these have Operands to const-propagate.
697 TerminatorKind::Goto { .. }
698 | TerminatorKind::Resume
699 | TerminatorKind::Abort
700 | TerminatorKind::Return
701 | TerminatorKind::Unreachable
702 | TerminatorKind::Drop { .. }
703 | TerminatorKind::DropAndReplace { .. }
704 | TerminatorKind::Yield { .. }
705 | TerminatorKind::GeneratorDrop
706 | TerminatorKind::FalseEdge { .. }
707 | TerminatorKind::FalseUnwind { .. }
708 | TerminatorKind::SwitchInt { .. }
709 | TerminatorKind::Call { .. }
710 | TerminatorKind::InlineAsm { .. } => {}
713 // We remove all Locals which are restricted in propagation to their containing blocks and
714 // which were modified in the current block.
715 // Take it out of the ecx so we can get a mutable reference to the ecx for `remove_const`.
716 let mut locals = std::mem::take(&mut self.ecx.machine.written_only_inside_own_block_locals);
717 for &local in locals.iter() {
718 Self::remove_const(&mut self.ecx, local);
721 // Put it back so we reuse the heap of the storage
722 self.ecx.machine.written_only_inside_own_block_locals = locals;
723 if cfg!(debug_assertions) {
724 // Ensure we are correctly erasing locals with the non-debug-assert logic.
725 for local in self.ecx.machine.only_propagate_inside_block_locals.iter() {
727 self.get_const(local.into()).is_none()
729 .layout_of(self.local_decls[local].ty)
730 .map_or(true, |layout| layout.is_zst())