1 //! Propagates constants for early reporting of statically known
4 use crate::const_prop::CanConstProp;
5 use crate::const_prop::ConstPropMachine;
6 use crate::const_prop::ConstPropMode;
8 use rustc_const_eval::const_eval::ConstEvalErr;
9 use rustc_const_eval::interpret::{
10 self, InterpCx, InterpResult, LocalState, LocalValue, MemoryKind, OpTy, Scalar, StackPopCleanup,
12 use rustc_hir::def::DefKind;
14 use rustc_index::bit_set::BitSet;
15 use rustc_index::vec::IndexVec;
16 use rustc_middle::mir::visit::Visitor;
17 use rustc_middle::mir::{
18 AssertKind, BinOp, Body, Constant, ConstantKind, Local, LocalDecl, Location, Operand, Place,
19 Rvalue, SourceInfo, SourceScope, SourceScopeData, Statement, StatementKind, Terminator,
20 TerminatorKind, UnOp, RETURN_PLACE,
22 use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout};
23 use rustc_middle::ty::subst::{InternalSubsts, Subst};
24 use rustc_middle::ty::{
25 self, ConstInt, ConstKind, Instance, ParamEnv, ScalarInt, Ty, TyCtxt, TypeVisitable,
27 use rustc_session::lint;
29 use rustc_target::abi::{HasDataLayout, Size, TargetDataLayout};
30 use rustc_trait_selection::traits;
33 /// The maximum number of bytes that we'll allocate space for a local or the return value.
34 /// Needed for #66397, because otherwise we eval into large places and that can cause OOM or just
35 /// Severely regress performance.
36 const MAX_ALLOC_LIMIT: u64 = 1024;
39 impl<'tcx> MirLint<'tcx> for ConstProp {
40 fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
41 // will be evaluated by miri and produce its errors there
42 if body.source.promoted.is_some() {
46 let def_id = body.source.def_id().expect_local();
47 let is_fn_like = tcx.def_kind(def_id).is_fn_like();
48 let is_assoc_const = tcx.def_kind(def_id) == DefKind::AssocConst;
50 // Only run const prop on functions, methods, closures and associated constants
51 if !is_fn_like && !is_assoc_const {
52 // skip anon_const/statics/consts because they'll be evaluated by miri anyway
53 trace!("ConstProp skipped for {:?}", def_id);
57 let is_generator = tcx.type_of(def_id.to_def_id()).is_generator();
58 // FIXME(welseywiser) const prop doesn't work on generators because of query cycles
59 // computing their layout.
61 trace!("ConstProp skipped for generator {:?}", def_id);
65 // Check if it's even possible to satisfy the 'where' clauses
67 // This branch will never be taken for any normal function.
68 // However, it's possible to `#!feature(trivial_bounds)]` to write
69 // a function with impossible to satisfy clauses, e.g.:
70 // `fn foo() where String: Copy {}`
72 // We don't usually need to worry about this kind of case,
73 // since we would get a compilation error if the user tried
74 // to call it. However, since we can do const propagation
75 // even without any calls to the function, we need to make
76 // sure that it even makes sense to try to evaluate the body.
77 // If there are unsatisfiable where clauses, then all bets are
78 // off, and we just give up.
80 // We manually filter the predicates, skipping anything that's not
81 // "global". We are in a potentially generic context
82 // (e.g. we are evaluating a function without substituting generic
83 // parameters, so this filtering serves two purposes:
85 // 1. We skip evaluating any predicates that we would
86 // never be able prove are unsatisfiable (e.g. `<T as Foo>`
87 // 2. We avoid trying to normalize predicates involving generic
88 // parameters (e.g. `<T as Foo>::MyItem`). This can confuse
89 // the normalization code (leading to cycle errors), since
90 // it's usually never invoked in this way.
92 .predicates_of(def_id.to_def_id())
95 .filter_map(|(p, _)| if p.is_global() { Some(*p) } else { None });
96 if traits::impossible_predicates(
98 traits::elaborate_predicates(tcx, predicates).map(|o| o.predicate).collect(),
100 trace!("ConstProp skipped for {:?}: found unsatisfiable predicates", def_id);
104 trace!("ConstProp starting for {:?}", def_id);
106 let dummy_body = &Body::new(
108 body.basic_blocks().clone(),
109 body.source_scopes.clone(),
110 body.local_decls.clone(),
115 body.generator_kind(),
116 body.tainted_by_errors,
119 // FIXME(oli-obk, eddyb) Optimize locals (or even local paths) to hold
120 // constants, instead of just checking for const-folding succeeding.
121 // That would require a uniform one-def no-mutation analysis
122 // and RPO (or recursing when needing the value of a local).
123 let mut optimization_finder = ConstPropagator::new(body, dummy_body, tcx);
124 optimization_finder.visit_body(body);
126 trace!("ConstProp done for {:?}", def_id);
130 /// Finds optimization opportunities on the MIR.
131 struct ConstPropagator<'mir, 'tcx> {
132 ecx: InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>,
134 param_env: ParamEnv<'tcx>,
135 source_scopes: &'mir IndexVec<SourceScope, SourceScopeData<'tcx>>,
136 local_decls: &'mir IndexVec<Local, LocalDecl<'tcx>>,
137 // Because we have `MutVisitor` we can't obtain the `SourceInfo` from a `Location`. So we store
138 // the last known `SourceInfo` here and just keep revisiting it.
139 source_info: Option<SourceInfo>,
142 impl<'tcx> LayoutOfHelpers<'tcx> for ConstPropagator<'_, 'tcx> {
143 type LayoutOfResult = Result<TyAndLayout<'tcx>, LayoutError<'tcx>>;
146 fn handle_layout_err(&self, err: LayoutError<'tcx>, _: Span, _: Ty<'tcx>) -> LayoutError<'tcx> {
151 impl HasDataLayout for ConstPropagator<'_, '_> {
153 fn data_layout(&self) -> &TargetDataLayout {
154 &self.tcx.data_layout
158 impl<'tcx> ty::layout::HasTyCtxt<'tcx> for ConstPropagator<'_, 'tcx> {
160 fn tcx(&self) -> TyCtxt<'tcx> {
165 impl<'tcx> ty::layout::HasParamEnv<'tcx> for ConstPropagator<'_, 'tcx> {
167 fn param_env(&self) -> ty::ParamEnv<'tcx> {
172 impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
175 dummy_body: &'mir Body<'tcx>,
177 ) -> ConstPropagator<'mir, 'tcx> {
178 let def_id = body.source.def_id();
179 let substs = &InternalSubsts::identity_for_item(tcx, def_id);
180 let param_env = tcx.param_env_reveal_all_normalized(def_id);
182 let can_const_prop = CanConstProp::check(tcx, param_env, body);
183 let mut only_propagate_inside_block_locals = BitSet::new_empty(can_const_prop.len());
184 for (l, mode) in can_const_prop.iter_enumerated() {
185 if *mode == ConstPropMode::OnlyInsideOwnBlock {
186 only_propagate_inside_block_locals.insert(l);
189 let mut ecx = InterpCx::new(
191 tcx.def_span(def_id),
193 ConstPropMachine::new(only_propagate_inside_block_locals, can_const_prop),
197 .layout_of(body.bound_return_ty().subst(tcx, substs))
199 // Don't bother allocating memory for large values.
200 // I don't know how return types can seem to be unsized but this happens in the
201 // `type/type-unsatisfiable.rs` test.
202 .filter(|ret_layout| {
203 !ret_layout.is_unsized() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT)
205 .unwrap_or_else(|| ecx.layout_of(tcx.types.unit).unwrap());
208 .allocate(ret_layout, MemoryKind::Stack)
209 .expect("couldn't perform small allocation")
212 ecx.push_stack_frame(
213 Instance::new(def_id, substs),
216 StackPopCleanup::Root { cleanup: false },
218 .expect("failed to push initial stack frame");
224 source_scopes: &dummy_body.source_scopes,
225 local_decls: &dummy_body.local_decls,
230 fn get_const(&self, place: Place<'tcx>) -> Option<OpTy<'tcx>> {
231 let op = match self.ecx.eval_place_to_op(place, None) {
234 trace!("get_const failed: {}", e);
239 // Try to read the local as an immediate so that if it is representable as a scalar, we can
240 // handle it as such, but otherwise, just return the value as is.
241 Some(match self.ecx.read_immediate_raw(&op) {
242 Ok(Ok(imm)) => imm.into(),
247 /// Remove `local` from the pool of `Locals`. Allows writing to them,
248 /// but not reading from them anymore.
249 fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) {
250 ecx.frame_mut().locals[local] = LocalState {
251 value: LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)),
252 layout: Cell::new(None),
256 fn lint_root(&self, source_info: SourceInfo) -> Option<HirId> {
257 source_info.scope.lint_root(self.source_scopes)
260 fn use_ecx<F, T>(&mut self, source_info: SourceInfo, f: F) -> Option<T>
262 F: FnOnce(&mut Self) -> InterpResult<'tcx, T>,
264 // Overwrite the PC -- whatever the interpreter does to it does not make any sense anyway.
265 self.ecx.frame_mut().loc = Err(source_info.span);
267 Ok(val) => Some(val),
269 trace!("InterpCx operation failed: {:?}", error);
270 // Some errors shouldn't come up because creating them causes
271 // an allocation, which we should avoid. When that happens,
272 // dedicated error variants should be introduced instead.
274 !error.kind().formatted_string(),
275 "const-prop encountered formatting error: {}",
283 /// Returns the value, if any, of evaluating `c`.
284 fn eval_constant(&mut self, c: &Constant<'tcx>, source_info: SourceInfo) -> Option<OpTy<'tcx>> {
285 // FIXME we need to revisit this for #67176
290 match self.ecx.mir_const_to_op(&c.literal, None) {
293 let tcx = self.ecx.tcx.at(c.span);
294 let err = ConstEvalErr::new(&self.ecx, error, Some(c.span));
295 if let Some(lint_root) = self.lint_root(source_info) {
296 let lint_only = match c.literal {
297 ConstantKind::Ty(ct) => match ct.kind() {
298 // Promoteds must lint and not error as the user didn't ask for them
299 ConstKind::Unevaluated(ty::Unevaluated {
304 // Out of backwards compatibility we cannot report hard errors in unused
305 // generic functions using associated constants of the generic parameters.
306 _ => c.literal.needs_subst(),
308 ConstantKind::Val(_, ty) => ty.needs_subst(),
311 // Out of backwards compatibility we cannot report hard errors in unused
312 // generic functions using associated constants of the generic parameters.
313 err.report_as_lint(tcx, "erroneous constant used", lint_root, Some(c.span));
315 err.report_as_error(tcx, "erroneous constant used");
318 err.report_as_error(tcx, "erroneous constant used");
325 /// Returns the value, if any, of evaluating `place`.
326 fn eval_place(&mut self, place: Place<'tcx>, source_info: SourceInfo) -> Option<OpTy<'tcx>> {
327 trace!("eval_place(place={:?})", place);
328 self.use_ecx(source_info, |this| this.ecx.eval_place_to_op(place, None))
331 /// Returns the value, if any, of evaluating `op`. Calls upon `eval_constant`
332 /// or `eval_place`, depending on the variant of `Operand` used.
333 fn eval_operand(&mut self, op: &Operand<'tcx>, source_info: SourceInfo) -> Option<OpTy<'tcx>> {
335 Operand::Constant(ref c) => self.eval_constant(c, source_info),
336 Operand::Move(place) | Operand::Copy(place) => self.eval_place(place, source_info),
340 fn report_assert_as_lint(
342 lint: &'static lint::Lint,
343 source_info: SourceInfo,
344 message: &'static str,
345 panic: AssertKind<impl std::fmt::Debug>,
347 if let Some(lint_root) = self.lint_root(source_info) {
348 self.tcx.struct_span_lint_hir(lint, lint_root, source_info.span, |lint| {
349 let mut err = lint.build(message);
350 err.span_label(source_info.span, format!("{:?}", panic));
360 source_info: SourceInfo,
362 if let (val, true) = self.use_ecx(source_info, |this| {
363 let val = this.ecx.read_immediate(&this.ecx.eval_operand(arg, None)?)?;
364 let (_res, overflow, _ty) = this.ecx.overflowing_unary_op(op, &val)?;
367 // `AssertKind` only has an `OverflowNeg` variant, so make sure that is
368 // appropriate to use.
369 assert_eq!(op, UnOp::Neg, "Neg is the only UnOp that can overflow");
370 self.report_assert_as_lint(
371 lint::builtin::ARITHMETIC_OVERFLOW,
373 "this arithmetic operation will overflow",
374 AssertKind::OverflowNeg(val.to_const_int()),
385 left: &Operand<'tcx>,
386 right: &Operand<'tcx>,
387 source_info: SourceInfo,
389 let r = self.use_ecx(source_info, |this| {
390 this.ecx.read_immediate(&this.ecx.eval_operand(right, None)?)
392 let l = self.use_ecx(source_info, |this| {
393 this.ecx.read_immediate(&this.ecx.eval_operand(left, None)?)
395 // Check for exceeding shifts *even if* we cannot evaluate the LHS.
396 if op == BinOp::Shr || op == BinOp::Shl {
398 // We need the type of the LHS. We cannot use `place_layout` as that is the type
399 // of the result, which for checked binops is not the same!
400 let left_ty = left.ty(self.local_decls, self.tcx);
401 let left_size = self.ecx.layout_of(left_ty).ok()?.size;
402 let right_size = r.layout.size;
403 let r_bits = r.to_scalar().to_bits(right_size).ok();
404 if r_bits.map_or(false, |b| b >= left_size.bits() as u128) {
405 debug!("check_binary_op: reporting assert for {:?}", source_info);
406 self.report_assert_as_lint(
407 lint::builtin::ARITHMETIC_OVERFLOW,
409 "this arithmetic operation will overflow",
410 AssertKind::Overflow(
413 Some(l) => l.to_const_int(),
414 // Invent a dummy value, the diagnostic ignores it anyway
415 None => ConstInt::new(
416 ScalarInt::try_from_uint(1_u8, left_size).unwrap(),
418 left_ty.is_ptr_sized_integral(),
428 if let (Some(l), Some(r)) = (l, r) {
429 // The remaining operators are handled through `overflowing_binary_op`.
430 if self.use_ecx(source_info, |this| {
431 let (_res, overflow, _ty) = this.ecx.overflowing_binary_op(op, &l, &r)?;
434 self.report_assert_as_lint(
435 lint::builtin::ARITHMETIC_OVERFLOW,
437 "this arithmetic operation will overflow",
438 AssertKind::Overflow(op, l.to_const_int(), r.to_const_int()),
448 rvalue: &Rvalue<'tcx>,
449 source_info: SourceInfo,
452 // Perform any special handling for specific Rvalue types.
453 // Generally, checks here fall into one of two categories:
454 // 1. Additional checking to provide useful lints to the user
455 // - In this case, we will do some validation and then fall through to the
456 // end of the function which evals the assignment.
457 // 2. Working around bugs in other parts of the compiler
458 // - In this case, we'll return `None` from this function to stop evaluation.
460 // Additional checking: give lints to the user if an overflow would occur.
461 // We do this here and not in the `Assert` terminator as that terminator is
462 // only sometimes emitted (overflow checks can be disabled), but we want to always
464 Rvalue::UnaryOp(op, arg) => {
465 trace!("checking UnaryOp(op = {:?}, arg = {:?})", op, arg);
466 self.check_unary_op(*op, arg, source_info)?;
468 Rvalue::BinaryOp(op, box (left, right)) => {
469 trace!("checking BinaryOp(op = {:?}, left = {:?}, right = {:?})", op, left, right);
470 self.check_binary_op(*op, left, right, source_info)?;
472 Rvalue::CheckedBinaryOp(op, box (left, right)) => {
474 "checking CheckedBinaryOp(op = {:?}, left = {:?}, right = {:?})",
479 self.check_binary_op(*op, left, right, source_info)?;
482 // Do not try creating references (#67862)
483 Rvalue::AddressOf(_, place) | Rvalue::Ref(_, _, place) => {
484 trace!("skipping AddressOf | Ref for {:?}", place);
486 // This may be creating mutable references or immutable references to cells.
487 // If that happens, the pointed to value could be mutated via that reference.
488 // Since we aren't tracking references, the const propagator loses track of what
489 // value the local has right now.
490 // Thus, all locals that have their reference taken
491 // must not take part in propagation.
492 Self::remove_const(&mut self.ecx, place.local);
496 Rvalue::ThreadLocalRef(def_id) => {
497 trace!("skipping ThreadLocalRef({:?})", def_id);
502 // There's no other checking to do at this time.
503 Rvalue::Aggregate(..)
505 | Rvalue::CopyForDeref(..)
509 | Rvalue::ShallowInitBox(..)
510 | Rvalue::Discriminant(..)
511 | Rvalue::NullaryOp(..) => {}
514 // FIXME we need to revisit this for #67176
515 if rvalue.needs_subst() {
519 self.use_ecx(source_info, |this| this.ecx.eval_rvalue_into_place(rvalue, place))
523 impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
524 fn visit_body(&mut self, body: &Body<'tcx>) {
525 for (bb, data) in body.basic_blocks().iter_enumerated() {
526 self.visit_basic_block_data(bb, data);
530 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
531 self.super_operand(operand, location);
534 fn visit_constant(&mut self, constant: &Constant<'tcx>, location: Location) {
535 trace!("visit_constant: {:?}", constant);
536 self.super_constant(constant, location);
537 self.eval_constant(constant, self.source_info.unwrap());
540 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
541 trace!("visit_statement: {:?}", statement);
542 let source_info = statement.source_info;
543 self.source_info = Some(source_info);
544 if let StatementKind::Assign(box (place, ref rval)) = statement.kind {
545 let can_const_prop = self.ecx.machine.can_const_prop[place.local];
546 if let Some(()) = self.const_prop(rval, source_info, place) {
547 match can_const_prop {
548 ConstPropMode::OnlyInsideOwnBlock => {
550 "found local restricted to its block. \
551 Will remove it from const-prop after block is finished. Local: {:?}",
555 ConstPropMode::OnlyPropagateInto | ConstPropMode::NoPropagation => {
556 trace!("can't propagate into {:?}", place);
557 if place.local != RETURN_PLACE {
558 Self::remove_const(&mut self.ecx, place.local);
561 ConstPropMode::FullConstProp => {}
564 // Const prop failed, so erase the destination, ensuring that whatever happens
565 // from here on, does not know about the previous value.
566 // This is important in case we have
569 // x = SOME_MUTABLE_STATIC;
570 // // x must now be uninit
572 // FIXME: we overzealously erase the entire local, because that's easier to
575 "propagation into {:?} failed.
576 Nuking the entire site from orbit, it's the only way to be sure",
579 Self::remove_const(&mut self.ecx, place.local);
582 match statement.kind {
583 StatementKind::SetDiscriminant { ref place, .. } => {
584 match self.ecx.machine.can_const_prop[place.local] {
585 ConstPropMode::FullConstProp | ConstPropMode::OnlyInsideOwnBlock => {
587 .use_ecx(source_info, |this| this.ecx.statement(statement))
590 trace!("propped discriminant into {:?}", place);
592 Self::remove_const(&mut self.ecx, place.local);
595 ConstPropMode::OnlyPropagateInto | ConstPropMode::NoPropagation => {
596 Self::remove_const(&mut self.ecx, place.local);
600 StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => {
601 let frame = self.ecx.frame_mut();
602 frame.locals[local].value =
603 if let StatementKind::StorageLive(_) = statement.kind {
604 LocalValue::Live(interpret::Operand::Immediate(
605 interpret::Immediate::Uninit,
615 self.super_statement(statement, location);
618 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
619 let source_info = terminator.source_info;
620 self.source_info = Some(source_info);
621 self.super_terminator(terminator, location);
622 match &terminator.kind {
623 TerminatorKind::Assert { expected, ref msg, ref cond, .. } => {
624 if let Some(ref value) = self.eval_operand(&cond, source_info) {
625 trace!("assertion on {:?} should be {:?}", value, expected);
626 let expected = Scalar::from_bool(*expected);
627 let value_const = self.ecx.read_scalar(&value).unwrap();
628 if expected != value_const {
633 impl<T: std::fmt::Debug> std::fmt::Debug for DbgVal<T> {
634 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
636 Self::Val(val) => val.fmt(fmt),
637 Self::Underscore => fmt.write_str("_"),
641 let mut eval_to_int = |op| {
642 // This can be `None` if the lhs wasn't const propagated and we just
643 // triggered the assert on the value of the rhs.
644 self.eval_operand(op, source_info).map_or(DbgVal::Underscore, |op| {
645 DbgVal::Val(self.ecx.read_immediate(&op).unwrap().to_const_int())
648 let msg = match msg {
649 AssertKind::DivisionByZero(op) => {
650 Some(AssertKind::DivisionByZero(eval_to_int(op)))
652 AssertKind::RemainderByZero(op) => {
653 Some(AssertKind::RemainderByZero(eval_to_int(op)))
655 AssertKind::Overflow(bin_op @ (BinOp::Div | BinOp::Rem), op1, op2) => {
656 // Division overflow is *UB* in the MIR, and different than the
657 // other overflow checks.
658 Some(AssertKind::Overflow(
664 AssertKind::BoundsCheck { ref len, ref index } => {
665 let len = eval_to_int(len);
666 let index = eval_to_int(index);
667 Some(AssertKind::BoundsCheck { len, index })
669 // Remaining overflow errors are already covered by checks on the binary operators.
670 AssertKind::Overflow(..) | AssertKind::OverflowNeg(_) => None,
671 // Need proper const propagator for these.
674 // Poison all places this operand references so that further code
675 // doesn't use the invalid value
677 Operand::Move(ref place) | Operand::Copy(ref place) => {
678 Self::remove_const(&mut self.ecx, place.local);
680 Operand::Constant(_) => {}
682 if let Some(msg) = msg {
683 self.report_assert_as_lint(
684 lint::builtin::UNCONDITIONAL_PANIC,
686 "this operation will panic at runtime",
693 // None of these have Operands to const-propagate.
694 TerminatorKind::Goto { .. }
695 | TerminatorKind::Resume
696 | TerminatorKind::Abort
697 | TerminatorKind::Return
698 | TerminatorKind::Unreachable
699 | TerminatorKind::Drop { .. }
700 | TerminatorKind::DropAndReplace { .. }
701 | TerminatorKind::Yield { .. }
702 | TerminatorKind::GeneratorDrop
703 | TerminatorKind::FalseEdge { .. }
704 | TerminatorKind::FalseUnwind { .. }
705 | TerminatorKind::SwitchInt { .. }
706 | TerminatorKind::Call { .. }
707 | TerminatorKind::InlineAsm { .. } => {}
710 // We remove all Locals which are restricted in propagation to their containing blocks and
711 // which were modified in the current block.
712 // Take it out of the ecx so we can get a mutable reference to the ecx for `remove_const`.
713 let mut locals = std::mem::take(&mut self.ecx.machine.written_only_inside_own_block_locals);
714 for &local in locals.iter() {
715 Self::remove_const(&mut self.ecx, local);
718 // Put it back so we reuse the heap of the storage
719 self.ecx.machine.written_only_inside_own_block_locals = locals;
720 if cfg!(debug_assertions) {
721 // Ensure we are correctly erasing locals with the non-debug-assert logic.
722 for local in self.ecx.machine.only_propagate_inside_block_locals.iter() {
724 self.get_const(local.into()).is_none()
726 .layout_of(self.local_decls[local].ty)
727 .map_or(true, |layout| layout.is_zst())