1 //! A constant propagation optimization pass based on dataflow analysis.
3 //! Currently, this pass only propagates scalar values.
5 use rustc_const_eval::const_eval::CheckAlignment;
6 use rustc_const_eval::interpret::{ConstValue, ImmTy, Immediate, InterpCx, Scalar};
7 use rustc_data_structures::fx::FxHashMap;
8 use rustc_middle::mir::visit::{MutVisitor, Visitor};
9 use rustc_middle::mir::*;
10 use rustc_middle::ty::{self, Ty, TyCtxt};
11 use rustc_mir_dataflow::value_analysis::{Map, State, TrackElem, ValueAnalysis, ValueOrPlace};
12 use rustc_mir_dataflow::{lattice::FlatSet, Analysis, ResultsVisitor, SwitchIntEdgeEffects};
13 use rustc_span::DUMMY_SP;
14 use rustc_target::abi::Align;
18 // These constants are somewhat random guesses and have not been optimized.
19 // If `tcx.sess.mir_opt_level() >= 4`, we ignore the limits (this can become very expensive).
20 const BLOCK_LIMIT: usize = 100;
21 const PLACE_LIMIT: usize = 100;
23 pub struct DataflowConstProp;
25 impl<'tcx> MirPass<'tcx> for DataflowConstProp {
26 fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
27 sess.mir_opt_level() >= 3
30 #[instrument(skip_all level = "debug")]
31 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
32 if tcx.sess.mir_opt_level() < 4 && body.basic_blocks.len() > BLOCK_LIMIT {
33 debug!("aborted dataflow const prop due too many basic blocks");
37 // Decide which places to track during the analysis.
38 let map = Map::from_filter(tcx, body, Ty::is_scalar);
40 // We want to have a somewhat linear runtime w.r.t. the number of statements/terminators.
41 // Let's call this number `n`. Dataflow analysis has `O(h*n)` transfer function
42 // applications, where `h` is the height of the lattice. Because the height of our lattice
43 // is linear w.r.t. the number of tracked places, this is `O(tracked_places * n)`. However,
44 // because every transfer function application could traverse the whole map, this becomes
45 // `O(num_nodes * tracked_places * n)` in terms of time complexity. Since the number of
46 // map nodes is strongly correlated to the number of tracked places, this becomes more or
47 // less `O(n)` if we place a constant limit on the number of tracked places.
48 if tcx.sess.mir_opt_level() < 4 && map.tracked_places() > PLACE_LIMIT {
49 debug!("aborted dataflow const prop due to too many tracked places");
53 // Perform the actual dataflow analysis.
54 let analysis = ConstAnalysis::new(tcx, body, map);
55 let results = debug_span!("analyze")
56 .in_scope(|| analysis.wrap().into_engine(tcx, body).iterate_to_fixpoint());
58 // Collect results and patch the body afterwards.
59 let mut visitor = CollectAndPatch::new(tcx, &results.analysis.0.map);
60 debug_span!("collect").in_scope(|| results.visit_reachable_with(body, &mut visitor));
61 debug_span!("patch").in_scope(|| visitor.visit_body(body));
65 struct ConstAnalysis<'tcx> {
68 ecx: InterpCx<'tcx, 'tcx, DummyMachine>,
69 param_env: ty::ParamEnv<'tcx>,
72 impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> {
73 type Value = FlatSet<ScalarTy<'tcx>>;
75 const NAME: &'static str = "ConstAnalysis";
77 fn map(&self) -> &Map {
84 rvalue: &Rvalue<'tcx>,
85 state: &mut State<Self::Value>,
88 Rvalue::CheckedBinaryOp(op, box (left, right)) => {
89 let target = self.map().find(target.as_ref());
90 if let Some(target) = target {
91 // We should not track any projections other than
92 // what is overwritten below, but just in case...
93 state.flood_idx(target, self.map());
96 let value_target = target
97 .and_then(|target| self.map().apply(target, TrackElem::Field(0_u32.into())));
98 let overflow_target = target
99 .and_then(|target| self.map().apply(target, TrackElem::Field(1_u32.into())));
101 if value_target.is_some() || overflow_target.is_some() {
102 let (val, overflow) = self.binary_op(state, *op, left, right);
104 if let Some(value_target) = value_target {
105 state.assign_idx(value_target, ValueOrPlace::Value(val), self.map());
107 if let Some(overflow_target) = overflow_target {
108 let overflow = match overflow {
109 FlatSet::Top => FlatSet::Top,
110 FlatSet::Elem(overflow) => {
112 // Overflow cannot be reliably propagated. See: https://github.com/rust-lang/rust/pull/101168#issuecomment-1288091446
115 self.wrap_scalar(Scalar::from_bool(false), self.tcx.types.bool)
118 FlatSet::Bottom => FlatSet::Bottom,
122 ValueOrPlace::Value(overflow),
128 _ => self.super_assign(target, rvalue, state),
134 rvalue: &Rvalue<'tcx>,
135 state: &mut State<Self::Value>,
136 ) -> ValueOrPlace<Self::Value> {
139 kind @ (CastKind::IntToInt
140 | CastKind::FloatToInt
141 | CastKind::FloatToFloat
142 | CastKind::IntToFloat),
145 ) => match self.eval_operand(operand, state) {
146 FlatSet::Elem(op) => match kind {
147 CastKind::IntToInt | CastKind::IntToFloat => {
148 self.ecx.int_to_int_or_float(&op, *ty)
150 CastKind::FloatToInt | CastKind::FloatToFloat => {
151 self.ecx.float_to_float_or_int(&op, *ty)
155 .map(|result| ValueOrPlace::Value(self.wrap_immediate(result, *ty)))
156 .unwrap_or(ValueOrPlace::top()),
157 _ => ValueOrPlace::top(),
159 Rvalue::BinaryOp(op, box (left, right)) => {
160 // Overflows must be ignored here.
161 let (val, _overflow) = self.binary_op(state, *op, left, right);
162 ValueOrPlace::Value(val)
164 Rvalue::UnaryOp(op, operand) => match self.eval_operand(operand, state) {
165 FlatSet::Elem(value) => self
167 .unary_op(*op, &value)
168 .map(|val| ValueOrPlace::Value(self.wrap_immty(val)))
169 .unwrap_or(ValueOrPlace::Value(FlatSet::Top)),
170 FlatSet::Bottom => ValueOrPlace::Value(FlatSet::Bottom),
171 FlatSet::Top => ValueOrPlace::Value(FlatSet::Top),
173 _ => self.super_rvalue(rvalue, state),
179 constant: &Constant<'tcx>,
180 _state: &mut State<Self::Value>,
184 .eval(self.tcx, self.param_env)
186 .map(|value| FlatSet::Elem(ScalarTy(value, constant.ty())))
187 .unwrap_or(FlatSet::Top)
190 fn handle_switch_int(
192 discr: &Operand<'tcx>,
193 apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
195 // FIXME: The dataflow framework only provides the state if we call `apply()`, which makes
196 // this more inefficient than it has to be.
197 let mut discr_value = None;
198 let mut handled = false;
199 apply_edge_effects.apply(|state, target| {
200 let discr_value = match discr_value {
201 Some(value) => value,
203 let value = match self.handle_operand(discr, state) {
204 ValueOrPlace::Value(value) => value,
205 ValueOrPlace::Place(place) => state.get_idx(place, self.map()),
207 let result = match value {
208 FlatSet::Top => FlatSet::Top,
209 FlatSet::Elem(ScalarTy(scalar, _)) => {
210 let int = scalar.assert_int();
211 FlatSet::Elem(int.assert_bits(int.size()))
213 FlatSet::Bottom => FlatSet::Bottom,
215 discr_value = Some(result);
220 let FlatSet::Elem(choice) = discr_value else {
221 // Do nothing if we don't know which branch will be taken.
225 if target.value.map(|n| n == choice).unwrap_or(!handled) {
226 // Branch is taken. Has no effect on state.
229 // Branch is not taken.
230 state.mark_unreachable();
236 #[derive(Clone, PartialEq, Eq)]
237 struct ScalarTy<'tcx>(Scalar, Ty<'tcx>);
239 impl<'tcx> std::fmt::Debug for ScalarTy<'tcx> {
240 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
241 // This is used for dataflow visualization, so we return something more concise.
242 std::fmt::Display::fmt(&ConstantKind::Val(ConstValue::Scalar(self.0), self.1), f)
246 impl<'tcx> ConstAnalysis<'tcx> {
247 pub fn new(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, map: Map) -> Self {
248 let param_env = tcx.param_env(body.source.def_id());
252 ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine),
253 param_env: param_env,
259 state: &mut State<FlatSet<ScalarTy<'tcx>>>,
261 left: &Operand<'tcx>,
262 right: &Operand<'tcx>,
263 ) -> (FlatSet<ScalarTy<'tcx>>, FlatSet<bool>) {
264 let left = self.eval_operand(left, state);
265 let right = self.eval_operand(right, state);
266 match (left, right) {
267 (FlatSet::Elem(left), FlatSet::Elem(right)) => {
268 match self.ecx.overflowing_binary_op(op, &left, &right) {
269 Ok((val, overflow, ty)) => (self.wrap_scalar(val, ty), FlatSet::Elem(overflow)),
270 _ => (FlatSet::Top, FlatSet::Top),
273 (FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom),
275 // Could attempt some algebraic simplifcations here.
276 (FlatSet::Top, FlatSet::Top)
284 state: &mut State<FlatSet<ScalarTy<'tcx>>>,
285 ) -> FlatSet<ImmTy<'tcx>> {
286 let value = match self.handle_operand(op, state) {
287 ValueOrPlace::Value(value) => value,
288 ValueOrPlace::Place(place) => state.get_idx(place, &self.map),
291 FlatSet::Top => FlatSet::Top,
292 FlatSet::Elem(ScalarTy(scalar, ty)) => self
294 .layout_of(self.param_env.and(ty))
295 .map(|layout| FlatSet::Elem(ImmTy::from_scalar(scalar, layout)))
296 .unwrap_or(FlatSet::Top),
297 FlatSet::Bottom => FlatSet::Bottom,
301 fn wrap_scalar(&self, scalar: Scalar, ty: Ty<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
302 FlatSet::Elem(ScalarTy(scalar, ty))
305 fn wrap_immediate(&self, imm: Immediate, ty: Ty<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
307 Immediate::Scalar(scalar) => self.wrap_scalar(scalar, ty),
312 fn wrap_immty(&self, val: ImmTy<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
313 self.wrap_immediate(*val, val.layout.ty)
317 struct CollectAndPatch<'tcx, 'map> {
321 /// For a given MIR location, this stores the values of the operands used by that location. In
322 /// particular, this is before the effect, such that the operands of `_1 = _1 + _2` are
323 /// properly captured. (This may become UB soon, but it is currently emitted even by safe code.)
324 before_effect: FxHashMap<(Location, Place<'tcx>), ScalarTy<'tcx>>,
326 /// Stores the assigned values for assignments where the Rvalue is constant.
327 assignments: FxHashMap<Location, ScalarTy<'tcx>>,
330 impl<'tcx, 'map> CollectAndPatch<'tcx, 'map> {
331 fn new(tcx: TyCtxt<'tcx>, map: &'map Map) -> Self {
332 Self { tcx, map, before_effect: FxHashMap::default(), assignments: FxHashMap::default() }
335 fn make_operand(&self, scalar: ScalarTy<'tcx>) -> Operand<'tcx> {
336 Operand::Constant(Box::new(Constant {
339 literal: ConstantKind::Val(ConstValue::Scalar(scalar.0), scalar.1),
344 impl<'mir, 'tcx, 'map> ResultsVisitor<'mir, 'tcx> for CollectAndPatch<'tcx, 'map> {
345 type FlowState = State<FlatSet<ScalarTy<'tcx>>>;
347 fn visit_statement_before_primary_effect(
349 state: &Self::FlowState,
350 statement: &'mir Statement<'tcx>,
353 match &statement.kind {
354 StatementKind::Assign(box (_, rvalue)) => {
355 OperandCollector { state, visitor: self }.visit_rvalue(rvalue, location);
361 fn visit_statement_after_primary_effect(
363 state: &Self::FlowState,
364 statement: &'mir Statement<'tcx>,
367 match statement.kind {
368 StatementKind::Assign(box (_, Rvalue::Use(Operand::Constant(_)))) => {
369 // Don't overwrite the assignment if it already uses a constant (to keep the span).
371 StatementKind::Assign(box (place, _)) => match state.get(place.as_ref(), self.map) {
373 FlatSet::Elem(value) => {
374 self.assignments.insert(location, value);
377 // This assignment is either unreachable, or an uninitialized value is assigned.
384 fn visit_terminator_before_primary_effect(
386 state: &Self::FlowState,
387 terminator: &'mir Terminator<'tcx>,
390 OperandCollector { state, visitor: self }.visit_terminator(terminator, location);
394 impl<'tcx, 'map> MutVisitor<'tcx> for CollectAndPatch<'tcx, 'map> {
395 fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
399 fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
400 if let Some(value) = self.assignments.get(&location) {
401 match &mut statement.kind {
402 StatementKind::Assign(box (_, rvalue)) => {
403 *rvalue = Rvalue::Use(self.make_operand(value.clone()));
405 _ => bug!("found assignment info for non-assign statement"),
408 self.super_statement(statement, location);
412 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
414 Operand::Copy(place) | Operand::Move(place) => {
415 if let Some(value) = self.before_effect.get(&(location, *place)) {
416 *operand = self.make_operand(value.clone());
424 struct OperandCollector<'tcx, 'map, 'a> {
425 state: &'a State<FlatSet<ScalarTy<'tcx>>>,
426 visitor: &'a mut CollectAndPatch<'tcx, 'map>,
429 impl<'tcx, 'map, 'a> Visitor<'tcx> for OperandCollector<'tcx, 'map, 'a> {
430 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
432 Operand::Copy(place) | Operand::Move(place) => {
433 match self.state.get(place.as_ref(), self.visitor.map) {
435 FlatSet::Elem(value) => {
436 self.visitor.before_effect.insert((location, *place), value);
438 FlatSet::Bottom => (),
448 impl<'mir, 'tcx> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachine {
449 rustc_const_eval::interpret::compile_time_machine!(<'mir, 'tcx>);
451 const PANIC_ON_ALLOC_FAIL: bool = true;
453 fn enforce_alignment(_ecx: &InterpCx<'mir, 'tcx, Self>) -> CheckAlignment {
457 fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
460 fn alignment_check_failed(
461 _ecx: &InterpCx<'mir, 'tcx, Self>,
464 _check: CheckAlignment,
465 ) -> interpret::InterpResult<'tcx, ()> {
469 fn find_mir_or_eval_fn(
470 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
471 _instance: ty::Instance<'tcx>,
472 _abi: rustc_target::spec::abi::Abi,
473 _args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>],
474 _destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>,
475 _target: Option<BasicBlock>,
476 _unwind: rustc_const_eval::interpret::StackPopUnwind,
477 ) -> interpret::InterpResult<'tcx, Option<(&'mir Body<'tcx>, ty::Instance<'tcx>)>> {
482 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
483 _instance: ty::Instance<'tcx>,
484 _args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>],
485 _destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>,
486 _target: Option<BasicBlock>,
487 _unwind: rustc_const_eval::interpret::StackPopUnwind,
488 ) -> interpret::InterpResult<'tcx> {
493 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
494 _msg: &rustc_middle::mir::AssertMessage<'tcx>,
495 _unwind: Option<BasicBlock>,
496 ) -> interpret::InterpResult<'tcx> {
501 _ecx: &InterpCx<'mir, 'tcx, Self>,
503 _left: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
504 _right: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
505 ) -> interpret::InterpResult<'tcx, (interpret::Scalar<Self::Provenance>, bool, Ty<'tcx>)> {
506 throw_unsup!(Unsupported("".into()))
510 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
511 _ptr: interpret::Pointer<Self::Provenance>,
512 ) -> interpret::InterpResult<'tcx> {
517 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
518 _frame: rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance>,
519 ) -> interpret::InterpResult<
521 rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
527 _ecx: &'a InterpCx<'mir, 'tcx, Self>,
528 ) -> &'a [rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>]
534 _ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
536 rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,