//! A constant propagation optimization pass based on dataflow analysis.
//!
-//! Tracks places that have a scalar type.
+//! Currently, this pass only propagates scalar values.
use rustc_const_eval::interpret::{ConstValue, ImmTy, Immediate, InterpCx, Scalar};
use rustc_data_structures::fx::FxHashMap;
use rustc_middle::mir::visit::{MutVisitor, Visitor};
use rustc_middle::mir::*;
use rustc_middle::ty::{self, Ty, TyCtxt};
-use rustc_mir_dataflow::value_analysis::{
- HasTop, Map, State, TrackElem, ValueAnalysis, ValueOrPlace, ValueOrPlaceOrRef,
-};
+use rustc_mir_dataflow::value_analysis::{Map, State, TrackElem, ValueAnalysis, ValueOrPlace};
use rustc_mir_dataflow::{lattice::FlatSet, Analysis, ResultsVisitor, SwitchIntEdgeEffects};
-use rustc_span::{sym, DUMMY_SP};
+use rustc_span::DUMMY_SP;
use crate::MirPass;
+// These constants are somewhat random guesses and have not been optimized.
+// If `tcx.sess.mir_opt_level() >= 4`, we ignore the limits (this can become very expensive).
+const BLOCK_LIMIT: usize = 100;
+const PLACE_LIMIT: usize = 100;
+
pub struct DataflowConstProp;
impl<'tcx> MirPass<'tcx> for DataflowConstProp {
fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
- sess.mir_opt_level() >= 1
+ sess.mir_opt_level() >= 3
}
+ #[instrument(skip_all level = "debug")]
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+ if tcx.sess.mir_opt_level() < 4 && body.basic_blocks.len() > BLOCK_LIMIT {
+ debug!("aborted dataflow const prop due too many basic blocks");
+ return;
+ }
+
// Decide which places to track during the analysis.
let map = Map::from_filter(tcx, body, Ty::is_scalar);
+ // We want to have a somewhat linear runtime w.r.t. the number of statements/terminators.
+ // Let's call this number `n`. Dataflow analysis has `O(h*n)` transfer function
+ // applications, where `h` is the height of the lattice. Because the height of our lattice
+ // is linear w.r.t. the number of tracked places, this is `O(tracked_places * n)`. However,
+ // because every transfer function application could traverse the whole map, this becomes
+ // `O(num_nodes * tracked_places * n)` in terms of time complexity. Since the number of
+ // map nodes is strongly correlated to the number of tracked places, this becomes more or
+ // less `O(n)` if we place a constant limit on the number of tracked places.
+ if tcx.sess.mir_opt_level() < 4 && map.tracked_places() > PLACE_LIMIT {
+ debug!("aborted dataflow const prop due to too many tracked places");
+ return;
+ }
+
// Perform the actual dataflow analysis.
let analysis = ConstAnalysis::new(tcx, body, map);
- let results = analysis.wrap().into_engine(tcx, body).iterate_to_fixpoint();
+ let results = debug_span!("analyze")
+ .in_scope(|| analysis.wrap().into_engine(tcx, body).iterate_to_fixpoint());
// Collect results and patch the body afterwards.
let mut visitor = CollectAndPatch::new(tcx, &results.analysis.0.map);
- results.visit_reachable_with(body, &mut visitor);
- visitor.visit_body(body);
+ debug_span!("collect").in_scope(|| results.visit_reachable_with(body, &mut visitor));
+ debug_span!("patch").in_scope(|| visitor.visit_body(body));
}
}
tcx: TyCtxt<'tcx>,
ecx: InterpCx<'tcx, 'tcx, DummyMachine>,
param_env: ty::ParamEnv<'tcx>,
- propagate_overflow: bool,
}
impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> {
let (val, overflow) = self.binary_op(state, *op, left, right);
if let Some(value_target) = value_target {
- state.assign_idx(value_target, ValueOrPlaceOrRef::Value(val), self.map());
+ state.assign_idx(value_target, ValueOrPlace::Value(val), self.map());
}
if let Some(overflow_target) = overflow_target {
let overflow = match overflow {
FlatSet::Top => FlatSet::Top,
FlatSet::Elem(overflow) => {
- if overflow && !self.propagate_overflow {
+ if overflow {
+ // Overflow cannot be reliably propagated. See: https://github.com/rust-lang/rust/pull/101168#issuecomment-1288091446
FlatSet::Top
} else {
- self.wrap_scalar(
- Scalar::from_bool(overflow),
- self.tcx.types.bool,
- )
+ self.wrap_scalar(Scalar::from_bool(false), self.tcx.types.bool)
}
}
FlatSet::Bottom => FlatSet::Bottom,
};
state.assign_idx(
overflow_target,
- ValueOrPlaceOrRef::Value(overflow),
+ ValueOrPlace::Value(overflow),
self.map(),
);
}
&self,
rvalue: &Rvalue<'tcx>,
state: &mut State<Self::Value>,
- ) -> ValueOrPlaceOrRef<Self::Value> {
+ ) -> ValueOrPlace<Self::Value> {
match rvalue {
Rvalue::Cast(
- CastKind::IntToInt
+ kind @ (CastKind::IntToInt
| CastKind::FloatToInt
| CastKind::FloatToFloat
- | CastKind::IntToFloat,
+ | CastKind::IntToFloat),
operand,
ty,
- ) => {
- let operand = self.eval_operand(operand, state);
- match operand {
- FlatSet::Elem(operand) => self
- .ecx
- .misc_cast(&operand, *ty)
- .map(|result| ValueOrPlaceOrRef::Value(self.wrap_immediate(result, *ty)))
- .unwrap_or(ValueOrPlaceOrRef::top()),
- _ => ValueOrPlaceOrRef::top(),
+ ) => match self.eval_operand(operand, state) {
+ FlatSet::Elem(op) => match kind {
+ CastKind::IntToInt | CastKind::IntToFloat => {
+ self.ecx.int_to_int_or_float(&op, *ty)
+ }
+ CastKind::FloatToInt | CastKind::FloatToFloat => {
+ self.ecx.float_to_float_or_int(&op, *ty)
+ }
+ _ => unreachable!(),
}
- }
+ .map(|result| ValueOrPlace::Value(self.wrap_immediate(result, *ty)))
+ .unwrap_or(ValueOrPlace::top()),
+ _ => ValueOrPlace::top(),
+ },
Rvalue::BinaryOp(op, box (left, right)) => {
// Overflows must be ignored here.
let (val, _overflow) = self.binary_op(state, *op, left, right);
- ValueOrPlaceOrRef::Value(val)
+ ValueOrPlace::Value(val)
}
Rvalue::UnaryOp(op, operand) => match self.eval_operand(operand, state) {
FlatSet::Elem(value) => self
.ecx
.unary_op(*op, &value)
- .map(|val| ValueOrPlaceOrRef::Value(self.wrap_immty(val)))
- .unwrap_or(ValueOrPlaceOrRef::Value(FlatSet::Top)),
- FlatSet::Bottom => ValueOrPlaceOrRef::Value(FlatSet::Bottom),
- FlatSet::Top => ValueOrPlaceOrRef::Value(FlatSet::Top),
+ .map(|val| ValueOrPlace::Value(self.wrap_immty(val)))
+ .unwrap_or(ValueOrPlace::Value(FlatSet::Top)),
+ FlatSet::Bottom => ValueOrPlace::Value(FlatSet::Bottom),
+ FlatSet::Top => ValueOrPlace::Value(FlatSet::Top),
},
_ => self.super_rvalue(rvalue, state),
}
impl<'tcx> ConstAnalysis<'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, map: Map) -> Self {
- // It can happen that overflow will be detected even though overflow checks are disabled.
- // This is caused by inlining functions that have #[rustc_inherit_overflow_checks]. Such
- // overflows must not be propagated if `-C overflow-checks=off`. Also, if the function we
- // are optimizing here has #[rustc_inherit_overflow_checks], the overflow checks may
- // actually not be triggered by the consuming crate, so we have to ignore them too.
- // Related to https://github.com/rust-lang/rust/issues/35310.
- let propagate_overflow = tcx.sess.overflow_checks()
- && !tcx.has_attr(body.source.def_id(), sym::rustc_inherit_overflow_checks);
+ let param_env = tcx.param_env(body.source.def_id());
Self {
map,
tcx,
- ecx: InterpCx::new(tcx, DUMMY_SP, ty::ParamEnv::empty(), DummyMachine),
- param_env: tcx.param_env(body.source.def_id()),
- propagate_overflow,
+ ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine),
+ param_env: param_env,
}
}
};
match value {
FlatSet::Top => FlatSet::Top,
- FlatSet::Elem(ScalarTy(scalar, ty)) => {
- let layout = self
- .tcx
- .layout_of(ty::ParamEnv::empty().and(ty))
- .expect("this should not happen"); // FIXME
- FlatSet::Elem(ImmTy::from_scalar(scalar, layout))
- }
+ FlatSet::Elem(ScalarTy(scalar, ty)) => self
+ .tcx
+ .layout_of(self.param_env.and(ty))
+ .map(|layout| FlatSet::Elem(ImmTy::from_scalar(scalar, layout)))
+ .unwrap_or(FlatSet::Top),
FlatSet::Bottom => FlatSet::Bottom,
}
}
struct CollectAndPatch<'tcx, 'map> {
tcx: TyCtxt<'tcx>,
map: &'map Map,
+
+ /// For a given MIR location, this stores the values of the operands used by that location. In
+ /// particular, this is before the effect, such that the operands of `_1 = _1 + _2` are
+ /// properly captured. (This may become UB soon, but it is currently emitted even by safe code.)
before_effect: FxHashMap<(Location, Place<'tcx>), ScalarTy<'tcx>>,
+
+ /// Stores the assigned values for assignments where the Rvalue is constant.
assignments: FxHashMap<Location, ScalarTy<'tcx>>,
}
_left: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
_right: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
) -> interpret::InterpResult<'tcx, (interpret::Scalar<Self::Provenance>, bool, Ty<'tcx>)> {
- unimplemented!()
+ throw_unsup!(Unsupported("".into()))
}
fn expose_ptr(