1 //! This module provides a framework on top of the normal MIR dataflow framework to simplify the
2 //! implementation of analyses that track the values stored in places of interest.
4 //! The default methods of [`ValueAnalysis`] (prefixed with `super_` instead of `handle_`)
5 //! provide some behavior that should be valid for all abstract domains that are based only on the
6 //! value stored in a certain place. On top of these default rules, an implementation should
7 //! override some of the `handle_` methods. For an example, see `ConstAnalysis`.
9 //! An implementation must also provide a [`Map`]. Before the anaylsis begins, all places that
10 //! should be tracked during the analysis must be registered. Currently, the projections of these
11 //! places may only contain derefs, fields and downcasts (otherwise registration fails). During the
12 //! analysis, no new places can be registered.
14 //! Note that if you want to track values behind references, you have to register the dereferenced
15 //! place. For example: Assume `let x = (0, 0)` and that we want to propagate values from `x.0` and
16 //! `x.1` also through the assignment `let y = &x`. In this case, we should register `x.0`, `x.1`,
17 //! `(*y).0` and `(*y).1`.
22 //! Warning: This is a semi-formal attempt to argue for the correctness of this analysis. If you
23 //! find any weak spots, let me know! Recommended reading: Abstract Interpretation. We will use the
24 //! term "place" to refer to a place expression (like `mir::Place`), and we will call the
25 //! underlying entity "object". For instance, `*_1` and `*_2` are not the same place, but depending
26 //! on the value of `_1` and `_2`, they could refer to the same object. Also, the same place can
27 //! refer to different objects during execution. If `_1` is reassigned, then `*_1` may refer to
28 //! different objects before and after assignment. Additionally, when saying "access to a place",
29 //! what we really mean is "access to an object denoted by arbitrary projections of that place".
31 //! In the following, we will assume a constant propagation analysis. Our analysis is correct if
32 //! every transfer function is correct. This is the case if for every pair (f, f#) and abstract
33 //! state s, we have f(y(s)) <= y(f#(s)), where s is a mapping from tracked place to top, bottom or
34 //! a constant. Since pointers (and mutable references) are not tracked, but can be used to change
35 //! values in the concrete domain, f# must assume that all places that can be affected in this way
36 //! for a given program point are already marked with top in s (otherwise many assignments and
37 //! function calls would have no choice but to mark all tracked places with top). This leads us to
38 //! an invariant: For all possible program points where there could possibly exist means of mutable
39 //! access to a tracked place (in the concrete domain), this place must be assigned to top (in the
40 //! abstract domain). The concretization function y can be defined as expected for the constant
41 //! propagation analysis, although the concrete state of course contains all kinds of non-tracked
42 //! data. However, by the invariant above, no mutable access to tracked places that are not marked
43 //! with top may be introduced.
45 //! Note that we (at least currently) do not differentiate between "this place may assume different
46 //! values" and "a pointer to this place escaped the analysis". However, we still want to handle
47 //! assignments to constants as usual for f#. This adds an assumption: Whenever we have an
48 //! assignment that is captured by the analysis, all mutable access to the underlying place (which
49 //! is not observable by the analysis) must be invalidated. This is (hopefully) covered by Stacked
52 //! To be continued...
54 use std::fmt::{Debug, Formatter};
56 use rustc_data_structures::fx::FxHashMap;
57 use rustc_index::vec::IndexVec;
58 use rustc_middle::mir::tcx::PlaceTy;
59 use rustc_middle::mir::*;
60 use rustc_middle::ty::{self, Ty, TyCtxt};
61 use rustc_target::abi::VariantIdx;
64 fmt::DebugWithContext, lattice::FlatSet, Analysis, AnalysisDomain, CallReturnPlaces,
65 JoinSemiLattice, SwitchIntEdgeEffects,
68 pub trait ValueAnalysis<'tcx> {
69 /// For each place of interest, the analysis tracks a value of the given type.
70 type Value: Clone + JoinSemiLattice + HasBottom + HasTop;
72 const NAME: &'static str;
74 fn map(&self) -> ⤅
76 fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State<Self::Value>) {
77 self.super_statement(statement, state)
80 fn super_statement(&self, statement: &Statement<'tcx>, state: &mut State<Self::Value>) {
81 match &statement.kind {
82 StatementKind::Assign(box (place, rvalue)) => {
83 self.handle_assign(*place, rvalue, state);
85 StatementKind::SetDiscriminant { .. } => {
86 // Could treat this as writing a constant to a pseudo-place.
87 // But discriminants are currently not tracked, so we do nothing.
88 // Related: https://github.com/rust-lang/unsafe-code-guidelines/issues/84
90 StatementKind::Intrinsic(box intrinsic) => {
91 self.handle_intrinsic(intrinsic, state);
93 StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => {
94 // We can flood with bottom here, because `StorageLive` makes the local
95 // uninitialized, and `StorageDead` makes it UB to access.
96 state.flood_with(Place::from(*local).as_ref(), self.map(), Self::Value::bottom());
98 StatementKind::Deinit(box place) => {
99 // The bottom states denotes uninitialized values.
100 state.flood_with(place.as_ref(), self.map(), Self::Value::bottom());
103 | StatementKind::Retag(..)
104 | StatementKind::FakeRead(..)
105 | StatementKind::Coverage(..)
106 | StatementKind::AscribeUserType(..) => (),
112 intrinsic: &NonDivergingIntrinsic<'tcx>,
113 state: &mut State<Self::Value>,
115 self.super_intrinsic(intrinsic, state);
120 intrinsic: &NonDivergingIntrinsic<'tcx>,
121 state: &mut State<Self::Value>,
124 NonDivergingIntrinsic::Assume(..) => {
125 // Could use this, but ignoring it is sound.
127 NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping { dst, .. }) => {
128 if let Some(place) = dst.place() {
129 state.flood(place.as_ref(), self.map());
138 rvalue: &Rvalue<'tcx>,
139 state: &mut State<Self::Value>,
141 self.super_assign(target, rvalue, state)
147 rvalue: &Rvalue<'tcx>,
148 state: &mut State<Self::Value>,
150 let result = self.handle_rvalue(rvalue, state);
151 state.assign(target.as_ref(), result, self.map());
156 rvalue: &Rvalue<'tcx>,
157 state: &mut State<Self::Value>,
158 ) -> ValueOrPlaceOrRef<Self::Value> {
159 self.super_rvalue(rvalue, state)
164 rvalue: &Rvalue<'tcx>,
165 state: &mut State<Self::Value>,
166 ) -> ValueOrPlaceOrRef<Self::Value> {
168 Rvalue::Use(operand) => self.handle_operand(operand, state).into(),
169 Rvalue::Ref(_, BorrowKind::Shared, place) => self
171 .find(place.as_ref())
172 .map(ValueOrPlaceOrRef::Ref)
173 .unwrap_or(ValueOrPlaceOrRef::Unknown),
174 Rvalue::Ref(_, _, place) | Rvalue::AddressOf(_, place) => {
175 state.flood(place.as_ref(), self.map());
176 ValueOrPlaceOrRef::Unknown
178 Rvalue::CopyForDeref(place) => {
179 self.handle_operand(&Operand::Copy(*place), state).into()
181 _ => ValueOrPlaceOrRef::Unknown,
187 operand: &Operand<'tcx>,
188 state: &mut State<Self::Value>,
189 ) -> ValueOrPlace<Self::Value> {
190 self.super_operand(operand, state)
195 operand: &Operand<'tcx>,
196 state: &mut State<Self::Value>,
197 ) -> ValueOrPlace<Self::Value> {
199 Operand::Constant(box constant) => {
200 ValueOrPlace::Value(self.handle_constant(constant, state))
202 Operand::Copy(place) | Operand::Move(place) => {
203 // Do we want to handle moves differently? Could flood place with bottom.
205 .find(place.as_ref())
206 .map(ValueOrPlace::Place)
207 .unwrap_or(ValueOrPlace::Unknown)
214 constant: &Constant<'tcx>,
215 state: &mut State<Self::Value>,
217 self.super_constant(constant, state)
222 _constant: &Constant<'tcx>,
223 _state: &mut State<Self::Value>,
228 /// The effect of a successful function call return should not be
229 /// applied here, see [`Analysis::apply_terminator_effect`].
230 fn handle_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State<Self::Value>) {
231 self.super_terminator(terminator, state)
234 fn super_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State<Self::Value>) {
235 match &terminator.kind {
236 TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
237 // Effect is applied by `handle_call_return`.
239 TerminatorKind::Drop { place, .. } => {
240 // Place can still be accessed after drop, and drop has mutable access to it.
241 state.flood(place.as_ref(), self.map());
243 TerminatorKind::DropAndReplace { .. } | TerminatorKind::Yield { .. } => {
244 // They would have an effect, but are not allowed in this phase.
245 bug!("encountered disallowed terminator");
248 // The other terminators can be ignored.
253 fn handle_call_return(
255 return_places: CallReturnPlaces<'_, 'tcx>,
256 state: &mut State<Self::Value>,
258 self.super_call_return(return_places, state)
261 fn super_call_return(
263 return_places: CallReturnPlaces<'_, 'tcx>,
264 state: &mut State<Self::Value>,
266 return_places.for_each(|place| {
267 state.flood(place.as_ref(), self.map());
271 fn handle_switch_int(
273 discr: &Operand<'tcx>,
274 apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
276 self.super_switch_int(discr, apply_edge_effects)
281 _discr: &Operand<'tcx>,
282 _apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
286 fn wrap(self) -> ValueAnalysisWrapper<Self>
290 ValueAnalysisWrapper(self)
294 pub struct ValueAnalysisWrapper<T>(pub T);
296 impl<'tcx, T: ValueAnalysis<'tcx>> AnalysisDomain<'tcx> for ValueAnalysisWrapper<T> {
297 type Domain = State<T::Value>;
299 type Direction = crate::Forward;
301 const NAME: &'static str = T::NAME;
303 fn bottom_value(&self, _body: &Body<'tcx>) -> Self::Domain {
304 State(StateData::Unreachable)
307 fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) {
308 // The initial state maps all tracked places of argument projections to ⊤ and the rest to ⊥.
309 // This utilizes that reading from an uninitialized place is UB.
310 assert!(matches!(state.0, StateData::Unreachable));
311 let values = IndexVec::from_elem_n(T::Value::bottom(), self.0.map().value_count);
312 *state = State(StateData::Reachable(values));
313 for arg in body.args_iter() {
314 state.flood(PlaceRef { local: arg, projection: &[] }, self.0.map());
319 impl<'tcx, T> Analysis<'tcx> for ValueAnalysisWrapper<T>
321 T: ValueAnalysis<'tcx>,
323 fn apply_statement_effect(
325 state: &mut Self::Domain,
326 statement: &Statement<'tcx>,
329 if state.is_reachable() {
330 self.0.handle_statement(statement, state);
334 fn apply_terminator_effect(
336 state: &mut Self::Domain,
337 terminator: &Terminator<'tcx>,
340 if state.is_reachable() {
341 self.0.handle_terminator(terminator, state);
345 fn apply_call_return_effect(
347 state: &mut Self::Domain,
349 return_places: crate::CallReturnPlaces<'_, 'tcx>,
351 if state.is_reachable() {
352 self.0.handle_call_return(return_places, state)
356 fn apply_switch_int_edge_effects(
359 discr: &Operand<'tcx>,
360 apply_edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>,
362 // FIXME: Dataflow framework provides no access to current state here.
363 self.0.handle_switch_int(discr, apply_edge_effects)
367 rustc_index::newtype_index!(
368 /// This index uniquely identifies a place.
370 /// Not every place has a `PlaceIndex`, and not every `PlaceIndex` correspondends to a tracked
371 /// place. However, every tracked place and all places along its projection have a `PlaceIndex`.
372 pub struct PlaceIndex {}
375 rustc_index::newtype_index!(
376 /// This index uniquely identifies a tracked place and therefore a slot in [`State`].
378 /// It is an implementation detail of this module.
383 #[derive(PartialEq, Eq, Clone, Debug)]
385 Reachable(IndexVec<ValueIndex, V>),
389 /// The dataflow state for an instance of [`ValueAnalysis`].
391 /// Every instance specifies a lattice that represents the possible values of a single tracked
392 /// place. If we call this lattice `V` and set set of tracked places `P`, then a [`State`] is an
393 /// element of `{unreachable} ∪ (P -> V)`. This again forms a lattice, where the bottom element is
394 /// `unreachable` and the top element is the mapping `p ↦ ⊤`. Note that the mapping `p ↦ ⊥` is not
395 /// the bottom element (because joining an unreachable and any other reachable state yields a
396 /// reachable state). All operations on unreachable states are ignored.
398 /// Flooding means assigning a value (by default `⊤`) to all tracked projections of a given place.
399 #[derive(PartialEq, Eq, Clone, Debug)]
400 pub struct State<V>(StateData<V>);
402 impl<V: Clone + HasTop> State<V> {
403 pub fn is_reachable(&self) -> bool {
404 matches!(&self.0, StateData::Reachable(_))
407 pub fn mark_unreachable(&mut self) {
408 self.0 = StateData::Unreachable;
411 pub fn flood_all(&mut self) {
412 self.flood_all_with(V::top())
415 pub fn flood_all_with(&mut self, value: V) {
416 let StateData::Reachable(values) = &mut self.0 else { return };
417 values.raw.fill(value);
420 pub fn flood_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) {
421 if let Some(root) = map.find(place) {
422 self.flood_idx_with(root, map, value);
426 pub fn flood(&mut self, place: PlaceRef<'_>, map: &Map) {
427 self.flood_with(place, map, V::top())
430 pub fn flood_idx_with(&mut self, place: PlaceIndex, map: &Map, value: V) {
431 let StateData::Reachable(values) = &mut self.0 else { return };
432 map.preorder_invoke(place, &mut |place| {
433 if let Some(vi) = map.places[place].value_index {
434 values[vi] = value.clone();
439 pub fn flood_idx(&mut self, place: PlaceIndex, map: &Map) {
440 self.flood_idx_with(place, map, V::top())
443 /// This method assumes that the given places are not overlapping, and that we can therefore
444 /// copy all entries one after another.
445 pub fn assign_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) {
446 let StateData::Reachable(values) = &mut self.0 else { return };
447 // If both places are tracked, we copy the value to the target. If the target is tracked,
448 // but the source is not, we have to invalidate the value in target. If the target is not
449 // tracked, then we don't have to do anything.
450 if let Some(target_value) = map.places[target].value_index {
451 if let Some(source_value) = map.places[source].value_index {
452 values[target_value] = values[source_value].clone();
454 values[target_value] = V::top();
457 for target_child in map.children(target) {
458 // Try to find corresponding child and recurse. Reasoning is similar as above.
459 let projection = map.places[target_child].proj_elem.unwrap();
460 if let Some(source_child) = map.projections.get(&(source, projection)) {
461 self.assign_place_idx(target_child, *source_child, map);
463 self.flood_idx(target_child, map);
468 pub fn assign(&mut self, target: PlaceRef<'_>, result: ValueOrPlaceOrRef<V>, map: &Map) {
469 if let Some(target) = map.find(target) {
470 self.assign_idx(target, result, map);
472 // We don't track this place nor any projections, assignment can be ignored.
476 pub fn assign_idx(&mut self, target: PlaceIndex, result: ValueOrPlaceOrRef<V>, map: &Map) {
478 ValueOrPlaceOrRef::Value(value) => {
479 // First flood the target place in case we also track any projections (although
480 // this scenario is currently not well-supported by the API).
481 self.flood_idx(target, map);
482 let StateData::Reachable(values) = &mut self.0 else { return };
483 if let Some(value_index) = map.places[target].value_index {
484 values[value_index] = value;
487 ValueOrPlaceOrRef::Place(source) => self.assign_place_idx(target, source, map),
488 ValueOrPlaceOrRef::Ref(source) => {
489 let StateData::Reachable(values) = &mut self.0 else { return };
490 if let Some(value_index) = map.places[target].value_index {
491 values[value_index] = V::top();
493 if let Some(target_deref) = map.apply_elem(target, ProjElem::Deref) {
494 self.assign_place_idx(target_deref, source, map);
497 ValueOrPlaceOrRef::Unknown => {
498 self.flood_idx(target, map);
503 pub fn get(&self, place: PlaceRef<'_>, map: &Map) -> V {
504 map.find(place).map(|place| self.get_idx(place, map)).unwrap_or(V::top())
507 pub fn get_idx(&self, place: PlaceIndex, map: &Map) -> V {
509 StateData::Reachable(values) => {
510 map.places[place].value_index.map(|v| values[v].clone()).unwrap_or(V::top())
512 StateData::Unreachable => V::top(),
517 impl<V: JoinSemiLattice + Clone> JoinSemiLattice for State<V> {
518 fn join(&mut self, other: &Self) -> bool {
519 match (&mut self.0, &other.0) {
520 (_, StateData::Unreachable) => false,
521 (StateData::Unreachable, _) => {
522 *self = other.clone();
525 (StateData::Reachable(this), StateData::Reachable(other)) => this.join(other),
532 locals: IndexVec<Local, Option<PlaceIndex>>,
533 projections: FxHashMap<(PlaceIndex, ProjElem), PlaceIndex>,
534 places: IndexVec<PlaceIndex, PlaceInfo>,
539 pub fn new() -> Self {
541 locals: IndexVec::new(),
542 projections: FxHashMap::default(),
543 places: IndexVec::new(),
548 /// Register all places with suitable types up to a certain derefence depth (to prevent cycles).
549 pub fn register_with_filter<'tcx>(
552 source: &impl HasLocalDecls<'tcx>,
554 mut filter: impl FnMut(Ty<'tcx>) -> bool,
556 let mut projection = Vec::new();
557 for (local, decl) in source.local_decls().iter_enumerated() {
558 self.register_with_filter_rec(
569 fn register_with_filter_rec<'tcx>(
574 projection: &mut Vec<PlaceElem<'tcx>>,
576 filter: &mut impl FnMut(Ty<'tcx>) -> bool,
579 // This might fail if `ty` is not scalar.
580 let _ = self.register_with_ty(local, projection, ty);
583 if let Some(ty::TypeAndMut { ty, .. }) = ty.builtin_deref(false) {
584 projection.push(PlaceElem::Deref);
585 self.register_with_filter_rec(tcx, max_derefs - 1, local, projection, ty, filter);
589 iter_fields(ty, tcx, |variant, field, ty| {
590 if variant.is_some() {
591 // Downcasts are currently not supported.
594 projection.push(PlaceElem::Field(field, ty));
595 self.register_with_filter_rec(tcx, max_derefs, local, projection, ty, filter);
603 projection: &[PlaceElem<'tcx>],
604 ) -> Result<PlaceIndex, ()> {
605 // Get the base index of the local.
607 *self.locals.get_or_insert_with(local, || self.places.push(PlaceInfo::new(None)));
609 // Apply the projection.
610 for &elem in projection {
612 PlaceElem::Downcast(..) => return Err(()),
615 let elem = elem.try_into()?;
616 index = *self.projections.entry((index, elem)).or_insert_with(|| {
617 // Prepend new child to the linked list.
618 let next = self.places.push(PlaceInfo::new(Some(elem)));
619 self.places[next].next_sibling = self.places[index].first_child;
620 self.places[index].first_child = Some(next);
628 pub fn register<'tcx>(
631 projection: &[PlaceElem<'tcx>],
632 decls: &impl HasLocalDecls<'tcx>,
634 ) -> Result<(), ()> {
637 .fold(PlaceTy::from_ty(decls.local_decls()[local].ty), |place_ty, &elem| {
638 place_ty.projection_ty(tcx, elem)
641 let place_ty = Place::ty_from(local, projection, decls, tcx);
642 if place_ty.variant_index.is_some() {
645 self.register_with_ty(local, projection, place_ty.ty)
648 fn register_with_ty<'tcx>(
651 projection: &[PlaceElem<'tcx>],
653 ) -> Result<(), ()> {
655 // Currently, only scalar types are allowed, because they are atomic
656 // and therefore do not require invalidation of parent places.
660 let place = self.make_place(local, projection)?;
662 // Allocate a value slot if it doesn't have one.
663 if self.places[place].value_index.is_none() {
664 self.places[place].value_index = Some(self.value_count.into());
665 self.value_count += 1;
671 pub fn apply_elem(&self, place: PlaceIndex, elem: ProjElem) -> Option<PlaceIndex> {
672 self.projections.get(&(place, elem)).copied()
675 pub fn find(&self, place: PlaceRef<'_>) -> Option<PlaceIndex> {
676 let mut index = *self.locals.get(place.local)?.as_ref()?;
678 for &elem in place.projection {
679 index = self.apply_elem(index, elem.try_into().ok()?)?;
685 pub fn children(&self, parent: PlaceIndex) -> impl Iterator<Item = PlaceIndex> + '_ {
686 Children::new(self, parent)
689 pub fn preorder_invoke(&self, root: PlaceIndex, f: &mut impl FnMut(PlaceIndex)) {
691 for child in self.children(root) {
692 self.preorder_invoke(child, f);
699 next_sibling: Option<PlaceIndex>,
700 first_child: Option<PlaceIndex>,
701 /// The projection used to go from parent to this node (only None for root).
702 proj_elem: Option<ProjElem>,
703 value_index: Option<ValueIndex>,
707 fn new(proj_elem: Option<ProjElem>) -> Self {
708 Self { next_sibling: None, first_child: None, proj_elem, value_index: None }
712 struct Children<'a> {
714 next: Option<PlaceIndex>,
717 impl<'a> Children<'a> {
718 fn new(map: &'a Map, parent: PlaceIndex) -> Self {
719 Self { map, next: map.places[parent].first_child }
723 impl<'a> Iterator for Children<'a> {
724 type Item = PlaceIndex;
726 fn next(&mut self) -> Option<Self::Item> {
729 self.next = self.map.places[child].next_sibling;
737 // FIXME: See if we can get rid of `Unknown`.
738 pub enum ValueOrPlace<V> {
744 pub enum ValueOrPlaceOrRef<V> {
751 impl<V> From<ValueOrPlace<V>> for ValueOrPlaceOrRef<V> {
752 fn from(x: ValueOrPlace<V>) -> Self {
754 ValueOrPlace::Value(value) => ValueOrPlaceOrRef::Value(value),
755 ValueOrPlace::Place(place) => ValueOrPlaceOrRef::Place(place),
756 ValueOrPlace::Unknown => ValueOrPlaceOrRef::Unknown,
761 pub trait HasBottom {
769 impl<V> HasBottom for FlatSet<V> {
770 fn bottom() -> Self {
775 impl<V> HasTop for FlatSet<V> {
781 /// Currently, we only track places through deref and field projections.
783 /// For now, downcast is not allowed due to aliasing between variants (see #101168).
784 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
790 impl<V, T> TryFrom<ProjectionElem<V, T>> for ProjElem {
793 fn try_from(value: ProjectionElem<V, T>) -> Result<Self, Self::Error> {
795 ProjectionElem::Deref => Ok(ProjElem::Deref),
796 ProjectionElem::Field(field, _) => Ok(ProjElem::Field(field)),
802 fn iter_fields<'tcx>(
805 mut f: impl FnMut(Option<VariantIdx>, Field, Ty<'tcx>),
809 for (field, ty) in list.iter().enumerate() {
810 f(None, field.into(), ty);
813 ty::Adt(def, substs) => {
814 for (v_index, v_def) in def.variants().iter_enumerated() {
815 for (f_index, f_def) in v_def.fields.iter().enumerate() {
816 let field_ty = f_def.ty(tcx, substs);
818 .try_normalize_erasing_regions(ty::ParamEnv::reveal_all(), field_ty)
819 .unwrap_or(field_ty);
820 f(Some(v_index), f_index.into(), field_ty);
824 ty::Closure(_, substs) => {
825 iter_fields(substs.as_closure().tupled_upvars_ty(), tcx, f);
831 fn debug_with_context_rec<V: Debug + Eq>(
834 new: &IndexVec<ValueIndex, V>,
835 old: Option<&IndexVec<ValueIndex, V>>,
837 f: &mut Formatter<'_>,
838 ) -> std::fmt::Result {
839 if let Some(value) = map.places[place].value_index {
841 None => writeln!(f, "{}: {:?}", place_str, new[value])?,
843 if new[value] != old[value] {
844 writeln!(f, "\u{001f}-{}: {:?}", place_str, old[value])?;
845 writeln!(f, "\u{001f}+{}: {:?}", place_str, new[value])?;
851 for child in map.children(place) {
852 let info_elem = map.places[child].proj_elem.unwrap();
853 let child_place_str = match info_elem {
854 ProjElem::Deref => format!("*{}", place_str),
855 ProjElem::Field(field) => {
856 if place_str.starts_with("*") {
857 format!("({}).{}", place_str, field.index())
859 format!("{}.{}", place_str, field.index())
863 debug_with_context_rec(child, &child_place_str, new, old, map, f)?;
869 fn debug_with_context<V: Debug + Eq>(
870 new: &IndexVec<ValueIndex, V>,
871 old: Option<&IndexVec<ValueIndex, V>>,
873 f: &mut Formatter<'_>,
874 ) -> std::fmt::Result {
875 for (local, place) in map.locals.iter_enumerated() {
876 if let Some(place) = place {
877 debug_with_context_rec(*place, &format!("{:?}", local), new, old, map, f)?;
883 impl<'tcx, T> DebugWithContext<ValueAnalysisWrapper<T>> for State<T::Value>
885 T: ValueAnalysis<'tcx>,
888 fn fmt_with(&self, ctxt: &ValueAnalysisWrapper<T>, f: &mut Formatter<'_>) -> std::fmt::Result {
890 StateData::Reachable(values) => debug_with_context(values, None, ctxt.0.map(), f),
891 StateData::Unreachable => write!(f, "unreachable"),
898 ctxt: &ValueAnalysisWrapper<T>,
899 f: &mut Formatter<'_>,
900 ) -> std::fmt::Result {
901 match (&self.0, &old.0) {
902 (StateData::Reachable(this), StateData::Reachable(old)) => {
903 debug_with_context(this, Some(old), ctxt.0.map(), f)
905 _ => Ok(()), // Consider printing something here.