1 //! This module provides a framework on top of the normal MIR dataflow framework to simplify the
2 //! implementation of analyses that track the values stored in places of interest.
4 //! The default methods of [`ValueAnalysis`] (prefixed with `super_` instead of `handle_`)
5 //! provide some behavior that should be valid for all abstract domains that are based only on the
6 //! value stored in a certain place. On top of these default rules, an implementation should
7 //! override some of the `handle_` methods. For an example, see `ConstAnalysis`.
9 //! An implementation must also provide a [`Map`]. Before the anaylsis begins, all places that
10 //! should be tracked during the analysis must be registered. Currently, the projections of these
11 //! places may only contain derefs, fields and downcasts (otherwise registration fails). During the
12 //! analysis, no new places can be registered.
14 //! Note that if you want to track values behind references, you have to register the dereferenced
15 //! place. For example: Assume `let x = (0, 0)` and that we want to propagate values from `x.0` and
16 //! `x.1` also through the assignment `let y = &x`. In this case, we should register `x.0`, `x.1`,
17 //! `(*y).0` and `(*y).1`.
22 //! Warning: This is a semi-formal attempt to argue for the correctness of this analysis. If you
23 //! find any weak spots, let me know! Recommended reading: Abstract Interpretation. We will use the
24 //! term "place" to refer to a place expression (like `mir::Place`), and we will call the
25 //! underlying entity "object". For instance, `*_1` and `*_2` are not the same place, but depending
26 //! on the value of `_1` and `_2`, they could refer to the same object. Also, the same place can
27 //! refer to different objects during execution. If `_1` is reassigned, then `*_1` may refer to
28 //! different objects before and after assignment. Additionally, when saying "access to a place",
29 //! what we really mean is "access to an object denoted by arbitrary projections of that place".
31 //! In the following, we will assume a constant propagation analysis. Our analysis is correct if
32 //! every transfer function is correct. This is the case if for every pair (f, f#) and abstract
33 //! state s, we have f(y(s)) <= y(f#(s)), where s is a mapping from tracked place to top, bottom or
34 //! a constant. Since pointers (and mutable references) are not tracked, but can be used to change
35 //! values in the concrete domain, f# must assume that all places that can be affected in this way
36 //! for a given program point are already marked with top in s (otherwise many assignments and
37 //! function calls would have no choice but to mark all tracked places with top). This leads us to
38 //! an invariant: For all possible program points where there could possibly exist means of mutable
39 //! access to a tracked place (in the concrete domain), this place must be assigned to top (in the
40 //! abstract domain). The concretization function y can be defined as expected for the constant
41 //! propagation analysis, although the concrete state of course contains all kinds of non-tracked
42 //! data. However, by the invariant above, no mutable access to tracked places that are not marked
43 //! with top may be introduced.
45 //! Note that we (at least currently) do not differentiate between "this place may assume different
46 //! values" and "a pointer to this place escaped the analysis". However, we still want to handle
47 //! assignments to constants as usual for f#. This adds an assumption: Whenever we have an
48 //! assignment that is captured by the analysis, all mutable access to the underlying place (which
49 //! is not observable by the analysis) must be invalidated. This is (hopefully) covered by Stacked
52 //! To be continued...
54 use std::fmt::{Debug, Formatter};
56 use rustc_data_structures::fx::FxHashMap;
57 use rustc_index::vec::IndexVec;
58 use rustc_middle::mir::tcx::PlaceTy;
59 use rustc_middle::mir::*;
60 use rustc_middle::ty::{self, Ty, TyCtxt};
61 use rustc_target::abi::VariantIdx;
64 fmt::DebugWithContext, lattice::FlatSet, Analysis, AnalysisDomain, CallReturnPlaces,
65 JoinSemiLattice, SwitchIntEdgeEffects,
68 pub trait ValueAnalysis<'tcx> {
69 /// For each place of interest, the analysis tracks a value of the given type.
70 type Value: Clone + JoinSemiLattice + HasBottom + HasTop;
72 const NAME: &'static str;
74 fn map(&self) -> ⤅
76 fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State<Self::Value>) {
77 self.super_statement(statement, state)
80 fn super_statement(&self, statement: &Statement<'tcx>, state: &mut State<Self::Value>) {
81 match &statement.kind {
82 StatementKind::Assign(box (place, rvalue)) => {
83 self.handle_assign(*place, rvalue, state);
85 StatementKind::SetDiscriminant { .. } => {
86 // Could treat this as writing a constant to a pseudo-place.
87 // But discriminants are currently not tracked, so we do nothing.
88 // Related: https://github.com/rust-lang/unsafe-code-guidelines/issues/84
90 StatementKind::CopyNonOverlapping(..) => {
91 // FIXME: What to do here?
93 StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => {
94 // It is UB to read from an unitialized or unallocated local.
95 state.flood(Place::from(*local).as_ref(), self.map());
97 StatementKind::Deinit(box place) => {
98 // It is UB to read `uninit` bytes.
99 state.flood(place.as_ref(), self.map());
102 | StatementKind::Retag(..)
103 | StatementKind::FakeRead(..)
104 | StatementKind::Coverage(..)
105 | StatementKind::AscribeUserType(..) => (),
112 rvalue: &Rvalue<'tcx>,
113 state: &mut State<Self::Value>,
115 self.super_assign(target, rvalue, state)
121 rvalue: &Rvalue<'tcx>,
122 state: &mut State<Self::Value>,
124 let result = self.handle_rvalue(rvalue, state);
125 state.assign(target.as_ref(), result, self.map());
130 rvalue: &Rvalue<'tcx>,
131 state: &mut State<Self::Value>,
132 ) -> ValueOrPlaceOrRef<Self::Value> {
133 self.super_rvalue(rvalue, state)
138 rvalue: &Rvalue<'tcx>,
139 state: &mut State<Self::Value>,
140 ) -> ValueOrPlaceOrRef<Self::Value> {
142 Rvalue::Use(operand) => self.handle_operand(operand, state).into(),
143 Rvalue::Ref(_, BorrowKind::Shared, place) => self
145 .find(place.as_ref())
146 .map(ValueOrPlaceOrRef::Ref)
147 .unwrap_or(ValueOrPlaceOrRef::Unknown),
148 Rvalue::Ref(_, _, place) | Rvalue::AddressOf(_, place) => {
149 state.flood(place.as_ref(), self.map());
150 ValueOrPlaceOrRef::Unknown
152 Rvalue::CopyForDeref(place) => {
153 self.handle_operand(&Operand::Copy(*place), state).into()
155 _ => ValueOrPlaceOrRef::Unknown,
161 operand: &Operand<'tcx>,
162 state: &mut State<Self::Value>,
163 ) -> ValueOrPlace<Self::Value> {
164 self.super_operand(operand, state)
169 operand: &Operand<'tcx>,
170 state: &mut State<Self::Value>,
171 ) -> ValueOrPlace<Self::Value> {
173 Operand::Constant(box constant) => {
174 ValueOrPlace::Value(self.handle_constant(constant, state))
176 Operand::Copy(place) | Operand::Move(place) => {
177 // Do we want to handle moves differently? Could flood place with bottom.
179 .find(place.as_ref())
180 .map(ValueOrPlace::Place)
181 .unwrap_or(ValueOrPlace::Unknown)
188 constant: &Constant<'tcx>,
189 state: &mut State<Self::Value>,
191 self.super_constant(constant, state)
196 _constant: &Constant<'tcx>,
197 _state: &mut State<Self::Value>,
202 /// The effect of a successful function call return should not be
203 /// applied here, see [`Analysis::apply_terminator_effect`].
204 fn handle_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State<Self::Value>) {
205 self.super_terminator(terminator, state)
208 fn super_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State<Self::Value>) {
209 match &terminator.kind {
210 TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
211 // Effect is applied by `handle_call_return`.
213 TerminatorKind::Drop { place, .. } => {
214 // Place can still be accessed after drop, and drop has mutable access to it.
215 state.flood(place.as_ref(), self.map());
217 TerminatorKind::DropAndReplace { .. } | TerminatorKind::Yield { .. } => {
218 // They would have an effect, but are not allowed in this phase.
219 bug!("encountered disallowed terminator");
222 // The other terminators can be ignored.
227 fn handle_call_return(
229 return_places: CallReturnPlaces<'_, 'tcx>,
230 state: &mut State<Self::Value>,
232 self.super_call_return(return_places, state)
235 fn super_call_return(
237 return_places: CallReturnPlaces<'_, 'tcx>,
238 state: &mut State<Self::Value>,
240 return_places.for_each(|place| {
241 state.flood(place.as_ref(), self.map());
245 fn handle_switch_int(
247 discr: &Operand<'tcx>,
248 apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
250 self.super_switch_int(discr, apply_edge_effects)
255 _discr: &Operand<'tcx>,
256 _apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
260 fn wrap(self) -> ValueAnalysisWrapper<Self>
264 ValueAnalysisWrapper(self)
268 pub struct ValueAnalysisWrapper<T>(pub T);
270 impl<'tcx, T: ValueAnalysis<'tcx>> AnalysisDomain<'tcx> for ValueAnalysisWrapper<T> {
271 type Domain = State<T::Value>;
273 type Direction = crate::Forward;
275 const NAME: &'static str = T::NAME;
277 fn bottom_value(&self, _body: &Body<'tcx>) -> Self::Domain {
278 State(StateData::Unreachable)
281 fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) {
282 // The initial state maps all tracked places of argument projections to ⊤ and the rest to ⊥.
283 // This utilizes that reading from an uninitialized place is UB.
284 assert!(matches!(state.0, StateData::Unreachable));
285 let values = IndexVec::from_elem_n(T::Value::bottom(), self.0.map().value_count);
286 *state = State(StateData::Reachable(values));
287 for arg in body.args_iter() {
288 state.flood(PlaceRef { local: arg, projection: &[] }, self.0.map());
293 impl<'tcx, T> Analysis<'tcx> for ValueAnalysisWrapper<T>
295 T: ValueAnalysis<'tcx>,
297 fn apply_statement_effect(
299 state: &mut Self::Domain,
300 statement: &Statement<'tcx>,
303 if state.is_reachable() {
304 self.0.handle_statement(statement, state);
308 fn apply_terminator_effect(
310 state: &mut Self::Domain,
311 terminator: &Terminator<'tcx>,
314 if state.is_reachable() {
315 self.0.handle_terminator(terminator, state);
319 fn apply_call_return_effect(
321 state: &mut Self::Domain,
323 return_places: crate::CallReturnPlaces<'_, 'tcx>,
325 if state.is_reachable() {
326 self.0.handle_call_return(return_places, state)
330 fn apply_switch_int_edge_effects(
333 discr: &Operand<'tcx>,
334 apply_edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>,
336 // FIXME: Dataflow framework provides no access to current state here.
337 self.0.handle_switch_int(discr, apply_edge_effects)
341 rustc_index::newtype_index!(
342 /// This index uniquely identifies a place.
344 /// Not every place has a `PlaceIndex`, and not every `PlaceIndex` correspondends to a tracked
345 /// place. However, every tracked place and all places along its projection have a `PlaceIndex`.
346 pub struct PlaceIndex {}
349 rustc_index::newtype_index!(
350 /// This index uniquely identifies a tracked place and therefore a slot in [`State`].
352 /// It is an implementation detail of this module.
357 #[derive(PartialEq, Eq, Clone, Debug)]
359 Reachable(IndexVec<ValueIndex, V>),
363 /// The dataflow state for an instance of [`ValueAnalysis`].
365 /// Every instance specifies a lattice that represents the possible values of a single tracked
366 /// place. If we call this lattice `V` and set set of tracked places `P`, then a [`State`] is an
367 /// element of `{unreachable} ∪ (P -> V)`. This again forms a lattice, where the bottom element is
368 /// `unreachable` and the top element is the mapping `p ↦ ⊤`. Note that the mapping `p ↦ ⊥` is not
369 /// the bottom element (because joining an unreachable and any other reachable state yields a
370 /// reachable state). All operations on unreachable states are ignored.
372 /// Flooding means assigning a value (by default `⊤`) to all tracked projections of a given place.
373 #[derive(PartialEq, Eq, Clone, Debug)]
374 pub struct State<V>(StateData<V>);
376 impl<V: Clone + HasTop> State<V> {
377 pub fn is_reachable(&self) -> bool {
378 matches!(&self.0, StateData::Reachable(_))
381 pub fn mark_unreachable(&mut self) {
382 self.0 = StateData::Unreachable;
385 pub fn flood_all(&mut self) {
386 self.flood_all_with(V::top())
389 pub fn flood_all_with(&mut self, value: V) {
390 let StateData::Reachable(values) = &mut self.0 else { return };
391 values.raw.fill(value);
394 pub fn flood_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) {
395 if let Some(root) = map.find(place) {
396 self.flood_idx_with(root, map, value);
400 pub fn flood(&mut self, place: PlaceRef<'_>, map: &Map) {
401 self.flood_with(place, map, V::top())
404 pub fn flood_idx_with(&mut self, place: PlaceIndex, map: &Map, value: V) {
405 let StateData::Reachable(values) = &mut self.0 else { return };
406 map.preorder_invoke(place, &mut |place| {
407 if let Some(vi) = map.places[place].value_index {
408 values[vi] = value.clone();
413 pub fn flood_idx(&mut self, place: PlaceIndex, map: &Map) {
414 self.flood_idx_with(place, map, V::top())
417 /// This method assumes that the given places are not overlapping, and that we can therefore
418 /// copy all entries one after another.
419 pub fn assign_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) {
420 let StateData::Reachable(values) = &mut self.0 else { return };
421 // If both places are tracked, we copy the value to the target. If the target is tracked,
422 // but the source is not, we have to invalidate the value in target. If the target is not
423 // tracked, then we don't have to do anything.
424 if let Some(target_value) = map.places[target].value_index {
425 if let Some(source_value) = map.places[source].value_index {
426 values[target_value] = values[source_value].clone();
428 values[target_value] = V::top();
431 for target_child in map.children(target) {
432 // Try to find corresponding child and recurse. Reasoning is similar as above.
433 let projection = map.places[target_child].proj_elem.unwrap();
434 if let Some(source_child) = map.projections.get(&(source, projection)) {
435 self.assign_place_idx(target_child, *source_child, map);
437 self.flood_idx(target_child, map);
442 pub fn assign(&mut self, target: PlaceRef<'_>, result: ValueOrPlaceOrRef<V>, map: &Map) {
443 if let Some(target) = map.find(target) {
444 self.assign_idx(target, result, map);
446 // We don't track this place nor any projections, assignment can be ignored.
450 pub fn assign_idx(&mut self, target: PlaceIndex, result: ValueOrPlaceOrRef<V>, map: &Map) {
452 ValueOrPlaceOrRef::Value(value) => {
453 // First flood the target place in case we also track any projections (although
454 // this scenario is currently not well-supported by the API).
455 self.flood_idx(target, map);
456 let StateData::Reachable(values) = &mut self.0 else { return };
457 if let Some(value_index) = map.places[target].value_index {
458 values[value_index] = value;
461 ValueOrPlaceOrRef::Place(source) => self.assign_place_idx(target, source, map),
462 ValueOrPlaceOrRef::Ref(source) => {
463 let StateData::Reachable(values) = &mut self.0 else { return };
464 if let Some(value_index) = map.places[target].value_index {
465 values[value_index] = V::top();
467 if let Some(target_deref) = map.apply_elem(target, ProjElem::Deref) {
468 self.assign_place_idx(target_deref, source, map);
471 ValueOrPlaceOrRef::Unknown => {
472 self.flood_idx(target, map);
477 pub fn get(&self, place: PlaceRef<'_>, map: &Map) -> V {
478 map.find(place).map(|place| self.get_idx(place, map)).unwrap_or(V::top())
481 pub fn get_idx(&self, place: PlaceIndex, map: &Map) -> V {
483 StateData::Reachable(values) => {
484 map.places[place].value_index.map(|v| values[v].clone()).unwrap_or(V::top())
486 StateData::Unreachable => V::top(),
491 impl<V: JoinSemiLattice + Clone> JoinSemiLattice for State<V> {
492 fn join(&mut self, other: &Self) -> bool {
493 match (&mut self.0, &other.0) {
494 (_, StateData::Unreachable) => false,
495 (StateData::Unreachable, _) => {
496 *self = other.clone();
499 (StateData::Reachable(this), StateData::Reachable(other)) => this.join(other),
506 locals: IndexVec<Local, Option<PlaceIndex>>,
507 projections: FxHashMap<(PlaceIndex, ProjElem), PlaceIndex>,
508 places: IndexVec<PlaceIndex, PlaceInfo>,
513 pub fn new() -> Self {
515 locals: IndexVec::new(),
516 projections: FxHashMap::default(),
517 places: IndexVec::new(),
522 /// Register all places with suitable types up to a certain derefence depth (to prevent cycles).
523 pub fn register_with_filter<'tcx>(
526 source: &impl HasLocalDecls<'tcx>,
528 mut filter: impl FnMut(Ty<'tcx>) -> bool,
530 let mut projection = Vec::new();
531 for (local, decl) in source.local_decls().iter_enumerated() {
532 self.register_with_filter_rec(
543 fn register_with_filter_rec<'tcx>(
548 projection: &mut Vec<PlaceElem<'tcx>>,
550 filter: &mut impl FnMut(Ty<'tcx>) -> bool,
553 // This might fail if `ty` is not scalar.
554 let _ = self.register_with_ty(local, projection, ty);
557 if let Some(ty::TypeAndMut { ty, .. }) = ty.builtin_deref(false) {
558 projection.push(PlaceElem::Deref);
559 self.register_with_filter_rec(tcx, max_derefs - 1, local, projection, ty, filter);
563 iter_fields(ty, tcx, |variant, field, ty| {
564 if variant.is_some() {
565 // Downcasts are currently not supported.
568 projection.push(PlaceElem::Field(field, ty));
569 self.register_with_filter_rec(tcx, max_derefs, local, projection, ty, filter);
577 projection: &[PlaceElem<'tcx>],
578 ) -> Result<PlaceIndex, ()> {
579 // Get the base index of the local.
581 *self.locals.get_or_insert_with(local, || self.places.push(PlaceInfo::new(None)));
583 // Apply the projection.
584 for &elem in projection {
586 PlaceElem::Downcast(..) => return Err(()),
589 let elem = elem.try_into()?;
590 index = *self.projections.entry((index, elem)).or_insert_with(|| {
591 // Prepend new child to the linked list.
592 let next = self.places.push(PlaceInfo::new(Some(elem)));
593 self.places[next].next_sibling = self.places[index].first_child;
594 self.places[index].first_child = Some(next);
602 pub fn register<'tcx>(
605 projection: &[PlaceElem<'tcx>],
606 decls: &impl HasLocalDecls<'tcx>,
608 ) -> Result<(), ()> {
611 .fold(PlaceTy::from_ty(decls.local_decls()[local].ty), |place_ty, &elem| {
612 place_ty.projection_ty(tcx, elem)
615 let place_ty = Place::ty_from(local, projection, decls, tcx);
616 if place_ty.variant_index.is_some() {
619 self.register_with_ty(local, projection, place_ty.ty)
622 fn register_with_ty<'tcx>(
625 projection: &[PlaceElem<'tcx>],
627 ) -> Result<(), ()> {
629 // Currently, only scalar types are allowed, because they are atomic
630 // and therefore do not require invalidation of parent places.
634 let place = self.make_place(local, projection)?;
636 // Allocate a value slot if it doesn't have one.
637 if self.places[place].value_index.is_none() {
638 self.places[place].value_index = Some(self.value_count.into());
639 self.value_count += 1;
645 pub fn apply_elem(&self, place: PlaceIndex, elem: ProjElem) -> Option<PlaceIndex> {
646 self.projections.get(&(place, elem)).copied()
649 pub fn find(&self, place: PlaceRef<'_>) -> Option<PlaceIndex> {
650 let mut index = *self.locals.get(place.local)?.as_ref()?;
652 for &elem in place.projection {
653 index = self.apply_elem(index, elem.try_into().ok()?)?;
659 pub fn children(&self, parent: PlaceIndex) -> impl Iterator<Item = PlaceIndex> + '_ {
660 Children::new(self, parent)
663 pub fn preorder_invoke(&self, root: PlaceIndex, f: &mut impl FnMut(PlaceIndex)) {
665 for child in self.children(root) {
666 self.preorder_invoke(child, f);
673 next_sibling: Option<PlaceIndex>,
674 first_child: Option<PlaceIndex>,
675 /// The projection used to go from parent to this node (only None for root).
676 proj_elem: Option<ProjElem>,
677 value_index: Option<ValueIndex>,
681 fn new(proj_elem: Option<ProjElem>) -> Self {
682 Self { next_sibling: None, first_child: None, proj_elem, value_index: None }
686 struct Children<'a> {
688 next: Option<PlaceIndex>,
691 impl<'a> Children<'a> {
692 fn new(map: &'a Map, parent: PlaceIndex) -> Self {
693 Self { map, next: map.places[parent].first_child }
697 impl<'a> Iterator for Children<'a> {
698 type Item = PlaceIndex;
700 fn next(&mut self) -> Option<Self::Item> {
703 self.next = self.map.places[child].next_sibling;
711 // FIXME: See if we can get rid of `Unknown`.
712 pub enum ValueOrPlace<V> {
718 pub enum ValueOrPlaceOrRef<V> {
725 impl<V> From<ValueOrPlace<V>> for ValueOrPlaceOrRef<V> {
726 fn from(x: ValueOrPlace<V>) -> Self {
728 ValueOrPlace::Value(value) => ValueOrPlaceOrRef::Value(value),
729 ValueOrPlace::Place(place) => ValueOrPlaceOrRef::Place(place),
730 ValueOrPlace::Unknown => ValueOrPlaceOrRef::Unknown,
735 pub trait HasBottom {
743 impl<V> HasBottom for FlatSet<V> {
744 fn bottom() -> Self {
749 impl<V> HasTop for FlatSet<V> {
755 /// Currently, we only track places through deref and field projections.
757 /// For now, downcast is not allowed due to aliasing between variants (see #101168).
758 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
764 impl<V, T> TryFrom<ProjectionElem<V, T>> for ProjElem {
767 fn try_from(value: ProjectionElem<V, T>) -> Result<Self, Self::Error> {
769 ProjectionElem::Deref => Ok(ProjElem::Deref),
770 ProjectionElem::Field(field, _) => Ok(ProjElem::Field(field)),
776 fn iter_fields<'tcx>(
779 mut f: impl FnMut(Option<VariantIdx>, Field, Ty<'tcx>),
783 for (field, ty) in list.iter().enumerate() {
784 f(None, field.into(), ty);
787 ty::Adt(def, substs) => {
788 for (v_index, v_def) in def.variants().iter_enumerated() {
789 for (f_index, f_def) in v_def.fields.iter().enumerate() {
790 let field_ty = f_def.ty(tcx, substs);
792 .try_normalize_erasing_regions(ty::ParamEnv::reveal_all(), field_ty)
793 .unwrap_or(field_ty);
794 f(Some(v_index), f_index.into(), field_ty);
798 ty::Closure(_, substs) => {
799 iter_fields(substs.as_closure().tupled_upvars_ty(), tcx, f);
805 fn debug_with_context_rec<V: Debug + Eq>(
808 new: &IndexVec<ValueIndex, V>,
809 old: Option<&IndexVec<ValueIndex, V>>,
811 f: &mut Formatter<'_>,
812 ) -> std::fmt::Result {
813 if let Some(value) = map.places[place].value_index {
815 None => writeln!(f, "{}: {:?}", place_str, new[value])?,
817 if new[value] != old[value] {
818 writeln!(f, "\u{001f}-{}: {:?}", place_str, old[value])?;
819 writeln!(f, "\u{001f}+{}: {:?}", place_str, new[value])?;
825 for child in map.children(place) {
826 let info_elem = map.places[child].proj_elem.unwrap();
827 let child_place_str = match info_elem {
828 ProjElem::Deref => format!("*{}", place_str),
829 ProjElem::Field(field) => {
830 if place_str.starts_with("*") {
831 format!("({}).{}", place_str, field.index())
833 format!("{}.{}", place_str, field.index())
837 debug_with_context_rec(child, &child_place_str, new, old, map, f)?;
843 fn debug_with_context<V: Debug + Eq>(
844 new: &IndexVec<ValueIndex, V>,
845 old: Option<&IndexVec<ValueIndex, V>>,
847 f: &mut Formatter<'_>,
848 ) -> std::fmt::Result {
849 for (local, place) in map.locals.iter_enumerated() {
850 if let Some(place) = place {
851 debug_with_context_rec(*place, &format!("{:?}", local), new, old, map, f)?;
857 impl<'tcx, T> DebugWithContext<ValueAnalysisWrapper<T>> for State<T::Value>
859 T: ValueAnalysis<'tcx>,
862 fn fmt_with(&self, ctxt: &ValueAnalysisWrapper<T>, f: &mut Formatter<'_>) -> std::fmt::Result {
864 StateData::Reachable(values) => debug_with_context(values, None, ctxt.0.map(), f),
865 StateData::Unreachable => write!(f, "unreachable"),
872 ctxt: &ValueAnalysisWrapper<T>,
873 f: &mut Formatter<'_>,
874 ) -> std::fmt::Result {
875 match (&self.0, &old.0) {
876 (StateData::Reachable(this), StateData::Reachable(old)) => {
877 debug_with_context(this, Some(old), ctxt.0.map(), f)
879 _ => Ok(()), // Consider printing something here.