1 //! This module provides a framework on top of the normal MIR dataflow framework to simplify the
2 //! implementation of analyses that track information about the values stored in certain places.
3 //! We are using the term "place" here to refer to a `mir::Place` (a place expression) instead of
4 //! an `interpret::Place` (a memory location).
6 //! The default methods of [`ValueAnalysis`] (prefixed with `super_` instead of `handle_`)
7 //! provide some behavior that should be valid for all abstract domains that are based only on the
8 //! value stored in a certain place. On top of these default rules, an implementation should
9 //! override some of the `handle_` methods. For an example, see `ConstAnalysis`.
11 //! An implementation must also provide a [`Map`]. Before the analysis begins, all places that
12 //! should be tracked during the analysis must be registered. During the analysis, no new places
13 //! can be registered. The [`State`] can be queried to retrieve the abstract value stored for a
14 //! certain place by passing the map.
16 //! This framework is currently experimental. Originally, it supported shared references and enum
17 //! variants. However, it was discovered that both of these were unsound, and especially references
18 //! had subtle but serious issues. In the future, they could be added back in, but we should clarify
19 //! the rules for optimizations that rely on the aliasing model first.
24 //! - The bottom state denotes uninitialized memory. Because we are only doing a sound approximation
25 //! of the actual execution, we can also use this state for places where access would be UB.
27 //! - The assignment logic in `State::assign_place_idx` assumes that the places are non-overlapping,
28 //! or identical. Note that this refers to place expressions, not memory locations.
30 //! - Currently, places that have their reference taken cannot be tracked. Although this would be
31 //! possible, it has to rely on some aliasing model, which we are not ready to commit to yet.
32 //! Because of that, we can assume that the only way to change the value behind a tracked place is
33 //! by direct assignment.
35 use std::fmt::{Debug, Formatter};
37 use rustc_data_structures::fx::FxHashMap;
38 use rustc_index::vec::IndexVec;
39 use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor};
40 use rustc_middle::mir::*;
41 use rustc_middle::ty::{self, Ty, TyCtxt};
42 use rustc_target::abi::VariantIdx;
44 use crate::lattice::{HasBottom, HasTop};
46 fmt::DebugWithContext, Analysis, AnalysisDomain, CallReturnPlaces, JoinSemiLattice,
50 pub trait ValueAnalysis<'tcx> {
51 /// For each place of interest, the analysis tracks a value of the given type.
52 type Value: Clone + JoinSemiLattice + HasBottom + HasTop;
54 const NAME: &'static str;
56 fn map(&self) -> ⤅
58 fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State<Self::Value>) {
59 self.super_statement(statement, state)
62 fn super_statement(&self, statement: &Statement<'tcx>, state: &mut State<Self::Value>) {
63 match &statement.kind {
64 StatementKind::Assign(box (place, rvalue)) => {
65 self.handle_assign(*place, rvalue, state);
67 StatementKind::SetDiscriminant { .. } => {
68 // Could treat this as writing a constant to a pseudo-place.
69 // But discriminants are currently not tracked, so we do nothing.
70 // Related: https://github.com/rust-lang/unsafe-code-guidelines/issues/84
72 StatementKind::Intrinsic(box intrinsic) => {
73 self.handle_intrinsic(intrinsic, state);
75 StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => {
76 // StorageLive leaves the local in an uninitialized state.
77 // StorageDead makes it UB to access the local afterwards.
78 state.flood_with(Place::from(*local).as_ref(), self.map(), Self::Value::bottom());
80 StatementKind::Deinit(box place) => {
81 // Deinit makes the place uninitialized.
82 state.flood_with(place.as_ref(), self.map(), Self::Value::bottom());
84 StatementKind::Retag(..) => {
85 // We don't track references.
87 StatementKind::ConstEvalCounter
89 | StatementKind::FakeRead(..)
90 | StatementKind::Coverage(..)
91 | StatementKind::AscribeUserType(..) => (),
97 intrinsic: &NonDivergingIntrinsic<'tcx>,
98 state: &mut State<Self::Value>,
100 self.super_intrinsic(intrinsic, state);
105 intrinsic: &NonDivergingIntrinsic<'tcx>,
106 state: &mut State<Self::Value>,
109 NonDivergingIntrinsic::Assume(..) => {
110 // Could use this, but ignoring it is sound.
112 NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping { dst, .. }) => {
113 if let Some(place) = dst.place() {
114 state.flood(place.as_ref(), self.map());
123 rvalue: &Rvalue<'tcx>,
124 state: &mut State<Self::Value>,
126 self.super_assign(target, rvalue, state)
132 rvalue: &Rvalue<'tcx>,
133 state: &mut State<Self::Value>,
135 let result = self.handle_rvalue(rvalue, state);
136 state.assign(target.as_ref(), result, self.map());
141 rvalue: &Rvalue<'tcx>,
142 state: &mut State<Self::Value>,
143 ) -> ValueOrPlace<Self::Value> {
144 self.super_rvalue(rvalue, state)
149 rvalue: &Rvalue<'tcx>,
150 state: &mut State<Self::Value>,
151 ) -> ValueOrPlace<Self::Value> {
153 Rvalue::Use(operand) => self.handle_operand(operand, state),
154 Rvalue::CopyForDeref(place) => self.handle_operand(&Operand::Copy(*place), state),
155 Rvalue::Ref(..) | Rvalue::AddressOf(..) => {
156 // We don't track such places.
160 | Rvalue::ThreadLocalRef(..)
163 | Rvalue::BinaryOp(..)
164 | Rvalue::CheckedBinaryOp(..)
165 | Rvalue::NullaryOp(..)
166 | Rvalue::UnaryOp(..)
167 | Rvalue::Discriminant(..)
168 | Rvalue::Aggregate(..)
169 | Rvalue::ShallowInitBox(..) => {
170 // No modification is possible through these r-values.
178 operand: &Operand<'tcx>,
179 state: &mut State<Self::Value>,
180 ) -> ValueOrPlace<Self::Value> {
181 self.super_operand(operand, state)
186 operand: &Operand<'tcx>,
187 state: &mut State<Self::Value>,
188 ) -> ValueOrPlace<Self::Value> {
190 Operand::Constant(box constant) => {
191 ValueOrPlace::Value(self.handle_constant(constant, state))
193 Operand::Copy(place) | Operand::Move(place) => {
194 // On move, we would ideally flood the place with bottom. But with the current
195 // framework this is not possible (similar to `InterpCx::eval_operand`).
197 .find(place.as_ref())
198 .map(ValueOrPlace::Place)
199 .unwrap_or(ValueOrPlace::top())
206 constant: &Constant<'tcx>,
207 state: &mut State<Self::Value>,
209 self.super_constant(constant, state)
214 _constant: &Constant<'tcx>,
215 _state: &mut State<Self::Value>,
220 /// The effect of a successful function call return should not be
221 /// applied here, see [`Analysis::apply_terminator_effect`].
222 fn handle_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State<Self::Value>) {
223 self.super_terminator(terminator, state)
226 fn super_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State<Self::Value>) {
227 match &terminator.kind {
228 TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
229 // Effect is applied by `handle_call_return`.
231 TerminatorKind::Drop { place, .. } => {
232 state.flood_with(place.as_ref(), self.map(), Self::Value::bottom());
234 TerminatorKind::DropAndReplace { .. } | TerminatorKind::Yield { .. } => {
235 // They would have an effect, but are not allowed in this phase.
236 bug!("encountered disallowed terminator");
238 TerminatorKind::Goto { .. }
239 | TerminatorKind::SwitchInt { .. }
240 | TerminatorKind::Resume
241 | TerminatorKind::Abort
242 | TerminatorKind::Return
243 | TerminatorKind::Unreachable
244 | TerminatorKind::Assert { .. }
245 | TerminatorKind::GeneratorDrop
246 | TerminatorKind::FalseEdge { .. }
247 | TerminatorKind::FalseUnwind { .. } => {
248 // These terminators have no effect on the analysis.
253 fn handle_call_return(
255 return_places: CallReturnPlaces<'_, 'tcx>,
256 state: &mut State<Self::Value>,
258 self.super_call_return(return_places, state)
261 fn super_call_return(
263 return_places: CallReturnPlaces<'_, 'tcx>,
264 state: &mut State<Self::Value>,
266 return_places.for_each(|place| {
267 state.flood(place.as_ref(), self.map());
271 fn handle_switch_int(
273 discr: &Operand<'tcx>,
274 apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
276 self.super_switch_int(discr, apply_edge_effects)
281 _discr: &Operand<'tcx>,
282 _apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
286 fn wrap(self) -> ValueAnalysisWrapper<Self>
290 ValueAnalysisWrapper(self)
294 pub struct ValueAnalysisWrapper<T>(pub T);
296 impl<'tcx, T: ValueAnalysis<'tcx>> AnalysisDomain<'tcx> for ValueAnalysisWrapper<T> {
297 type Domain = State<T::Value>;
299 type Direction = crate::Forward;
301 const NAME: &'static str = T::NAME;
303 fn bottom_value(&self, _body: &Body<'tcx>) -> Self::Domain {
304 State(StateData::Unreachable)
307 fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) {
308 // The initial state maps all tracked places of argument projections to ⊤ and the rest to ⊥.
309 assert!(matches!(state.0, StateData::Unreachable));
310 let values = IndexVec::from_elem_n(T::Value::bottom(), self.0.map().value_count);
311 *state = State(StateData::Reachable(values));
312 for arg in body.args_iter() {
313 state.flood(PlaceRef { local: arg, projection: &[] }, self.0.map());
318 impl<'tcx, T> Analysis<'tcx> for ValueAnalysisWrapper<T>
320 T: ValueAnalysis<'tcx>,
322 fn apply_statement_effect(
324 state: &mut Self::Domain,
325 statement: &Statement<'tcx>,
328 if state.is_reachable() {
329 self.0.handle_statement(statement, state);
333 fn apply_terminator_effect(
335 state: &mut Self::Domain,
336 terminator: &Terminator<'tcx>,
339 if state.is_reachable() {
340 self.0.handle_terminator(terminator, state);
344 fn apply_call_return_effect(
346 state: &mut Self::Domain,
348 return_places: crate::CallReturnPlaces<'_, 'tcx>,
350 if state.is_reachable() {
351 self.0.handle_call_return(return_places, state)
355 fn apply_switch_int_edge_effects(
358 discr: &Operand<'tcx>,
359 apply_edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>,
361 // FIXME: Dataflow framework provides no access to current state here.
362 self.0.handle_switch_int(discr, apply_edge_effects)
366 rustc_index::newtype_index!(
367 /// This index uniquely identifies a place.
369 /// Not every place has a `PlaceIndex`, and not every `PlaceIndex` correspondends to a tracked
370 /// place. However, every tracked place and all places along its projection have a `PlaceIndex`.
371 pub struct PlaceIndex {}
374 rustc_index::newtype_index!(
375 /// This index uniquely identifies a tracked place and therefore a slot in [`State`].
377 /// It is an implementation detail of this module.
382 #[derive(PartialEq, Eq, Debug)]
384 Reachable(IndexVec<ValueIndex, V>),
388 impl<V: Clone> Clone for StateData<V> {
389 fn clone(&self) -> Self {
391 Self::Reachable(x) => Self::Reachable(x.clone()),
392 Self::Unreachable => Self::Unreachable,
396 fn clone_from(&mut self, source: &Self) {
397 match (&mut *self, source) {
398 (Self::Reachable(x), Self::Reachable(y)) => {
399 // We go through `raw` here, because `IndexVec` currently has a naive `clone_from`.
400 x.raw.clone_from(&y.raw);
402 _ => *self = source.clone(),
407 /// The dataflow state for an instance of [`ValueAnalysis`].
409 /// Every instance specifies a lattice that represents the possible values of a single tracked
410 /// place. If we call this lattice `V` and set of tracked places `P`, then a [`State`] is an
411 /// element of `{unreachable} ∪ (P -> V)`. This again forms a lattice, where the bottom element is
412 /// `unreachable` and the top element is the mapping `p ↦ ⊤`. Note that the mapping `p ↦ ⊥` is not
413 /// the bottom element (because joining an unreachable and any other reachable state yields a
414 /// reachable state). All operations on unreachable states are ignored.
416 /// Flooding means assigning a value (by default `⊤`) to all tracked projections of a given place.
417 #[derive(PartialEq, Eq, Debug)]
418 pub struct State<V>(StateData<V>);
420 impl<V: Clone> Clone for State<V> {
421 fn clone(&self) -> Self {
425 fn clone_from(&mut self, source: &Self) {
426 self.0.clone_from(&source.0);
430 impl<V: Clone + HasTop + HasBottom> State<V> {
431 pub fn is_reachable(&self) -> bool {
432 matches!(&self.0, StateData::Reachable(_))
435 pub fn mark_unreachable(&mut self) {
436 self.0 = StateData::Unreachable;
439 pub fn flood_all(&mut self) {
440 self.flood_all_with(V::top())
443 pub fn flood_all_with(&mut self, value: V) {
444 let StateData::Reachable(values) = &mut self.0 else { return };
445 values.raw.fill(value);
448 pub fn flood_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) {
449 if let Some(root) = map.find(place) {
450 self.flood_idx_with(root, map, value);
454 pub fn flood(&mut self, place: PlaceRef<'_>, map: &Map) {
455 self.flood_with(place, map, V::top())
458 pub fn flood_idx_with(&mut self, place: PlaceIndex, map: &Map, value: V) {
459 let StateData::Reachable(values) = &mut self.0 else { return };
460 map.preorder_invoke(place, &mut |place| {
461 if let Some(vi) = map.places[place].value_index {
462 values[vi] = value.clone();
467 pub fn flood_idx(&mut self, place: PlaceIndex, map: &Map) {
468 self.flood_idx_with(place, map, V::top())
471 /// Copies `source` to `target`, including all tracked places beneath.
473 /// If `target` contains a place that is not contained in `source`, it will be overwritten with
474 /// Top. Also, because this will copy all entries one after another, it may only be used for
475 /// places that are non-overlapping or identical.
476 pub fn assign_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) {
477 let StateData::Reachable(values) = &mut self.0 else { return };
479 // If both places are tracked, we copy the value to the target. If the target is tracked,
480 // but the source is not, we have to invalidate the value in target. If the target is not
481 // tracked, then we don't have to do anything.
482 if let Some(target_value) = map.places[target].value_index {
483 if let Some(source_value) = map.places[source].value_index {
484 values[target_value] = values[source_value].clone();
486 values[target_value] = V::top();
489 for target_child in map.children(target) {
490 // Try to find corresponding child and recurse. Reasoning is similar as above.
491 let projection = map.places[target_child].proj_elem.unwrap();
492 if let Some(source_child) = map.projections.get(&(source, projection)) {
493 self.assign_place_idx(target_child, *source_child, map);
495 self.flood_idx(target_child, map);
500 pub fn assign(&mut self, target: PlaceRef<'_>, result: ValueOrPlace<V>, map: &Map) {
501 if let Some(target) = map.find(target) {
502 self.assign_idx(target, result, map);
504 // We don't track this place nor any projections, assignment can be ignored.
508 pub fn assign_idx(&mut self, target: PlaceIndex, result: ValueOrPlace<V>, map: &Map) {
510 ValueOrPlace::Value(value) => {
511 // First flood the target place in case we also track any projections (although
512 // this scenario is currently not well-supported by the API).
513 self.flood_idx(target, map);
514 let StateData::Reachable(values) = &mut self.0 else { return };
515 if let Some(value_index) = map.places[target].value_index {
516 values[value_index] = value;
519 ValueOrPlace::Place(source) => self.assign_place_idx(target, source, map),
523 /// Retrieve the value stored for a place, or ⊤ if it is not tracked.
524 pub fn get(&self, place: PlaceRef<'_>, map: &Map) -> V {
525 map.find(place).map(|place| self.get_idx(place, map)).unwrap_or(V::top())
528 /// Retrieve the value stored for a place index, or ⊤ if it is not tracked.
529 pub fn get_idx(&self, place: PlaceIndex, map: &Map) -> V {
531 StateData::Reachable(values) => {
532 map.places[place].value_index.map(|v| values[v].clone()).unwrap_or(V::top())
534 StateData::Unreachable => {
535 // Because this is unreachable, we can return any value we want.
542 impl<V: JoinSemiLattice + Clone> JoinSemiLattice for State<V> {
543 fn join(&mut self, other: &Self) -> bool {
544 match (&mut self.0, &other.0) {
545 (_, StateData::Unreachable) => false,
546 (StateData::Unreachable, _) => {
547 *self = other.clone();
550 (StateData::Reachable(this), StateData::Reachable(other)) => this.join(other),
555 /// Partial mapping from [`Place`] to [`PlaceIndex`], where some places also have a [`ValueIndex`].
557 /// This data structure essentially maintains a tree of places and their projections. Some
558 /// additional bookkeeping is done, to speed up traversal over this tree:
559 /// - For iteration, every [`PlaceInfo`] contains an intrusive linked list of its children.
560 /// - To directly get the child for a specific projection, there is a `projections` map.
563 locals: IndexVec<Local, Option<PlaceIndex>>,
564 projections: FxHashMap<(PlaceIndex, TrackElem), PlaceIndex>,
565 places: IndexVec<PlaceIndex, PlaceInfo>,
572 locals: IndexVec::new(),
573 projections: FxHashMap::default(),
574 places: IndexVec::new(),
579 /// Returns a map that only tracks places whose type passes the filter.
581 /// This is currently the only way to create a [`Map`]. The way in which the tracked places are
582 /// chosen is an implementation detail and may not be relied upon (other than that their type
583 /// passes the filter).
584 #[instrument(skip_all, level = "debug")]
585 pub fn from_filter<'tcx>(
588 filter: impl FnMut(Ty<'tcx>) -> bool,
590 let mut map = Self::new();
591 let exclude = excluded_locals(body);
592 map.register_with_filter(tcx, body, filter, &exclude);
593 debug!("registered {} places ({} nodes in total)", map.value_count, map.places.len());
597 /// Register all non-excluded places that pass the filter.
598 fn register_with_filter<'tcx>(
602 mut filter: impl FnMut(Ty<'tcx>) -> bool,
603 exclude: &IndexVec<Local, bool>,
605 // We use this vector as stack, pushing and popping projections.
606 let mut projection = Vec::new();
607 for (local, decl) in body.local_decls.iter_enumerated() {
609 self.register_with_filter_rec(tcx, local, &mut projection, decl.ty, &mut filter);
614 /// Potentially register the (local, projection) place and its fields, recursively.
616 /// Invariant: The projection must only contain fields.
617 fn register_with_filter_rec<'tcx>(
621 projection: &mut Vec<PlaceElem<'tcx>>,
623 filter: &mut impl FnMut(Ty<'tcx>) -> bool,
625 // Note: The framework supports only scalars for now.
626 if filter(ty) && ty.is_scalar() {
627 // We know that the projection only contains trackable elements.
628 let place = self.make_place(local, projection).unwrap();
630 // Allocate a value slot if it doesn't have one.
631 if self.places[place].value_index.is_none() {
632 self.places[place].value_index = Some(self.value_count.into());
633 self.value_count += 1;
637 // Recurse with all fields of this place.
638 iter_fields(ty, tcx, |variant, field, ty| {
639 if variant.is_some() {
640 // Downcasts are currently not supported.
643 projection.push(PlaceElem::Field(field, ty));
644 self.register_with_filter_rec(tcx, local, projection, ty, filter);
649 /// Tries to add the place to the map, without allocating a value slot.
651 /// Can fail if the projection contains non-trackable elements.
655 projection: &[PlaceElem<'tcx>],
656 ) -> Result<PlaceIndex, ()> {
657 // Get the base index of the local.
659 *self.locals.get_or_insert_with(local, || self.places.push(PlaceInfo::new(None)));
661 // Apply the projection.
662 for &elem in projection {
663 let elem = elem.try_into()?;
664 index = *self.projections.entry((index, elem)).or_insert_with(|| {
665 // Prepend new child to the linked list.
666 let next = self.places.push(PlaceInfo::new(Some(elem)));
667 self.places[next].next_sibling = self.places[index].first_child;
668 self.places[index].first_child = Some(next);
676 /// Returns the number of tracked places, i.e., those for which a value can be stored.
677 pub fn tracked_places(&self) -> usize {
681 /// Applies a single projection element, yielding the corresponding child.
682 pub fn apply(&self, place: PlaceIndex, elem: TrackElem) -> Option<PlaceIndex> {
683 self.projections.get(&(place, elem)).copied()
686 /// Locates the given place, if it exists in the tree.
687 pub fn find(&self, place: PlaceRef<'_>) -> Option<PlaceIndex> {
688 let mut index = *self.locals.get(place.local)?.as_ref()?;
690 for &elem in place.projection {
691 index = self.apply(index, elem.try_into().ok()?)?;
697 /// Iterate over all direct children.
698 pub fn children(&self, parent: PlaceIndex) -> impl Iterator<Item = PlaceIndex> + '_ {
699 Children::new(self, parent)
702 /// Invoke a function on the given place and all descendants.
703 pub fn preorder_invoke(&self, root: PlaceIndex, f: &mut impl FnMut(PlaceIndex)) {
705 for child in self.children(root) {
706 self.preorder_invoke(child, f);
711 /// This is the information tracked for every [`PlaceIndex`] and is stored by [`Map`].
713 /// Together, `first_child` and `next_sibling` form an intrusive linked list, which is used to
714 /// model a tree structure (a replacement for a member like `children: Vec<PlaceIndex>`).
717 /// We store a [`ValueIndex`] if and only if the placed is tracked by the analysis.
718 value_index: Option<ValueIndex>,
720 /// The projection used to go from parent to this node (only None for root).
721 proj_elem: Option<TrackElem>,
723 /// The left-most child.
724 first_child: Option<PlaceIndex>,
726 /// Index of the sibling to the right of this node.
727 next_sibling: Option<PlaceIndex>,
731 fn new(proj_elem: Option<TrackElem>) -> Self {
732 Self { next_sibling: None, first_child: None, proj_elem, value_index: None }
736 struct Children<'a> {
738 next: Option<PlaceIndex>,
741 impl<'a> Children<'a> {
742 fn new(map: &'a Map, parent: PlaceIndex) -> Self {
743 Self { map, next: map.places[parent].first_child }
747 impl<'a> Iterator for Children<'a> {
748 type Item = PlaceIndex;
750 fn next(&mut self) -> Option<Self::Item> {
753 self.next = self.map.places[child].next_sibling;
761 /// Used as the result of an operand or r-value.
762 pub enum ValueOrPlace<V> {
767 impl<V: HasTop> ValueOrPlace<V> {
768 pub fn top() -> Self {
769 ValueOrPlace::Value(V::top())
773 /// The set of projection elements that can be used by a tracked place.
775 /// Although only field projections are currently allowed, this could change in the future.
776 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
781 impl<V, T> TryFrom<ProjectionElem<V, T>> for TrackElem {
784 fn try_from(value: ProjectionElem<V, T>) -> Result<Self, Self::Error> {
786 ProjectionElem::Field(field, _) => Ok(TrackElem::Field(field)),
792 /// Invokes `f` on all direct fields of `ty`.
793 pub fn iter_fields<'tcx>(
796 mut f: impl FnMut(Option<VariantIdx>, Field, Ty<'tcx>),
800 for (field, ty) in list.iter().enumerate() {
801 f(None, field.into(), ty);
804 ty::Adt(def, substs) => {
808 for (v_index, v_def) in def.variants().iter_enumerated() {
809 let variant = if def.is_struct() { None } else { Some(v_index) };
810 for (f_index, f_def) in v_def.fields.iter().enumerate() {
811 let field_ty = f_def.ty(tcx, substs);
813 .try_normalize_erasing_regions(ty::ParamEnv::reveal_all(), field_ty)
814 .unwrap_or(field_ty);
815 f(variant, f_index.into(), field_ty);
819 ty::Closure(_, substs) => {
820 iter_fields(substs.as_closure().tupled_upvars_ty(), tcx, f);
826 /// Returns all locals with projections that have their reference or address taken.
827 pub fn excluded_locals(body: &Body<'_>) -> IndexVec<Local, bool> {
829 result: IndexVec<Local, bool>,
832 impl<'tcx> Visitor<'tcx> for Collector {
833 fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, _location: Location) {
834 if context.is_borrow()
835 || context.is_address_of()
837 || context == PlaceContext::MutatingUse(MutatingUseContext::AsmOutput)
839 // A pointer to a place could be used to access other places with the same local,
840 // hence we have to exclude the local completely.
841 self.result[place.local] = true;
846 let mut collector = Collector { result: IndexVec::from_elem(false, &body.local_decls) };
847 collector.visit_body(body);
851 /// This is used to visualize the dataflow analysis.
852 impl<'tcx, T> DebugWithContext<ValueAnalysisWrapper<T>> for State<T::Value>
854 T: ValueAnalysis<'tcx>,
857 fn fmt_with(&self, ctxt: &ValueAnalysisWrapper<T>, f: &mut Formatter<'_>) -> std::fmt::Result {
859 StateData::Reachable(values) => debug_with_context(values, None, ctxt.0.map(), f),
860 StateData::Unreachable => write!(f, "unreachable"),
867 ctxt: &ValueAnalysisWrapper<T>,
868 f: &mut Formatter<'_>,
869 ) -> std::fmt::Result {
870 match (&self.0, &old.0) {
871 (StateData::Reachable(this), StateData::Reachable(old)) => {
872 debug_with_context(this, Some(old), ctxt.0.map(), f)
874 _ => Ok(()), // Consider printing something here.
879 fn debug_with_context_rec<V: Debug + Eq>(
882 new: &IndexVec<ValueIndex, V>,
883 old: Option<&IndexVec<ValueIndex, V>>,
885 f: &mut Formatter<'_>,
886 ) -> std::fmt::Result {
887 if let Some(value) = map.places[place].value_index {
889 None => writeln!(f, "{}: {:?}", place_str, new[value])?,
891 if new[value] != old[value] {
892 writeln!(f, "\u{001f}-{}: {:?}", place_str, old[value])?;
893 writeln!(f, "\u{001f}+{}: {:?}", place_str, new[value])?;
899 for child in map.children(place) {
900 let info_elem = map.places[child].proj_elem.unwrap();
901 let child_place_str = match info_elem {
902 TrackElem::Field(field) => {
903 if place_str.starts_with('*') {
904 format!("({}).{}", place_str, field.index())
906 format!("{}.{}", place_str, field.index())
910 debug_with_context_rec(child, &child_place_str, new, old, map, f)?;
916 fn debug_with_context<V: Debug + Eq>(
917 new: &IndexVec<ValueIndex, V>,
918 old: Option<&IndexVec<ValueIndex, V>>,
920 f: &mut Formatter<'_>,
921 ) -> std::fmt::Result {
922 for (local, place) in map.locals.iter_enumerated() {
923 if let Some(place) = place {
924 debug_with_context_rec(*place, &format!("{local:?}"), new, old, map, f)?;