1 use rustc_data_structures::fx::FxHashMap;
2 use rustc_index::bit_set::BitSet;
3 use rustc_middle::mir::{self, BasicBlock, Body, Location, Place};
4 use rustc_middle::ty::RegionVid;
5 use rustc_middle::ty::TyCtxt;
6 use rustc_mir_dataflow::impls::{EverInitializedPlaces, MaybeUninitializedPlaces};
7 use rustc_mir_dataflow::ResultsVisitable;
8 use rustc_mir_dataflow::{self, fmt::DebugWithContext, CallReturnPlaces, GenKill};
9 use rustc_mir_dataflow::{Analysis, Direction, Results};
14 places_conflict, BorrowSet, PlaceConflictBias, PlaceExt, RegionInferenceContext, ToRegionVid,
17 /// A tuple with named fields that can hold either the results or the transient state of the
18 /// dataflow analyses used by the borrow checker.
20 pub struct BorrowckAnalyses<B, U, E> {
26 /// The results of the dataflow analyses used by the borrow checker.
27 pub type BorrowckResults<'mir, 'tcx> = BorrowckAnalyses<
28 Results<'tcx, Borrows<'mir, 'tcx>>,
29 Results<'tcx, MaybeUninitializedPlaces<'mir, 'tcx>>,
30 Results<'tcx, EverInitializedPlaces<'mir, 'tcx>>,
33 /// The transient state of the dataflow analyses used by the borrow checker.
34 pub type BorrowckFlowState<'mir, 'tcx> =
35 <BorrowckResults<'mir, 'tcx> as ResultsVisitable<'tcx>>::FlowState;
37 macro_rules! impl_visitable {
39 $T:ident { $( $field:ident : $A:ident ),* $(,)? }
41 impl<'tcx, $($A),*, D: Direction> ResultsVisitable<'tcx> for $T<$( Results<'tcx, $A> ),*>
43 $( $A: Analysis<'tcx, Direction = D>, )*
46 type FlowState = $T<$( $A::Domain ),*>;
48 fn new_flow_state(&self, body: &mir::Body<'tcx>) -> Self::FlowState {
50 $( $field: self.$field.analysis.bottom_value(body) ),*
54 fn reset_to_block_entry(
56 state: &mut Self::FlowState,
59 $( state.$field.clone_from(&self.$field.entry_set_for_block(block)); )*
62 fn reconstruct_before_statement_effect(
64 state: &mut Self::FlowState,
65 stmt: &mir::Statement<'tcx>,
68 $( self.$field.analysis
69 .apply_before_statement_effect(&mut state.$field, stmt, loc); )*
72 fn reconstruct_statement_effect(
74 state: &mut Self::FlowState,
75 stmt: &mir::Statement<'tcx>,
78 $( self.$field.analysis
79 .apply_statement_effect(&mut state.$field, stmt, loc); )*
82 fn reconstruct_before_terminator_effect(
84 state: &mut Self::FlowState,
85 term: &mir::Terminator<'tcx>,
88 $( self.$field.analysis
89 .apply_before_terminator_effect(&mut state.$field, term, loc); )*
92 fn reconstruct_terminator_effect(
94 state: &mut Self::FlowState,
95 term: &mir::Terminator<'tcx>,
98 $( self.$field.analysis
99 .apply_terminator_effect(&mut state.$field, term, loc); )*
106 BorrowckAnalyses { borrows: B, uninits: U, ever_inits: E }
109 rustc_index::newtype_index! {
110 pub struct BorrowIndex {
111 DEBUG_FORMAT = "bw{}"
115 /// `Borrows` stores the data used in the analyses that track the flow
118 /// It uniquely identifies every borrow (`Rvalue::Ref`) by a
119 /// `BorrowIndex`, and maps each such index to a `BorrowData`
120 /// describing the borrow. These indexes are used for representing the
121 /// borrows in compact bitvectors.
122 pub struct Borrows<'a, 'tcx> {
124 body: &'a Body<'tcx>,
126 borrow_set: &'a BorrowSet<'tcx>,
127 borrows_out_of_scope_at_location: FxHashMap<Location, Vec<BorrowIndex>>,
136 struct OutOfScopePrecomputer<'a, 'tcx> {
137 visited: BitSet<mir::BasicBlock>,
138 visit_stack: Vec<StackEntry>,
139 body: &'a Body<'tcx>,
140 regioncx: &'a RegionInferenceContext<'tcx>,
141 borrows_out_of_scope_at_location: FxHashMap<Location, Vec<BorrowIndex>>,
144 impl<'a, 'tcx> OutOfScopePrecomputer<'a, 'tcx> {
145 fn new(body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>) -> Self {
146 OutOfScopePrecomputer {
147 visited: BitSet::new_empty(body.basic_blocks().len()),
151 borrows_out_of_scope_at_location: FxHashMap::default(),
156 impl<'tcx> OutOfScopePrecomputer<'_, 'tcx> {
157 fn precompute_borrows_out_of_scope(
159 borrow_index: BorrowIndex,
160 borrow_region: RegionVid,
163 // We visit one BB at a time. The complication is that we may start in the
164 // middle of the first BB visited (the one containing `location`), in which
165 // case we may have to later on process the first part of that BB if there
166 // is a path back to its start.
168 // For visited BBs, we record the index of the first statement processed.
169 // (In fully processed BBs this index is 0.) Note also that we add BBs to
170 // `visited` once they are added to `stack`, before they are actually
171 // processed, because this avoids the need to look them up again on
173 self.visited.insert(location.block);
175 let mut first_lo = location.statement_index;
176 let first_hi = self.body[location.block].statements.len();
178 self.visit_stack.push(StackEntry { bb: location.block, lo: first_lo, hi: first_hi });
180 while let Some(StackEntry { bb, lo, hi }) = self.visit_stack.pop() {
181 // If we process the first part of the first basic block (i.e. we encounter that block
182 // for the second time), we no longer have to visit its successors again.
183 let mut finished_early = bb == location.block && hi != first_hi;
185 let location = Location { block: bb, statement_index: i };
186 // If region does not contain a point at the location, then add to list and skip
187 // successor locations.
188 if !self.regioncx.region_contains(borrow_region, location) {
189 debug!("borrow {:?} gets killed at {:?}", borrow_index, location);
190 self.borrows_out_of_scope_at_location
194 finished_early = true;
200 // Add successor BBs to the work list, if necessary.
201 let bb_data = &self.body[bb];
202 debug_assert!(hi == bb_data.statements.len());
203 for &succ_bb in bb_data.terminator().successors() {
204 if !self.visited.insert(succ_bb) {
205 if succ_bb == location.block && first_lo > 0 {
206 // `succ_bb` has been seen before. If it wasn't
207 // fully processed, add its first part to `stack`
209 self.visit_stack.push(StackEntry {
215 // And update this entry with 0, to represent the
216 // whole BB being processed.
220 // succ_bb hasn't been seen before. Add it to
221 // `stack` for processing.
222 self.visit_stack.push(StackEntry {
225 hi: self.body[succ_bb].statements.len(),
232 self.visited.clear();
236 impl<'a, 'tcx> Borrows<'a, 'tcx> {
239 body: &'a Body<'tcx>,
240 nonlexical_regioncx: &'a RegionInferenceContext<'tcx>,
241 borrow_set: &'a BorrowSet<'tcx>,
243 let mut prec = OutOfScopePrecomputer::new(body, nonlexical_regioncx);
244 for (borrow_index, borrow_data) in borrow_set.iter_enumerated() {
245 let borrow_region = borrow_data.region.to_region_vid();
246 let location = borrow_data.reserve_location;
248 prec.precompute_borrows_out_of_scope(borrow_index, borrow_region, location);
255 borrows_out_of_scope_at_location: prec.borrows_out_of_scope_at_location,
259 pub fn location(&self, idx: BorrowIndex) -> &Location {
260 &self.borrow_set[idx].reserve_location
263 /// Add all borrows to the kill set, if those borrows are out of scope at `location`.
264 /// That means they went out of a nonlexical scope
265 fn kill_loans_out_of_scope_at_location(
267 trans: &mut impl GenKill<BorrowIndex>,
270 // NOTE: The state associated with a given `location`
271 // reflects the dataflow on entry to the statement.
272 // Iterate over each of the borrows that we've precomputed
273 // to have went out of scope at this location and kill them.
275 // We are careful always to call this function *before* we
276 // set up the gen-bits for the statement or
277 // terminator. That way, if the effect of the statement or
278 // terminator *does* introduce a new loan of the same
279 // region, then setting that gen-bit will override any
280 // potential kill introduced here.
281 if let Some(indices) = self.borrows_out_of_scope_at_location.get(&location) {
282 trans.kill_all(indices.iter().copied());
286 /// Kill any borrows that conflict with `place`.
287 fn kill_borrows_on_place(&self, trans: &mut impl GenKill<BorrowIndex>, place: Place<'tcx>) {
288 debug!("kill_borrows_on_place: place={:?}", place);
290 let other_borrows_of_local = self
295 .flat_map(|bs| bs.iter())
298 // If the borrowed place is a local with no projections, all other borrows of this
299 // local must conflict. This is purely an optimization so we don't have to call
300 // `places_conflict` for every borrow.
301 if place.projection.is_empty() {
302 if !self.body.local_decls[place.local].is_ref_to_static() {
303 trans.kill_all(other_borrows_of_local);
308 // By passing `PlaceConflictBias::NoOverlap`, we conservatively assume that any given
309 // pair of array indices are unequal, so that when `places_conflict` returns true, we
310 // will be assured that two places being compared definitely denotes the same sets of
312 let definitely_conflicting_borrows = other_borrows_of_local.filter(|&i| {
316 self.borrow_set[i].borrowed_place,
318 PlaceConflictBias::NoOverlap,
322 trans.kill_all(definitely_conflicting_borrows);
326 impl<'tcx> rustc_mir_dataflow::AnalysisDomain<'tcx> for Borrows<'_, 'tcx> {
327 type Domain = BitSet<BorrowIndex>;
329 const NAME: &'static str = "borrows";
331 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
332 // bottom = nothing is reserved or activated yet;
333 BitSet::new_empty(self.borrow_set.len() * 2)
336 fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) {
337 // no borrows of code region_scopes have been taken prior to
338 // function execution, so this method has no effect.
342 impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
343 type Idx = BorrowIndex;
345 fn before_statement_effect(
347 trans: &mut impl GenKill<Self::Idx>,
348 _statement: &mir::Statement<'tcx>,
351 self.kill_loans_out_of_scope_at_location(trans, location);
356 trans: &mut impl GenKill<Self::Idx>,
357 stmt: &mir::Statement<'tcx>,
361 mir::StatementKind::Assign(box (lhs, ref rhs)) => {
362 if let mir::Rvalue::Ref(_, _, place) = *rhs {
363 if place.ignore_borrow(
366 &self.borrow_set.locals_state_at_exit,
370 let index = self.borrow_set.get_index_of(&location).unwrap_or_else(|| {
371 panic!("could not find BorrowIndex for location {:?}", location);
377 // Make sure there are no remaining borrows for variables
378 // that are assigned over.
379 self.kill_borrows_on_place(trans, lhs);
382 mir::StatementKind::StorageDead(local) => {
383 // Make sure there are no remaining borrows for locals that
384 // are gone out of scope.
385 self.kill_borrows_on_place(trans, Place::from(local));
388 mir::StatementKind::LlvmInlineAsm(ref asm) => {
389 for (output, kind) in iter::zip(&*asm.outputs, &asm.asm.outputs) {
390 if !kind.is_indirect && !kind.is_rw {
391 self.kill_borrows_on_place(trans, *output);
396 mir::StatementKind::FakeRead(..)
397 | mir::StatementKind::SetDiscriminant { .. }
398 | mir::StatementKind::StorageLive(..)
399 | mir::StatementKind::Retag { .. }
400 | mir::StatementKind::AscribeUserType(..)
401 | mir::StatementKind::Coverage(..)
402 | mir::StatementKind::CopyNonOverlapping(..)
403 | mir::StatementKind::Nop => {}
407 fn before_terminator_effect(
409 trans: &mut impl GenKill<Self::Idx>,
410 _terminator: &mir::Terminator<'tcx>,
413 self.kill_loans_out_of_scope_at_location(trans, location);
416 fn terminator_effect(
418 trans: &mut impl GenKill<Self::Idx>,
419 teminator: &mir::Terminator<'tcx>,
422 if let mir::TerminatorKind::InlineAsm { operands, .. } = &teminator.kind {
424 if let mir::InlineAsmOperand::Out { place: Some(place), .. }
425 | mir::InlineAsmOperand::InOut { out_place: Some(place), .. } = *op
427 self.kill_borrows_on_place(trans, place);
433 fn call_return_effect(
435 _trans: &mut impl GenKill<Self::Idx>,
436 _block: mir::BasicBlock,
437 _return_places: CallReturnPlaces<'_, 'tcx>,
442 impl DebugWithContext<Borrows<'_, '_>> for BorrowIndex {
443 fn fmt_with(&self, ctxt: &Borrows<'_, '_>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
444 write!(f, "{:?}", ctxt.location(*self))