1 use rustc_middle::mir::{self, Body, Location, Place};
2 use rustc_middle::ty::RegionVid;
3 use rustc_middle::ty::TyCtxt;
5 use rustc_data_structures::fx::FxHashMap;
6 use rustc_index::bit_set::BitSet;
8 use crate::borrow_check::{
9 places_conflict, BorrowSet, PlaceConflictBias, PlaceExt, RegionInferenceContext, ToRegionVid,
11 use crate::dataflow::{self, fmt::DebugWithContext, GenKill};
16 rustc_index::newtype_index! {
17 pub struct BorrowIndex {
22 /// `Borrows` stores the data used in the analyses that track the flow
25 /// It uniquely identifies every borrow (`Rvalue::Ref`) by a
26 /// `BorrowIndex`, and maps each such index to a `BorrowData`
27 /// describing the borrow. These indexes are used for representing the
28 /// borrows in compact bitvectors.
29 pub struct Borrows<'a, 'tcx> {
33 borrow_set: &'a BorrowSet<'tcx>,
34 borrows_out_of_scope_at_location: FxHashMap<Location, Vec<BorrowIndex>>,
43 struct OutOfScopePrecomputer<'a, 'tcx> {
44 visited: BitSet<mir::BasicBlock>,
45 visit_stack: Vec<StackEntry>,
47 regioncx: &'a RegionInferenceContext<'tcx>,
48 borrows_out_of_scope_at_location: FxHashMap<Location, Vec<BorrowIndex>>,
51 impl<'a, 'tcx> OutOfScopePrecomputer<'a, 'tcx> {
52 fn new(body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>) -> Self {
53 OutOfScopePrecomputer {
54 visited: BitSet::new_empty(body.basic_blocks().len()),
58 borrows_out_of_scope_at_location: FxHashMap::default(),
63 impl<'tcx> OutOfScopePrecomputer<'_, 'tcx> {
64 fn precompute_borrows_out_of_scope(
66 borrow_index: BorrowIndex,
67 borrow_region: RegionVid,
70 // We visit one BB at a time. The complication is that we may start in the
71 // middle of the first BB visited (the one containing `location`), in which
72 // case we may have to later on process the first part of that BB if there
73 // is a path back to its start.
75 // For visited BBs, we record the index of the first statement processed.
76 // (In fully processed BBs this index is 0.) Note also that we add BBs to
77 // `visited` once they are added to `stack`, before they are actually
78 // processed, because this avoids the need to look them up again on
80 self.visited.insert(location.block);
82 let mut first_lo = location.statement_index;
83 let first_hi = self.body[location.block].statements.len();
85 self.visit_stack.push(StackEntry { bb: location.block, lo: first_lo, hi: first_hi });
87 while let Some(StackEntry { bb, lo, hi }) = self.visit_stack.pop() {
88 // If we process the first part of the first basic block (i.e. we encounter that block
89 // for the second time), we no longer have to visit its successors again.
90 let mut finished_early = bb == location.block && hi != first_hi;
92 let location = Location { block: bb, statement_index: i };
93 // If region does not contain a point at the location, then add to list and skip
94 // successor locations.
95 if !self.regioncx.region_contains(borrow_region, location) {
96 debug!("borrow {:?} gets killed at {:?}", borrow_index, location);
97 self.borrows_out_of_scope_at_location
101 finished_early = true;
107 // Add successor BBs to the work list, if necessary.
108 let bb_data = &self.body[bb];
109 debug_assert!(hi == bb_data.statements.len());
110 for &succ_bb in bb_data.terminator().successors() {
111 if self.visited.insert(succ_bb) == false {
112 if succ_bb == location.block && first_lo > 0 {
113 // `succ_bb` has been seen before. If it wasn't
114 // fully processed, add its first part to `stack`
116 self.visit_stack.push(StackEntry {
122 // And update this entry with 0, to represent the
123 // whole BB being processed.
127 // succ_bb hasn't been seen before. Add it to
128 // `stack` for processing.
129 self.visit_stack.push(StackEntry {
132 hi: self.body[succ_bb].statements.len(),
139 self.visited.clear();
143 impl<'a, 'tcx> Borrows<'a, 'tcx> {
146 body: &'a Body<'tcx>,
147 nonlexical_regioncx: &'a RegionInferenceContext<'tcx>,
148 borrow_set: &'a BorrowSet<'tcx>,
150 let mut prec = OutOfScopePrecomputer::new(body, nonlexical_regioncx);
151 for (borrow_index, borrow_data) in borrow_set.iter_enumerated() {
152 let borrow_region = borrow_data.region.to_region_vid();
153 let location = borrow_data.reserve_location;
155 prec.precompute_borrows_out_of_scope(borrow_index, borrow_region, location);
162 borrows_out_of_scope_at_location: prec.borrows_out_of_scope_at_location,
166 pub fn location(&self, idx: BorrowIndex) -> &Location {
167 &self.borrow_set[idx].reserve_location
170 /// Add all borrows to the kill set, if those borrows are out of scope at `location`.
171 /// That means they went out of a nonlexical scope
172 fn kill_loans_out_of_scope_at_location(
174 trans: &mut impl GenKill<BorrowIndex>,
177 // NOTE: The state associated with a given `location`
178 // reflects the dataflow on entry to the statement.
179 // Iterate over each of the borrows that we've precomputed
180 // to have went out of scope at this location and kill them.
182 // We are careful always to call this function *before* we
183 // set up the gen-bits for the statement or
184 // terminator. That way, if the effect of the statement or
185 // terminator *does* introduce a new loan of the same
186 // region, then setting that gen-bit will override any
187 // potential kill introduced here.
188 if let Some(indices) = self.borrows_out_of_scope_at_location.get(&location) {
189 trans.kill_all(indices.iter().copied());
193 /// Kill any borrows that conflict with `place`.
194 fn kill_borrows_on_place(&self, trans: &mut impl GenKill<BorrowIndex>, place: Place<'tcx>) {
195 debug!("kill_borrows_on_place: place={:?}", place);
197 let other_borrows_of_local = self
202 .flat_map(|bs| bs.iter())
205 // If the borrowed place is a local with no projections, all other borrows of this
206 // local must conflict. This is purely an optimization so we don't have to call
207 // `places_conflict` for every borrow.
208 if place.projection.is_empty() {
209 if !self.body.local_decls[place.local].is_ref_to_static() {
210 trans.kill_all(other_borrows_of_local);
215 // By passing `PlaceConflictBias::NoOverlap`, we conservatively assume that any given
216 // pair of array indices are unequal, so that when `places_conflict` returns true, we
217 // will be assured that two places being compared definitely denotes the same sets of
219 let definitely_conflicting_borrows = other_borrows_of_local.filter(|&i| {
223 self.borrow_set[i].borrowed_place,
225 PlaceConflictBias::NoOverlap,
229 trans.kill_all(definitely_conflicting_borrows);
233 impl<'tcx> dataflow::AnalysisDomain<'tcx> for Borrows<'_, 'tcx> {
234 type Domain = BitSet<BorrowIndex>;
236 const NAME: &'static str = "borrows";
238 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
239 // bottom = nothing is reserved or activated yet;
240 BitSet::new_empty(self.borrow_set.len() * 2)
243 fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) {
244 // no borrows of code region_scopes have been taken prior to
245 // function execution, so this method has no effect.
249 impl<'tcx> dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
250 type Idx = BorrowIndex;
252 fn before_statement_effect(
254 trans: &mut impl GenKill<Self::Idx>,
255 _statement: &mir::Statement<'tcx>,
258 self.kill_loans_out_of_scope_at_location(trans, location);
263 trans: &mut impl GenKill<Self::Idx>,
264 stmt: &mir::Statement<'tcx>,
268 mir::StatementKind::Assign(box (lhs, ref rhs)) => {
269 if let mir::Rvalue::Ref(_, _, place) = *rhs {
270 if place.ignore_borrow(
273 &self.borrow_set.locals_state_at_exit,
277 let index = self.borrow_set.get_index_of(&location).unwrap_or_else(|| {
278 panic!("could not find BorrowIndex for location {:?}", location);
284 // Make sure there are no remaining borrows for variables
285 // that are assigned over.
286 self.kill_borrows_on_place(trans, lhs);
289 mir::StatementKind::StorageDead(local) => {
290 // Make sure there are no remaining borrows for locals that
291 // are gone out of scope.
292 self.kill_borrows_on_place(trans, Place::from(local));
295 mir::StatementKind::LlvmInlineAsm(ref asm) => {
296 for (output, kind) in iter::zip(&*asm.outputs, &asm.asm.outputs) {
297 if !kind.is_indirect && !kind.is_rw {
298 self.kill_borrows_on_place(trans, *output);
303 mir::StatementKind::FakeRead(..)
304 | mir::StatementKind::SetDiscriminant { .. }
305 | mir::StatementKind::StorageLive(..)
306 | mir::StatementKind::Retag { .. }
307 | mir::StatementKind::AscribeUserType(..)
308 | mir::StatementKind::Coverage(..)
309 | mir::StatementKind::CopyNonOverlapping(..)
310 | mir::StatementKind::Nop => {}
314 fn before_terminator_effect(
316 trans: &mut impl GenKill<Self::Idx>,
317 _terminator: &mir::Terminator<'tcx>,
320 self.kill_loans_out_of_scope_at_location(trans, location);
323 fn terminator_effect(
325 trans: &mut impl GenKill<Self::Idx>,
326 teminator: &mir::Terminator<'tcx>,
329 if let mir::TerminatorKind::InlineAsm { operands, .. } = &teminator.kind {
331 if let mir::InlineAsmOperand::Out { place: Some(place), .. }
332 | mir::InlineAsmOperand::InOut { out_place: Some(place), .. } = *op
334 self.kill_borrows_on_place(trans, place);
340 fn call_return_effect(
342 _trans: &mut impl GenKill<Self::Idx>,
343 _block: mir::BasicBlock,
344 _func: &mir::Operand<'tcx>,
345 _args: &[mir::Operand<'tcx>],
346 _dest_place: mir::Place<'tcx>,
351 impl DebugWithContext<Borrows<'_, '_>> for BorrowIndex {
352 fn fmt_with(&self, ctxt: &Borrows<'_, '_>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
353 write!(f, "{:?}", ctxt.location(*self))