1 use crate::deref_separator::deref_finder;
3 use rustc_data_structures::fx::FxHashMap;
4 use rustc_index::bit_set::BitSet;
5 use rustc_middle::mir::patch::MirPatch;
6 use rustc_middle::mir::*;
7 use rustc_middle::ty::{self, TyCtxt};
8 use rustc_mir_dataflow::elaborate_drops::{elaborate_drop, DropFlagState, Unwind};
9 use rustc_mir_dataflow::elaborate_drops::{DropElaborator, DropFlagMode, DropStyle};
10 use rustc_mir_dataflow::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
11 use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
12 use rustc_mir_dataflow::on_lookup_result_bits;
13 use rustc_mir_dataflow::un_derefer::UnDerefer;
14 use rustc_mir_dataflow::MoveDataParamEnv;
15 use rustc_mir_dataflow::{on_all_children_bits, on_all_drop_children_bits};
16 use rustc_mir_dataflow::{Analysis, ResultsCursor};
18 use rustc_target::abi::VariantIdx;
21 /// During MIR building, Drop and DropAndReplace terminators are inserted in every place where a drop may occur.
22 /// However, in this phase, the presence of these terminators does not guarantee that a destructor will run,
23 /// as the target of the drop may be uninitialized.
24 /// In general, the compiler cannot determine at compile time whether a destructor will run or not.
26 /// At a high level, this pass refines Drop and DropAndReplace to only run the destructor if the
27 /// target is initialized. The way this is achievied is by inserting drop flags for every variable
28 /// that may be dropped, and then using those flags to determine whether a destructor should run.
29 /// This pass also removes DropAndReplace, replacing it with a Drop paired with an assign statement.
30 /// Once this is complete, Drop terminators in the MIR correspond to a call to the "drop glue" or
31 /// "drop shim" for the type of the dropped place.
33 /// This pass relies on dropped places having an associated move path, which is then used to determine
34 /// the initialization status of the place and its descendants.
35 /// It's worth noting that a MIR containing a Drop without an associated move path is probably ill formed,
36 /// as it would allow running a destructor on a place behind a reference:
39 // fn drop_term<T>(t: &mut T) {
50 pub struct ElaborateDrops;
52 impl<'tcx> MirPass<'tcx> for ElaborateDrops {
53 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
54 debug!("elaborate_drops({:?} @ {:?})", body.source, body.span);
56 let def_id = body.source.def_id();
57 let param_env = tcx.param_env_reveal_all_normalized(def_id);
58 let (side_table, move_data) = match MoveData::gather_moves(body, tcx, param_env) {
59 Ok(move_data) => move_data,
60 Err((move_data, _)) => {
61 tcx.sess.delay_span_bug(
63 "No `move_errors` should be allowed in MIR borrowck",
65 (Default::default(), move_data)
68 let un_derefer = UnDerefer { tcx: tcx, derefer_sidetable: side_table };
69 let elaborate_patch = {
71 let env = MoveDataParamEnv { move_data, param_env };
72 let dead_unwinds = find_dead_unwinds(tcx, body, &env, &un_derefer);
74 let inits = MaybeInitializedPlaces::new(tcx, body, &env)
75 .into_engine(tcx, body)
76 .dead_unwinds(&dead_unwinds)
77 .pass_name("elaborate_drops")
78 .iterate_to_fixpoint()
79 .into_results_cursor(body);
81 let uninits = MaybeUninitializedPlaces::new(tcx, body, &env)
82 .mark_inactive_variants_as_uninit()
83 .into_engine(tcx, body)
84 .dead_unwinds(&dead_unwinds)
85 .pass_name("elaborate_drops")
86 .iterate_to_fixpoint()
87 .into_results_cursor(body);
93 init_data: InitializationData { inits, uninits },
94 drop_flags: Default::default(),
95 patch: MirPatch::new(body),
96 un_derefer: un_derefer,
100 elaborate_patch.apply(body);
101 deref_finder(tcx, body);
105 /// Returns the set of basic blocks whose unwind edges are known
106 /// to not be reachable, because they are `drop` terminators
107 /// that can't drop anything.
108 fn find_dead_unwinds<'tcx>(
111 env: &MoveDataParamEnv<'tcx>,
112 und: &UnDerefer<'tcx>,
113 ) -> BitSet<BasicBlock> {
114 debug!("find_dead_unwinds({:?})", body.span);
115 // We only need to do this pass once, because unwind edges can only
116 // reach cleanup blocks, which can't have unwind edges themselves.
117 let mut dead_unwinds = BitSet::new_empty(body.basic_blocks.len());
118 let mut flow_inits = MaybeInitializedPlaces::new(tcx, body, &env)
119 .into_engine(tcx, body)
120 .pass_name("find_dead_unwinds")
121 .iterate_to_fixpoint()
122 .into_results_cursor(body);
123 for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
124 let place = match bb_data.terminator().kind {
125 TerminatorKind::Drop { ref place, unwind: Some(_), .. }
126 | TerminatorKind::DropAndReplace { ref place, unwind: Some(_), .. } => {
127 und.derefer(place.as_ref(), body).unwrap_or(*place)
132 debug!("find_dead_unwinds @ {:?}: {:?}", bb, bb_data);
134 let LookupResult::Exact(path) = env.move_data.rev_lookup.find(place.as_ref()) else {
135 debug!("find_dead_unwinds: has parent; skipping");
139 flow_inits.seek_before_primary_effect(body.terminator_loc(bb));
141 "find_dead_unwinds @ {:?}: path({:?})={:?}; init_data={:?}",
148 let mut maybe_live = false;
149 on_all_drop_children_bits(tcx, body, &env, path, |child| {
150 maybe_live |= flow_inits.contains(child);
153 debug!("find_dead_unwinds @ {:?}: maybe_live={}", bb, maybe_live);
155 dead_unwinds.insert(bb);
162 struct InitializationData<'mir, 'tcx> {
163 inits: ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
164 uninits: ResultsCursor<'mir, 'tcx, MaybeUninitializedPlaces<'mir, 'tcx>>,
167 impl InitializationData<'_, '_> {
168 fn seek_before(&mut self, loc: Location) {
169 self.inits.seek_before_primary_effect(loc);
170 self.uninits.seek_before_primary_effect(loc);
173 fn maybe_live_dead(&self, path: MovePathIndex) -> (bool, bool) {
174 (self.inits.contains(path), self.uninits.contains(path))
178 struct Elaborator<'a, 'b, 'tcx> {
179 ctxt: &'a mut ElaborateDropsCtxt<'b, 'tcx>,
182 impl fmt::Debug for Elaborator<'_, '_, '_> {
183 fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
188 impl<'a, 'tcx> DropElaborator<'a, 'tcx> for Elaborator<'a, '_, 'tcx> {
189 type Path = MovePathIndex;
191 fn patch(&mut self) -> &mut MirPatch<'tcx> {
195 fn body(&self) -> &'a Body<'tcx> {
199 fn tcx(&self) -> TyCtxt<'tcx> {
203 fn param_env(&self) -> ty::ParamEnv<'tcx> {
204 self.ctxt.param_env()
207 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle {
208 let ((maybe_live, maybe_dead), multipart) = match mode {
209 DropFlagMode::Shallow => (self.ctxt.init_data.maybe_live_dead(path), false),
210 DropFlagMode::Deep => {
211 let mut some_live = false;
212 let mut some_dead = false;
213 let mut children_count = 0;
214 on_all_drop_children_bits(self.tcx(), self.body(), self.ctxt.env, path, |child| {
215 let (live, dead) = self.ctxt.init_data.maybe_live_dead(child);
216 debug!("elaborate_drop: state({:?}) = {:?}", child, (live, dead));
221 ((some_live, some_dead), children_count != 1)
224 match (maybe_live, maybe_dead, multipart) {
225 (false, _, _) => DropStyle::Dead,
226 (true, false, _) => DropStyle::Static,
227 (true, true, false) => DropStyle::Conditional,
228 (true, true, true) => DropStyle::Open,
232 fn clear_drop_flag(&mut self, loc: Location, path: Self::Path, mode: DropFlagMode) {
234 DropFlagMode::Shallow => {
235 self.ctxt.set_drop_flag(loc, path, DropFlagState::Absent);
237 DropFlagMode::Deep => {
238 on_all_children_bits(
241 self.ctxt.move_data(),
243 |child| self.ctxt.set_drop_flag(loc, child, DropFlagState::Absent),
249 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path> {
250 rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
251 ProjectionElem::Field(idx, _) => idx == field,
256 fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path> {
257 rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
258 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
259 debug_assert!(size == min_length, "min_length should be exact for arrays");
260 assert!(!from_end, "from_end should not be used for array element ConstantIndex");
267 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path> {
268 rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| {
269 e == ProjectionElem::Deref
273 fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path> {
274 rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
275 ProjectionElem::Downcast(_, idx) => idx == variant,
280 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>> {
281 self.ctxt.drop_flag(path).map(Operand::Copy)
285 struct ElaborateDropsCtxt<'a, 'tcx> {
287 body: &'a Body<'tcx>,
288 env: &'a MoveDataParamEnv<'tcx>,
289 init_data: InitializationData<'a, 'tcx>,
290 drop_flags: FxHashMap<MovePathIndex, Local>,
291 patch: MirPatch<'tcx>,
292 un_derefer: UnDerefer<'tcx>,
295 impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
296 fn move_data(&self) -> &'b MoveData<'tcx> {
300 fn param_env(&self) -> ty::ParamEnv<'tcx> {
304 fn create_drop_flag(&mut self, index: MovePathIndex, span: Span) {
306 let patch = &mut self.patch;
307 debug!("create_drop_flag({:?})", self.body.span);
308 self.drop_flags.entry(index).or_insert_with(|| patch.new_internal(tcx.types.bool, span));
311 fn drop_flag(&mut self, index: MovePathIndex) -> Option<Place<'tcx>> {
312 self.drop_flags.get(&index).map(|t| Place::from(*t))
315 /// create a patch that elaborates all drops in the input
317 fn elaborate(mut self) -> MirPatch<'tcx> {
318 self.collect_drop_flags();
320 self.elaborate_drops();
322 self.drop_flags_on_init();
323 self.drop_flags_for_fn_rets();
324 self.drop_flags_for_args();
325 self.drop_flags_for_locs();
330 fn collect_drop_flags(&mut self) {
331 for (bb, data) in self.body.basic_blocks.iter_enumerated() {
332 let terminator = data.terminator();
333 let place = match terminator.kind {
334 TerminatorKind::Drop { ref place, .. }
335 | TerminatorKind::DropAndReplace { ref place, .. } => {
336 self.un_derefer.derefer(place.as_ref(), self.body).unwrap_or(*place)
341 self.init_data.seek_before(self.body.terminator_loc(bb));
343 let path = self.move_data().rev_lookup.find(place.as_ref());
344 debug!("collect_drop_flags: {:?}, place {:?} ({:?})", bb, place, path);
346 let path = match path {
347 LookupResult::Exact(e) => e,
348 LookupResult::Parent(None) => continue,
349 LookupResult::Parent(Some(parent)) => {
350 let (_maybe_live, maybe_dead) = self.init_data.maybe_live_dead(parent);
352 if self.body.local_decls[place.local].is_deref_temp() {
357 self.tcx.sess.delay_span_bug(
358 terminator.source_info.span,
360 "drop of untracked, uninitialized value {:?}, place {:?} ({:?})",
369 on_all_drop_children_bits(self.tcx, self.body, self.env, path, |child| {
370 let (maybe_live, maybe_dead) = self.init_data.maybe_live_dead(child);
372 "collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
376 (maybe_live, maybe_dead)
378 if maybe_live && maybe_dead {
379 self.create_drop_flag(child, terminator.source_info.span)
385 fn elaborate_drops(&mut self) {
386 for (bb, data) in self.body.basic_blocks.iter_enumerated() {
387 let loc = Location { block: bb, statement_index: data.statements.len() };
388 let terminator = data.terminator();
390 let resume_block = self.patch.resume_block();
391 match terminator.kind {
392 TerminatorKind::Drop { mut place, target, unwind } => {
393 if let Some(new_place) = self.un_derefer.derefer(place.as_ref(), self.body) {
397 self.init_data.seek_before(loc);
398 match self.move_data().rev_lookup.find(place.as_ref()) {
399 LookupResult::Exact(path) => elaborate_drop(
400 &mut Elaborator { ctxt: self },
401 terminator.source_info,
408 Unwind::To(Option::unwrap_or(unwind, resume_block))
412 LookupResult::Parent(..) => {
413 self.tcx.sess.delay_span_bug(
414 terminator.source_info.span,
415 &format!("drop of untracked value {:?}", bb),
420 TerminatorKind::DropAndReplace { mut place, ref value, target, unwind } => {
421 assert!(!data.is_cleanup);
423 if let Some(new_place) = self.un_derefer.derefer(place.as_ref(), self.body) {
426 self.elaborate_replace(loc, place, value, target, unwind);
433 /// Elaborate a MIR `replace` terminator. This instruction
434 /// is not directly handled by codegen, and therefore
435 /// must be desugared.
437 /// The desugaring drops the location if needed, and then writes
438 /// the value (including setting the drop flag) over it in *both* arms.
440 /// The `replace` terminator can also be called on places that
441 /// are not tracked by elaboration (for example,
442 /// `replace x[i] <- tmp0`). The borrow checker requires that
443 /// these locations are initialized before the assignment,
444 /// so we just generate an unconditional drop.
445 fn elaborate_replace(
449 value: &Operand<'tcx>,
451 unwind: Option<BasicBlock>,
454 let data = &self.body[bb];
455 let terminator = data.terminator();
456 assert!(!data.is_cleanup, "DropAndReplace in unwind path not supported");
458 let assign = Statement {
459 kind: StatementKind::Assign(Box::new((place, Rvalue::Use(value.clone())))),
460 source_info: terminator.source_info,
463 let unwind = unwind.unwrap_or_else(|| self.patch.resume_block());
464 let unwind = self.patch.new_block(BasicBlockData {
465 statements: vec![assign.clone()],
466 terminator: Some(Terminator {
467 kind: TerminatorKind::Goto { target: unwind },
473 let target = self.patch.new_block(BasicBlockData {
474 statements: vec![assign],
475 terminator: Some(Terminator { kind: TerminatorKind::Goto { target }, ..*terminator }),
479 match self.move_data().rev_lookup.find(place.as_ref()) {
480 LookupResult::Exact(path) => {
481 debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
482 self.init_data.seek_before(loc);
484 &mut Elaborator { ctxt: self },
485 terminator.source_info,
492 on_all_children_bits(self.tcx, self.body, self.move_data(), path, |child| {
494 Location { block: target, statement_index: 0 },
496 DropFlagState::Present,
499 Location { block: unwind, statement_index: 0 },
501 DropFlagState::Present,
505 LookupResult::Parent(parent) => {
506 // drop and replace behind a pointer/array/whatever. The location
507 // must be initialized.
508 debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent);
509 self.patch.patch_terminator(
511 TerminatorKind::Drop { place, target, unwind: Some(unwind) },
517 fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
518 Rvalue::Use(Operand::Constant(Box::new(Constant {
521 literal: ConstantKind::from_bool(self.tcx, val),
525 fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
526 if let Some(&flag) = self.drop_flags.get(&path) {
527 let span = self.patch.source_info_for_location(self.body, loc).span;
528 let val = self.constant_bool(span, val.value());
529 self.patch.add_assign(loc, Place::from(flag), val);
533 fn drop_flags_on_init(&mut self) {
534 let loc = Location::START;
535 let span = self.patch.source_info_for_location(self.body, loc).span;
536 let false_ = self.constant_bool(span, false);
537 for flag in self.drop_flags.values() {
538 self.patch.add_assign(loc, Place::from(*flag), false_.clone());
542 fn drop_flags_for_fn_rets(&mut self) {
543 for (bb, data) in self.body.basic_blocks.iter_enumerated() {
544 if let TerminatorKind::Call {
545 destination, target: Some(tgt), cleanup: Some(_), ..
546 } = data.terminator().kind
548 assert!(!self.patch.is_patched(bb));
550 let loc = Location { block: tgt, statement_index: 0 };
551 let path = self.move_data().rev_lookup.find(destination.as_ref());
552 on_lookup_result_bits(self.tcx, self.body, self.move_data(), path, |child| {
553 self.set_drop_flag(loc, child, DropFlagState::Present)
559 fn drop_flags_for_args(&mut self) {
560 let loc = Location::START;
561 rustc_mir_dataflow::drop_flag_effects_for_function_entry(
566 self.set_drop_flag(loc, path, ds);
571 fn drop_flags_for_locs(&mut self) {
572 // We intentionally iterate only over the *old* basic blocks.
574 // Basic blocks created by drop elaboration update their
575 // drop flags by themselves, to avoid the drop flags being
576 // clobbered before they are read.
578 for (bb, data) in self.body.basic_blocks.iter_enumerated() {
579 debug!("drop_flags_for_locs({:?})", data);
580 for i in 0..(data.statements.len() + 1) {
581 debug!("drop_flag_for_locs: stmt {}", i);
582 let mut allow_initializations = true;
583 if i == data.statements.len() {
584 match data.terminator().kind {
585 TerminatorKind::Drop { .. } => {
586 // drop elaboration should handle that by itself
589 TerminatorKind::DropAndReplace { .. } => {
590 // this contains the move of the source and
591 // the initialization of the destination. We
592 // only want the former - the latter is handled
593 // by the elaboration code and must be done
594 // *after* the destination is dropped.
595 assert!(self.patch.is_patched(bb));
596 allow_initializations = false;
598 TerminatorKind::Resume => {
599 // It is possible for `Resume` to be patched
600 // (in particular it can be patched to be replaced with
601 // a Goto; see `MirPatch::new`).
604 assert!(!self.patch.is_patched(bb));
608 let loc = Location { block: bb, statement_index: i };
609 rustc_mir_dataflow::drop_flag_effects_for_location(
615 if ds == DropFlagState::Absent || allow_initializations {
616 self.set_drop_flag(loc, path, ds)
622 // There may be a critical edge after this call,
623 // so mark the return as initialized *before* the
625 if let TerminatorKind::Call { destination, target: Some(_), cleanup: None, .. } =
626 data.terminator().kind
628 assert!(!self.patch.is_patched(bb));
630 let loc = Location { block: bb, statement_index: data.statements.len() };
631 let path = self.move_data().rev_lookup.find(destination.as_ref());
632 on_lookup_result_bits(self.tcx, self.body, self.move_data(), path, |child| {
633 self.set_drop_flag(loc, child, DropFlagState::Present)