2 use crate::dataflow::generic::{Analysis, ResultsCursor};
3 use crate::dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
4 use crate::dataflow::on_lookup_result_bits;
5 use crate::dataflow::MoveDataParamEnv;
6 use crate::dataflow::{on_all_children_bits, on_all_drop_children_bits};
7 use crate::dataflow::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
8 use crate::transform::{MirPass, MirSource};
9 use crate::util::elaborate_drops::{elaborate_drop, DropFlagState, Unwind};
10 use crate::util::elaborate_drops::{DropElaborator, DropFlagMode, DropStyle};
11 use crate::util::patch::MirPatch;
13 use rustc::ty::layout::VariantIdx;
14 use rustc::ty::{self, TyCtxt};
15 use rustc_data_structures::fx::FxHashMap;
17 use rustc_index::bit_set::BitSet;
21 pub struct ElaborateDrops;
23 impl<'tcx> MirPass<'tcx> for ElaborateDrops {
24 fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut BodyAndCache<'tcx>) {
25 debug!("elaborate_drops({:?} @ {:?})", src, body.span);
27 let def_id = src.def_id();
28 let param_env = tcx.param_env(src.def_id()).with_reveal_all();
29 let move_data = match MoveData::gather_moves(body, tcx, param_env) {
30 Ok(move_data) => move_data,
31 Err((move_data, _)) => {
32 tcx.sess.delay_span_bug(
34 "No `move_errors` should be allowed in MIR borrowck",
39 let elaborate_patch = {
41 let env = MoveDataParamEnv { move_data, param_env };
42 let dead_unwinds = find_dead_unwinds(tcx, body, def_id, &env);
44 let inits = MaybeInitializedPlaces::new(tcx, body, &env)
45 .into_engine(tcx, body, def_id)
46 .dead_unwinds(&dead_unwinds)
47 .iterate_to_fixpoint()
48 .into_results_cursor(body);
50 let uninits = MaybeUninitializedPlaces::new(tcx, body, &env)
51 .into_engine(tcx, body, def_id)
52 .dead_unwinds(&dead_unwinds)
53 .iterate_to_fixpoint()
54 .into_results_cursor(body);
60 init_data: InitializationData { inits, uninits },
61 drop_flags: Default::default(),
62 patch: MirPatch::new(body),
66 elaborate_patch.apply(body);
70 /// Returns the set of basic blocks whose unwind edges are known
71 /// to not be reachable, because they are `drop` terminators
72 /// that can't drop anything.
73 fn find_dead_unwinds<'tcx>(
76 def_id: hir::def_id::DefId,
77 env: &MoveDataParamEnv<'tcx>,
78 ) -> BitSet<BasicBlock> {
79 debug!("find_dead_unwinds({:?})", body.span);
80 // We only need to do this pass once, because unwind edges can only
81 // reach cleanup blocks, which can't have unwind edges themselves.
82 let mut dead_unwinds = BitSet::new_empty(body.basic_blocks().len());
83 let mut flow_inits = MaybeInitializedPlaces::new(tcx, body, &env)
84 .into_engine(tcx, body, def_id)
85 .iterate_to_fixpoint()
86 .into_results_cursor(body);
87 for (bb, bb_data) in body.basic_blocks().iter_enumerated() {
88 let location = match bb_data.terminator().kind {
89 TerminatorKind::Drop { ref location, unwind: Some(_), .. }
90 | TerminatorKind::DropAndReplace { ref location, unwind: Some(_), .. } => location,
94 debug!("find_dead_unwinds @ {:?}: {:?}", bb, bb_data);
96 let path = match env.move_data.rev_lookup.find(location.as_ref()) {
97 LookupResult::Exact(e) => e,
98 LookupResult::Parent(..) => {
99 debug!("find_dead_unwinds: has parent; skipping");
104 flow_inits.seek_before(body.terminator_loc(bb));
106 "find_dead_unwinds @ {:?}: path({:?})={:?}; init_data={:?}",
113 let mut maybe_live = false;
114 on_all_drop_children_bits(tcx, body, &env, path, |child| {
115 maybe_live |= flow_inits.contains(child);
118 debug!("find_dead_unwinds @ {:?}: maybe_live={}", bb, maybe_live);
120 dead_unwinds.insert(bb);
127 struct InitializationData<'mir, 'tcx> {
128 inits: ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
129 uninits: ResultsCursor<'mir, 'tcx, MaybeUninitializedPlaces<'mir, 'tcx>>,
132 impl InitializationData<'_, '_> {
133 fn seek_before(&mut self, loc: Location) {
134 self.inits.seek_before(loc);
135 self.uninits.seek_before(loc);
138 fn maybe_live_dead(&self, path: MovePathIndex) -> (bool, bool) {
139 (self.inits.contains(path), self.uninits.contains(path))
143 struct Elaborator<'a, 'b, 'tcx> {
144 ctxt: &'a mut ElaborateDropsCtxt<'b, 'tcx>,
147 impl<'a, 'b, 'tcx> fmt::Debug for Elaborator<'a, 'b, 'tcx> {
148 fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
153 impl<'a, 'b, 'tcx> DropElaborator<'a, 'tcx> for Elaborator<'a, 'b, 'tcx> {
154 type Path = MovePathIndex;
156 fn patch(&mut self) -> &mut MirPatch<'tcx> {
160 fn body(&self) -> &'a Body<'tcx> {
164 fn tcx(&self) -> TyCtxt<'tcx> {
168 fn param_env(&self) -> ty::ParamEnv<'tcx> {
169 self.ctxt.param_env()
172 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle {
173 let ((maybe_live, maybe_dead), multipart) = match mode {
174 DropFlagMode::Shallow => (self.ctxt.init_data.maybe_live_dead(path), false),
175 DropFlagMode::Deep => {
176 let mut some_live = false;
177 let mut some_dead = false;
178 let mut children_count = 0;
179 on_all_drop_children_bits(self.tcx(), self.body(), self.ctxt.env, path, |child| {
180 let (live, dead) = self.ctxt.init_data.maybe_live_dead(child);
181 debug!("elaborate_drop: state({:?}) = {:?}", child, (live, dead));
186 ((some_live, some_dead), children_count != 1)
189 match (maybe_live, maybe_dead, multipart) {
190 (false, _, _) => DropStyle::Dead,
191 (true, false, _) => DropStyle::Static,
192 (true, true, false) => DropStyle::Conditional,
193 (true, true, true) => DropStyle::Open,
197 fn clear_drop_flag(&mut self, loc: Location, path: Self::Path, mode: DropFlagMode) {
199 DropFlagMode::Shallow => {
200 self.ctxt.set_drop_flag(loc, path, DropFlagState::Absent);
202 DropFlagMode::Deep => {
203 on_all_children_bits(
206 self.ctxt.move_data(),
208 |child| self.ctxt.set_drop_flag(loc, child, DropFlagState::Absent),
214 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path> {
215 dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
216 ProjectionElem::Field(idx, _) => *idx == field,
221 fn array_subpath(&self, path: Self::Path, index: u32, size: u32) -> Option<Self::Path> {
222 dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
223 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
224 debug_assert!(size == *min_length, "min_length should be exact for arrays");
225 assert!(!from_end, "from_end should not be used for array element ConstantIndex");
232 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path> {
233 dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| {
234 *e == ProjectionElem::Deref
238 fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path> {
239 dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
240 ProjectionElem::Downcast(_, idx) => *idx == variant,
245 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>> {
246 self.ctxt.drop_flag(path).map(Operand::Copy)
250 struct ElaborateDropsCtxt<'a, 'tcx> {
252 body: &'a Body<'tcx>,
253 env: &'a MoveDataParamEnv<'tcx>,
254 init_data: InitializationData<'a, 'tcx>,
255 drop_flags: FxHashMap<MovePathIndex, Local>,
256 patch: MirPatch<'tcx>,
259 impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
260 fn move_data(&self) -> &'b MoveData<'tcx> {
264 fn param_env(&self) -> ty::ParamEnv<'tcx> {
268 fn create_drop_flag(&mut self, index: MovePathIndex, span: Span) {
270 let patch = &mut self.patch;
271 debug!("create_drop_flag({:?})", self.body.span);
272 self.drop_flags.entry(index).or_insert_with(|| patch.new_internal(tcx.types.bool, span));
275 fn drop_flag(&mut self, index: MovePathIndex) -> Option<Place<'tcx>> {
276 self.drop_flags.get(&index).map(|t| Place::from(*t))
279 /// create a patch that elaborates all drops in the input
281 fn elaborate(mut self) -> MirPatch<'tcx> {
282 self.collect_drop_flags();
284 self.elaborate_drops();
286 self.drop_flags_on_init();
287 self.drop_flags_for_fn_rets();
288 self.drop_flags_for_args();
289 self.drop_flags_for_locs();
294 fn collect_drop_flags(&mut self) {
295 for (bb, data) in self.body.basic_blocks().iter_enumerated() {
296 let terminator = data.terminator();
297 let location = match terminator.kind {
298 TerminatorKind::Drop { ref location, .. }
299 | TerminatorKind::DropAndReplace { ref location, .. } => location,
303 self.init_data.seek_before(self.body.terminator_loc(bb));
305 let path = self.move_data().rev_lookup.find(location.as_ref());
306 debug!("collect_drop_flags: {:?}, place {:?} ({:?})", bb, location, path);
308 let path = match path {
309 LookupResult::Exact(e) => e,
310 LookupResult::Parent(None) => continue,
311 LookupResult::Parent(Some(parent)) => {
312 let (_maybe_live, maybe_dead) = self.init_data.maybe_live_dead(parent);
315 terminator.source_info.span,
316 "drop of untracked, uninitialized value {:?}, place {:?} ({:?})",
326 on_all_drop_children_bits(self.tcx, self.body, self.env, path, |child| {
327 let (maybe_live, maybe_dead) = self.init_data.maybe_live_dead(child);
329 "collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
333 (maybe_live, maybe_dead)
335 if maybe_live && maybe_dead {
336 self.create_drop_flag(child, terminator.source_info.span)
342 fn elaborate_drops(&mut self) {
343 for (bb, data) in self.body.basic_blocks().iter_enumerated() {
344 let loc = Location { block: bb, statement_index: data.statements.len() };
345 let terminator = data.terminator();
347 let resume_block = self.patch.resume_block();
348 match terminator.kind {
349 TerminatorKind::Drop { ref location, target, unwind } => {
350 self.init_data.seek_before(loc);
351 match self.move_data().rev_lookup.find(location.as_ref()) {
352 LookupResult::Exact(path) => elaborate_drop(
353 &mut Elaborator { ctxt: self },
354 terminator.source_info,
361 Unwind::To(Option::unwrap_or(unwind, resume_block))
365 LookupResult::Parent(..) => {
367 terminator.source_info.span,
368 "drop of untracked value {:?}",
374 TerminatorKind::DropAndReplace { ref location, ref value, target, unwind } => {
375 assert!(!data.is_cleanup);
377 self.elaborate_replace(loc, location, value, target, unwind);
384 /// Elaborate a MIR `replace` terminator. This instruction
385 /// is not directly handled by codegen, and therefore
386 /// must be desugared.
388 /// The desugaring drops the location if needed, and then writes
389 /// the value (including setting the drop flag) over it in *both* arms.
391 /// The `replace` terminator can also be called on places that
392 /// are not tracked by elaboration (for example,
393 /// `replace x[i] <- tmp0`). The borrow checker requires that
394 /// these locations are initialized before the assignment,
395 /// so we just generate an unconditional drop.
396 fn elaborate_replace(
399 location: &Place<'tcx>,
400 value: &Operand<'tcx>,
402 unwind: Option<BasicBlock>,
405 let data = &self.body[bb];
406 let terminator = data.terminator();
407 assert!(!data.is_cleanup, "DropAndReplace in unwind path not supported");
409 let assign = Statement {
410 kind: StatementKind::Assign(box (*location, Rvalue::Use(value.clone()))),
411 source_info: terminator.source_info,
414 let unwind = unwind.unwrap_or_else(|| self.patch.resume_block());
415 let unwind = self.patch.new_block(BasicBlockData {
416 statements: vec![assign.clone()],
417 terminator: Some(Terminator {
418 kind: TerminatorKind::Goto { target: unwind },
424 let target = self.patch.new_block(BasicBlockData {
425 statements: vec![assign],
426 terminator: Some(Terminator { kind: TerminatorKind::Goto { target }, ..*terminator }),
430 match self.move_data().rev_lookup.find(location.as_ref()) {
431 LookupResult::Exact(path) => {
432 debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
433 self.init_data.seek_before(loc);
435 &mut Elaborator { ctxt: self },
436 terminator.source_info,
443 on_all_children_bits(self.tcx, self.body, self.move_data(), path, |child| {
445 Location { block: target, statement_index: 0 },
447 DropFlagState::Present,
450 Location { block: unwind, statement_index: 0 },
452 DropFlagState::Present,
456 LookupResult::Parent(parent) => {
457 // drop and replace behind a pointer/array/whatever. The location
458 // must be initialized.
459 debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent);
460 self.patch.patch_terminator(
462 TerminatorKind::Drop { location: *location, target, unwind: Some(unwind) },
468 fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
469 Rvalue::Use(Operand::Constant(Box::new(Constant {
472 literal: ty::Const::from_bool(self.tcx, val),
476 fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
477 if let Some(&flag) = self.drop_flags.get(&path) {
478 let span = self.patch.source_info_for_location(self.body, loc).span;
479 let val = self.constant_bool(span, val.value());
480 self.patch.add_assign(loc, Place::from(flag), val);
484 fn drop_flags_on_init(&mut self) {
485 let loc = Location::START;
486 let span = self.patch.source_info_for_location(self.body, loc).span;
487 let false_ = self.constant_bool(span, false);
488 for flag in self.drop_flags.values() {
489 self.patch.add_assign(loc, Place::from(*flag), false_.clone());
493 fn drop_flags_for_fn_rets(&mut self) {
494 for (bb, data) in self.body.basic_blocks().iter_enumerated() {
495 if let TerminatorKind::Call {
496 destination: Some((ref place, tgt)),
499 } = data.terminator().kind
501 assert!(!self.patch.is_patched(bb));
503 let loc = Location { block: tgt, statement_index: 0 };
504 let path = self.move_data().rev_lookup.find(place.as_ref());
505 on_lookup_result_bits(self.tcx, self.body, self.move_data(), path, |child| {
506 self.set_drop_flag(loc, child, DropFlagState::Present)
512 fn drop_flags_for_args(&mut self) {
513 let loc = Location::START;
514 dataflow::drop_flag_effects_for_function_entry(self.tcx, self.body, self.env, |path, ds| {
515 self.set_drop_flag(loc, path, ds);
519 fn drop_flags_for_locs(&mut self) {
520 // We intentionally iterate only over the *old* basic blocks.
522 // Basic blocks created by drop elaboration update their
523 // drop flags by themselves, to avoid the drop flags being
524 // clobbered before they are read.
526 for (bb, data) in self.body.basic_blocks().iter_enumerated() {
527 debug!("drop_flags_for_locs({:?})", data);
528 for i in 0..(data.statements.len() + 1) {
529 debug!("drop_flag_for_locs: stmt {}", i);
530 let mut allow_initializations = true;
531 if i == data.statements.len() {
532 match data.terminator().kind {
533 TerminatorKind::Drop { .. } => {
534 // drop elaboration should handle that by itself
537 TerminatorKind::DropAndReplace { .. } => {
538 // this contains the move of the source and
539 // the initialization of the destination. We
540 // only want the former - the latter is handled
541 // by the elaboration code and must be done
542 // *after* the destination is dropped.
543 assert!(self.patch.is_patched(bb));
544 allow_initializations = false;
546 TerminatorKind::Resume => {
547 // It is possible for `Resume` to be patched
548 // (in particular it can be patched to be replaced with
549 // a Goto; see `MirPatch::new`).
552 assert!(!self.patch.is_patched(bb));
556 let loc = Location { block: bb, statement_index: i };
557 dataflow::drop_flag_effects_for_location(
563 if ds == DropFlagState::Absent || allow_initializations {
564 self.set_drop_flag(loc, path, ds)
570 // There may be a critical edge after this call,
571 // so mark the return as initialized *before* the
573 if let TerminatorKind::Call {
574 destination: Some((ref place, _)), cleanup: None, ..
575 } = data.terminator().kind
577 assert!(!self.patch.is_patched(bb));
579 let loc = Location { block: bb, statement_index: data.statements.len() };
580 let path = self.move_data().rev_lookup.find(place.as_ref());
581 on_lookup_result_bits(self.tcx, self.body, self.move_data(), path, |child| {
582 self.set_drop_flag(loc, child, DropFlagState::Present)