1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use super::gather_moves::{HasMoveData, MoveData, MovePathIndex, LookupResult};
12 use super::dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals};
13 use super::dataflow::{DataflowResults};
14 use super::{drop_flag_effects_for_location, on_all_children_bits};
15 use super::on_lookup_result_bits;
16 use super::{DropFlagState, MoveDataParamEnv};
17 use super::patch::MirPatch;
18 use rustc::ty::{self, Ty, TyCtxt};
19 use rustc::ty::subst::{Kind, Subst, Substs};
21 use rustc::mir::transform::{Pass, MirPass, MirSource};
22 use rustc::middle::const_val::ConstVal;
23 use rustc::middle::lang_items;
24 use rustc::util::nodemap::FxHashMap;
25 use rustc_data_structures::indexed_set::IdxSetBuf;
26 use rustc_data_structures::indexed_vec::Idx;
33 pub struct ElaborateDrops;
35 impl<'tcx> MirPass<'tcx> for ElaborateDrops {
36 fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
37 src: MirSource, mir: &mut Mir<'tcx>)
39 debug!("elaborate_drops({:?} @ {:?})", src, mir.span);
41 MirSource::Fn(..) => {},
44 let id = src.item_id();
45 let param_env = ty::ParameterEnvironment::for_item(tcx, id);
46 let move_data = MoveData::gather_moves(mir, tcx, ¶m_env);
47 let elaborate_patch = {
49 let env = MoveDataParamEnv {
54 super::do_dataflow(tcx, mir, id, &[],
55 MaybeInitializedLvals::new(tcx, mir, &env),
56 |bd, p| &bd.move_data().move_paths[p]);
58 super::do_dataflow(tcx, mir, id, &[],
59 MaybeUninitializedLvals::new(tcx, mir, &env),
60 |bd, p| &bd.move_data().move_paths[p]);
66 flow_inits: flow_inits,
67 flow_uninits: flow_uninits,
68 drop_flags: FxHashMap(),
69 patch: MirPatch::new(mir),
72 elaborate_patch.apply(mir);
76 impl Pass for ElaborateDrops {}
78 struct InitializationData {
79 live: IdxSetBuf<MovePathIndex>,
80 dead: IdxSetBuf<MovePathIndex>
83 impl InitializationData {
84 fn apply_location<'a,'tcx>(&mut self,
85 tcx: TyCtxt<'a, 'tcx, 'tcx>,
87 env: &MoveDataParamEnv<'tcx>,
90 drop_flag_effects_for_location(tcx, mir, env, loc, |path, df| {
91 debug!("at location {:?}: setting {:?} to {:?}",
94 DropFlagState::Present => {
96 self.dead.remove(&path);
98 DropFlagState::Absent => {
100 self.live.remove(&path);
106 fn state(&self, path: MovePathIndex) -> (bool, bool) {
107 (self.live.contains(&path), self.dead.contains(&path))
111 impl fmt::Debug for InitializationData {
112 fn fmt(&self, _f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
117 struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
118 tcx: TyCtxt<'a, 'tcx, 'tcx>,
120 env: &'a MoveDataParamEnv<'tcx>,
121 flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>,
122 flow_uninits: DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>,
123 drop_flags: FxHashMap<MovePathIndex, Local>,
124 patch: MirPatch<'tcx>,
127 #[derive(Copy, Clone, Debug)]
128 struct DropCtxt<'a, 'tcx: 'a> {
129 source_info: SourceInfo,
132 init_data: &'a InitializationData,
134 lvalue: &'a Lvalue<'tcx>,
137 unwind: Option<BasicBlock>
140 impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
141 fn move_data(&self) -> &'b MoveData<'tcx> { &self.env.move_data }
142 fn param_env(&self) -> &'b ty::ParameterEnvironment<'tcx> {
146 fn initialization_data_at(&self, loc: Location) -> InitializationData {
147 let mut data = InitializationData {
148 live: self.flow_inits.sets().on_entry_set_for(loc.block.index())
150 dead: self.flow_uninits.sets().on_entry_set_for(loc.block.index())
153 for stmt in 0..loc.statement_index {
154 data.apply_location(self.tcx, self.mir, self.env,
155 Location { block: loc.block, statement_index: stmt });
160 fn create_drop_flag(&mut self, index: MovePathIndex) {
162 let patch = &mut self.patch;
163 self.drop_flags.entry(index).or_insert_with(|| {
164 patch.new_temp(tcx.types.bool)
168 fn drop_flag(&mut self, index: MovePathIndex) -> Option<Lvalue<'tcx>> {
169 self.drop_flags.get(&index).map(|t| Lvalue::Local(*t))
172 /// create a patch that elaborates all drops in the input
174 fn elaborate(mut self) -> MirPatch<'tcx>
176 self.collect_drop_flags();
178 self.elaborate_drops();
180 self.drop_flags_on_init();
181 self.drop_flags_for_fn_rets();
182 self.drop_flags_for_args();
183 self.drop_flags_for_locs();
188 fn path_needs_drop(&self, path: MovePathIndex) -> bool
190 let lvalue = &self.move_data().move_paths[path].lvalue;
191 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
192 debug!("path_needs_drop({:?}, {:?} : {:?})", path, lvalue, ty);
194 self.tcx.type_needs_drop_given_env(ty, self.param_env())
197 fn collect_drop_flags(&mut self)
199 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
200 let terminator = data.terminator();
201 let location = match terminator.kind {
202 TerminatorKind::Drop { ref location, .. } |
203 TerminatorKind::DropAndReplace { ref location, .. } => location,
207 let init_data = self.initialization_data_at(Location {
209 statement_index: data.statements.len()
212 let path = self.move_data().rev_lookup.find(location);
213 debug!("collect_drop_flags: {:?}, lv {:?} ({:?})",
216 let path = match path {
217 LookupResult::Exact(e) => e,
218 LookupResult::Parent(None) => continue,
219 LookupResult::Parent(Some(parent)) => {
220 let (_maybe_live, maybe_dead) = init_data.state(parent);
222 span_bug!(terminator.source_info.span,
223 "drop of untracked, uninitialized value {:?}, lv {:?} ({:?})",
230 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
231 if self.path_needs_drop(child) {
232 let (maybe_live, maybe_dead) = init_data.state(child);
233 debug!("collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
234 child, location, path, (maybe_live, maybe_dead));
235 if maybe_live && maybe_dead {
236 self.create_drop_flag(child)
243 fn elaborate_drops(&mut self)
245 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
246 let loc = Location { block: bb, statement_index: data.statements.len() };
247 let terminator = data.terminator();
249 let resume_block = self.patch.resume_block();
250 match terminator.kind {
251 TerminatorKind::Drop { ref location, target, unwind } => {
252 let init_data = self.initialization_data_at(loc);
253 match self.move_data().rev_lookup.find(location) {
254 LookupResult::Exact(path) => {
255 self.elaborate_drop(&DropCtxt {
256 source_info: terminator.source_info,
257 is_cleanup: data.is_cleanup,
258 init_data: &init_data,
262 unwind: if data.is_cleanup {
265 Some(Option::unwrap_or(unwind, resume_block))
269 LookupResult::Parent(..) => {
270 span_bug!(terminator.source_info.span,
271 "drop of untracked value {:?}", bb);
275 TerminatorKind::DropAndReplace { ref location, ref value,
278 assert!(!data.is_cleanup);
280 self.elaborate_replace(
291 /// Elaborate a MIR `replace` terminator. This instruction
292 /// is not directly handled by translation, and therefore
293 /// must be desugared.
295 /// The desugaring drops the location if needed, and then writes
296 /// the value (including setting the drop flag) over it in *both* arms.
298 /// The `replace` terminator can also be called on lvalues that
299 /// are not tracked by elaboration (for example,
300 /// `replace x[i] <- tmp0`). The borrow checker requires that
301 /// these locations are initialized before the assignment,
302 /// so we just generate an unconditional drop.
303 fn elaborate_replace(
306 location: &Lvalue<'tcx>,
307 value: &Operand<'tcx>,
309 unwind: Option<BasicBlock>)
312 let data = &self.mir[bb];
313 let terminator = data.terminator();
315 let assign = Statement {
316 kind: StatementKind::Assign(location.clone(), Rvalue::Use(value.clone())),
317 source_info: terminator.source_info
320 let unwind = unwind.unwrap_or(self.patch.resume_block());
321 let unwind = self.patch.new_block(BasicBlockData {
322 statements: vec![assign.clone()],
323 terminator: Some(Terminator {
324 kind: TerminatorKind::Goto { target: unwind },
330 let target = self.patch.new_block(BasicBlockData {
331 statements: vec![assign],
332 terminator: Some(Terminator {
333 kind: TerminatorKind::Goto { target: target },
336 is_cleanup: data.is_cleanup,
339 match self.move_data().rev_lookup.find(location) {
340 LookupResult::Exact(path) => {
341 debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
342 let init_data = self.initialization_data_at(loc);
344 self.elaborate_drop(&DropCtxt {
345 source_info: terminator.source_info,
346 is_cleanup: data.is_cleanup,
347 init_data: &init_data,
353 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
354 self.set_drop_flag(Location { block: target, statement_index: 0 },
355 child, DropFlagState::Present);
356 self.set_drop_flag(Location { block: unwind, statement_index: 0 },
357 child, DropFlagState::Present);
360 LookupResult::Parent(parent) => {
361 // drop and replace behind a pointer/array/whatever. The location
362 // must be initialized.
363 debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent);
364 self.patch.patch_terminator(bb, TerminatorKind::Drop {
365 location: location.clone(),
373 /// This elaborates a single drop instruction, located at `bb`, and
376 /// The elaborated drop checks the drop flags to only drop what
379 /// In addition, the relevant drop flags also need to be cleared
380 /// to avoid double-drops. However, in the middle of a complex
381 /// drop, one must avoid clearing some of the flags before they
382 /// are read, as that would cause a memory leak.
384 /// In particular, when dropping an ADT, multiple fields may be
385 /// joined together under the `rest` subpath. They are all controlled
386 /// by the primary drop flag, but only the last rest-field dropped
387 /// should clear it (and it must also not clear anything else).
389 /// FIXME: I think we should just control the flags externally
390 /// and then we do not need this machinery.
391 fn elaborate_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, bb: BasicBlock) {
392 debug!("elaborate_drop({:?})", c);
394 let mut some_live = false;
395 let mut some_dead = false;
396 let mut children_count = 0;
397 on_all_children_bits(
398 self.tcx, self.mir, self.move_data(),
400 if self.path_needs_drop(child) {
401 let (live, dead) = c.init_data.state(child);
402 debug!("elaborate_drop: state({:?}) = {:?}",
403 child, (live, dead));
410 debug!("elaborate_drop({:?}): live - {:?}", c,
411 (some_live, some_dead));
412 match (some_live, some_dead) {
413 (false, false) | (false, true) => {
414 // dead drop - patch it out
415 self.patch.patch_terminator(bb, TerminatorKind::Goto {
420 // static drop - just set the flag
421 self.patch.patch_terminator(bb, TerminatorKind::Drop {
422 location: c.lvalue.clone(),
426 self.drop_flags_for_drop(c, bb);
430 let drop_bb = if children_count == 1 || self.must_complete_drop(c) {
431 self.conditional_drop(c)
435 self.patch.patch_terminator(bb, TerminatorKind::Goto {
442 /// Return the lvalue and move path for each field of `variant`,
443 /// (the move path is `None` if the field is a rest field).
444 fn move_paths_for_fields(&self,
445 base_lv: &Lvalue<'tcx>,
446 variant_path: MovePathIndex,
447 variant: &'tcx ty::VariantDef,
448 substs: &'tcx Substs<'tcx>)
449 -> Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>
451 variant.fields.iter().enumerate().map(|(i, f)| {
453 super::move_path_children_matching(self.move_data(), variant_path, |p| {
456 elem: ProjectionElem::Field(idx, _), ..
457 } => idx.index() == i,
463 self.tcx.normalize_associated_type_in_env(
464 &f.ty(self.tcx, substs),
467 (base_lv.clone().field(Field::new(i), field_ty), subpath)
471 /// Create one-half of the drop ladder for a list of fields, and return
472 /// the list of steps in it in reverse order.
474 /// `unwind_ladder` is such a list of steps in reverse order,
475 /// which is called instead of the next step if the drop unwinds
476 /// (the first field is never reached). If it is `None`, all
477 /// unwind targets are left blank.
478 fn drop_halfladder<'a>(&mut self,
479 c: &DropCtxt<'a, 'tcx>,
480 unwind_ladder: Option<Vec<BasicBlock>>,
482 fields: &[(Lvalue<'tcx>, Option<MovePathIndex>)],
486 let mut unwind_succ = if is_cleanup {
492 let mut succ = self.new_block(
493 c, c.is_cleanup, TerminatorKind::Goto { target: succ }
496 // Always clear the "master" drop flag at the bottom of the
497 // ladder. This is needed because the "master" drop flag
498 // protects the ADT's discriminant, which is invalidated
499 // after the ADT is dropped.
501 Location { block: succ, statement_index: 0 },
503 DropFlagState::Absent
506 fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
507 succ = if let Some(path) = path {
508 debug!("drop_ladder: for std field {} ({:?})", i, lv);
510 self.elaborated_drop_block(&DropCtxt {
511 source_info: c.source_info,
512 is_cleanup: is_cleanup,
513 init_data: c.init_data,
520 debug!("drop_ladder: for rest field {} ({:?})", i, lv);
522 self.complete_drop(&DropCtxt {
523 source_info: c.source_info,
524 is_cleanup: is_cleanup,
525 init_data: c.init_data,
533 unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
538 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
540 /// For example, with 3 fields, the drop ladder is
543 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
545 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
547 /// ELAB(drop location.2 [target=`c.succ`, unwind=`c.unwind`])
549 /// ELAB(drop location.1 [target=.c2])
551 /// ELAB(drop location.2 [target=`c.unwind])
552 fn drop_ladder<'a>(&mut self,
553 c: &DropCtxt<'a, 'tcx>,
554 fields: Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>)
557 debug!("drop_ladder({:?}, {:?})", c, fields);
559 let mut fields = fields;
560 fields.retain(|&(ref lvalue, _)| {
561 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
562 self.tcx.type_needs_drop_given_env(ty, self.param_env())
565 debug!("drop_ladder - fields needing drop: {:?}", fields);
567 let unwind_ladder = if c.is_cleanup {
570 Some(self.drop_halfladder(c, None, c.unwind.unwrap(), &fields, true))
573 self.drop_halfladder(c, unwind_ladder, c.succ, &fields, c.is_cleanup)
574 .last().cloned().unwrap_or(c.succ)
577 fn open_drop_for_tuple<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, tys: &[Ty<'tcx>])
580 debug!("open_drop_for_tuple({:?}, {:?})", c, tys);
582 let fields = tys.iter().enumerate().map(|(i, &ty)| {
583 (c.lvalue.clone().field(Field::new(i), ty),
584 super::move_path_children_matching(
585 self.move_data(), c.path, |proj| match proj {
587 elem: ProjectionElem::Field(f, _), ..
594 self.drop_ladder(c, fields)
597 fn open_drop_for_box<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, ty: Ty<'tcx>)
600 debug!("open_drop_for_box({:?}, {:?})", c, ty);
602 let interior_path = super::move_path_children_matching(
603 self.move_data(), c.path, |proj| match proj {
604 &Projection { elem: ProjectionElem::Deref, .. } => true,
608 let interior = c.lvalue.clone().deref();
609 let inner_c = DropCtxt {
611 unwind: c.unwind.map(|u| {
612 self.box_free_block(c, ty, u, true)
614 succ: self.box_free_block(c, ty, c.succ, c.is_cleanup),
619 self.elaborated_drop_block(&inner_c)
622 fn open_drop_for_variant<'a>(&mut self,
623 c: &DropCtxt<'a, 'tcx>,
624 drop_block: &mut Option<BasicBlock>,
625 adt: &'tcx ty::AdtDef,
626 substs: &'tcx Substs<'tcx>,
627 variant_index: usize)
630 let subpath = super::move_path_children_matching(
631 self.move_data(), c.path, |proj| match proj {
633 elem: ProjectionElem::Downcast(_, idx), ..
634 } => idx == variant_index,
638 if let Some(variant_path) = subpath {
639 let base_lv = c.lvalue.clone().elem(
640 ProjectionElem::Downcast(adt, variant_index)
642 let fields = self.move_paths_for_fields(
645 &adt.variants[variant_index],
647 self.drop_ladder(c, fields)
649 // variant not found - drop the entire enum
650 if let None = *drop_block {
651 *drop_block = Some(self.complete_drop(c, true));
653 return drop_block.unwrap();
657 fn open_drop_for_adt<'a>(&mut self, c: &DropCtxt<'a, 'tcx>,
658 adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
660 debug!("open_drop_for_adt({:?}, {:?}, {:?})", c, adt, substs);
662 let mut drop_block = None;
664 match adt.variants.len() {
666 let fields = self.move_paths_for_fields(
672 self.drop_ladder(c, fields)
675 let variant_drops : Vec<BasicBlock> =
676 (0..adt.variants.len()).map(|i| {
677 self.open_drop_for_variant(c, &mut drop_block,
681 // If there are multiple variants, then if something
682 // is present within the enum the discriminant, tracked
683 // by the rest path, must be initialized.
685 // Additionally, we do not want to switch on the
686 // discriminant after it is free-ed, because that
687 // way lies only trouble.
689 let switch_block = self.new_block(
690 c, c.is_cleanup, TerminatorKind::Switch {
691 discr: c.lvalue.clone(),
693 targets: variant_drops
696 self.drop_flag_test_block(c, switch_block)
701 /// The slow-path - create an "open", elaborated drop for a type
702 /// which is moved-out-of only partially, and patch `bb` to a jump
703 /// to it. This must not be called on ADTs with a destructor,
704 /// as these can't be moved-out-of, except for `Box<T>`, which is
707 /// This creates a "drop ladder" that drops the needed fields of the
708 /// ADT, both in the success case or if one of the destructors fail.
709 fn open_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
710 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
712 ty::TyClosure(def_id, substs) => {
713 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx).collect();
714 self.open_drop_for_tuple(c, &tys)
716 ty::TyTuple(tys, _) => {
717 self.open_drop_for_tuple(c, tys)
719 ty::TyAdt(def, _) if def.is_box() => {
720 self.open_drop_for_box(c, ty.boxed_ty())
722 ty::TyAdt(def, substs) => {
723 self.open_drop_for_adt(c, def, substs)
725 _ => bug!("open drop from non-ADT `{:?}`", ty)
729 /// Return a basic block that drop an lvalue using the context
730 /// and path in `c`. If `update_drop_flag` is true, also
734 /// if(update_drop_flag) FLAG(c.path) = false
736 fn complete_drop<'a>(
738 c: &DropCtxt<'a, 'tcx>,
739 update_drop_flag: bool)
742 debug!("complete_drop({:?},{:?})", c, update_drop_flag);
744 let drop_block = self.drop_block(c);
745 if update_drop_flag {
747 Location { block: drop_block, statement_index: 0 },
749 DropFlagState::Absent
753 self.drop_flag_test_block(c, drop_block)
756 /// Create a simple conditional drop.
759 /// FLAGS(c.lv) = false
761 fn conditional_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>)
764 debug!("conditional_drop({:?})", c);
765 let drop_bb = self.drop_block(c);
766 self.drop_flags_for_drop(c, drop_bb);
768 self.drop_flag_test_block(c, drop_bb)
771 fn new_block<'a>(&mut self,
772 c: &DropCtxt<'a, 'tcx>,
774 k: TerminatorKind<'tcx>)
777 self.patch.new_block(BasicBlockData {
779 terminator: Some(Terminator {
780 source_info: c.source_info, kind: k
782 is_cleanup: is_cleanup
786 fn elaborated_drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
787 debug!("elaborated_drop_block({:?})", c);
788 let blk = self.drop_block(c);
789 self.elaborate_drop(c, blk);
793 fn drop_flag_test_block<'a>(&mut self,
794 c: &DropCtxt<'a, 'tcx>,
797 self.drop_flag_test_block_with_succ(c, c.is_cleanup, on_set, c.succ)
800 fn drop_flag_test_block_with_succ<'a>(&mut self,
801 c: &DropCtxt<'a, 'tcx>,
804 on_unset: BasicBlock)
807 let (maybe_live, maybe_dead) = c.init_data.state(c.path);
808 debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
809 c, is_cleanup, on_set, (maybe_live, maybe_dead));
811 match (maybe_live, maybe_dead) {
812 (false, _) => on_unset,
813 (true, false) => on_set,
815 let flag = self.drop_flag(c.path).unwrap();
816 self.new_block(c, is_cleanup, TerminatorKind::If {
817 cond: Operand::Consume(flag),
818 targets: (on_set, on_unset)
824 fn drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
825 self.new_block(c, c.is_cleanup, TerminatorKind::Drop {
826 location: c.lvalue.clone(),
832 fn box_free_block<'a>(
834 c: &DropCtxt<'a, 'tcx>,
839 let block = self.unelaborated_free_block(c, ty, target, is_cleanup);
840 self.drop_flag_test_block_with_succ(c, is_cleanup, block, target)
843 fn unelaborated_free_block<'a>(
845 c: &DropCtxt<'a, 'tcx>,
850 let mut statements = vec![];
851 if let Some(&flag) = self.drop_flags.get(&c.path) {
852 statements.push(Statement {
853 source_info: c.source_info,
854 kind: StatementKind::Assign(
856 self.constant_bool(c.source_info.span, false)
862 let unit_temp = Lvalue::Local(self.patch.new_temp(tcx.mk_nil()));
863 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
864 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
865 let fty = tcx.item_type(free_func).subst(tcx, substs);
867 self.patch.new_block(BasicBlockData {
868 statements: statements,
869 terminator: Some(Terminator {
870 source_info: c.source_info, kind: TerminatorKind::Call {
871 func: Operand::Constant(Constant {
872 span: c.source_info.span,
874 literal: Literal::Item {
879 args: vec![Operand::Consume(c.lvalue.clone())],
880 destination: Some((unit_temp, target)),
884 is_cleanup: is_cleanup
888 fn must_complete_drop<'a>(&self, c: &DropCtxt<'a, 'tcx>) -> bool {
889 // if we have a destuctor, we must *not* split the drop.
891 // dataflow can create unneeded children in some cases
892 // - be sure to ignore them.
894 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
897 ty::TyAdt(def, _) => {
898 if def.has_dtor() && !def.is_box() {
899 self.tcx.sess.span_warn(
901 &format!("dataflow bug??? moving out of type with dtor {:?}",
912 fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
913 Rvalue::Use(Operand::Constant(Constant {
915 ty: self.tcx.types.bool,
916 literal: Literal::Value { value: ConstVal::Bool(val) }
920 fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
921 if let Some(&flag) = self.drop_flags.get(&path) {
922 let span = self.patch.source_info_for_location(self.mir, loc).span;
923 let val = self.constant_bool(span, val.value());
924 self.patch.add_assign(loc, Lvalue::Local(flag), val);
928 fn drop_flags_on_init(&mut self) {
929 let loc = Location { block: START_BLOCK, statement_index: 0 };
930 let span = self.patch.source_info_for_location(self.mir, loc).span;
931 let false_ = self.constant_bool(span, false);
932 for flag in self.drop_flags.values() {
933 self.patch.add_assign(loc, Lvalue::Local(*flag), false_.clone());
937 fn drop_flags_for_fn_rets(&mut self) {
938 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
939 if let TerminatorKind::Call {
940 destination: Some((ref lv, tgt)), cleanup: Some(_), ..
941 } = data.terminator().kind {
942 assert!(!self.patch.is_patched(bb));
944 let loc = Location { block: tgt, statement_index: 0 };
945 let path = self.move_data().rev_lookup.find(lv);
946 on_lookup_result_bits(
947 self.tcx, self.mir, self.move_data(), path,
948 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
954 fn drop_flags_for_args(&mut self) {
955 let loc = Location { block: START_BLOCK, statement_index: 0 };
956 super::drop_flag_effects_for_function_entry(
957 self.tcx, self.mir, self.env, |path, ds| {
958 self.set_drop_flag(loc, path, ds);
963 fn drop_flags_for_locs(&mut self) {
964 // We intentionally iterate only over the *old* basic blocks.
966 // Basic blocks created by drop elaboration update their
967 // drop flags by themselves, to avoid the drop flags being
968 // clobbered before they are read.
970 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
971 debug!("drop_flags_for_locs({:?})", data);
972 for i in 0..(data.statements.len()+1) {
973 debug!("drop_flag_for_locs: stmt {}", i);
974 let mut allow_initializations = true;
975 if i == data.statements.len() {
976 match data.terminator().kind {
977 TerminatorKind::Drop { .. } => {
978 // drop elaboration should handle that by itself
981 TerminatorKind::DropAndReplace { .. } => {
982 // this contains the move of the source and
983 // the initialization of the destination. We
984 // only want the former - the latter is handled
985 // by the elaboration code and must be done
986 // *after* the destination is dropped.
987 assert!(self.patch.is_patched(bb));
988 allow_initializations = false;
991 assert!(!self.patch.is_patched(bb));
995 let loc = Location { block: bb, statement_index: i };
996 super::drop_flag_effects_for_location(
997 self.tcx, self.mir, self.env, loc, |path, ds| {
998 if ds == DropFlagState::Absent || allow_initializations {
999 self.set_drop_flag(loc, path, ds)
1005 // There may be a critical edge after this call,
1006 // so mark the return as initialized *before* the
1008 if let TerminatorKind::Call {
1009 destination: Some((ref lv, _)), cleanup: None, ..
1010 } = data.terminator().kind {
1011 assert!(!self.patch.is_patched(bb));
1013 let loc = Location { block: bb, statement_index: data.statements.len() };
1014 let path = self.move_data().rev_lookup.find(lv);
1015 on_lookup_result_bits(
1016 self.tcx, self.mir, self.move_data(), path,
1017 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
1023 fn drop_flags_for_drop<'a>(&mut self,
1024 c: &DropCtxt<'a, 'tcx>,
1027 let loc = self.patch.terminator_loc(self.mir, bb);
1028 on_all_children_bits(
1029 self.tcx, self.mir, self.move_data(), c.path,
1030 |child| self.set_drop_flag(loc, child, DropFlagState::Absent)