1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use super::gather_moves::{HasMoveData, MoveData, MovePathIndex, LookupResult};
12 use super::dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals};
13 use super::dataflow::{DataflowResults};
14 use super::{drop_flag_effects_for_location, on_all_children_bits};
15 use super::on_lookup_result_bits;
16 use super::{DropFlagState, MoveDataParamEnv};
17 use super::patch::MirPatch;
18 use rustc::ty::{self, Ty, TyCtxt};
19 use rustc::ty::subst::{Kind, Subst, Substs};
20 use rustc::ty::util::IntTypeExt;
22 use rustc::mir::transform::{Pass, MirPass, MirSource};
23 use rustc::middle::const_val::{ConstVal, ConstInt};
24 use rustc::middle::lang_items;
25 use rustc::util::nodemap::FxHashMap;
26 use rustc_data_structures::indexed_set::IdxSetBuf;
27 use rustc_data_structures::indexed_vec::Idx;
34 pub struct ElaborateDrops;
36 impl<'tcx> MirPass<'tcx> for ElaborateDrops {
37 fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
38 src: MirSource, mir: &mut Mir<'tcx>)
40 debug!("elaborate_drops({:?} @ {:?})", src, mir.span);
42 MirSource::Fn(..) => {},
45 let id = src.item_id();
46 let param_env = ty::ParameterEnvironment::for_item(tcx, id);
47 let move_data = MoveData::gather_moves(mir, tcx, ¶m_env);
48 let elaborate_patch = {
50 let env = MoveDataParamEnv {
55 super::do_dataflow(tcx, mir, id, &[],
56 MaybeInitializedLvals::new(tcx, mir, &env),
57 |bd, p| &bd.move_data().move_paths[p]);
59 super::do_dataflow(tcx, mir, id, &[],
60 MaybeUninitializedLvals::new(tcx, mir, &env),
61 |bd, p| &bd.move_data().move_paths[p]);
67 flow_inits: flow_inits,
68 flow_uninits: flow_uninits,
69 drop_flags: FxHashMap(),
70 patch: MirPatch::new(mir),
73 elaborate_patch.apply(mir);
77 impl Pass for ElaborateDrops {}
79 struct InitializationData {
80 live: IdxSetBuf<MovePathIndex>,
81 dead: IdxSetBuf<MovePathIndex>
84 impl InitializationData {
85 fn apply_location<'a,'tcx>(&mut self,
86 tcx: TyCtxt<'a, 'tcx, 'tcx>,
88 env: &MoveDataParamEnv<'tcx>,
91 drop_flag_effects_for_location(tcx, mir, env, loc, |path, df| {
92 debug!("at location {:?}: setting {:?} to {:?}",
95 DropFlagState::Present => {
97 self.dead.remove(&path);
99 DropFlagState::Absent => {
100 self.dead.add(&path);
101 self.live.remove(&path);
107 fn state(&self, path: MovePathIndex) -> (bool, bool) {
108 (self.live.contains(&path), self.dead.contains(&path))
112 impl fmt::Debug for InitializationData {
113 fn fmt(&self, _f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
118 struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
119 tcx: TyCtxt<'a, 'tcx, 'tcx>,
121 env: &'a MoveDataParamEnv<'tcx>,
122 flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>,
123 flow_uninits: DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>,
124 drop_flags: FxHashMap<MovePathIndex, Local>,
125 patch: MirPatch<'tcx>,
128 #[derive(Copy, Clone, Debug)]
129 struct DropCtxt<'a, 'tcx: 'a> {
130 source_info: SourceInfo,
133 init_data: &'a InitializationData,
135 lvalue: &'a Lvalue<'tcx>,
138 unwind: Option<BasicBlock>
141 impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
142 fn move_data(&self) -> &'b MoveData<'tcx> { &self.env.move_data }
143 fn param_env(&self) -> &'b ty::ParameterEnvironment<'tcx> {
147 fn initialization_data_at(&self, loc: Location) -> InitializationData {
148 let mut data = InitializationData {
149 live: self.flow_inits.sets().on_entry_set_for(loc.block.index())
151 dead: self.flow_uninits.sets().on_entry_set_for(loc.block.index())
154 for stmt in 0..loc.statement_index {
155 data.apply_location(self.tcx, self.mir, self.env,
156 Location { block: loc.block, statement_index: stmt });
161 fn create_drop_flag(&mut self, index: MovePathIndex) {
163 let patch = &mut self.patch;
164 self.drop_flags.entry(index).or_insert_with(|| {
165 patch.new_temp(tcx.types.bool)
169 fn drop_flag(&mut self, index: MovePathIndex) -> Option<Lvalue<'tcx>> {
170 self.drop_flags.get(&index).map(|t| Lvalue::Local(*t))
173 /// create a patch that elaborates all drops in the input
175 fn elaborate(mut self) -> MirPatch<'tcx>
177 self.collect_drop_flags();
179 self.elaborate_drops();
181 self.drop_flags_on_init();
182 self.drop_flags_for_fn_rets();
183 self.drop_flags_for_args();
184 self.drop_flags_for_locs();
189 fn path_needs_drop(&self, path: MovePathIndex) -> bool
191 let lvalue = &self.move_data().move_paths[path].lvalue;
192 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
193 debug!("path_needs_drop({:?}, {:?} : {:?})", path, lvalue, ty);
195 self.tcx.type_needs_drop_given_env(ty, self.param_env())
198 fn collect_drop_flags(&mut self)
200 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
201 let terminator = data.terminator();
202 let location = match terminator.kind {
203 TerminatorKind::Drop { ref location, .. } |
204 TerminatorKind::DropAndReplace { ref location, .. } => location,
208 let init_data = self.initialization_data_at(Location {
210 statement_index: data.statements.len()
213 let path = self.move_data().rev_lookup.find(location);
214 debug!("collect_drop_flags: {:?}, lv {:?} ({:?})",
217 let path = match path {
218 LookupResult::Exact(e) => e,
219 LookupResult::Parent(None) => continue,
220 LookupResult::Parent(Some(parent)) => {
221 let (_maybe_live, maybe_dead) = init_data.state(parent);
223 span_bug!(terminator.source_info.span,
224 "drop of untracked, uninitialized value {:?}, lv {:?} ({:?})",
231 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
232 if self.path_needs_drop(child) {
233 let (maybe_live, maybe_dead) = init_data.state(child);
234 debug!("collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
235 child, location, path, (maybe_live, maybe_dead));
236 if maybe_live && maybe_dead {
237 self.create_drop_flag(child)
244 fn elaborate_drops(&mut self)
246 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
247 let loc = Location { block: bb, statement_index: data.statements.len() };
248 let terminator = data.terminator();
250 let resume_block = self.patch.resume_block();
251 match terminator.kind {
252 TerminatorKind::Drop { ref location, target, unwind } => {
253 let init_data = self.initialization_data_at(loc);
254 match self.move_data().rev_lookup.find(location) {
255 LookupResult::Exact(path) => {
256 self.elaborate_drop(&DropCtxt {
257 source_info: terminator.source_info,
258 is_cleanup: data.is_cleanup,
259 init_data: &init_data,
263 unwind: if data.is_cleanup {
266 Some(Option::unwrap_or(unwind, resume_block))
270 LookupResult::Parent(..) => {
271 span_bug!(terminator.source_info.span,
272 "drop of untracked value {:?}", bb);
276 TerminatorKind::DropAndReplace { ref location, ref value,
279 assert!(!data.is_cleanup);
281 self.elaborate_replace(
292 /// Elaborate a MIR `replace` terminator. This instruction
293 /// is not directly handled by translation, and therefore
294 /// must be desugared.
296 /// The desugaring drops the location if needed, and then writes
297 /// the value (including setting the drop flag) over it in *both* arms.
299 /// The `replace` terminator can also be called on lvalues that
300 /// are not tracked by elaboration (for example,
301 /// `replace x[i] <- tmp0`). The borrow checker requires that
302 /// these locations are initialized before the assignment,
303 /// so we just generate an unconditional drop.
304 fn elaborate_replace(
307 location: &Lvalue<'tcx>,
308 value: &Operand<'tcx>,
310 unwind: Option<BasicBlock>)
313 let data = &self.mir[bb];
314 let terminator = data.terminator();
316 let assign = Statement {
317 kind: StatementKind::Assign(location.clone(), Rvalue::Use(value.clone())),
318 source_info: terminator.source_info
321 let unwind = unwind.unwrap_or(self.patch.resume_block());
322 let unwind = self.patch.new_block(BasicBlockData {
323 statements: vec![assign.clone()],
324 terminator: Some(Terminator {
325 kind: TerminatorKind::Goto { target: unwind },
331 let target = self.patch.new_block(BasicBlockData {
332 statements: vec![assign],
333 terminator: Some(Terminator {
334 kind: TerminatorKind::Goto { target: target },
337 is_cleanup: data.is_cleanup,
340 match self.move_data().rev_lookup.find(location) {
341 LookupResult::Exact(path) => {
342 debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
343 let init_data = self.initialization_data_at(loc);
345 self.elaborate_drop(&DropCtxt {
346 source_info: terminator.source_info,
347 is_cleanup: data.is_cleanup,
348 init_data: &init_data,
354 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
355 self.set_drop_flag(Location { block: target, statement_index: 0 },
356 child, DropFlagState::Present);
357 self.set_drop_flag(Location { block: unwind, statement_index: 0 },
358 child, DropFlagState::Present);
361 LookupResult::Parent(parent) => {
362 // drop and replace behind a pointer/array/whatever. The location
363 // must be initialized.
364 debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent);
365 self.patch.patch_terminator(bb, TerminatorKind::Drop {
366 location: location.clone(),
374 /// This elaborates a single drop instruction, located at `bb`, and
377 /// The elaborated drop checks the drop flags to only drop what
380 /// In addition, the relevant drop flags also need to be cleared
381 /// to avoid double-drops. However, in the middle of a complex
382 /// drop, one must avoid clearing some of the flags before they
383 /// are read, as that would cause a memory leak.
385 /// In particular, when dropping an ADT, multiple fields may be
386 /// joined together under the `rest` subpath. They are all controlled
387 /// by the primary drop flag, but only the last rest-field dropped
388 /// should clear it (and it must also not clear anything else).
390 /// FIXME: I think we should just control the flags externally
391 /// and then we do not need this machinery.
392 fn elaborate_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, bb: BasicBlock) {
393 debug!("elaborate_drop({:?})", c);
395 let mut some_live = false;
396 let mut some_dead = false;
397 let mut children_count = 0;
398 on_all_children_bits(
399 self.tcx, self.mir, self.move_data(),
401 if self.path_needs_drop(child) {
402 let (live, dead) = c.init_data.state(child);
403 debug!("elaborate_drop: state({:?}) = {:?}",
404 child, (live, dead));
411 debug!("elaborate_drop({:?}): live - {:?}", c,
412 (some_live, some_dead));
413 match (some_live, some_dead) {
414 (false, false) | (false, true) => {
415 // dead drop - patch it out
416 self.patch.patch_terminator(bb, TerminatorKind::Goto {
421 // static drop - just set the flag
422 self.patch.patch_terminator(bb, TerminatorKind::Drop {
423 location: c.lvalue.clone(),
427 self.drop_flags_for_drop(c, bb);
431 let drop_bb = if children_count == 1 || self.must_complete_drop(c) {
432 self.conditional_drop(c)
436 self.patch.patch_terminator(bb, TerminatorKind::Goto {
443 /// Return the lvalue and move path for each field of `variant`,
444 /// (the move path is `None` if the field is a rest field).
445 fn move_paths_for_fields(&self,
446 base_lv: &Lvalue<'tcx>,
447 variant_path: MovePathIndex,
448 variant: &'tcx ty::VariantDef,
449 substs: &'tcx Substs<'tcx>)
450 -> Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>
452 variant.fields.iter().enumerate().map(|(i, f)| {
454 super::move_path_children_matching(self.move_data(), variant_path, |p| {
457 elem: ProjectionElem::Field(idx, _), ..
458 } => idx.index() == i,
464 self.tcx.normalize_associated_type_in_env(
465 &f.ty(self.tcx, substs),
468 (base_lv.clone().field(Field::new(i), field_ty), subpath)
472 /// Create one-half of the drop ladder for a list of fields, and return
473 /// the list of steps in it in reverse order.
475 /// `unwind_ladder` is such a list of steps in reverse order,
476 /// which is called instead of the next step if the drop unwinds
477 /// (the first field is never reached). If it is `None`, all
478 /// unwind targets are left blank.
479 fn drop_halfladder<'a>(&mut self,
480 c: &DropCtxt<'a, 'tcx>,
481 unwind_ladder: Option<Vec<BasicBlock>>,
483 fields: &[(Lvalue<'tcx>, Option<MovePathIndex>)],
487 let mut unwind_succ = if is_cleanup {
493 let mut succ = self.new_block(
494 c, c.is_cleanup, TerminatorKind::Goto { target: succ }
497 // Always clear the "master" drop flag at the bottom of the
498 // ladder. This is needed because the "master" drop flag
499 // protects the ADT's discriminant, which is invalidated
500 // after the ADT is dropped.
502 Location { block: succ, statement_index: 0 },
504 DropFlagState::Absent
507 fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
508 succ = if let Some(path) = path {
509 debug!("drop_ladder: for std field {} ({:?})", i, lv);
511 self.elaborated_drop_block(&DropCtxt {
512 source_info: c.source_info,
513 is_cleanup: is_cleanup,
514 init_data: c.init_data,
521 debug!("drop_ladder: for rest field {} ({:?})", i, lv);
523 self.complete_drop(&DropCtxt {
524 source_info: c.source_info,
525 is_cleanup: is_cleanup,
526 init_data: c.init_data,
534 unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
539 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
541 /// For example, with 3 fields, the drop ladder is
544 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
546 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
548 /// ELAB(drop location.2 [target=`c.succ`, unwind=`c.unwind`])
550 /// ELAB(drop location.1 [target=.c2])
552 /// ELAB(drop location.2 [target=`c.unwind])
553 fn drop_ladder<'a>(&mut self,
554 c: &DropCtxt<'a, 'tcx>,
555 fields: Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>)
558 debug!("drop_ladder({:?}, {:?})", c, fields);
560 let mut fields = fields;
561 fields.retain(|&(ref lvalue, _)| {
562 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
563 self.tcx.type_needs_drop_given_env(ty, self.param_env())
566 debug!("drop_ladder - fields needing drop: {:?}", fields);
568 let unwind_ladder = if c.is_cleanup {
571 Some(self.drop_halfladder(c, None, c.unwind.unwrap(), &fields, true))
574 self.drop_halfladder(c, unwind_ladder, c.succ, &fields, c.is_cleanup)
575 .last().cloned().unwrap_or(c.succ)
578 fn open_drop_for_tuple<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, tys: &[Ty<'tcx>])
581 debug!("open_drop_for_tuple({:?}, {:?})", c, tys);
583 let fields = tys.iter().enumerate().map(|(i, &ty)| {
584 (c.lvalue.clone().field(Field::new(i), ty),
585 super::move_path_children_matching(
586 self.move_data(), c.path, |proj| match proj {
588 elem: ProjectionElem::Field(f, _), ..
595 self.drop_ladder(c, fields)
598 fn open_drop_for_box<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, ty: Ty<'tcx>)
601 debug!("open_drop_for_box({:?}, {:?})", c, ty);
603 let interior_path = super::move_path_children_matching(
604 self.move_data(), c.path, |proj| match proj {
605 &Projection { elem: ProjectionElem::Deref, .. } => true,
609 let interior = c.lvalue.clone().deref();
610 let inner_c = DropCtxt {
612 unwind: c.unwind.map(|u| {
613 self.box_free_block(c, ty, u, true)
615 succ: self.box_free_block(c, ty, c.succ, c.is_cleanup),
620 self.elaborated_drop_block(&inner_c)
623 fn open_drop_for_variant<'a>(&mut self,
624 c: &DropCtxt<'a, 'tcx>,
625 drop_block: &mut Option<BasicBlock>,
626 adt: &'tcx ty::AdtDef,
627 substs: &'tcx Substs<'tcx>,
628 variant_index: usize)
631 let subpath = super::move_path_children_matching(
632 self.move_data(), c.path, |proj| match proj {
634 elem: ProjectionElem::Downcast(_, idx), ..
635 } => idx == variant_index,
639 if let Some(variant_path) = subpath {
640 let base_lv = c.lvalue.clone().elem(
641 ProjectionElem::Downcast(adt, variant_index)
643 let fields = self.move_paths_for_fields(
646 &adt.variants[variant_index],
648 self.drop_ladder(c, fields)
650 // variant not found - drop the entire enum
651 if let None = *drop_block {
652 *drop_block = Some(self.complete_drop(c, true));
654 return drop_block.unwrap();
658 fn open_drop_for_adt<'a>(&mut self, c: &DropCtxt<'a, 'tcx>,
659 adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
661 debug!("open_drop_for_adt({:?}, {:?}, {:?})", c, adt, substs);
663 let mut drop_block = None;
665 match adt.variants.len() {
667 let fields = self.move_paths_for_fields(
673 self.drop_ladder(c, fields)
676 let mut values = Vec::with_capacity(adt.variants.len());
677 let mut blocks = Vec::with_capacity(adt.variants.len() + 1);
678 for (idx, variant) in adt.variants.iter().enumerate() {
679 let discr = ConstInt::new_inttype(variant.disr_val, adt.discr_ty,
680 self.tcx.sess.target.uint_type,
681 self.tcx.sess.target.int_type).unwrap();
683 blocks.push(self.open_drop_for_variant(c, &mut drop_block, adt, substs, idx));
685 // If there are multiple variants, then if something
686 // is present within the enum the discriminant, tracked
687 // by the rest path, must be initialized.
689 // Additionally, we do not want to switch on the
690 // discriminant after it is free-ed, because that
691 // way lies only trouble.
692 let discr_ty = adt.discr_ty.to_ty(self.tcx);
693 let discr = Lvalue::Local(self.patch.new_temp(discr_ty));
694 let switch_block = self.patch.new_block(BasicBlockData {
697 source_info: c.source_info,
698 kind: StatementKind::Assign(discr.clone(),
699 Rvalue::Discriminant(c.lvalue.clone()))
702 terminator: Some(Terminator {
703 source_info: c.source_info,
704 kind: TerminatorKind::SwitchInt {
705 discr: Operand::Consume(discr),
707 values: From::from(values),
710 // targets: variant_drops
713 is_cleanup: c.is_cleanup,
715 self.drop_flag_test_block(c, switch_block)
720 /// The slow-path - create an "open", elaborated drop for a type
721 /// which is moved-out-of only partially, and patch `bb` to a jump
722 /// to it. This must not be called on ADTs with a destructor,
723 /// as these can't be moved-out-of, except for `Box<T>`, which is
726 /// This creates a "drop ladder" that drops the needed fields of the
727 /// ADT, both in the success case or if one of the destructors fail.
728 fn open_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
729 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
731 ty::TyClosure(def_id, substs) => {
732 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx).collect();
733 self.open_drop_for_tuple(c, &tys)
735 ty::TyTuple(tys, _) => {
736 self.open_drop_for_tuple(c, tys)
738 ty::TyAdt(def, _) if def.is_box() => {
739 self.open_drop_for_box(c, ty.boxed_ty())
741 ty::TyAdt(def, substs) => {
742 self.open_drop_for_adt(c, def, substs)
744 _ => bug!("open drop from non-ADT `{:?}`", ty)
748 /// Return a basic block that drop an lvalue using the context
749 /// and path in `c`. If `update_drop_flag` is true, also
753 /// if(update_drop_flag) FLAG(c.path) = false
755 fn complete_drop<'a>(
757 c: &DropCtxt<'a, 'tcx>,
758 update_drop_flag: bool)
761 debug!("complete_drop({:?},{:?})", c, update_drop_flag);
763 let drop_block = self.drop_block(c);
764 if update_drop_flag {
766 Location { block: drop_block, statement_index: 0 },
768 DropFlagState::Absent
772 self.drop_flag_test_block(c, drop_block)
775 /// Create a simple conditional drop.
778 /// FLAGS(c.lv) = false
780 fn conditional_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>)
783 debug!("conditional_drop({:?})", c);
784 let drop_bb = self.drop_block(c);
785 self.drop_flags_for_drop(c, drop_bb);
787 self.drop_flag_test_block(c, drop_bb)
790 fn new_block<'a>(&mut self,
791 c: &DropCtxt<'a, 'tcx>,
793 k: TerminatorKind<'tcx>)
796 self.patch.new_block(BasicBlockData {
798 terminator: Some(Terminator {
799 source_info: c.source_info, kind: k
801 is_cleanup: is_cleanup
805 fn elaborated_drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
806 debug!("elaborated_drop_block({:?})", c);
807 let blk = self.drop_block(c);
808 self.elaborate_drop(c, blk);
812 fn drop_flag_test_block<'a>(&mut self,
813 c: &DropCtxt<'a, 'tcx>,
816 self.drop_flag_test_block_with_succ(c, c.is_cleanup, on_set, c.succ)
819 fn drop_flag_test_block_with_succ<'a>(&mut self,
820 c: &DropCtxt<'a, 'tcx>,
823 on_unset: BasicBlock)
826 let (maybe_live, maybe_dead) = c.init_data.state(c.path);
827 debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
828 c, is_cleanup, on_set, (maybe_live, maybe_dead));
830 match (maybe_live, maybe_dead) {
831 (false, _) => on_unset,
832 (true, false) => on_set,
834 let flag = self.drop_flag(c.path).unwrap();
835 let boolty = self.tcx.types.bool;
836 self.new_block(c, is_cleanup, TerminatorKind::SwitchInt {
837 discr: Operand::Consume(flag),
839 values: BOOL_SWITCH_TRUE.clone(),
840 targets: vec![on_set, on_unset],
846 fn drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
847 self.new_block(c, c.is_cleanup, TerminatorKind::Drop {
848 location: c.lvalue.clone(),
854 fn box_free_block<'a>(
856 c: &DropCtxt<'a, 'tcx>,
861 let block = self.unelaborated_free_block(c, ty, target, is_cleanup);
862 self.drop_flag_test_block_with_succ(c, is_cleanup, block, target)
865 fn unelaborated_free_block<'a>(
867 c: &DropCtxt<'a, 'tcx>,
872 let mut statements = vec![];
873 if let Some(&flag) = self.drop_flags.get(&c.path) {
874 statements.push(Statement {
875 source_info: c.source_info,
876 kind: StatementKind::Assign(
878 self.constant_bool(c.source_info.span, false)
884 let unit_temp = Lvalue::Local(self.patch.new_temp(tcx.mk_nil()));
885 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
886 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
887 let fty = tcx.item_type(free_func).subst(tcx, substs);
889 self.patch.new_block(BasicBlockData {
890 statements: statements,
891 terminator: Some(Terminator {
892 source_info: c.source_info, kind: TerminatorKind::Call {
893 func: Operand::Constant(Constant {
894 span: c.source_info.span,
896 literal: Literal::Item {
901 args: vec![Operand::Consume(c.lvalue.clone())],
902 destination: Some((unit_temp, target)),
906 is_cleanup: is_cleanup
910 fn must_complete_drop<'a>(&self, c: &DropCtxt<'a, 'tcx>) -> bool {
911 // if we have a destuctor, we must *not* split the drop.
913 // dataflow can create unneeded children in some cases
914 // - be sure to ignore them.
916 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
919 ty::TyAdt(def, _) => {
920 if def.has_dtor() && !def.is_box() {
921 self.tcx.sess.span_warn(
923 &format!("dataflow bug??? moving out of type with dtor {:?}",
934 fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
935 Rvalue::Use(Operand::Constant(Constant {
937 ty: self.tcx.types.bool,
938 literal: Literal::Value { value: ConstVal::Bool(val) }
942 fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
943 if let Some(&flag) = self.drop_flags.get(&path) {
944 let span = self.patch.source_info_for_location(self.mir, loc).span;
945 let val = self.constant_bool(span, val.value());
946 self.patch.add_assign(loc, Lvalue::Local(flag), val);
950 fn drop_flags_on_init(&mut self) {
951 let loc = Location { block: START_BLOCK, statement_index: 0 };
952 let span = self.patch.source_info_for_location(self.mir, loc).span;
953 let false_ = self.constant_bool(span, false);
954 for flag in self.drop_flags.values() {
955 self.patch.add_assign(loc, Lvalue::Local(*flag), false_.clone());
959 fn drop_flags_for_fn_rets(&mut self) {
960 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
961 if let TerminatorKind::Call {
962 destination: Some((ref lv, tgt)), cleanup: Some(_), ..
963 } = data.terminator().kind {
964 assert!(!self.patch.is_patched(bb));
966 let loc = Location { block: tgt, statement_index: 0 };
967 let path = self.move_data().rev_lookup.find(lv);
968 on_lookup_result_bits(
969 self.tcx, self.mir, self.move_data(), path,
970 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
976 fn drop_flags_for_args(&mut self) {
977 let loc = Location { block: START_BLOCK, statement_index: 0 };
978 super::drop_flag_effects_for_function_entry(
979 self.tcx, self.mir, self.env, |path, ds| {
980 self.set_drop_flag(loc, path, ds);
985 fn drop_flags_for_locs(&mut self) {
986 // We intentionally iterate only over the *old* basic blocks.
988 // Basic blocks created by drop elaboration update their
989 // drop flags by themselves, to avoid the drop flags being
990 // clobbered before they are read.
992 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
993 debug!("drop_flags_for_locs({:?})", data);
994 for i in 0..(data.statements.len()+1) {
995 debug!("drop_flag_for_locs: stmt {}", i);
996 let mut allow_initializations = true;
997 if i == data.statements.len() {
998 match data.terminator().kind {
999 TerminatorKind::Drop { .. } => {
1000 // drop elaboration should handle that by itself
1003 TerminatorKind::DropAndReplace { .. } => {
1004 // this contains the move of the source and
1005 // the initialization of the destination. We
1006 // only want the former - the latter is handled
1007 // by the elaboration code and must be done
1008 // *after* the destination is dropped.
1009 assert!(self.patch.is_patched(bb));
1010 allow_initializations = false;
1013 assert!(!self.patch.is_patched(bb));
1017 let loc = Location { block: bb, statement_index: i };
1018 super::drop_flag_effects_for_location(
1019 self.tcx, self.mir, self.env, loc, |path, ds| {
1020 if ds == DropFlagState::Absent || allow_initializations {
1021 self.set_drop_flag(loc, path, ds)
1027 // There may be a critical edge after this call,
1028 // so mark the return as initialized *before* the
1030 if let TerminatorKind::Call {
1031 destination: Some((ref lv, _)), cleanup: None, ..
1032 } = data.terminator().kind {
1033 assert!(!self.patch.is_patched(bb));
1035 let loc = Location { block: bb, statement_index: data.statements.len() };
1036 let path = self.move_data().rev_lookup.find(lv);
1037 on_lookup_result_bits(
1038 self.tcx, self.mir, self.move_data(), path,
1039 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
1045 fn drop_flags_for_drop<'a>(&mut self,
1046 c: &DropCtxt<'a, 'tcx>,
1049 let loc = self.patch.terminator_loc(self.mir, bb);
1050 on_all_children_bits(
1051 self.tcx, self.mir, self.move_data(), c.path,
1052 |child| self.set_drop_flag(loc, child, DropFlagState::Absent)