1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use super::gather_moves::{HasMoveData, MoveData, MovePathIndex, LookupResult};
12 use super::dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals};
13 use super::dataflow::{DataflowResults};
14 use super::{drop_flag_effects_for_location, on_all_children_bits};
15 use super::on_lookup_result_bits;
16 use super::{DropFlagState, MoveDataParamEnv};
17 use super::patch::MirPatch;
18 use rustc::ty::{self, Ty, TyCtxt};
19 use rustc::ty::subst::{Kind, Subst, Substs};
20 use rustc::ty::util::IntTypeExt;
22 use rustc::mir::transform::{Pass, MirPass, MirSource};
23 use rustc::middle::const_val::{ConstVal, ConstInt};
24 use rustc::middle::lang_items;
25 use rustc::util::nodemap::FxHashMap;
26 use rustc_data_structures::indexed_set::IdxSetBuf;
27 use rustc_data_structures::indexed_vec::Idx;
34 pub struct ElaborateDrops;
36 impl<'tcx> MirPass<'tcx> for ElaborateDrops {
37 fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
38 src: MirSource, mir: &mut Mir<'tcx>)
40 debug!("elaborate_drops({:?} @ {:?})", src, mir.span);
42 MirSource::Fn(..) => {},
45 let id = src.item_id();
46 let param_env = ty::ParameterEnvironment::for_item(tcx, id);
47 let move_data = MoveData::gather_moves(mir, tcx, ¶m_env);
48 let elaborate_patch = {
50 let env = MoveDataParamEnv {
55 super::do_dataflow(tcx, mir, id, &[],
56 MaybeInitializedLvals::new(tcx, mir, &env),
57 |bd, p| &bd.move_data().move_paths[p]);
59 super::do_dataflow(tcx, mir, id, &[],
60 MaybeUninitializedLvals::new(tcx, mir, &env),
61 |bd, p| &bd.move_data().move_paths[p]);
67 flow_inits: flow_inits,
68 flow_uninits: flow_uninits,
69 drop_flags: FxHashMap(),
70 patch: MirPatch::new(mir),
73 elaborate_patch.apply(mir);
77 impl Pass for ElaborateDrops {}
79 struct InitializationData {
80 live: IdxSetBuf<MovePathIndex>,
81 dead: IdxSetBuf<MovePathIndex>
84 impl InitializationData {
85 fn apply_location<'a,'tcx>(&mut self,
86 tcx: TyCtxt<'a, 'tcx, 'tcx>,
88 env: &MoveDataParamEnv<'tcx>,
91 drop_flag_effects_for_location(tcx, mir, env, loc, |path, df| {
92 debug!("at location {:?}: setting {:?} to {:?}",
95 DropFlagState::Present => {
97 self.dead.remove(&path);
99 DropFlagState::Absent => {
100 self.dead.add(&path);
101 self.live.remove(&path);
107 fn state(&self, path: MovePathIndex) -> (bool, bool) {
108 (self.live.contains(&path), self.dead.contains(&path))
112 impl fmt::Debug for InitializationData {
113 fn fmt(&self, _f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
118 struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
119 tcx: TyCtxt<'a, 'tcx, 'tcx>,
121 env: &'a MoveDataParamEnv<'tcx>,
122 flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>,
123 flow_uninits: DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>,
124 drop_flags: FxHashMap<MovePathIndex, Local>,
125 patch: MirPatch<'tcx>,
128 #[derive(Copy, Clone, Debug)]
129 struct DropCtxt<'a, 'tcx: 'a> {
130 source_info: SourceInfo,
133 init_data: &'a InitializationData,
135 lvalue: &'a Lvalue<'tcx>,
138 unwind: Option<BasicBlock>
141 impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
142 fn move_data(&self) -> &'b MoveData<'tcx> { &self.env.move_data }
143 fn param_env(&self) -> &'b ty::ParameterEnvironment<'tcx> {
147 fn initialization_data_at(&self, loc: Location) -> InitializationData {
148 let mut data = InitializationData {
149 live: self.flow_inits.sets().on_entry_set_for(loc.block.index())
151 dead: self.flow_uninits.sets().on_entry_set_for(loc.block.index())
154 for stmt in 0..loc.statement_index {
155 data.apply_location(self.tcx, self.mir, self.env,
156 Location { block: loc.block, statement_index: stmt });
161 fn create_drop_flag(&mut self, index: MovePathIndex) {
163 let patch = &mut self.patch;
164 self.drop_flags.entry(index).or_insert_with(|| {
165 patch.new_temp(tcx.types.bool)
169 fn drop_flag(&mut self, index: MovePathIndex) -> Option<Lvalue<'tcx>> {
170 self.drop_flags.get(&index).map(|t| Lvalue::Local(*t))
173 /// create a patch that elaborates all drops in the input
175 fn elaborate(mut self) -> MirPatch<'tcx>
177 self.collect_drop_flags();
179 self.elaborate_drops();
181 self.drop_flags_on_init();
182 self.drop_flags_for_fn_rets();
183 self.drop_flags_for_args();
184 self.drop_flags_for_locs();
189 fn path_needs_drop(&self, path: MovePathIndex) -> bool
191 let lvalue = &self.move_data().move_paths[path].lvalue;
192 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
193 debug!("path_needs_drop({:?}, {:?} : {:?})", path, lvalue, ty);
195 self.tcx.type_needs_drop_given_env(ty, self.param_env())
198 fn collect_drop_flags(&mut self)
200 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
201 let terminator = data.terminator();
202 let location = match terminator.kind {
203 TerminatorKind::Drop { ref location, .. } |
204 TerminatorKind::DropAndReplace { ref location, .. } => location,
208 let init_data = self.initialization_data_at(Location {
210 statement_index: data.statements.len()
213 let path = self.move_data().rev_lookup.find(location);
214 debug!("collect_drop_flags: {:?}, lv {:?} ({:?})",
217 let path = match path {
218 LookupResult::Exact(e) => e,
219 LookupResult::Parent(None) => continue,
220 LookupResult::Parent(Some(parent)) => {
221 let (_maybe_live, maybe_dead) = init_data.state(parent);
223 span_bug!(terminator.source_info.span,
224 "drop of untracked, uninitialized value {:?}, lv {:?} ({:?})",
231 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
232 if self.path_needs_drop(child) {
233 let (maybe_live, maybe_dead) = init_data.state(child);
234 debug!("collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
235 child, location, path, (maybe_live, maybe_dead));
236 if maybe_live && maybe_dead {
237 self.create_drop_flag(child)
244 fn elaborate_drops(&mut self)
246 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
247 let loc = Location { block: bb, statement_index: data.statements.len() };
248 let terminator = data.terminator();
250 let resume_block = self.patch.resume_block();
251 match terminator.kind {
252 TerminatorKind::Drop { ref location, target, unwind } => {
253 let init_data = self.initialization_data_at(loc);
254 match self.move_data().rev_lookup.find(location) {
255 LookupResult::Exact(path) => {
256 self.elaborate_drop(&DropCtxt {
257 source_info: terminator.source_info,
258 is_cleanup: data.is_cleanup,
259 init_data: &init_data,
263 unwind: if data.is_cleanup {
266 Some(Option::unwrap_or(unwind, resume_block))
270 LookupResult::Parent(..) => {
271 span_bug!(terminator.source_info.span,
272 "drop of untracked value {:?}", bb);
276 TerminatorKind::DropAndReplace { ref location, ref value,
279 assert!(!data.is_cleanup);
281 self.elaborate_replace(
292 /// Elaborate a MIR `replace` terminator. This instruction
293 /// is not directly handled by translation, and therefore
294 /// must be desugared.
296 /// The desugaring drops the location if needed, and then writes
297 /// the value (including setting the drop flag) over it in *both* arms.
299 /// The `replace` terminator can also be called on lvalues that
300 /// are not tracked by elaboration (for example,
301 /// `replace x[i] <- tmp0`). The borrow checker requires that
302 /// these locations are initialized before the assignment,
303 /// so we just generate an unconditional drop.
304 fn elaborate_replace(
307 location: &Lvalue<'tcx>,
308 value: &Operand<'tcx>,
310 unwind: Option<BasicBlock>)
313 let data = &self.mir[bb];
314 let terminator = data.terminator();
316 let assign = Statement {
317 kind: StatementKind::Assign(location.clone(), Rvalue::Use(value.clone())),
318 source_info: terminator.source_info
321 let unwind = unwind.unwrap_or(self.patch.resume_block());
322 let unwind = self.patch.new_block(BasicBlockData {
323 statements: vec![assign.clone()],
324 terminator: Some(Terminator {
325 kind: TerminatorKind::Goto { target: unwind },
331 let target = self.patch.new_block(BasicBlockData {
332 statements: vec![assign],
333 terminator: Some(Terminator {
334 kind: TerminatorKind::Goto { target: target },
337 is_cleanup: data.is_cleanup,
340 match self.move_data().rev_lookup.find(location) {
341 LookupResult::Exact(path) => {
342 debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
343 let init_data = self.initialization_data_at(loc);
345 self.elaborate_drop(&DropCtxt {
346 source_info: terminator.source_info,
347 is_cleanup: data.is_cleanup,
348 init_data: &init_data,
354 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
355 self.set_drop_flag(Location { block: target, statement_index: 0 },
356 child, DropFlagState::Present);
357 self.set_drop_flag(Location { block: unwind, statement_index: 0 },
358 child, DropFlagState::Present);
361 LookupResult::Parent(parent) => {
362 // drop and replace behind a pointer/array/whatever. The location
363 // must be initialized.
364 debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent);
365 self.patch.patch_terminator(bb, TerminatorKind::Drop {
366 location: location.clone(),
374 /// This elaborates a single drop instruction, located at `bb`, and
377 /// The elaborated drop checks the drop flags to only drop what
380 /// In addition, the relevant drop flags also need to be cleared
381 /// to avoid double-drops. However, in the middle of a complex
382 /// drop, one must avoid clearing some of the flags before they
383 /// are read, as that would cause a memory leak.
385 /// In particular, when dropping an ADT, multiple fields may be
386 /// joined together under the `rest` subpath. They are all controlled
387 /// by the primary drop flag, but only the last rest-field dropped
388 /// should clear it (and it must also not clear anything else).
390 /// FIXME: I think we should just control the flags externally
391 /// and then we do not need this machinery.
392 fn elaborate_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, bb: BasicBlock) {
393 debug!("elaborate_drop({:?})", c);
395 let mut some_live = false;
396 let mut some_dead = false;
397 let mut children_count = 0;
398 on_all_children_bits(
399 self.tcx, self.mir, self.move_data(),
401 if self.path_needs_drop(child) {
402 let (live, dead) = c.init_data.state(child);
403 debug!("elaborate_drop: state({:?}) = {:?}",
404 child, (live, dead));
411 debug!("elaborate_drop({:?}): live - {:?}", c,
412 (some_live, some_dead));
413 match (some_live, some_dead) {
414 (false, false) | (false, true) => {
415 // dead drop - patch it out
416 self.patch.patch_terminator(bb, TerminatorKind::Goto {
421 // static drop - just set the flag
422 self.patch.patch_terminator(bb, TerminatorKind::Drop {
423 location: c.lvalue.clone(),
427 self.drop_flags_for_drop(c, bb);
431 let drop_bb = if children_count == 1 || self.must_complete_drop(c) {
432 self.conditional_drop(c)
436 self.patch.patch_terminator(bb, TerminatorKind::Goto {
443 /// Return the lvalue and move path for each field of `variant`,
444 /// (the move path is `None` if the field is a rest field).
445 fn move_paths_for_fields(&self,
446 base_lv: &Lvalue<'tcx>,
447 variant_path: MovePathIndex,
448 variant: &'tcx ty::VariantDef,
449 substs: &'tcx Substs<'tcx>)
450 -> Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>
452 variant.fields.iter().enumerate().map(|(i, f)| {
454 super::move_path_children_matching(self.move_data(), variant_path, |p| {
457 elem: ProjectionElem::Field(idx, _), ..
458 } => idx.index() == i,
464 self.tcx.normalize_associated_type_in_env(
465 &f.ty(self.tcx, substs),
468 (base_lv.clone().field(Field::new(i), field_ty), subpath)
472 /// Create one-half of the drop ladder for a list of fields, and return
473 /// the list of steps in it in reverse order.
475 /// `unwind_ladder` is such a list of steps in reverse order,
476 /// which is called instead of the next step if the drop unwinds
477 /// (the first field is never reached). If it is `None`, all
478 /// unwind targets are left blank.
479 fn drop_halfladder<'a>(&mut self,
480 c: &DropCtxt<'a, 'tcx>,
481 unwind_ladder: Option<Vec<BasicBlock>>,
483 fields: &[(Lvalue<'tcx>, Option<MovePathIndex>)],
487 let mut unwind_succ = if is_cleanup {
493 let mut succ = self.new_block(
494 c, c.is_cleanup, TerminatorKind::Goto { target: succ }
497 // Always clear the "master" drop flag at the bottom of the
498 // ladder. This is needed because the "master" drop flag
499 // protects the ADT's discriminant, which is invalidated
500 // after the ADT is dropped.
502 Location { block: succ, statement_index: 0 },
504 DropFlagState::Absent
507 fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
508 succ = if let Some(path) = path {
509 debug!("drop_ladder: for std field {} ({:?})", i, lv);
511 self.elaborated_drop_block(&DropCtxt {
512 source_info: c.source_info,
513 is_cleanup: is_cleanup,
514 init_data: c.init_data,
521 debug!("drop_ladder: for rest field {} ({:?})", i, lv);
523 self.complete_drop(&DropCtxt {
524 source_info: c.source_info,
525 is_cleanup: is_cleanup,
526 init_data: c.init_data,
534 unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
539 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
541 /// For example, with 3 fields, the drop ladder is
544 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
546 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
548 /// ELAB(drop location.2 [target=`c.succ`, unwind=`c.unwind`])
550 /// ELAB(drop location.1 [target=.c2])
552 /// ELAB(drop location.2 [target=`c.unwind])
553 fn drop_ladder<'a>(&mut self,
554 c: &DropCtxt<'a, 'tcx>,
555 fields: Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>)
558 debug!("drop_ladder({:?}, {:?})", c, fields);
560 let mut fields = fields;
561 fields.retain(|&(ref lvalue, _)| {
562 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
563 self.tcx.type_needs_drop_given_env(ty, self.param_env())
566 debug!("drop_ladder - fields needing drop: {:?}", fields);
568 let unwind_ladder = if c.is_cleanup {
571 Some(self.drop_halfladder(c, None, c.unwind.unwrap(), &fields, true))
574 self.drop_halfladder(c, unwind_ladder, c.succ, &fields, c.is_cleanup)
575 .last().cloned().unwrap_or(c.succ)
578 fn open_drop_for_tuple<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, tys: &[Ty<'tcx>])
581 debug!("open_drop_for_tuple({:?}, {:?})", c, tys);
583 let fields = tys.iter().enumerate().map(|(i, &ty)| {
584 (c.lvalue.clone().field(Field::new(i), ty),
585 super::move_path_children_matching(
586 self.move_data(), c.path, |proj| match proj {
588 elem: ProjectionElem::Field(f, _), ..
595 self.drop_ladder(c, fields)
598 fn open_drop_for_box<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, ty: Ty<'tcx>)
601 debug!("open_drop_for_box({:?}, {:?})", c, ty);
603 let interior_path = super::move_path_children_matching(
604 self.move_data(), c.path, |proj| match proj {
605 &Projection { elem: ProjectionElem::Deref, .. } => true,
609 let interior = c.lvalue.clone().deref();
610 let inner_c = DropCtxt {
612 unwind: c.unwind.map(|u| {
613 self.box_free_block(c, ty, u, true)
615 succ: self.box_free_block(c, ty, c.succ, c.is_cleanup),
620 self.elaborated_drop_block(&inner_c)
623 fn open_drop_for_adt<'a>(&mut self, c: &DropCtxt<'a, 'tcx>,
624 adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
626 debug!("open_drop_for_adt({:?}, {:?}, {:?})", c, adt, substs);
628 match adt.variants.len() {
630 let fields = self.move_paths_for_fields(
636 self.drop_ladder(c, fields)
639 let mut values = Vec::with_capacity(adt.variants.len());
640 let mut blocks = Vec::with_capacity(adt.variants.len());
641 let mut otherwise = None;
642 for (variant_index, variant) in adt.variants.iter().enumerate() {
643 let discr = ConstInt::new_inttype(variant.disr_val, adt.discr_ty,
644 self.tcx.sess.target.uint_type,
645 self.tcx.sess.target.int_type).unwrap();
646 let subpath = super::move_path_children_matching(
647 self.move_data(), c.path, |proj| match proj {
649 elem: ProjectionElem::Downcast(_, idx), ..
650 } => idx == variant_index,
653 if let Some(variant_path) = subpath {
654 let base_lv = c.lvalue.clone().elem(
655 ProjectionElem::Downcast(adt, variant_index)
657 let fields = self.move_paths_for_fields(
660 &adt.variants[variant_index],
663 blocks.push(self.drop_ladder(c, fields));
665 // variant not found - drop the entire enum
666 if let None = otherwise {
667 otherwise = Some(self.complete_drop(c, true));
671 if let Some(block) = otherwise {
676 // If there are multiple variants, then if something
677 // is present within the enum the discriminant, tracked
678 // by the rest path, must be initialized.
680 // Additionally, we do not want to switch on the
681 // discriminant after it is free-ed, because that
682 // way lies only trouble.
683 let discr_ty = adt.discr_ty.to_ty(self.tcx);
684 let discr = Lvalue::Local(self.patch.new_temp(discr_ty));
685 let switch_block = self.patch.new_block(BasicBlockData {
688 source_info: c.source_info,
689 kind: StatementKind::Assign(discr.clone(),
690 Rvalue::Discriminant(c.lvalue.clone()))
693 terminator: Some(Terminator {
694 source_info: c.source_info,
695 kind: TerminatorKind::SwitchInt {
696 discr: Operand::Consume(discr),
698 values: From::from(values),
702 is_cleanup: c.is_cleanup,
704 self.drop_flag_test_block(c, switch_block)
709 /// The slow-path - create an "open", elaborated drop for a type
710 /// which is moved-out-of only partially, and patch `bb` to a jump
711 /// to it. This must not be called on ADTs with a destructor,
712 /// as these can't be moved-out-of, except for `Box<T>`, which is
715 /// This creates a "drop ladder" that drops the needed fields of the
716 /// ADT, both in the success case or if one of the destructors fail.
717 fn open_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
718 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
720 ty::TyClosure(def_id, substs) => {
721 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx).collect();
722 self.open_drop_for_tuple(c, &tys)
724 ty::TyTuple(tys, _) => {
725 self.open_drop_for_tuple(c, tys)
727 ty::TyAdt(def, _) if def.is_box() => {
728 self.open_drop_for_box(c, ty.boxed_ty())
730 ty::TyAdt(def, substs) => {
731 self.open_drop_for_adt(c, def, substs)
733 _ => bug!("open drop from non-ADT `{:?}`", ty)
737 /// Return a basic block that drop an lvalue using the context
738 /// and path in `c`. If `update_drop_flag` is true, also
742 /// if(update_drop_flag) FLAG(c.path) = false
744 fn complete_drop<'a>(
746 c: &DropCtxt<'a, 'tcx>,
747 update_drop_flag: bool)
750 debug!("complete_drop({:?},{:?})", c, update_drop_flag);
752 let drop_block = self.drop_block(c);
753 if update_drop_flag {
755 Location { block: drop_block, statement_index: 0 },
757 DropFlagState::Absent
761 self.drop_flag_test_block(c, drop_block)
764 /// Create a simple conditional drop.
767 /// FLAGS(c.lv) = false
769 fn conditional_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>)
772 debug!("conditional_drop({:?})", c);
773 let drop_bb = self.drop_block(c);
774 self.drop_flags_for_drop(c, drop_bb);
776 self.drop_flag_test_block(c, drop_bb)
779 fn new_block<'a>(&mut self,
780 c: &DropCtxt<'a, 'tcx>,
782 k: TerminatorKind<'tcx>)
785 self.patch.new_block(BasicBlockData {
787 terminator: Some(Terminator {
788 source_info: c.source_info, kind: k
790 is_cleanup: is_cleanup
794 fn elaborated_drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
795 debug!("elaborated_drop_block({:?})", c);
796 let blk = self.drop_block(c);
797 self.elaborate_drop(c, blk);
801 fn drop_flag_test_block<'a>(&mut self,
802 c: &DropCtxt<'a, 'tcx>,
805 self.drop_flag_test_block_with_succ(c, c.is_cleanup, on_set, c.succ)
808 fn drop_flag_test_block_with_succ<'a>(&mut self,
809 c: &DropCtxt<'a, 'tcx>,
812 on_unset: BasicBlock)
815 let (maybe_live, maybe_dead) = c.init_data.state(c.path);
816 debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
817 c, is_cleanup, on_set, (maybe_live, maybe_dead));
819 match (maybe_live, maybe_dead) {
820 (false, _) => on_unset,
821 (true, false) => on_set,
823 let flag = self.drop_flag(c.path).unwrap();
824 let term = TerminatorKind::if_(self.tcx, Operand::Consume(flag), on_set, on_unset);
825 self.new_block(c, is_cleanup, term)
830 fn drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
831 self.new_block(c, c.is_cleanup, TerminatorKind::Drop {
832 location: c.lvalue.clone(),
838 fn box_free_block<'a>(
840 c: &DropCtxt<'a, 'tcx>,
845 let block = self.unelaborated_free_block(c, ty, target, is_cleanup);
846 self.drop_flag_test_block_with_succ(c, is_cleanup, block, target)
849 fn unelaborated_free_block<'a>(
851 c: &DropCtxt<'a, 'tcx>,
856 let mut statements = vec![];
857 if let Some(&flag) = self.drop_flags.get(&c.path) {
858 statements.push(Statement {
859 source_info: c.source_info,
860 kind: StatementKind::Assign(
862 self.constant_bool(c.source_info.span, false)
868 let unit_temp = Lvalue::Local(self.patch.new_temp(tcx.mk_nil()));
869 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
870 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
871 let fty = tcx.item_type(free_func).subst(tcx, substs);
873 self.patch.new_block(BasicBlockData {
874 statements: statements,
875 terminator: Some(Terminator {
876 source_info: c.source_info, kind: TerminatorKind::Call {
877 func: Operand::Constant(Constant {
878 span: c.source_info.span,
880 literal: Literal::Item {
885 args: vec![Operand::Consume(c.lvalue.clone())],
886 destination: Some((unit_temp, target)),
890 is_cleanup: is_cleanup
894 fn must_complete_drop<'a>(&self, c: &DropCtxt<'a, 'tcx>) -> bool {
895 // if we have a destuctor, we must *not* split the drop.
897 // dataflow can create unneeded children in some cases
898 // - be sure to ignore them.
900 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
903 ty::TyAdt(def, _) => {
904 if def.has_dtor() && !def.is_box() {
905 self.tcx.sess.span_warn(
907 &format!("dataflow bug??? moving out of type with dtor {:?}",
918 fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
919 Rvalue::Use(Operand::Constant(Constant {
921 ty: self.tcx.types.bool,
922 literal: Literal::Value { value: ConstVal::Bool(val) }
926 fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
927 if let Some(&flag) = self.drop_flags.get(&path) {
928 let span = self.patch.source_info_for_location(self.mir, loc).span;
929 let val = self.constant_bool(span, val.value());
930 self.patch.add_assign(loc, Lvalue::Local(flag), val);
934 fn drop_flags_on_init(&mut self) {
935 let loc = Location { block: START_BLOCK, statement_index: 0 };
936 let span = self.patch.source_info_for_location(self.mir, loc).span;
937 let false_ = self.constant_bool(span, false);
938 for flag in self.drop_flags.values() {
939 self.patch.add_assign(loc, Lvalue::Local(*flag), false_.clone());
943 fn drop_flags_for_fn_rets(&mut self) {
944 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
945 if let TerminatorKind::Call {
946 destination: Some((ref lv, tgt)), cleanup: Some(_), ..
947 } = data.terminator().kind {
948 assert!(!self.patch.is_patched(bb));
950 let loc = Location { block: tgt, statement_index: 0 };
951 let path = self.move_data().rev_lookup.find(lv);
952 on_lookup_result_bits(
953 self.tcx, self.mir, self.move_data(), path,
954 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
960 fn drop_flags_for_args(&mut self) {
961 let loc = Location { block: START_BLOCK, statement_index: 0 };
962 super::drop_flag_effects_for_function_entry(
963 self.tcx, self.mir, self.env, |path, ds| {
964 self.set_drop_flag(loc, path, ds);
969 fn drop_flags_for_locs(&mut self) {
970 // We intentionally iterate only over the *old* basic blocks.
972 // Basic blocks created by drop elaboration update their
973 // drop flags by themselves, to avoid the drop flags being
974 // clobbered before they are read.
976 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
977 debug!("drop_flags_for_locs({:?})", data);
978 for i in 0..(data.statements.len()+1) {
979 debug!("drop_flag_for_locs: stmt {}", i);
980 let mut allow_initializations = true;
981 if i == data.statements.len() {
982 match data.terminator().kind {
983 TerminatorKind::Drop { .. } => {
984 // drop elaboration should handle that by itself
987 TerminatorKind::DropAndReplace { .. } => {
988 // this contains the move of the source and
989 // the initialization of the destination. We
990 // only want the former - the latter is handled
991 // by the elaboration code and must be done
992 // *after* the destination is dropped.
993 assert!(self.patch.is_patched(bb));
994 allow_initializations = false;
997 assert!(!self.patch.is_patched(bb));
1001 let loc = Location { block: bb, statement_index: i };
1002 super::drop_flag_effects_for_location(
1003 self.tcx, self.mir, self.env, loc, |path, ds| {
1004 if ds == DropFlagState::Absent || allow_initializations {
1005 self.set_drop_flag(loc, path, ds)
1011 // There may be a critical edge after this call,
1012 // so mark the return as initialized *before* the
1014 if let TerminatorKind::Call {
1015 destination: Some((ref lv, _)), cleanup: None, ..
1016 } = data.terminator().kind {
1017 assert!(!self.patch.is_patched(bb));
1019 let loc = Location { block: bb, statement_index: data.statements.len() };
1020 let path = self.move_data().rev_lookup.find(lv);
1021 on_lookup_result_bits(
1022 self.tcx, self.mir, self.move_data(), path,
1023 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
1029 fn drop_flags_for_drop<'a>(&mut self,
1030 c: &DropCtxt<'a, 'tcx>,
1033 let loc = self.patch.terminator_loc(self.mir, bb);
1034 on_all_children_bits(
1035 self.tcx, self.mir, self.move_data(), c.path,
1036 |child| self.set_drop_flag(loc, child, DropFlagState::Absent)