1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use super::gather_moves::{HasMoveData, MoveData, MovePathIndex, LookupResult};
12 use super::dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals};
13 use super::dataflow::{DataflowResults};
14 use super::{drop_flag_effects_for_location, on_all_children_bits};
15 use super::on_lookup_result_bits;
16 use super::{DropFlagState, MoveDataParamEnv};
17 use super::patch::MirPatch;
18 use rustc::ty::{self, Ty, TyCtxt};
19 use rustc::ty::subst::{Kind, Subst, Substs};
20 use rustc::ty::util::IntTypeExt;
22 use rustc::mir::transform::{Pass, MirPass, MirSource};
23 use rustc::middle::const_val::ConstVal;
24 use rustc::middle::lang_items;
25 use rustc::util::nodemap::FxHashMap;
26 use rustc_data_structures::indexed_set::IdxSetBuf;
27 use rustc_data_structures::indexed_vec::Idx;
34 pub struct ElaborateDrops;
36 impl<'tcx> MirPass<'tcx> for ElaborateDrops {
37 fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
38 src: MirSource, mir: &mut Mir<'tcx>)
40 debug!("elaborate_drops({:?} @ {:?})", src, mir.span);
42 MirSource::Fn(..) => {},
45 let id = src.item_id();
46 let param_env = ty::ParameterEnvironment::for_item(tcx, id);
47 let move_data = MoveData::gather_moves(mir, tcx, ¶m_env);
48 let elaborate_patch = {
50 let env = MoveDataParamEnv {
55 super::do_dataflow(tcx, mir, id, &[],
56 MaybeInitializedLvals::new(tcx, mir, &env),
57 |bd, p| &bd.move_data().move_paths[p]);
59 super::do_dataflow(tcx, mir, id, &[],
60 MaybeUninitializedLvals::new(tcx, mir, &env),
61 |bd, p| &bd.move_data().move_paths[p]);
67 flow_inits: flow_inits,
68 flow_uninits: flow_uninits,
69 drop_flags: FxHashMap(),
70 patch: MirPatch::new(mir),
73 elaborate_patch.apply(mir);
77 impl Pass for ElaborateDrops {}
79 struct InitializationData {
80 live: IdxSetBuf<MovePathIndex>,
81 dead: IdxSetBuf<MovePathIndex>
84 impl InitializationData {
85 fn apply_location<'a,'tcx>(&mut self,
86 tcx: TyCtxt<'a, 'tcx, 'tcx>,
88 env: &MoveDataParamEnv<'tcx>,
91 drop_flag_effects_for_location(tcx, mir, env, loc, |path, df| {
92 debug!("at location {:?}: setting {:?} to {:?}",
95 DropFlagState::Present => {
97 self.dead.remove(&path);
99 DropFlagState::Absent => {
100 self.dead.add(&path);
101 self.live.remove(&path);
107 fn state(&self, path: MovePathIndex) -> (bool, bool) {
108 (self.live.contains(&path), self.dead.contains(&path))
112 impl fmt::Debug for InitializationData {
113 fn fmt(&self, _f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
118 struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
119 tcx: TyCtxt<'a, 'tcx, 'tcx>,
121 env: &'a MoveDataParamEnv<'tcx>,
122 flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>,
123 flow_uninits: DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>,
124 drop_flags: FxHashMap<MovePathIndex, Local>,
125 patch: MirPatch<'tcx>,
128 #[derive(Copy, Clone, Debug)]
129 struct DropCtxt<'a, 'tcx: 'a> {
130 source_info: SourceInfo,
133 init_data: &'a InitializationData,
135 lvalue: &'a Lvalue<'tcx>,
138 unwind: Option<BasicBlock>
141 impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
142 fn move_data(&self) -> &'b MoveData<'tcx> { &self.env.move_data }
143 fn param_env(&self) -> &'b ty::ParameterEnvironment<'tcx> {
147 fn initialization_data_at(&self, loc: Location) -> InitializationData {
148 let mut data = InitializationData {
149 live: self.flow_inits.sets().on_entry_set_for(loc.block.index())
151 dead: self.flow_uninits.sets().on_entry_set_for(loc.block.index())
154 for stmt in 0..loc.statement_index {
155 data.apply_location(self.tcx, self.mir, self.env,
156 Location { block: loc.block, statement_index: stmt });
161 fn create_drop_flag(&mut self, index: MovePathIndex) {
163 let patch = &mut self.patch;
164 self.drop_flags.entry(index).or_insert_with(|| {
165 patch.new_temp(tcx.types.bool)
169 fn drop_flag(&mut self, index: MovePathIndex) -> Option<Lvalue<'tcx>> {
170 self.drop_flags.get(&index).map(|t| Lvalue::Local(*t))
173 /// create a patch that elaborates all drops in the input
175 fn elaborate(mut self) -> MirPatch<'tcx>
177 self.collect_drop_flags();
179 self.elaborate_drops();
181 self.drop_flags_on_init();
182 self.drop_flags_for_fn_rets();
183 self.drop_flags_for_args();
184 self.drop_flags_for_locs();
189 fn path_needs_drop(&self, path: MovePathIndex) -> bool
191 let lvalue = &self.move_data().move_paths[path].lvalue;
192 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
193 debug!("path_needs_drop({:?}, {:?} : {:?})", path, lvalue, ty);
195 self.tcx.type_needs_drop_given_env(ty, self.param_env())
198 fn collect_drop_flags(&mut self)
200 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
201 let terminator = data.terminator();
202 let location = match terminator.kind {
203 TerminatorKind::Drop { ref location, .. } |
204 TerminatorKind::DropAndReplace { ref location, .. } => location,
208 let init_data = self.initialization_data_at(Location {
210 statement_index: data.statements.len()
213 let path = self.move_data().rev_lookup.find(location);
214 debug!("collect_drop_flags: {:?}, lv {:?} ({:?})",
217 let path = match path {
218 LookupResult::Exact(e) => e,
219 LookupResult::Parent(None) => continue,
220 LookupResult::Parent(Some(parent)) => {
221 let (_maybe_live, maybe_dead) = init_data.state(parent);
223 span_bug!(terminator.source_info.span,
224 "drop of untracked, uninitialized value {:?}, lv {:?} ({:?})",
231 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
232 if self.path_needs_drop(child) {
233 let (maybe_live, maybe_dead) = init_data.state(child);
234 debug!("collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
235 child, location, path, (maybe_live, maybe_dead));
236 if maybe_live && maybe_dead {
237 self.create_drop_flag(child)
244 fn elaborate_drops(&mut self)
246 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
247 let loc = Location { block: bb, statement_index: data.statements.len() };
248 let terminator = data.terminator();
250 let resume_block = self.patch.resume_block();
251 match terminator.kind {
252 TerminatorKind::Drop { ref location, target, unwind } => {
253 let init_data = self.initialization_data_at(loc);
254 match self.move_data().rev_lookup.find(location) {
255 LookupResult::Exact(path) => {
256 self.elaborate_drop(&DropCtxt {
257 source_info: terminator.source_info,
258 is_cleanup: data.is_cleanup,
259 init_data: &init_data,
263 unwind: if data.is_cleanup {
266 Some(Option::unwrap_or(unwind, resume_block))
270 LookupResult::Parent(..) => {
271 span_bug!(terminator.source_info.span,
272 "drop of untracked value {:?}", bb);
276 TerminatorKind::DropAndReplace { ref location, ref value,
279 assert!(!data.is_cleanup);
281 self.elaborate_replace(
292 /// Elaborate a MIR `replace` terminator. This instruction
293 /// is not directly handled by translation, and therefore
294 /// must be desugared.
296 /// The desugaring drops the location if needed, and then writes
297 /// the value (including setting the drop flag) over it in *both* arms.
299 /// The `replace` terminator can also be called on lvalues that
300 /// are not tracked by elaboration (for example,
301 /// `replace x[i] <- tmp0`). The borrow checker requires that
302 /// these locations are initialized before the assignment,
303 /// so we just generate an unconditional drop.
304 fn elaborate_replace(
307 location: &Lvalue<'tcx>,
308 value: &Operand<'tcx>,
310 unwind: Option<BasicBlock>)
313 let data = &self.mir[bb];
314 let terminator = data.terminator();
316 let assign = Statement {
317 kind: StatementKind::Assign(location.clone(), Rvalue::Use(value.clone())),
318 source_info: terminator.source_info
321 let unwind = unwind.unwrap_or(self.patch.resume_block());
322 let unwind = self.patch.new_block(BasicBlockData {
323 statements: vec![assign.clone()],
324 terminator: Some(Terminator {
325 kind: TerminatorKind::Goto { target: unwind },
331 let target = self.patch.new_block(BasicBlockData {
332 statements: vec![assign],
333 terminator: Some(Terminator {
334 kind: TerminatorKind::Goto { target: target },
337 is_cleanup: data.is_cleanup,
340 match self.move_data().rev_lookup.find(location) {
341 LookupResult::Exact(path) => {
342 debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
343 let init_data = self.initialization_data_at(loc);
345 self.elaborate_drop(&DropCtxt {
346 source_info: terminator.source_info,
347 is_cleanup: data.is_cleanup,
348 init_data: &init_data,
354 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
355 self.set_drop_flag(Location { block: target, statement_index: 0 },
356 child, DropFlagState::Present);
357 self.set_drop_flag(Location { block: unwind, statement_index: 0 },
358 child, DropFlagState::Present);
361 LookupResult::Parent(parent) => {
362 // drop and replace behind a pointer/array/whatever. The location
363 // must be initialized.
364 debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent);
365 self.patch.patch_terminator(bb, TerminatorKind::Drop {
366 location: location.clone(),
374 /// This elaborates a single drop instruction, located at `bb`, and
377 /// The elaborated drop checks the drop flags to only drop what
380 /// In addition, the relevant drop flags also need to be cleared
381 /// to avoid double-drops. However, in the middle of a complex
382 /// drop, one must avoid clearing some of the flags before they
383 /// are read, as that would cause a memory leak.
385 /// In particular, when dropping an ADT, multiple fields may be
386 /// joined together under the `rest` subpath. They are all controlled
387 /// by the primary drop flag, but only the last rest-field dropped
388 /// should clear it (and it must also not clear anything else).
390 /// FIXME: I think we should just control the flags externally
391 /// and then we do not need this machinery.
392 fn elaborate_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, bb: BasicBlock) {
393 debug!("elaborate_drop({:?})", c);
395 let mut some_live = false;
396 let mut some_dead = false;
397 let mut children_count = 0;
398 on_all_children_bits(
399 self.tcx, self.mir, self.move_data(),
401 if self.path_needs_drop(child) {
402 let (live, dead) = c.init_data.state(child);
403 debug!("elaborate_drop: state({:?}) = {:?}",
404 child, (live, dead));
411 debug!("elaborate_drop({:?}): live - {:?}", c,
412 (some_live, some_dead));
413 match (some_live, some_dead) {
414 (false, false) | (false, true) => {
415 // dead drop - patch it out
416 self.patch.patch_terminator(bb, TerminatorKind::Goto {
421 // static drop - just set the flag
422 self.patch.patch_terminator(bb, TerminatorKind::Drop {
423 location: c.lvalue.clone(),
427 self.drop_flags_for_drop(c, bb);
431 let drop_bb = if children_count == 1 || self.must_complete_drop(c) {
432 self.conditional_drop(c)
436 self.patch.patch_terminator(bb, TerminatorKind::Goto {
443 /// Return the lvalue and move path for each field of `variant`,
444 /// (the move path is `None` if the field is a rest field).
445 fn move_paths_for_fields(&self,
446 base_lv: &Lvalue<'tcx>,
447 variant_path: MovePathIndex,
448 variant: &'tcx ty::VariantDef,
449 substs: &'tcx Substs<'tcx>)
450 -> Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>
452 variant.fields.iter().enumerate().map(|(i, f)| {
454 super::move_path_children_matching(self.move_data(), variant_path, |p| {
457 elem: ProjectionElem::Field(idx, _), ..
458 } => idx.index() == i,
464 self.tcx.normalize_associated_type_in_env(
465 &f.ty(self.tcx, substs),
468 (base_lv.clone().field(Field::new(i), field_ty), subpath)
472 /// Create one-half of the drop ladder for a list of fields, and return
473 /// the list of steps in it in reverse order.
475 /// `unwind_ladder` is such a list of steps in reverse order,
476 /// which is called instead of the next step if the drop unwinds
477 /// (the first field is never reached). If it is `None`, all
478 /// unwind targets are left blank.
479 fn drop_halfladder<'a>(&mut self,
480 c: &DropCtxt<'a, 'tcx>,
481 unwind_ladder: Option<Vec<BasicBlock>>,
483 fields: &[(Lvalue<'tcx>, Option<MovePathIndex>)],
487 let mut unwind_succ = if is_cleanup {
493 let mut succ = self.new_block(
494 c, c.is_cleanup, TerminatorKind::Goto { target: succ }
497 // Always clear the "master" drop flag at the bottom of the
498 // ladder. This is needed because the "master" drop flag
499 // protects the ADT's discriminant, which is invalidated
500 // after the ADT is dropped.
502 Location { block: succ, statement_index: 0 },
504 DropFlagState::Absent
507 fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
508 succ = if let Some(path) = path {
509 debug!("drop_ladder: for std field {} ({:?})", i, lv);
511 self.elaborated_drop_block(&DropCtxt {
512 source_info: c.source_info,
513 is_cleanup: is_cleanup,
514 init_data: c.init_data,
521 debug!("drop_ladder: for rest field {} ({:?})", i, lv);
523 self.complete_drop(&DropCtxt {
524 source_info: c.source_info,
525 is_cleanup: is_cleanup,
526 init_data: c.init_data,
534 unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
539 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
541 /// For example, with 3 fields, the drop ladder is
544 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
546 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
548 /// ELAB(drop location.2 [target=`c.succ`, unwind=`c.unwind`])
550 /// ELAB(drop location.1 [target=.c2])
552 /// ELAB(drop location.2 [target=`c.unwind])
553 fn drop_ladder<'a>(&mut self,
554 c: &DropCtxt<'a, 'tcx>,
555 fields: Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>)
558 debug!("drop_ladder({:?}, {:?})", c, fields);
560 let mut fields = fields;
561 fields.retain(|&(ref lvalue, _)| {
562 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
563 self.tcx.type_needs_drop_given_env(ty, self.param_env())
566 debug!("drop_ladder - fields needing drop: {:?}", fields);
568 let unwind_ladder = if c.is_cleanup {
571 Some(self.drop_halfladder(c, None, c.unwind.unwrap(), &fields, true))
574 self.drop_halfladder(c, unwind_ladder, c.succ, &fields, c.is_cleanup)
575 .last().cloned().unwrap_or(c.succ)
578 fn open_drop_for_tuple<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, tys: &[Ty<'tcx>])
581 debug!("open_drop_for_tuple({:?}, {:?})", c, tys);
583 let fields = tys.iter().enumerate().map(|(i, &ty)| {
584 (c.lvalue.clone().field(Field::new(i), ty),
585 super::move_path_children_matching(
586 self.move_data(), c.path, |proj| match proj {
588 elem: ProjectionElem::Field(f, _), ..
595 self.drop_ladder(c, fields)
598 fn open_drop_for_box<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, ty: Ty<'tcx>)
601 debug!("open_drop_for_box({:?}, {:?})", c, ty);
603 let interior_path = super::move_path_children_matching(
604 self.move_data(), c.path, |proj| match proj {
605 &Projection { elem: ProjectionElem::Deref, .. } => true,
609 let interior = c.lvalue.clone().deref();
610 let inner_c = DropCtxt {
612 unwind: c.unwind.map(|u| {
613 self.box_free_block(c, ty, u, true)
615 succ: self.box_free_block(c, ty, c.succ, c.is_cleanup),
620 self.elaborated_drop_block(&inner_c)
623 fn open_drop_for_adt<'a>(&mut self, c: &DropCtxt<'a, 'tcx>,
624 adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
626 debug!("open_drop_for_adt({:?}, {:?}, {:?})", c, adt, substs);
628 match adt.variants.len() {
630 let fields = self.move_paths_for_fields(
636 self.drop_ladder(c, fields)
639 let mut values = Vec::with_capacity(adt.variants.len());
640 let mut blocks = Vec::with_capacity(adt.variants.len());
641 let mut otherwise = None;
642 for (variant_index, discr) in adt.discriminants(self.tcx).enumerate() {
643 let subpath = super::move_path_children_matching(
644 self.move_data(), c.path, |proj| match proj {
646 elem: ProjectionElem::Downcast(_, idx), ..
647 } => idx == variant_index,
650 if let Some(variant_path) = subpath {
651 let base_lv = c.lvalue.clone().elem(
652 ProjectionElem::Downcast(adt, variant_index)
654 let fields = self.move_paths_for_fields(
657 &adt.variants[variant_index],
660 blocks.push(self.drop_ladder(c, fields));
662 // variant not found - drop the entire enum
663 if let None = otherwise {
664 otherwise = Some(self.complete_drop(c, true));
668 if let Some(block) = otherwise {
673 // If there are multiple variants, then if something
674 // is present within the enum the discriminant, tracked
675 // by the rest path, must be initialized.
677 // Additionally, we do not want to switch on the
678 // discriminant after it is free-ed, because that
679 // way lies only trouble.
680 let repr_hints = self.tcx.lookup_repr_hints(adt.did);
681 let repr_type = self.tcx.enum_repr_type(repr_hints.get(0));
682 let discr_ty = repr_type.to_ty(self.tcx);
683 let discr = Lvalue::Local(self.patch.new_temp(discr_ty));
684 let switch_block = self.patch.new_block(BasicBlockData {
687 source_info: c.source_info,
688 kind: StatementKind::Assign(discr.clone(),
689 Rvalue::Discriminant(c.lvalue.clone()))
692 terminator: Some(Terminator {
693 source_info: c.source_info,
694 kind: TerminatorKind::SwitchInt {
695 discr: Operand::Consume(discr),
697 values: From::from(values),
701 is_cleanup: c.is_cleanup,
703 self.drop_flag_test_block(c, switch_block)
708 /// The slow-path - create an "open", elaborated drop for a type
709 /// which is moved-out-of only partially, and patch `bb` to a jump
710 /// to it. This must not be called on ADTs with a destructor,
711 /// as these can't be moved-out-of, except for `Box<T>`, which is
714 /// This creates a "drop ladder" that drops the needed fields of the
715 /// ADT, both in the success case or if one of the destructors fail.
716 fn open_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
717 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
719 ty::TyClosure(def_id, substs) => {
720 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx).collect();
721 self.open_drop_for_tuple(c, &tys)
723 ty::TyTuple(tys, _) => {
724 self.open_drop_for_tuple(c, tys)
726 ty::TyAdt(def, _) if def.is_box() => {
727 self.open_drop_for_box(c, ty.boxed_ty())
729 ty::TyAdt(def, substs) => {
730 self.open_drop_for_adt(c, def, substs)
732 _ => bug!("open drop from non-ADT `{:?}`", ty)
736 /// Return a basic block that drop an lvalue using the context
737 /// and path in `c`. If `update_drop_flag` is true, also
741 /// if(update_drop_flag) FLAG(c.path) = false
743 fn complete_drop<'a>(
745 c: &DropCtxt<'a, 'tcx>,
746 update_drop_flag: bool)
749 debug!("complete_drop({:?},{:?})", c, update_drop_flag);
751 let drop_block = self.drop_block(c);
752 if update_drop_flag {
754 Location { block: drop_block, statement_index: 0 },
756 DropFlagState::Absent
760 self.drop_flag_test_block(c, drop_block)
763 /// Create a simple conditional drop.
766 /// FLAGS(c.lv) = false
768 fn conditional_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>)
771 debug!("conditional_drop({:?})", c);
772 let drop_bb = self.drop_block(c);
773 self.drop_flags_for_drop(c, drop_bb);
775 self.drop_flag_test_block(c, drop_bb)
778 fn new_block<'a>(&mut self,
779 c: &DropCtxt<'a, 'tcx>,
781 k: TerminatorKind<'tcx>)
784 self.patch.new_block(BasicBlockData {
786 terminator: Some(Terminator {
787 source_info: c.source_info, kind: k
789 is_cleanup: is_cleanup
793 fn elaborated_drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
794 debug!("elaborated_drop_block({:?})", c);
795 let blk = self.drop_block(c);
796 self.elaborate_drop(c, blk);
800 fn drop_flag_test_block<'a>(&mut self,
801 c: &DropCtxt<'a, 'tcx>,
804 self.drop_flag_test_block_with_succ(c, c.is_cleanup, on_set, c.succ)
807 fn drop_flag_test_block_with_succ<'a>(&mut self,
808 c: &DropCtxt<'a, 'tcx>,
811 on_unset: BasicBlock)
814 let (maybe_live, maybe_dead) = c.init_data.state(c.path);
815 debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
816 c, is_cleanup, on_set, (maybe_live, maybe_dead));
818 match (maybe_live, maybe_dead) {
819 (false, _) => on_unset,
820 (true, false) => on_set,
822 let flag = self.drop_flag(c.path).unwrap();
823 let term = TerminatorKind::if_(self.tcx, Operand::Consume(flag), on_set, on_unset);
824 self.new_block(c, is_cleanup, term)
829 fn drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
830 self.new_block(c, c.is_cleanup, TerminatorKind::Drop {
831 location: c.lvalue.clone(),
837 fn box_free_block<'a>(
839 c: &DropCtxt<'a, 'tcx>,
844 let block = self.unelaborated_free_block(c, ty, target, is_cleanup);
845 self.drop_flag_test_block_with_succ(c, is_cleanup, block, target)
848 fn unelaborated_free_block<'a>(
850 c: &DropCtxt<'a, 'tcx>,
855 let mut statements = vec![];
856 if let Some(&flag) = self.drop_flags.get(&c.path) {
857 statements.push(Statement {
858 source_info: c.source_info,
859 kind: StatementKind::Assign(
861 self.constant_bool(c.source_info.span, false)
867 let unit_temp = Lvalue::Local(self.patch.new_temp(tcx.mk_nil()));
868 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
869 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
870 let fty = tcx.item_type(free_func).subst(tcx, substs);
872 self.patch.new_block(BasicBlockData {
873 statements: statements,
874 terminator: Some(Terminator {
875 source_info: c.source_info, kind: TerminatorKind::Call {
876 func: Operand::Constant(Constant {
877 span: c.source_info.span,
879 literal: Literal::Item {
884 args: vec![Operand::Consume(c.lvalue.clone())],
885 destination: Some((unit_temp, target)),
889 is_cleanup: is_cleanup
893 fn must_complete_drop<'a>(&self, c: &DropCtxt<'a, 'tcx>) -> bool {
894 // if we have a destuctor, we must *not* split the drop.
896 // dataflow can create unneeded children in some cases
897 // - be sure to ignore them.
899 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
902 ty::TyAdt(def, _) => {
903 if def.has_dtor() && !def.is_box() {
904 self.tcx.sess.span_warn(
906 &format!("dataflow bug??? moving out of type with dtor {:?}",
917 fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
918 Rvalue::Use(Operand::Constant(Constant {
920 ty: self.tcx.types.bool,
921 literal: Literal::Value { value: ConstVal::Bool(val) }
925 fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
926 if let Some(&flag) = self.drop_flags.get(&path) {
927 let span = self.patch.source_info_for_location(self.mir, loc).span;
928 let val = self.constant_bool(span, val.value());
929 self.patch.add_assign(loc, Lvalue::Local(flag), val);
933 fn drop_flags_on_init(&mut self) {
934 let loc = Location { block: START_BLOCK, statement_index: 0 };
935 let span = self.patch.source_info_for_location(self.mir, loc).span;
936 let false_ = self.constant_bool(span, false);
937 for flag in self.drop_flags.values() {
938 self.patch.add_assign(loc, Lvalue::Local(*flag), false_.clone());
942 fn drop_flags_for_fn_rets(&mut self) {
943 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
944 if let TerminatorKind::Call {
945 destination: Some((ref lv, tgt)), cleanup: Some(_), ..
946 } = data.terminator().kind {
947 assert!(!self.patch.is_patched(bb));
949 let loc = Location { block: tgt, statement_index: 0 };
950 let path = self.move_data().rev_lookup.find(lv);
951 on_lookup_result_bits(
952 self.tcx, self.mir, self.move_data(), path,
953 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
959 fn drop_flags_for_args(&mut self) {
960 let loc = Location { block: START_BLOCK, statement_index: 0 };
961 super::drop_flag_effects_for_function_entry(
962 self.tcx, self.mir, self.env, |path, ds| {
963 self.set_drop_flag(loc, path, ds);
968 fn drop_flags_for_locs(&mut self) {
969 // We intentionally iterate only over the *old* basic blocks.
971 // Basic blocks created by drop elaboration update their
972 // drop flags by themselves, to avoid the drop flags being
973 // clobbered before they are read.
975 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
976 debug!("drop_flags_for_locs({:?})", data);
977 for i in 0..(data.statements.len()+1) {
978 debug!("drop_flag_for_locs: stmt {}", i);
979 let mut allow_initializations = true;
980 if i == data.statements.len() {
981 match data.terminator().kind {
982 TerminatorKind::Drop { .. } => {
983 // drop elaboration should handle that by itself
986 TerminatorKind::DropAndReplace { .. } => {
987 // this contains the move of the source and
988 // the initialization of the destination. We
989 // only want the former - the latter is handled
990 // by the elaboration code and must be done
991 // *after* the destination is dropped.
992 assert!(self.patch.is_patched(bb));
993 allow_initializations = false;
996 assert!(!self.patch.is_patched(bb));
1000 let loc = Location { block: bb, statement_index: i };
1001 super::drop_flag_effects_for_location(
1002 self.tcx, self.mir, self.env, loc, |path, ds| {
1003 if ds == DropFlagState::Absent || allow_initializations {
1004 self.set_drop_flag(loc, path, ds)
1010 // There may be a critical edge after this call,
1011 // so mark the return as initialized *before* the
1013 if let TerminatorKind::Call {
1014 destination: Some((ref lv, _)), cleanup: None, ..
1015 } = data.terminator().kind {
1016 assert!(!self.patch.is_patched(bb));
1018 let loc = Location { block: bb, statement_index: data.statements.len() };
1019 let path = self.move_data().rev_lookup.find(lv);
1020 on_lookup_result_bits(
1021 self.tcx, self.mir, self.move_data(), path,
1022 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
1028 fn drop_flags_for_drop<'a>(&mut self,
1029 c: &DropCtxt<'a, 'tcx>,
1032 let loc = self.patch.terminator_loc(self.mir, bb);
1033 on_all_children_bits(
1034 self.tcx, self.mir, self.move_data(), c.path,
1035 |child| self.set_drop_flag(loc, child, DropFlagState::Absent)