1 // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
14 use rustc::middle::const_val::ConstInt;
15 use rustc::middle::lang_items;
16 use rustc::ty::{self, Ty};
17 use rustc::ty::subst::{Kind, Substs};
18 use rustc::ty::util::IntTypeExt;
19 use rustc_data_structures::indexed_vec::Idx;
20 use util::patch::MirPatch;
24 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
25 pub enum DropFlagState {
26 Present, // i.e. initialized
27 Absent, // i.e. deinitialized or "moved"
31 pub fn value(self) -> bool {
33 DropFlagState::Present => true,
34 DropFlagState::Absent => false
48 pub enum DropFlagMode {
53 pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
54 type Path : Copy + fmt::Debug;
56 fn patch(&mut self) -> &mut MirPatch<'tcx>;
57 fn mir(&self) -> &'a Mir<'tcx>;
58 fn tcx(&self) -> ty::TyCtxt<'a, 'tcx, 'tcx>;
59 fn param_env(&self) -> &'a ty::ParameterEnvironment<'tcx>;
61 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
62 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
63 fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
66 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path>;
67 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
68 fn downcast_subpath(&self, path: Self::Path, variant: usize) -> Option<Self::Path>;
72 struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
73 where D : DropElaborator<'b, 'tcx> + 'l
75 elaborator: &'l mut D,
77 source_info: SourceInfo,
80 lvalue: &'l Lvalue<'tcx>,
83 unwind: Option<BasicBlock>,
86 pub fn elaborate_drop<'b, 'tcx, D>(
88 source_info: SourceInfo,
90 lvalue: &Lvalue<'tcx>,
93 unwind: Option<BasicBlock>,
95 where D: DropElaborator<'b, 'tcx>
97 assert_eq!(unwind.is_none(), is_cleanup);
99 elaborator, source_info, is_cleanup, lvalue, path, succ, unwind
103 impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
104 where D: DropElaborator<'b, 'tcx>
106 fn lvalue_ty(&self, lvalue: &Lvalue<'tcx>) -> Ty<'tcx> {
107 lvalue.ty(self.elaborator.mir(), self.tcx()).to_ty(self.tcx())
110 fn tcx(&self) -> ty::TyCtxt<'b, 'tcx, 'tcx> {
111 self.elaborator.tcx()
114 /// This elaborates a single drop instruction, located at `bb`, and
117 /// The elaborated drop checks the drop flags to only drop what
120 /// In addition, the relevant drop flags also need to be cleared
121 /// to avoid double-drops. However, in the middle of a complex
122 /// drop, one must avoid clearing some of the flags before they
123 /// are read, as that would cause a memory leak.
125 /// In particular, when dropping an ADT, multiple fields may be
126 /// joined together under the `rest` subpath. They are all controlled
127 /// by the primary drop flag, but only the last rest-field dropped
128 /// should clear it (and it must also not clear anything else).
130 /// FIXME: I think we should just control the flags externally
131 /// and then we do not need this machinery.
132 pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
133 debug!("elaborate_drop({:?})", self);
134 let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
135 debug!("elaborate_drop({:?}): live - {:?}", self, style);
138 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
142 DropStyle::Static => {
143 let loc = self.terminator_loc(bb);
144 self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
145 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
146 location: self.lvalue.clone(),
151 DropStyle::Conditional => {
152 let is_cleanup = self.is_cleanup; // FIXME(#6393)
153 let succ = self.succ;
154 let drop_bb = self.complete_drop(
155 is_cleanup, Some(DropFlagMode::Deep), succ);
156 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
161 let drop_bb = self.open_drop();
162 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
169 /// Return the lvalue and move path for each field of `variant`,
170 /// (the move path is `None` if the field is a rest field).
171 fn move_paths_for_fields(&self,
172 base_lv: &Lvalue<'tcx>,
173 variant_path: D::Path,
174 variant: &'tcx ty::VariantDef,
175 substs: &'tcx Substs<'tcx>)
176 -> Vec<(Lvalue<'tcx>, Option<D::Path>)>
178 variant.fields.iter().enumerate().map(|(i, f)| {
179 let field = Field::new(i);
180 let subpath = self.elaborator.field_subpath(variant_path, field);
183 self.tcx().normalize_associated_type_in_env(
184 &f.ty(self.tcx(), substs),
185 self.elaborator.param_env()
187 (base_lv.clone().field(field, field_ty), subpath)
191 fn drop_subpath(&mut self,
193 lvalue: &Lvalue<'tcx>,
194 path: Option<D::Path>,
196 unwind: Option<BasicBlock>)
199 if let Some(path) = path {
200 debug!("drop_subpath: for std field {:?}", lvalue);
203 elaborator: self.elaborator,
204 source_info: self.source_info,
205 path, lvalue, succ, unwind, is_cleanup
206 }.elaborated_drop_block()
208 debug!("drop_subpath: for rest field {:?}", lvalue);
211 elaborator: self.elaborator,
212 source_info: self.source_info,
213 lvalue, succ, unwind, is_cleanup,
214 // Using `self.path` here to condition the drop on
215 // our own drop flag.
217 }.complete_drop(is_cleanup, None, succ)
221 /// Create one-half of the drop ladder for a list of fields, and return
222 /// the list of steps in it in reverse order.
224 /// `unwind_ladder` is such a list of steps in reverse order,
225 /// which is called instead of the next step if the drop unwinds
226 /// (the first field is never reached). If it is `None`, all
227 /// unwind targets are left blank.
228 fn drop_halfladder<'a>(&mut self,
229 unwind_ladder: Option<&[BasicBlock]>,
231 fields: &[(Lvalue<'tcx>, Option<D::Path>)],
235 let mut unwind_succ = if is_cleanup {
241 let goto = TerminatorKind::Goto { target: succ };
242 let mut succ = self.new_block(is_cleanup, goto);
244 // Always clear the "master" drop flag at the bottom of the
245 // ladder. This is needed because the "master" drop flag
246 // protects the ADT's discriminant, which is invalidated
247 // after the ADT is dropped.
248 let succ_loc = Location { block: succ, statement_index: 0 };
249 self.elaborator.clear_drop_flag(succ_loc, self.path, DropFlagMode::Shallow);
251 fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
252 succ = self.drop_subpath(is_cleanup, lv, path, succ, unwind_succ);
253 unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
258 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
260 /// For example, with 3 fields, the drop ladder is
263 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
265 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
267 /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
269 /// ELAB(drop location.1 [target=.c2])
271 /// ELAB(drop location.2 [target=`self.unwind`])
272 fn drop_ladder<'a>(&mut self,
273 fields: Vec<(Lvalue<'tcx>, Option<D::Path>)>)
274 -> (BasicBlock, Option<BasicBlock>)
276 debug!("drop_ladder({:?}, {:?})", self, fields);
278 let mut fields = fields;
279 fields.retain(|&(ref lvalue, _)| {
280 self.tcx().type_needs_drop_given_env(
281 self.lvalue_ty(lvalue), self.elaborator.param_env())
284 debug!("drop_ladder - fields needing drop: {:?}", fields);
286 let unwind_ladder = if self.is_cleanup {
289 let unwind = self.unwind.unwrap(); // FIXME(#6393)
290 Some(self.drop_halfladder(None, unwind, &fields, true))
293 let succ = self.succ; // FIXME(#6393)
294 let is_cleanup = self.is_cleanup;
296 self.drop_halfladder(unwind_ladder.as_ref().map(|x| &**x),
297 succ, &fields, is_cleanup);
299 (normal_ladder.last().cloned().unwrap_or(succ),
300 unwind_ladder.and_then(|l| l.last().cloned()).or(self.unwind))
303 fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
306 debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
308 let fields = tys.iter().enumerate().map(|(i, &ty)| {
309 (self.lvalue.clone().field(Field::new(i), ty),
310 self.elaborator.field_subpath(self.path, Field::new(i)))
313 self.drop_ladder(fields).0
316 fn open_drop_for_box<'a>(&mut self, ty: Ty<'tcx>) -> BasicBlock
318 debug!("open_drop_for_box({:?}, {:?})", self, ty);
320 let interior = self.lvalue.clone().deref();
321 let interior_path = self.elaborator.deref_subpath(self.path);
323 let succ = self.succ; // FIXME(#6393)
324 let is_cleanup = self.is_cleanup;
325 let succ = self.box_free_block(ty, succ, is_cleanup);
326 let unwind_succ = self.unwind.map(|u| {
327 self.box_free_block(ty, u, true)
330 self.drop_subpath(is_cleanup, &interior, interior_path, succ, unwind_succ)
333 fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
335 debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
336 if adt.variants.len() == 0 {
337 return self.elaborator.patch().new_block(BasicBlockData {
339 terminator: Some(Terminator {
340 source_info: self.source_info,
341 kind: TerminatorKind::Unreachable
343 is_cleanup: self.is_cleanup
347 let contents_drop = if adt.is_union() {
348 (self.succ, self.unwind)
350 self.open_drop_for_adt_contents(adt, substs)
353 if adt.has_dtor(self.tcx()) {
354 self.destructor_call_block(contents_drop)
360 fn open_drop_for_adt_contents<'a>(&mut self, adt: &'tcx ty::AdtDef,
361 substs: &'tcx Substs<'tcx>)
362 -> (BasicBlock, Option<BasicBlock>) {
363 match adt.variants.len() {
365 let fields = self.move_paths_for_fields(
371 self.drop_ladder(fields)
374 let is_cleanup = self.is_cleanup;
375 let succ = self.succ;
376 let unwind = self.unwind; // FIXME(#6393)
378 let mut values = Vec::with_capacity(adt.variants.len());
379 let mut normal_blocks = Vec::with_capacity(adt.variants.len());
380 let mut unwind_blocks = if is_cleanup {
383 Some(Vec::with_capacity(adt.variants.len()))
385 let mut otherwise = None;
386 let mut unwind_otherwise = None;
387 for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
388 let subpath = self.elaborator.downcast_subpath(
389 self.path, variant_index);
390 if let Some(variant_path) = subpath {
391 let base_lv = self.lvalue.clone().elem(
392 ProjectionElem::Downcast(adt, variant_index)
394 let fields = self.move_paths_for_fields(
397 &adt.variants[variant_index],
400 if let Some(ref mut unwind_blocks) = unwind_blocks {
401 // We can't use the half-ladder from the original
402 // drop ladder, because this breaks the
403 // "funclet can't have 2 successor funclets"
404 // requirement from MSVC:
406 // switch unwind-switch
408 // v1.0 v2.0 v2.0-unwind v1.0-unwind
410 // v1.1-unwind v2.1-unwind |
412 // \-------------------------------/
414 // Create a duplicate half-ladder to avoid that. We
415 // could technically only do this on MSVC, but I
416 // I want to minimize the divergence between MSVC
419 let unwind = unwind.unwrap();
420 let halfladder = self.drop_halfladder(
421 None, unwind, &fields, true);
423 halfladder.last().cloned().unwrap_or(unwind)
426 let (normal, _) = self.drop_ladder(fields);
427 normal_blocks.push(normal);
429 // variant not found - drop the entire enum
430 if let None = otherwise {
431 otherwise = Some(self.complete_drop(
433 Some(DropFlagMode::Shallow),
435 unwind_otherwise = unwind.map(|unwind| self.complete_drop(
437 Some(DropFlagMode::Shallow),
443 if let Some(block) = otherwise {
444 normal_blocks.push(block);
445 if let Some(ref mut unwind_blocks) = unwind_blocks {
446 unwind_blocks.push(unwind_otherwise.unwrap());
452 (self.adt_switch_block(is_cleanup, adt, normal_blocks, &values, succ),
453 unwind_blocks.map(|unwind_blocks| {
454 self.adt_switch_block(
455 is_cleanup, adt, unwind_blocks, &values, unwind.unwrap()
462 fn adt_switch_block(&mut self,
464 adt: &'tcx ty::AdtDef,
465 blocks: Vec<BasicBlock>,
469 // If there are multiple variants, then if something
470 // is present within the enum the discriminant, tracked
471 // by the rest path, must be initialized.
473 // Additionally, we do not want to switch on the
474 // discriminant after it is free-ed, because that
475 // way lies only trouble.
476 let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
477 let discr = Lvalue::Local(self.new_temp(discr_ty));
478 let discr_rv = Rvalue::Discriminant(self.lvalue.clone());
479 let switch_block = self.elaborator.patch().new_block(BasicBlockData {
482 source_info: self.source_info,
483 kind: StatementKind::Assign(discr.clone(), discr_rv),
486 terminator: Some(Terminator {
487 source_info: self.source_info,
488 kind: TerminatorKind::SwitchInt {
489 discr: Operand::Consume(discr),
491 values: From::from(values.to_owned()),
495 is_cleanup: is_cleanup,
497 self.drop_flag_test_block(is_cleanup, switch_block, succ)
500 fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Option<BasicBlock>))
503 debug!("destructor_call_block({:?}, {:?})", self, succ);
504 let tcx = self.tcx();
505 let drop_trait = tcx.lang_items.drop_trait().unwrap();
506 let drop_fn = tcx.associated_items(drop_trait).next().unwrap();
507 let ty = self.lvalue_ty(self.lvalue);
508 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
510 let re_erased = tcx.mk_region(ty::ReErased);
511 let ref_ty = tcx.mk_ref(re_erased, ty::TypeAndMut {
513 mutbl: hir::Mutability::MutMutable
515 let ref_lvalue = self.new_temp(ref_ty);
516 let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
518 self.elaborator.patch().new_block(BasicBlockData {
519 statements: vec![Statement {
520 source_info: self.source_info,
521 kind: StatementKind::Assign(
522 Lvalue::Local(ref_lvalue),
523 Rvalue::Ref(re_erased, BorrowKind::Mut, self.lvalue.clone())
526 terminator: Some(Terminator {
527 kind: TerminatorKind::Call {
528 func: Operand::function_handle(tcx, drop_fn.def_id, substs,
529 self.source_info.span),
530 args: vec![Operand::Consume(Lvalue::Local(ref_lvalue))],
531 destination: Some((unit_temp, succ)),
534 source_info: self.source_info
536 is_cleanup: self.is_cleanup,
540 /// The slow-path - create an "open", elaborated drop for a type
541 /// which is moved-out-of only partially, and patch `bb` to a jump
542 /// to it. This must not be called on ADTs with a destructor,
543 /// as these can't be moved-out-of, except for `Box<T>`, which is
546 /// This creates a "drop ladder" that drops the needed fields of the
547 /// ADT, both in the success case or if one of the destructors fail.
548 fn open_drop<'a>(&mut self) -> BasicBlock {
549 let ty = self.lvalue_ty(self.lvalue);
550 let is_cleanup = self.is_cleanup; // FIXME(#6393)
551 let succ = self.succ;
553 ty::TyClosure(def_id, substs) => {
554 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
555 self.open_drop_for_tuple(&tys)
557 ty::TyTuple(tys, _) => {
558 self.open_drop_for_tuple(tys)
560 ty::TyAdt(def, _) if def.is_box() => {
561 self.open_drop_for_box(ty.boxed_ty())
563 ty::TyAdt(def, substs) => {
564 self.open_drop_for_adt(def, substs)
566 ty::TyDynamic(..) => {
567 self.complete_drop(is_cleanup, Some(DropFlagMode::Deep), succ)
569 ty::TyArray(..) | ty::TySlice(..) => {
570 // FIXME(#34708): handle partially-dropped
571 // array/slice elements.
572 self.complete_drop(is_cleanup, Some(DropFlagMode::Deep), succ)
574 _ => bug!("open drop from non-ADT `{:?}`", ty)
578 /// Return a basic block that drop an lvalue using the context
579 /// and path in `c`. If `mode` is something, also clear `c`
582 /// if FLAG(self.path)
583 /// if let Some(mode) = mode: FLAG(self.path)[mode] = false
585 fn complete_drop<'a>(&mut self,
587 drop_mode: Option<DropFlagMode>,
588 succ: BasicBlock) -> BasicBlock
590 debug!("complete_drop({:?},{:?})", self, drop_mode);
592 let drop_block = self.drop_block(is_cleanup, succ);
593 if let Some(mode) = drop_mode {
594 let block_start = Location { block: drop_block, statement_index: 0 };
595 self.elaborator.clear_drop_flag(block_start, self.path, mode);
598 self.drop_flag_test_block(is_cleanup, drop_block, succ)
601 fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
602 debug!("elaborated_drop_block({:?})", self);
603 let is_cleanup = self.is_cleanup; // FIXME(#6393)
604 let succ = self.succ;
605 let blk = self.drop_block(is_cleanup, succ);
606 self.elaborate_drop(blk);
610 fn box_free_block<'a>(
616 let block = self.unelaborated_free_block(ty, target, is_cleanup);
617 self.drop_flag_test_block(is_cleanup, block, target)
620 fn unelaborated_free_block<'a>(
626 let tcx = self.tcx();
627 let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
628 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
629 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
631 let call = TerminatorKind::Call {
632 func: Operand::function_handle(tcx, free_func, substs, self.source_info.span),
633 args: vec![Operand::Consume(self.lvalue.clone())],
634 destination: Some((unit_temp, target)),
637 let free_block = self.new_block(is_cleanup, call);
639 let block_start = Location { block: free_block, statement_index: 0 };
640 self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
644 fn drop_block<'a>(&mut self, is_cleanup: bool, succ: BasicBlock) -> BasicBlock {
645 let block = TerminatorKind::Drop {
646 location: self.lvalue.clone(),
648 unwind: if is_cleanup { None } else { self.unwind }
650 self.new_block(is_cleanup, block)
653 fn drop_flag_test_block(&mut self,
656 on_unset: BasicBlock)
659 let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
660 debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
661 self, is_cleanup, on_set, style);
664 DropStyle::Dead => on_unset,
665 DropStyle::Static => on_set,
666 DropStyle::Conditional | DropStyle::Open => {
667 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
668 let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
669 self.new_block(is_cleanup, term)
674 fn new_block<'a>(&mut self,
676 k: TerminatorKind<'tcx>)
679 self.elaborator.patch().new_block(BasicBlockData {
681 terminator: Some(Terminator {
682 source_info: self.source_info, kind: k
684 is_cleanup: is_cleanup
688 fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
689 self.elaborator.patch().new_temp(ty, self.source_info.span)
692 fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
693 let mir = self.elaborator.mir();
694 self.elaborator.patch().terminator_loc(mir, bb)