1 // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
13 // Code relating to taking, dropping, etc as well as type descriptors.
18 use llvm::{ValueRef, True, get_param};
20 use middle::lang_items::ExchangeFreeFnLangItem;
22 use middle::subst::{Subst, Substs};
28 use trans::cleanup::CleanupMethods;
32 use trans::debuginfo::DebugLoc;
34 use trans::machine::*;
36 use trans::type_::Type;
37 use trans::type_of::{type_of, sizing_type_of, align_of};
38 use middle::ty::{self, Ty};
39 use util::ppaux::{ty_to_short_str, Repr};
42 use arena::TypedArena;
44 use std::ffi::CString;
46 use syntax::parse::token;
48 pub fn trans_exchange_free_dyn<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
53 -> Block<'blk, 'tcx> {
54 let _icx = push_ctxt("trans_exchange_free");
56 callee::trans_lang_call(cx,
57 langcall(cx, None, "", ExchangeFreeFnLangItem),
58 &[PointerCast(cx, v, Type::i8p(ccx)), size, align],
63 pub fn trans_exchange_free<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
68 -> Block<'blk, 'tcx> {
69 trans_exchange_free_dyn(cx,
71 C_uint(cx.ccx(), size),
72 C_uint(cx.ccx(), align),
76 pub fn trans_exchange_free_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
80 -> Block<'blk, 'tcx> {
81 assert!(type_is_sized(bcx.ccx().tcx(), content_ty));
82 let sizing_type = sizing_type_of(bcx.ccx(), content_ty);
83 let content_size = llsize_of_alloc(bcx.ccx(), sizing_type);
85 // `Box<ZeroSizeType>` does not allocate.
86 if content_size != 0 {
87 let content_align = align_of(bcx.ccx(), content_ty);
88 trans_exchange_free(bcx, ptr, content_size, content_align, debug_loc)
94 pub fn get_drop_glue_type<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
95 t: Ty<'tcx>) -> Ty<'tcx> {
97 // Even if there is no dtor for t, there might be one deeper down and we
98 // might need to pass in the vtable ptr.
99 if !type_is_sized(tcx, t) {
102 if !type_needs_drop(tcx, t) {
106 ty::ty_uniq(typ) if !type_needs_drop(tcx, typ)
107 && type_is_sized(tcx, typ) => {
108 let llty = sizing_type_of(ccx, typ);
109 // `Box<ZeroSizeType>` does not allocate.
110 if llsize_of_alloc(ccx, llty) == 0 {
120 pub fn drop_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
124 -> Block<'blk, 'tcx> {
125 // NB: v is an *alias* of type t here, not a direct value.
126 debug!("drop_ty(t={})", t.repr(bcx.tcx()));
127 let _icx = push_ctxt("drop_ty");
128 if type_needs_drop(bcx.tcx(), t) {
130 let glue = get_drop_glue(ccx, t);
131 let glue_type = get_drop_glue_type(ccx, t);
132 let ptr = if glue_type != t {
133 PointerCast(bcx, v, type_of(ccx, glue_type).ptr_to())
138 Call(bcx, glue, &[ptr], None, debug_loc);
143 pub fn drop_ty_immediate<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
147 -> Block<'blk, 'tcx> {
148 let _icx = push_ctxt("drop_ty_immediate");
149 let vp = alloca(bcx, type_of(bcx.ccx(), t), "");
150 store_ty(bcx, v, vp, t);
151 drop_ty(bcx, vp, t, debug_loc)
154 pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ValueRef {
155 debug!("make drop glue for {}", ppaux::ty_to_string(ccx.tcx(), t));
156 let t = get_drop_glue_type(ccx, t);
157 debug!("drop glue type {}", ppaux::ty_to_string(ccx.tcx(), t));
158 match ccx.drop_glues().borrow().get(&t) {
159 Some(&glue) => return glue,
163 let llty = if type_is_sized(ccx.tcx(), t) {
164 type_of(ccx, t).ptr_to()
166 type_of(ccx, ty::mk_uniq(ccx.tcx(), t)).ptr_to()
169 let llfnty = Type::glue_fn(ccx, llty);
171 let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) {
173 let glue = decl_cdecl_fn(ccx, &old_sym[], llfnty, ty::mk_nil(ccx.tcx()));
177 let (sym, glue) = declare_generic_glue(ccx, t, llfnty, "drop");
182 ccx.drop_glues().borrow_mut().insert(t, glue);
184 // To avoid infinite recursion, don't `make_drop_glue` until after we've
185 // added the entry to the `drop_glues` cache.
188 ccx.available_drop_glues().borrow_mut().insert(t, sym);
189 // We're creating a new drop glue, so also generate a body.
190 make_generic_glue(ccx, t, glue, make_drop_glue, "drop");
198 fn trans_struct_drop_flag<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
201 dtor_did: ast::DefId,
202 class_did: ast::DefId,
203 substs: &subst::Substs<'tcx>)
204 -> Block<'blk, 'tcx> {
205 let repr = adt::represent_type(bcx.ccx(), t);
206 let struct_data = if type_is_sized(bcx.tcx(), t) {
209 let llval = GEPi(bcx, v0, &[0, abi::FAT_PTR_ADDR]);
212 let drop_flag = unpack_datum!(bcx, adt::trans_drop_flag_ptr(bcx, &*repr, struct_data));
213 with_cond(bcx, load_ty(bcx, drop_flag.val, bcx.tcx().types.bool), |cx| {
214 trans_struct_drop(cx, t, v0, dtor_did, class_did, substs)
218 fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
221 dtor_did: ast::DefId,
222 class_did: ast::DefId,
223 substs: &subst::Substs<'tcx>)
226 let repr = adt::represent_type(bcx.ccx(), t);
228 // Find and call the actual destructor
229 let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did, t,
232 // The first argument is the "self" argument for drop
233 let params = unsafe {
234 let ty = Type::from_ref(llvm::LLVMTypeOf(dtor_addr));
235 ty.element_type().func_params()
238 let fty = ty::lookup_item_type(bcx.tcx(), dtor_did).ty.subst(bcx.tcx(), substs);
239 let self_ty = match fty.sty {
240 ty::ty_bare_fn(_, ref f) => {
241 let sig = ty::erase_late_bound_regions(bcx.tcx(), &f.sig);
242 assert!(sig.inputs.len() == 1);
245 _ => bcx.sess().bug(&format!("Expected function type, found {}",
246 bcx.ty_to_string(fty))[])
249 let (struct_data, info) = if type_is_sized(bcx.tcx(), t) {
252 let data = GEPi(bcx, v0, &[0, abi::FAT_PTR_ADDR]);
253 let info = GEPi(bcx, v0, &[0, abi::FAT_PTR_EXTRA]);
254 (Load(bcx, data), Some(Load(bcx, info)))
257 adt::fold_variants(bcx, &*repr, struct_data, |variant_cx, st, value| {
258 // Be sure to put all of the fields into a scope so we can use an invoke
259 // instruction to call the user destructor but still call the field
260 // destructors if the user destructor panics.
261 let field_scope = variant_cx.fcx.push_custom_cleanup_scope();
263 // Class dtors have no explicit args, so the params should
264 // just consist of the environment (self).
265 assert_eq!(params.len(), 1);
266 let self_arg = if type_is_fat_ptr(bcx.tcx(), self_ty) {
267 // The dtor expects a fat pointer, so make one, even if we have to fake it.
268 let boxed_ty = ty::mk_open(bcx.tcx(), t);
269 let scratch = datum::rvalue_scratch_datum(bcx, boxed_ty, "__fat_ptr_drop_self");
270 Store(bcx, value, GEPi(bcx, scratch.val, &[0, abi::FAT_PTR_ADDR]));
272 // If we just had a thin pointer, make a fat pointer by sticking
273 // null where we put the unsizing info. This works because t
274 // is a sized type, so we will only unpack the fat pointer, never
275 // use the fake info.
276 info.unwrap_or(C_null(Type::i8p(bcx.ccx()))),
277 GEPi(bcx, scratch.val, &[0, abi::FAT_PTR_EXTRA]));
278 PointerCast(variant_cx, scratch.val, params[0])
280 PointerCast(variant_cx, value, params[0])
282 let args = vec!(self_arg);
284 // Add all the fields as a value which needs to be cleaned at the end of
285 // this scope. Iterate in reverse order so a Drop impl doesn't reverse
286 // the order in which fields get dropped.
287 for (i, ty) in st.fields.iter().enumerate().rev() {
288 let llfld_a = adt::struct_field_ptr(variant_cx, &*st, value, i, false);
290 let val = if type_is_sized(bcx.tcx(), *ty) {
293 let boxed_ty = ty::mk_open(bcx.tcx(), *ty);
294 let scratch = datum::rvalue_scratch_datum(bcx, boxed_ty, "__fat_ptr_drop_field");
295 Store(bcx, llfld_a, GEPi(bcx, scratch.val, &[0, abi::FAT_PTR_ADDR]));
296 Store(bcx, info.unwrap(), GEPi(bcx, scratch.val, &[0, abi::FAT_PTR_EXTRA]));
299 variant_cx.fcx.schedule_drop_mem(cleanup::CustomScope(field_scope),
303 let dtor_ty = ty::mk_ctor_fn(bcx.tcx(),
305 &[get_drop_glue_type(bcx.ccx(), t)],
306 ty::mk_nil(bcx.tcx()));
307 let (_, variant_cx) = invoke(variant_cx, dtor_addr, &args[], dtor_ty, DebugLoc::None);
309 variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope);
314 fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: ValueRef)
315 -> (ValueRef, ValueRef) {
316 debug!("calculate size of DST: {}; with lost info: {}",
317 bcx.ty_to_string(t), bcx.val_to_string(info));
318 if type_is_sized(bcx.tcx(), t) {
319 let sizing_type = sizing_type_of(bcx.ccx(), t);
320 let size = C_uint(bcx.ccx(), llsize_of_alloc(bcx.ccx(), sizing_type));
321 let align = C_uint(bcx.ccx(), align_of(bcx.ccx(), t));
322 return (size, align);
325 ty::ty_struct(id, substs) => {
327 // First get the size of all statically known fields.
328 // Don't use type_of::sizing_type_of because that expects t to be sized.
329 assert!(!ty::type_is_simd(bcx.tcx(), t));
330 let repr = adt::represent_type(ccx, t);
331 let sizing_type = adt::sizing_type_of(ccx, &*repr, true);
332 let sized_size = C_uint(ccx, llsize_of_alloc(ccx, sizing_type));
333 let sized_align = C_uint(ccx, llalign_of_min(ccx, sizing_type));
335 // Recurse to get the size of the dynamically sized field (must be
337 let fields = ty::struct_fields(bcx.tcx(), id, substs);
338 let last_field = fields[fields.len()-1];
339 let field_ty = last_field.mt.ty;
340 let (unsized_size, unsized_align) = size_and_align_of_dst(bcx, field_ty, info);
342 // Return the sum of sizes and max of aligns.
343 let size = Add(bcx, sized_size, unsized_size, DebugLoc::None);
344 let align = Select(bcx,
354 ty::ty_trait(..) => {
355 // info points to the vtable and the second entry in the vtable is the
356 // dynamic size of the object.
357 let info = PointerCast(bcx, info, Type::int(bcx.ccx()).ptr_to());
358 let size_ptr = GEPi(bcx, info, &[1]);
359 let align_ptr = GEPi(bcx, info, &[2]);
360 (Load(bcx, size_ptr), Load(bcx, align_ptr))
362 ty::ty_vec(_, None) | ty::ty_str => {
363 let unit_ty = ty::sequence_element_type(bcx.tcx(), t);
364 // The info in this case is the length of the str, so the size is that
365 // times the unit size.
366 let llunit_ty = sizing_type_of(bcx.ccx(), unit_ty);
367 let unit_align = llalign_of_min(bcx.ccx(), llunit_ty);
368 let unit_size = llsize_of_alloc(bcx.ccx(), llunit_ty);
369 (Mul(bcx, info, C_uint(bcx.ccx(), unit_size), DebugLoc::None),
370 C_uint(bcx.ccx(), unit_align))
372 _ => bcx.sess().bug(&format!("Unexpected unsized type, found {}",
373 bcx.ty_to_string(t))[])
377 fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
378 -> Block<'blk, 'tcx> {
379 // NB: v0 is an *alias* of type t here, not a direct value.
380 let _icx = push_ctxt("make_drop_glue");
382 ty::ty_uniq(content_ty) => {
383 match content_ty.sty {
384 ty::ty_vec(ty, None) => {
385 tvec::make_drop_glue_unboxed(bcx, v0, ty, true)
388 let unit_ty = ty::sequence_element_type(bcx.tcx(), content_ty);
389 tvec::make_drop_glue_unboxed(bcx, v0, unit_ty, true)
391 ty::ty_trait(..) => {
392 let lluniquevalue = GEPi(bcx, v0, &[0, abi::FAT_PTR_ADDR]);
393 // Only drop the value when it is non-null
394 let concrete_ptr = Load(bcx, lluniquevalue);
395 with_cond(bcx, IsNotNull(bcx, concrete_ptr), |bcx| {
396 let dtor_ptr = Load(bcx, GEPi(bcx, v0, &[0, abi::FAT_PTR_EXTRA]));
397 let dtor = Load(bcx, dtor_ptr);
400 &[PointerCast(bcx, lluniquevalue, Type::i8p(bcx.ccx()))],
406 ty::ty_struct(..) if !type_is_sized(bcx.tcx(), content_ty) => {
407 let llval = GEPi(bcx, v0, &[0, abi::FAT_PTR_ADDR]);
408 let llbox = Load(bcx, llval);
409 let not_null = IsNotNull(bcx, llbox);
410 with_cond(bcx, not_null, |bcx| {
411 let bcx = drop_ty(bcx, v0, content_ty, DebugLoc::None);
412 let info = GEPi(bcx, v0, &[0, abi::FAT_PTR_EXTRA]);
413 let info = Load(bcx, info);
414 let (llsize, llalign) = size_and_align_of_dst(bcx, content_ty, info);
415 trans_exchange_free_dyn(bcx, llbox, llsize, llalign, DebugLoc::None)
419 assert!(type_is_sized(bcx.tcx(), content_ty));
421 let llbox = Load(bcx, llval);
422 let not_null = IsNotNull(bcx, llbox);
423 with_cond(bcx, not_null, |bcx| {
424 let bcx = drop_ty(bcx, llbox, content_ty, DebugLoc::None);
425 trans_exchange_free_ty(bcx, llbox, content_ty, DebugLoc::None)
430 ty::ty_struct(did, substs) | ty::ty_enum(did, substs) => {
432 match ty::ty_dtor(tcx, did) {
433 ty::TraitDtor(dtor, true) => {
434 // FIXME(16758) Since the struct is unsized, it is hard to
435 // find the drop flag (which is at the end of the struct).
436 // Lets just ignore the flag and pretend everything will be
438 if type_is_sized(bcx.tcx(), t) {
439 trans_struct_drop_flag(bcx, t, v0, dtor, did, substs)
441 // Give the user a heads up that we are doing something
442 // stupid and dangerous.
443 bcx.sess().warn(&format!("Ignoring drop flag in destructor for {}\
444 because the struct is unsized. See issue\
446 bcx.ty_to_string(t))[]);
447 trans_struct_drop(bcx, t, v0, dtor, did, substs)
450 ty::TraitDtor(dtor, false) => {
451 trans_struct_drop(bcx, t, v0, dtor, did, substs)
454 // No dtor? Just the default case
455 iter_structural_ty(bcx, v0, t, |bb, vv, tt| drop_ty(bb, vv, tt, DebugLoc::None))
459 ty::ty_closure(..) => {
460 iter_structural_ty(bcx,
463 |bb, vv, tt| drop_ty(bb, vv, tt, DebugLoc::None))
465 ty::ty_trait(..) => {
466 // No need to do a null check here (as opposed to the Box<trait case
467 // above), because this happens for a trait field in an unsized
468 // struct. If anything is null, it is the whole struct and we won't
470 let lluniquevalue = GEPi(bcx, v0, &[0, abi::FAT_PTR_ADDR]);
471 let dtor_ptr = Load(bcx, GEPi(bcx, v0, &[0, abi::FAT_PTR_EXTRA]));
472 let dtor = Load(bcx, dtor_ptr);
475 &[PointerCast(bcx, Load(bcx, lluniquevalue), Type::i8p(bcx.ccx()))],
480 ty::ty_vec(_, None) | ty::ty_str => {
481 let unit_ty = ty::sequence_element_type(bcx.tcx(), t);
482 tvec::make_drop_glue_unboxed(bcx, v0, unit_ty, false)
485 assert!(type_is_sized(bcx.tcx(), t));
486 if type_needs_drop(bcx.tcx(), t) && ty::type_is_structural(t) {
487 iter_structural_ty(bcx,
490 |bb, vv, tt| drop_ty(bb, vv, tt, DebugLoc::None))
498 // Generates the declaration for (but doesn't emit) a type descriptor.
499 pub fn declare_tydesc<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>)
500 -> tydesc_info<'tcx> {
501 // If emit_tydescs already ran, then we shouldn't be creating any new
503 assert!(!ccx.finished_tydescs().get());
505 let llty = type_of(ccx, t);
507 if ccx.sess().count_type_sizes() {
508 println!("{}\t{}", llsize_of_real(ccx, llty),
509 ppaux::ty_to_string(ccx.tcx(), t));
512 let llsize = llsize_of(ccx, llty);
513 let llalign = llalign_of(ccx, llty);
514 let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc");
515 debug!("+++ declare_tydesc {} {}", ppaux::ty_to_string(ccx.tcx(), t), name);
516 let buf = CString::from_slice(name.as_bytes());
518 llvm::LLVMAddGlobal(ccx.llmod(), ccx.tydesc_type().to_ref(),
521 note_unique_llvm_symbol(ccx, name);
523 let ty_name = token::intern_and_get_ident(
524 &ppaux::ty_to_string(ccx.tcx(), t)[]);
525 let ty_name = C_str_slice(ccx, ty_name);
527 debug!("--- declare_tydesc {}", ppaux::ty_to_string(ccx.tcx(), t));
537 fn declare_generic_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>,
538 llfnty: Type, name: &str) -> (String, ValueRef) {
539 let _icx = push_ctxt("declare_generic_glue");
540 let fn_nm = mangle_internal_name_by_type_and_seq(
543 &format!("glue_{}", name)[]);
544 let llfn = decl_cdecl_fn(ccx, &fn_nm[], llfnty, ty::mk_nil(ccx.tcx()));
545 note_unique_llvm_symbol(ccx, fn_nm.clone());
546 return (fn_nm, llfn);
549 fn make_generic_glue<'a, 'tcx, F>(ccx: &CrateContext<'a, 'tcx>,
555 F: for<'blk> FnOnce(Block<'blk, 'tcx>, ValueRef, Ty<'tcx>) -> Block<'blk, 'tcx>,
557 let _icx = push_ctxt("make_generic_glue");
558 let glue_name = format!("glue {} {}", name, ty_to_short_str(ccx.tcx(), t));
559 let _s = StatRecorder::new(ccx, glue_name);
561 let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty());
562 let (arena, fcx): (TypedArena<_>, FunctionContext);
563 arena = TypedArena::new();
564 fcx = new_fn_ctxt(ccx, llfn, ast::DUMMY_NODE_ID, false,
565 ty::FnConverging(ty::mk_nil(ccx.tcx())),
566 empty_substs, None, &arena);
568 let bcx = init_function(&fcx, false, ty::FnConverging(ty::mk_nil(ccx.tcx())));
570 update_linkage(ccx, llfn, None, OriginalTranslation);
572 ccx.stats().n_glues_created.set(ccx.stats().n_glues_created.get() + 1);
573 // All glue functions take values passed *by alias*; this is a
574 // requirement since in many contexts glue is invoked indirectly and
575 // the caller has no idea if it's dealing with something that can be
578 // llfn is expected be declared to take a parameter of the appropriate
579 // type, so we don't need to explicitly cast the function parameter.
581 let llrawptr0 = get_param(llfn, fcx.arg_pos(0) as c_uint);
582 let bcx = helper(bcx, llrawptr0, t);
583 finish_fn(&fcx, bcx, ty::FnConverging(ty::mk_nil(ccx.tcx())), DebugLoc::None);
588 pub fn emit_tydescs(ccx: &CrateContext) {
589 let _icx = push_ctxt("emit_tydescs");
590 // As of this point, allow no more tydescs to be created.
591 ccx.finished_tydescs().set(true);
592 let glue_fn_ty = Type::generic_glue_fn(ccx).ptr_to();
593 for (_, ti) in &*ccx.tydescs().borrow() {
594 // Each of the glue functions needs to be cast to a generic type
595 // before being put into the tydesc because we only have a singleton
596 // tydesc type. Then we'll recast each function to its real type when
598 let drop_glue = consts::ptrcast(get_drop_glue(ccx, ti.ty), glue_fn_ty);
599 ccx.stats().n_real_glues.set(ccx.stats().n_real_glues.get() + 1);
601 let tydesc = C_named_struct(ccx.tydesc_type(),
604 drop_glue, // drop_glue
608 let gvar = ti.tydesc;
609 llvm::LLVMSetInitializer(gvar, tydesc);
610 llvm::LLVMSetGlobalConstant(gvar, True);
611 llvm::SetLinkage(gvar, llvm::InternalLinkage);