1 // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
13 // Code relating to taking, dropping, etc as well as type descriptors.
18 use lib::llvm::{llvm, ValueRef, True};
20 use middle::lang_items::{FreeFnLangItem, ExchangeFreeFnLangItem};
22 use middle::trans::adt;
23 use middle::trans::base::*;
24 use middle::trans::build::*;
25 use middle::trans::callee;
26 use middle::trans::cleanup;
27 use middle::trans::cleanup::CleanupMethods;
28 use middle::trans::common::*;
29 use middle::trans::expr;
30 use middle::trans::machine::*;
31 use middle::trans::reflect;
32 use middle::trans::tvec;
33 use middle::trans::type_::Type;
34 use middle::trans::type_of::{type_of, sizing_type_of};
36 use util::ppaux::ty_to_short_str;
39 use arena::TypedArena;
40 use std::c_str::ToCStr;
44 use syntax::parse::token;
46 pub fn trans_free<'a>(cx: &'a Block<'a>, v: ValueRef) -> &'a Block<'a> {
47 let _icx = push_ctxt("trans_free");
48 callee::trans_lang_call(cx,
49 langcall(cx, None, "", FreeFnLangItem),
50 [PointerCast(cx, v, Type::i8p(cx.ccx()))],
51 Some(expr::Ignore)).bcx
54 fn trans_exchange_free<'a>(cx: &'a Block<'a>, v: ValueRef, size: u64,
55 align: u64) -> &'a Block<'a> {
56 let _icx = push_ctxt("trans_exchange_free");
58 callee::trans_lang_call(cx,
59 langcall(cx, None, "", ExchangeFreeFnLangItem),
60 [PointerCast(cx, v, Type::i8p(ccx)), C_uint(ccx, size as uint), C_uint(ccx, align as uint)],
61 Some(expr::Ignore)).bcx
64 pub fn trans_exchange_free_ty<'a>(bcx: &'a Block<'a>, ptr: ValueRef,
65 content_ty: ty::t) -> &'a Block<'a> {
66 let sizing_type = sizing_type_of(bcx.ccx(), content_ty);
67 let content_size = llsize_of_alloc(bcx.ccx(), sizing_type);
69 // `Box<ZeroSizeType>` does not allocate.
70 if content_size != 0 {
71 let content_align = llalign_of_min(bcx.ccx(), sizing_type);
72 trans_exchange_free(bcx, ptr, content_size, content_align)
78 pub fn take_ty<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
80 // NB: v is an *alias* of type t here, not a direct value.
81 let _icx = push_ctxt("take_ty");
82 match ty::get(t).sty {
83 ty::ty_box(_) => incr_refcnt_of_boxed(bcx, v),
84 _ if ty::type_is_structural(t)
85 && ty::type_needs_drop(bcx.tcx(), t) => {
86 iter_structural_ty(bcx, v, t, take_ty)
92 pub fn get_drop_glue_type(ccx: &CrateContext, t: ty::t) -> ty::t {
94 if !ty::type_needs_drop(tcx, t) {
97 match ty::get(t).sty {
98 ty::ty_box(typ) if !ty::type_needs_drop(tcx, typ) =>
99 ty::mk_box(tcx, ty::mk_i8()),
101 ty::ty_uniq(typ) if !ty::type_needs_drop(tcx, typ) => {
102 match ty::get(typ).sty {
103 ty::ty_vec(_, None) | ty::ty_str | ty::ty_trait(..) => t,
105 let llty = sizing_type_of(ccx, typ);
106 // `Box<ZeroSizeType>` does not allocate.
107 if llsize_of_alloc(ccx, llty) == 0 {
110 ty::mk_uniq(tcx, ty::mk_i8())
119 pub fn drop_ty<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
121 // NB: v is an *alias* of type t here, not a direct value.
122 let _icx = push_ctxt("drop_ty");
124 if ty::type_needs_drop(bcx.tcx(), t) {
125 let glue = get_drop_glue(ccx, t);
126 let glue_type = get_drop_glue_type(ccx, t);
127 let ptr = if glue_type != t {
128 PointerCast(bcx, v, type_of(ccx, glue_type).ptr_to())
132 Call(bcx, glue, [ptr], []);
137 pub fn drop_ty_immediate<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
139 let _icx = push_ctxt("drop_ty_immediate");
140 let vp = alloca(bcx, type_of(bcx.ccx(), t), "");
145 pub fn get_drop_glue(ccx: &CrateContext, t: ty::t) -> ValueRef {
146 let t = get_drop_glue_type(ccx, t);
147 match ccx.drop_glues.borrow().find(&t) {
148 Some(&glue) => return glue,
152 let llfnty = Type::glue_fn(ccx, type_of(ccx, t).ptr_to());
153 let glue = declare_generic_glue(ccx, t, llfnty, "drop");
155 ccx.drop_glues.borrow_mut().insert(t, glue);
157 make_generic_glue(ccx, t, glue, make_drop_glue, "drop");
162 pub fn lazily_emit_visit_glue(ccx: &CrateContext, ti: &tydesc_info) -> ValueRef {
163 let _icx = push_ctxt("lazily_emit_visit_glue");
165 let llfnty = Type::glue_fn(ccx, type_of(ccx, ti.ty).ptr_to());
167 match ti.visit_glue.get() {
168 Some(visit_glue) => visit_glue,
170 debug!("+++ lazily_emit_tydesc_glue VISIT {}", ppaux::ty_to_string(ccx.tcx(), ti.ty));
171 let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, "visit");
172 ti.visit_glue.set(Some(glue_fn));
173 make_generic_glue(ccx, ti.ty, glue_fn, make_visit_glue, "visit");
174 debug!("--- lazily_emit_tydesc_glue VISIT {}", ppaux::ty_to_string(ccx.tcx(), ti.ty));
180 // See [Note-arg-mode]
181 pub fn call_visit_glue(bcx: &Block, v: ValueRef, tydesc: ValueRef,
182 static_ti: Option<&tydesc_info>) {
183 let _icx = push_ctxt("call_tydesc_glue_full");
185 // NB: Don't short-circuit even if this block is unreachable because
186 // GC-based cleanup needs to the see that the roots are live.
187 if bcx.unreachable.get() && !ccx.sess().no_landing_pads() { return; }
189 let static_glue_fn = static_ti.map(|sti| lazily_emit_visit_glue(ccx, sti));
191 // When static type info is available, avoid casting to a generic pointer.
192 let llrawptr = if static_glue_fn.is_none() {
193 PointerCast(bcx, v, Type::i8p(ccx))
199 match static_glue_fn {
201 // Select out the glue function to call from the tydesc
202 let llfnptr = GEPi(bcx, tydesc, [0u, abi::tydesc_field_visit_glue]);
209 Call(bcx, llfn, [llrawptr], []);
212 fn make_visit_glue<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
214 let _icx = push_ctxt("make_visit_glue");
216 let (visitor_trait, object_ty) = match ty::visitor_object_ty(bcx.tcx(),
220 bcx.tcx().sess.fatal(s.as_slice());
223 let v = PointerCast(bcx, v, type_of(bcx.ccx(), object_ty).ptr_to());
224 bcx = reflect::emit_calls_to_trait_visit_ty(bcx, t, v, visitor_trait.def_id);
228 fn trans_struct_drop_flag<'a>(bcx: &'a Block<'a>,
231 dtor_did: ast::DefId,
232 class_did: ast::DefId,
233 substs: &subst::Substs)
235 let repr = adt::represent_type(bcx.ccx(), t);
236 let drop_flag = adt::trans_drop_flag_ptr(bcx, &*repr, v0);
237 with_cond(bcx, load_ty(bcx, drop_flag, ty::mk_bool()), |cx| {
238 trans_struct_drop(cx, t, v0, dtor_did, class_did, substs)
242 fn trans_struct_drop<'a>(bcx: &'a Block<'a>,
245 dtor_did: ast::DefId,
246 class_did: ast::DefId,
247 substs: &subst::Substs)
249 let repr = adt::represent_type(bcx.ccx(), t);
251 // Find and call the actual destructor
252 let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did, t,
255 // The second argument is the "self" argument for drop
256 let params = unsafe {
257 let ty = Type::from_ref(llvm::LLVMTypeOf(dtor_addr));
258 ty.element_type().func_params()
261 // Class dtors have no explicit args, so the params should
262 // just consist of the environment (self)
263 assert_eq!(params.len(), 1);
265 // Be sure to put all of the fields into a scope so we can use an invoke
266 // instruction to call the user destructor but still call the field
267 // destructors if the user destructor fails.
268 let field_scope = bcx.fcx.push_custom_cleanup_scope();
270 let self_arg = PointerCast(bcx, v0, *params.get(0));
271 let args = vec!(self_arg);
273 // Add all the fields as a value which needs to be cleaned at the end of
275 let field_tys = ty::struct_fields(bcx.tcx(), class_did, substs);
276 for (i, fld) in field_tys.iter().enumerate() {
277 let llfld_a = adt::trans_field_ptr(bcx, &*repr, v0, 0, i);
278 bcx.fcx.schedule_drop_mem(cleanup::CustomScope(field_scope),
283 let dtor_ty = ty::mk_ctor_fn(bcx.tcx(), ast::DUMMY_NODE_ID,
284 [get_drop_glue_type(bcx.ccx(), t)], ty::mk_nil());
285 let (_, bcx) = invoke(bcx, dtor_addr, args, dtor_ty, None);
287 bcx.fcx.pop_and_trans_custom_cleanup_scope(bcx, field_scope)
290 fn make_drop_glue<'a>(bcx: &'a Block<'a>, v0: ValueRef, t: ty::t) -> &'a Block<'a> {
291 // NB: v0 is an *alias* of type t here, not a direct value.
292 let _icx = push_ctxt("make_drop_glue");
293 match ty::get(t).sty {
294 ty::ty_box(body_ty) => {
295 decr_refcnt_maybe_free(bcx, v0, body_ty)
297 ty::ty_uniq(content_ty) => {
298 match ty::get(content_ty).sty {
299 ty::ty_vec(mt, None) => {
300 let llbox = Load(bcx, v0);
301 let not_null = IsNotNull(bcx, llbox);
302 with_cond(bcx, not_null, |bcx| {
303 let bcx = tvec::make_drop_glue_unboxed(bcx, llbox, mt.ty);
304 // FIXME: #13994: the old `Box<[T]>` will not support sized deallocation
305 trans_exchange_free(bcx, llbox, 0, 8)
309 let llbox = Load(bcx, v0);
310 let not_null = IsNotNull(bcx, llbox);
311 with_cond(bcx, not_null, |bcx| {
312 let unit_ty = ty::sequence_element_type(bcx.tcx(), t);
313 let bcx = tvec::make_drop_glue_unboxed(bcx, llbox, unit_ty);
314 // FIXME: #13994: the old `Box<str>` will not support sized deallocation
315 trans_exchange_free(bcx, llbox, 0, 8)
318 ty::ty_trait(..) => {
319 let lluniquevalue = GEPi(bcx, v0, [0, abi::trt_field_box]);
320 // Only drop the value when it is non-null
321 with_cond(bcx, IsNotNull(bcx, Load(bcx, lluniquevalue)), |bcx| {
322 let dtor_ptr = Load(bcx, GEPi(bcx, v0, [0, abi::trt_field_vtable]));
323 let dtor = Load(bcx, dtor_ptr);
326 [PointerCast(bcx, lluniquevalue, Type::i8p(bcx.ccx()))],
332 let llbox = Load(bcx, v0);
333 let not_null = IsNotNull(bcx, llbox);
334 with_cond(bcx, not_null, |bcx| {
335 let bcx = drop_ty(bcx, llbox, content_ty);
336 trans_exchange_free_ty(bcx, llbox, content_ty)
341 ty::ty_struct(did, ref substs) => {
343 match ty::ty_dtor(tcx, did) {
344 ty::TraitDtor(dtor, true) => {
345 trans_struct_drop_flag(bcx, t, v0, dtor, did, substs)
347 ty::TraitDtor(dtor, false) => {
348 trans_struct_drop(bcx, t, v0, dtor, did, substs)
351 // No dtor? Just the default case
352 iter_structural_ty(bcx, v0, t, drop_ty)
356 ty::ty_closure(ref f) if f.store == ty::UniqTraitStore => {
357 let box_cell_v = GEPi(bcx, v0, [0u, abi::fn_field_box]);
358 let env = Load(bcx, box_cell_v);
359 let env_ptr_ty = Type::at_box(bcx.ccx(), Type::i8(bcx.ccx())).ptr_to();
360 let env = PointerCast(bcx, env, env_ptr_ty);
361 with_cond(bcx, IsNotNull(bcx, env), |bcx| {
362 let dtor_ptr = GEPi(bcx, env, [0u, abi::box_field_tydesc]);
363 let dtor = Load(bcx, dtor_ptr);
364 let cdata = GEPi(bcx, env, [0u, abi::box_field_body]);
365 Call(bcx, dtor, [PointerCast(bcx, cdata, Type::i8p(bcx.ccx()))], []);
367 // Free the environment itself
368 // FIXME: #13994: pass align and size here
369 trans_exchange_free(bcx, env, 0, 8)
373 if ty::type_needs_drop(bcx.tcx(), t) &&
374 ty::type_is_structural(t) {
375 iter_structural_ty(bcx, v0, t, drop_ty)
383 fn decr_refcnt_maybe_free<'a>(bcx: &'a Block<'a>,
384 box_ptr_ptr: ValueRef,
385 t: ty::t) -> &'a Block<'a> {
386 let _icx = push_ctxt("decr_refcnt_maybe_free");
390 let decr_bcx = fcx.new_temp_block("decr");
391 let free_bcx = fcx.new_temp_block("free");
392 let next_bcx = fcx.new_temp_block("next");
394 let box_ptr = Load(bcx, box_ptr_ptr);
395 let llnotnull = IsNotNull(bcx, box_ptr);
396 CondBr(bcx, llnotnull, decr_bcx.llbb, next_bcx.llbb);
398 let rc_ptr = GEPi(decr_bcx, box_ptr, [0u, abi::box_field_refcnt]);
399 let rc = Sub(decr_bcx, Load(decr_bcx, rc_ptr), C_int(ccx, 1));
400 Store(decr_bcx, rc, rc_ptr);
401 CondBr(decr_bcx, IsNull(decr_bcx, rc), free_bcx.llbb, next_bcx.llbb);
403 let v = Load(free_bcx, box_ptr_ptr);
404 let body = GEPi(free_bcx, v, [0u, abi::box_field_body]);
405 let free_bcx = drop_ty(free_bcx, body, t);
406 let free_bcx = trans_free(free_bcx, v);
407 Br(free_bcx, next_bcx.llbb);
412 fn incr_refcnt_of_boxed<'a>(bcx: &'a Block<'a>,
413 box_ptr_ptr: ValueRef) -> &'a Block<'a> {
414 let _icx = push_ctxt("incr_refcnt_of_boxed");
416 let box_ptr = Load(bcx, box_ptr_ptr);
417 let rc_ptr = GEPi(bcx, box_ptr, [0u, abi::box_field_refcnt]);
418 let rc = Load(bcx, rc_ptr);
419 let rc = Add(bcx, rc, C_int(ccx, 1));
420 Store(bcx, rc, rc_ptr);
425 // Generates the declaration for (but doesn't emit) a type descriptor.
426 pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> tydesc_info {
427 // If emit_tydescs already ran, then we shouldn't be creating any new
429 assert!(!ccx.finished_tydescs.get());
431 let llty = type_of(ccx, t);
433 if ccx.sess().count_type_sizes() {
434 println!("{}\t{}", llsize_of_real(ccx, llty),
435 ppaux::ty_to_string(ccx.tcx(), t));
438 let llsize = llsize_of(ccx, llty);
439 let llalign = llalign_of(ccx, llty);
440 let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc");
441 debug!("+++ declare_tydesc {} {}", ppaux::ty_to_string(ccx.tcx(), t), name);
442 let gvar = name.as_slice().with_c_str(|buf| {
444 llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type().to_ref(), buf)
447 note_unique_llvm_symbol(ccx, name);
449 let ty_name = token::intern_and_get_ident(
450 ppaux::ty_to_string(ccx.tcx(), t).as_slice());
451 let ty_name = C_str_slice(ccx, ty_name);
453 debug!("--- declare_tydesc {}", ppaux::ty_to_string(ccx.tcx(), t));
460 visit_glue: Cell::new(None),
464 fn declare_generic_glue(ccx: &CrateContext, t: ty::t, llfnty: Type,
465 name: &str) -> ValueRef {
466 let _icx = push_ctxt("declare_generic_glue");
467 let fn_nm = mangle_internal_name_by_type_and_seq(
470 format!("glue_{}", name).as_slice());
471 debug!("{} is for type {}", fn_nm, ppaux::ty_to_string(ccx.tcx(), t));
472 let llfn = decl_cdecl_fn(ccx, fn_nm.as_slice(), llfnty, ty::mk_nil());
473 note_unique_llvm_symbol(ccx, fn_nm);
477 fn make_generic_glue(ccx: &CrateContext,
480 helper: <'a> |&'a Block<'a>, ValueRef, ty::t|
484 let _icx = push_ctxt("make_generic_glue");
485 let glue_name = format!("glue {} {}", name, ty_to_short_str(ccx.tcx(), t));
486 let _s = StatRecorder::new(ccx, glue_name);
488 let arena = TypedArena::new();
489 let empty_param_substs = param_substs::empty();
490 let fcx = new_fn_ctxt(ccx, llfn, -1, false, ty::mk_nil(),
491 &empty_param_substs, None, &arena);
493 let bcx = init_function(&fcx, false, ty::mk_nil());
495 lib::llvm::SetLinkage(llfn, lib::llvm::InternalLinkage);
496 ccx.stats.n_glues_created.set(ccx.stats.n_glues_created.get() + 1u);
497 // All glue functions take values passed *by alias*; this is a
498 // requirement since in many contexts glue is invoked indirectly and
499 // the caller has no idea if it's dealing with something that can be
502 // llfn is expected be declared to take a parameter of the appropriate
503 // type, so we don't need to explicitly cast the function parameter.
505 let llrawptr0 = unsafe { llvm::LLVMGetParam(llfn, fcx.arg_pos(0) as c_uint) };
506 let bcx = helper(bcx, llrawptr0, t);
507 finish_fn(&fcx, bcx, ty::mk_nil());
512 pub fn emit_tydescs(ccx: &CrateContext) {
513 let _icx = push_ctxt("emit_tydescs");
514 // As of this point, allow no more tydescs to be created.
515 ccx.finished_tydescs.set(true);
516 let glue_fn_ty = Type::generic_glue_fn(ccx).ptr_to();
517 for (_, ti) in ccx.tydescs.borrow().iter() {
518 // Each of the glue functions needs to be cast to a generic type
519 // before being put into the tydesc because we only have a singleton
520 // tydesc type. Then we'll recast each function to its real type when
522 let drop_glue = unsafe {
523 llvm::LLVMConstPointerCast(get_drop_glue(ccx, ti.ty), glue_fn_ty.to_ref())
525 ccx.stats.n_real_glues.set(ccx.stats.n_real_glues.get() + 1);
527 match ti.visit_glue.get() {
529 ccx.stats.n_null_glues.set(ccx.stats.n_null_glues.get() +
535 ccx.stats.n_real_glues.set(ccx.stats.n_real_glues.get() +
537 llvm::LLVMConstPointerCast(v, glue_fn_ty.to_ref())
542 let tydesc = C_named_struct(ccx.tydesc_type(),
545 drop_glue, // drop_glue
546 visit_glue, // visit_glue
550 let gvar = ti.tydesc;
551 llvm::LLVMSetInitializer(gvar, tydesc);
552 llvm::LLVMSetGlobalConstant(gvar, True);
553 lib::llvm::SetLinkage(gvar, lib::llvm::InternalLinkage);