1 //! See librustc_codegen_llvm/meth.rs for reference
5 const DROP_FN_INDEX: usize = 0;
6 const SIZE_INDEX: usize = 1;
7 const ALIGN_INDEX: usize = 2;
9 pub fn drop_fn_of_obj<'a, 'tcx: 'a>(
10 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
13 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
18 (DROP_FN_INDEX * usize_size) as i32,
22 pub fn size_of_obj<'a, 'tcx: 'a>(
23 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
26 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
31 (SIZE_INDEX * usize_size) as i32,
35 pub fn min_align_of_obj<'a, 'tcx: 'a>(
36 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
39 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
44 (ALIGN_INDEX * usize_size) as i32,
48 pub fn get_ptr_and_method_ref<'a, 'tcx: 'a>(
49 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
53 let arg = if arg.layout().ty.is_box() {
54 // Cast `Box<T>` to `*mut T` so `load_value_pair` works
55 arg.unchecked_cast_to(fx.layout_of(fx.tcx.mk_mut_ptr(arg.layout().ty.boxed_ty())))
60 let (ptr, vtable) = arg.load_value_pair(fx);
61 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes();
62 let func_ref = fx.bcx.ins().load(
66 ((idx + 3) * usize_size as usize) as i32,
71 pub fn get_vtable<'a, 'tcx: 'a>(
72 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
74 trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
76 let data_id = if let Some(data_id) = fx.caches.vtables.get(&(ty, trait_ref)) {
79 let data_id = build_vtable(fx, ty, trait_ref);
80 fx.caches.vtables.insert((ty, trait_ref), data_id);
84 let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
85 fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
88 fn build_vtable<'a, 'tcx: 'a>(
89 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
91 trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
94 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
96 let drop_in_place_fn = import_function(
99 crate::rustc_mir::monomorphize::resolve_drop_in_place(tcx, ty),
102 let mut components: Vec<_> = vec![Some(drop_in_place_fn), None, None];
105 let methods = if let Some(trait_ref) = trait_ref {
106 methods_root = tcx.vtable_methods(trait_ref.with_self_ty(tcx, ty));
111 let methods = methods.cloned().map(|opt_mth| {
112 opt_mth.map_or(None, |(def_id, substs)| {
113 Some(import_function(
116 Instance::resolve(tcx, ParamEnv::reveal_all(), def_id, substs).unwrap(),
120 components.extend(methods);
122 let mut data_ctx = DataContext::new();
123 let mut data = ::std::iter::repeat(0u8)
124 .take(components.len() * usize_size)
125 .collect::<Vec<u8>>()
128 let layout = tcx.layout_of(ParamEnv::reveal_all().and(ty)).unwrap();
129 write_usize(fx.tcx, &mut data, SIZE_INDEX, layout.size.bytes());
130 write_usize(fx.tcx, &mut data, ALIGN_INDEX, layout.align.abi.bytes());
131 data_ctx.define(data);
133 for (i, component) in components.into_iter().enumerate() {
134 if let Some(func_id) = component {
135 let func_ref = fx.module.declare_func_in_data(func_id, &mut data_ctx);
136 data_ctx.write_function_addr((i * usize_size) as u32, func_ref);
143 &format!("vtable.{:?}.for.{:?}", trait_ref, ty),
148 fx.module.define_data(data_id, &data_ctx).unwrap();
152 fn write_usize(tcx: TyCtxt, buf: &mut [u8], idx: usize, num: u64) {
153 use byteorder::{BigEndian, LittleEndian, WriteBytesExt};
156 .layout_of(ParamEnv::reveal_all().and(tcx.types.usize))
160 let mut target = &mut buf[idx * usize_size..(idx + 1) * usize_size];
162 match tcx.data_layout.endian {
163 layout::Endian::Little => target.write_uint::<LittleEndian>(num, usize_size),
164 layout::Endian::Big => target.write_uint::<BigEndian>(num, usize_size),