1 //! See librustc_codegen_llvm/meth.rs for reference
5 const DROP_FN_INDEX: usize = 0;
6 const SIZE_INDEX: usize = 1;
7 const ALIGN_INDEX: usize = 2;
9 fn vtable_memflags() -> MemFlags {
10 let mut flags = MemFlags::trusted(); // A vtable access is always aligned and will never trap.
11 flags.set_readonly(); // A vtable is always read-only.
15 pub fn drop_fn_of_obj(fx: &mut FunctionCx<'_, '_, impl Backend>, vtable: Value) -> Value {
16 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
21 (DROP_FN_INDEX * usize_size) as i32,
25 pub fn size_of_obj(fx: &mut FunctionCx<'_, '_, impl Backend>, vtable: Value) -> Value {
26 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
31 (SIZE_INDEX * usize_size) as i32,
35 pub fn min_align_of_obj(fx: &mut FunctionCx<'_, '_, impl Backend>, vtable: Value) -> Value {
36 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
41 (ALIGN_INDEX * usize_size) as i32,
45 pub fn get_ptr_and_method_ref<'tcx>(
46 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
50 let (ptr, vtable) = arg.load_scalar_pair(fx);
51 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes();
52 let func_ref = fx.bcx.ins().load(
56 ((idx + 3) * usize_size as usize) as i32,
61 pub fn get_vtable<'tcx>(
62 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
64 trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
66 let data_id = if let Some(data_id) = fx.caches.vtables.get(&(ty, trait_ref)) {
69 let data_id = build_vtable(fx, ty, trait_ref);
70 fx.caches.vtables.insert((ty, trait_ref), data_id);
74 let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
75 fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
78 fn build_vtable<'tcx>(
79 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
81 trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
84 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
86 let drop_in_place_fn =
87 import_function(tcx, fx.module, Instance::resolve_drop_in_place(tcx, ty));
89 let mut components: Vec<_> = vec![Some(drop_in_place_fn), None, None];
92 let methods = if let Some(trait_ref) = trait_ref {
93 methods_root = tcx.vtable_methods(trait_ref.with_self_ty(tcx, ty));
98 let methods = methods.cloned().map(|opt_mth| {
99 opt_mth.map_or(None, |(def_id, substs)| {
100 Some(import_function(
103 Instance::resolve_for_vtable(tcx, ParamEnv::reveal_all(), def_id, substs).unwrap(),
107 components.extend(methods);
109 let mut data_ctx = DataContext::new();
110 let mut data = ::std::iter::repeat(0u8)
111 .take(components.len() * usize_size)
112 .collect::<Vec<u8>>()
115 let layout = tcx.layout_of(ParamEnv::reveal_all().and(ty)).unwrap();
116 write_usize(fx.tcx, &mut data, SIZE_INDEX, layout.size.bytes());
117 write_usize(fx.tcx, &mut data, ALIGN_INDEX, layout.align.abi.bytes());
118 data_ctx.define(data);
120 for (i, component) in components.into_iter().enumerate() {
121 if let Some(func_id) = component {
122 let func_ref = fx.module.declare_func_in_data(func_id, &mut data_ctx);
123 data_ctx.write_function_addr((i * usize_size) as u32, func_ref);
130 &format!("vtable.{:?}.for.{:?}", trait_ref, ty),
145 match fx.module.define_data(data_id, &data_ctx) {
146 Ok(()) | Err(cranelift_module::ModuleError::DuplicateDefinition(_)) => {}
153 fn write_usize(tcx: TyCtxt, buf: &mut [u8], idx: usize, num: u64) {
154 use byteorder::{BigEndian, LittleEndian, WriteBytesExt};
157 .layout_of(ParamEnv::reveal_all().and(tcx.types.usize))
161 let mut target = &mut buf[idx * usize_size..(idx + 1) * usize_size];
163 match tcx.data_layout.endian {
164 layout::Endian::Little => target.write_uint::<LittleEndian>(num, usize_size),
165 layout::Endian::Big => target.write_uint::<BigEndian>(num, usize_size),