1 //! See librustc_codegen_llvm/meth.rs for reference
5 const DROP_FN_INDEX: usize = 0;
6 const SIZE_INDEX: usize = 1;
7 const ALIGN_INDEX: usize = 2;
9 fn vtable_memflags() -> MemFlags {
10 let mut flags = MemFlags::trusted(); // A vtable access is always aligned and will never trap.
11 flags.set_readonly(); // A vtable is always read-only.
15 pub(crate) fn drop_fn_of_obj(fx: &mut FunctionCx<'_, '_, impl Backend>, vtable: Value) -> Value {
16 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
21 (DROP_FN_INDEX * usize_size) as i32,
25 pub(crate) fn size_of_obj(fx: &mut FunctionCx<'_, '_, impl Backend>, vtable: Value) -> Value {
26 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
31 (SIZE_INDEX * usize_size) as i32,
35 pub(crate) fn min_align_of_obj(fx: &mut FunctionCx<'_, '_, impl Backend>, vtable: Value) -> Value {
36 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
41 (ALIGN_INDEX * usize_size) as i32,
45 pub(crate) fn get_ptr_and_method_ref<'tcx>(
46 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
50 let (ptr, vtable) = if let Abi::ScalarPair(_, _) = arg.layout().abi {
51 arg.load_scalar_pair(fx)
53 let (ptr, vtable) = arg.try_to_ptr().unwrap();
60 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes();
61 let func_ref = fx.bcx.ins().load(
65 ((idx + 3) * usize_size as usize) as i32,
70 pub(crate) fn get_vtable<'tcx>(
71 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
72 layout: TyAndLayout<'tcx>,
73 trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
75 let data_id = if let Some(data_id) = fx.vtables.get(&(layout.ty, trait_ref)) {
78 let data_id = build_vtable(fx, layout, trait_ref);
79 fx.vtables.insert((layout.ty, trait_ref), data_id);
83 let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
84 fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
87 fn build_vtable<'tcx>(
88 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
89 layout: TyAndLayout<'tcx>,
90 trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
93 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
95 let drop_in_place_fn =
96 import_function(tcx, fx.module, Instance::resolve_drop_in_place(tcx, layout.ty));
98 let mut components: Vec<_> = vec![Some(drop_in_place_fn), None, None];
101 let methods = if let Some(trait_ref) = trait_ref {
102 methods_root = tcx.vtable_methods(trait_ref.with_self_ty(tcx, layout.ty));
107 let methods = methods.cloned().map(|opt_mth| {
108 opt_mth.map_or(None, |(def_id, substs)| {
109 Some(import_function(
112 Instance::resolve_for_vtable(tcx, ParamEnv::reveal_all(), def_id, substs).unwrap(),
116 components.extend(methods);
118 let mut data_ctx = DataContext::new();
119 let mut data = ::std::iter::repeat(0u8)
120 .take(components.len() * usize_size)
121 .collect::<Vec<u8>>()
124 write_usize(fx.tcx, &mut data, SIZE_INDEX, layout.size.bytes());
125 write_usize(fx.tcx, &mut data, ALIGN_INDEX, layout.align.abi.bytes());
126 data_ctx.define(data);
128 for (i, component) in components.into_iter().enumerate() {
129 if let Some(func_id) = component {
130 let func_ref = fx.module.declare_func_in_data(func_id, &mut data_ctx);
131 data_ctx.write_function_addr((i * usize_size) as u32, func_ref);
139 "__vtable.{}.for.{:?}",
142 .map(|trait_ref| format!("{:?}", trait_ref.skip_binder()).into())
143 .unwrap_or(std::borrow::Cow::Borrowed("???")),
161 match fx.module.define_data(data_id, &data_ctx) {
162 Ok(()) | Err(cranelift_module::ModuleError::DuplicateDefinition(_)) => {}
169 fn write_usize(tcx: TyCtxt<'_>, buf: &mut [u8], idx: usize, num: u64) {
170 use byteorder::{BigEndian, LittleEndian, WriteBytesExt};
173 .layout_of(ParamEnv::reveal_all().and(tcx.types.usize))
177 let mut target = &mut buf[idx * usize_size..(idx + 1) * usize_size];
179 match tcx.data_layout.endian {
180 rustc_target::abi::Endian::Little => target.write_uint::<LittleEndian>(num, usize_size),
181 rustc_target::abi::Endian::Big => target.write_uint::<BigEndian>(num, usize_size),