1 //! See librustc_codegen_llvm/meth.rs for reference
5 const DROP_FN_INDEX: usize = 0;
6 const SIZE_INDEX: usize = 1;
7 const ALIGN_INDEX: usize = 2;
9 fn vtable_memflags() -> MemFlags {
10 let mut flags = MemFlags::trusted(); // A vtable access is always aligned and will never trap.
11 flags.set_readonly(); // A vtable is always read-only.
15 pub(crate) fn drop_fn_of_obj(fx: &mut FunctionCx<'_, '_, impl Backend>, vtable: Value) -> Value {
16 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
21 (DROP_FN_INDEX * usize_size) as i32,
25 pub(crate) fn size_of_obj(fx: &mut FunctionCx<'_, '_, impl Backend>, vtable: Value) -> Value {
26 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
31 (SIZE_INDEX * usize_size) as i32,
35 pub(crate) fn min_align_of_obj(fx: &mut FunctionCx<'_, '_, impl Backend>, vtable: Value) -> Value {
36 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
41 (ALIGN_INDEX * usize_size) as i32,
45 pub(crate) fn get_ptr_and_method_ref<'tcx>(
46 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
50 let (ptr, vtable) = if let Abi::ScalarPair(_, _) = arg.layout().abi {
51 arg.load_scalar_pair(fx)
53 let (ptr, vtable) = arg.try_to_ptr().unwrap();
54 (ptr.get_addr(fx), vtable.unwrap())
57 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes();
58 let func_ref = fx.bcx.ins().load(
62 ((idx + 3) * usize_size as usize) as i32,
67 pub(crate) fn get_vtable<'tcx>(
68 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
69 layout: TyAndLayout<'tcx>,
70 trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
72 let data_id = if let Some(data_id) = fx.cx.vtables.get(&(layout.ty, trait_ref)) {
75 let data_id = build_vtable(fx, layout, trait_ref);
76 fx.cx.vtables.insert((layout.ty, trait_ref), data_id);
80 let local_data_id = fx.cx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
81 fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
84 fn build_vtable<'tcx>(
85 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
86 layout: TyAndLayout<'tcx>,
87 trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
90 let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
92 let drop_in_place_fn = import_function(
95 Instance::resolve_drop_in_place(tcx, layout.ty).polymorphize(fx.tcx),
98 let mut components: Vec<_> = vec![Some(drop_in_place_fn), None, None];
101 let methods = if let Some(trait_ref) = trait_ref {
102 methods_root = tcx.vtable_methods(trait_ref.with_self_ty(tcx, layout.ty));
107 let methods = methods.cloned().map(|opt_mth| {
108 opt_mth.map_or(None, |(def_id, substs)| {
109 Some(import_function(
112 Instance::resolve_for_vtable(tcx, ParamEnv::reveal_all(), def_id, substs)
114 .polymorphize(fx.tcx),
118 components.extend(methods);
120 let mut data_ctx = DataContext::new();
121 let mut data = ::std::iter::repeat(0u8)
122 .take(components.len() * usize_size)
123 .collect::<Vec<u8>>()
126 write_usize(fx.tcx, &mut data, SIZE_INDEX, layout.size.bytes());
127 write_usize(fx.tcx, &mut data, ALIGN_INDEX, layout.align.abi.bytes());
128 data_ctx.define(data);
130 for (i, component) in components.into_iter().enumerate() {
131 if let Some(func_id) = component {
132 let func_ref = fx.cx.module.declare_func_in_data(func_id, &mut data_ctx);
133 data_ctx.write_function_addr((i * usize_size) as u32, func_ref);
142 "__vtable.{}.for.{:?}.{}",
145 .map(|trait_ref| format!("{:?}", trait_ref.skip_binder()).into())
146 .unwrap_or(std::borrow::Cow::Borrowed("???")),
165 fx.cx.module.define_data(data_id, &data_ctx).unwrap();
170 fn write_usize(tcx: TyCtxt<'_>, buf: &mut [u8], idx: usize, num: u64) {
171 use byteorder::{BigEndian, LittleEndian, WriteBytesExt};
174 .layout_of(ParamEnv::reveal_all().and(tcx.types.usize))
178 let mut target = &mut buf[idx * usize_size..(idx + 1) * usize_size];
180 match tcx.data_layout.endian {
181 rustc_target::abi::Endian::Little => target.write_uint::<LittleEndian>(num, usize_size),
182 rustc_target::abi::Endian::Big => target.write_uint::<BigEndian>(num, usize_size),