2 // Alignment of 128 bit types is not currently handled, this will
3 // need to be fixed when PowerPC vector support is added.
5 use crate::abi::call::{ArgAbi, FnAbi, Reg, RegKind, Uniform};
6 use crate::abi::{Endian, HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
7 use crate::spec::HasTargetSpec;
9 #[derive(Debug, Clone, Copy, PartialEq)]
11 ELFv1, // original ABI used for powerpc64 (big-endian)
12 ELFv2, // newer ABI used for powerpc64le and musl (both endians)
16 fn is_homogeneous_aggregate<'a, Ty, C>(
18 arg: &mut ArgAbi<'a, Ty>,
22 Ty: TyLayoutMethods<'a, C> + Copy,
23 C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout,
25 arg.layout.homogeneous_aggregate(cx).unit().and_then(|unit| {
26 // ELFv1 only passes one-member aggregates transparently.
27 // ELFv2 passes up to eight uniquely addressable members.
28 if (abi == ELFv1 && arg.layout.size > unit.size)
29 || arg.layout.size > unit.size.checked_mul(8, cx).unwrap()
34 let valid_unit = match unit.kind {
35 RegKind::Integer => false,
36 RegKind::Float => true,
37 RegKind::Vector => arg.layout.size.bits() == 128,
40 valid_unit.then_some(Uniform { unit, total: arg.layout.size })
44 fn classify_ret<'a, Ty, C>(cx: &C, ret: &mut ArgAbi<'a, Ty>, abi: ABI)
46 Ty: TyLayoutMethods<'a, C> + Copy,
47 C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout,
49 if !ret.layout.is_aggregate() {
50 ret.extend_integer_width_to(64);
54 // The ELFv1 ABI doesn't return aggregates in registers
60 if let Some(uniform) = is_homogeneous_aggregate(cx, ret, abi) {
65 let size = ret.layout.size;
66 let bits = size.bits();
68 let unit = if cx.data_layout().endian == Endian::Big {
69 Reg { kind: RegKind::Integer, size }
72 } else if bits <= 16 {
74 } else if bits <= 32 {
80 ret.cast_to(Uniform { unit, total: size });
87 fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, abi: ABI)
89 Ty: TyLayoutMethods<'a, C> + Copy,
90 C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout,
92 if !arg.layout.is_aggregate() {
93 arg.extend_integer_width_to(64);
97 if let Some(uniform) = is_homogeneous_aggregate(cx, arg, abi) {
102 let size = arg.layout.size;
103 let (unit, total) = if size.bits() <= 64 {
104 // Aggregates smaller than a doubleword should appear in
105 // the least-significant bits of the parameter doubleword.
106 (Reg { kind: RegKind::Integer, size }, size)
108 // Aggregates larger than a doubleword should be padded
109 // at the tail to fill out a whole number of doublewords.
110 let reg_i64 = Reg::i64();
111 (reg_i64, size.align_to(reg_i64.align(cx)))
114 arg.cast_to(Uniform { unit, total });
117 pub fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
119 Ty: TyLayoutMethods<'a, C> + Copy,
120 C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec,
122 let abi = if cx.target_spec().target_env == "musl" {
125 match cx.data_layout().endian {
126 Endian::Big => ELFv1,
127 Endian::Little => ELFv2,
131 if !fn_abi.ret.is_ignore() {
132 classify_ret(cx, &mut fn_abi.ret, abi);
135 for arg in &mut fn_abi.args {
139 classify_arg(cx, arg, abi);