2 // Alignment of 128 bit types is not currently handled, this will
3 // need to be fixed when PowerPC vector support is added.
5 use crate::abi::call::{FnType, ArgType, Reg, RegKind, Uniform};
6 use crate::abi::{Endian, HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
7 use crate::spec::HasTargetSpec;
9 #[derive(Debug, Clone, Copy, PartialEq)]
11 ELFv1, // original ABI used for powerpc64 (big-endian)
12 ELFv2, // newer ABI used for powerpc64le and musl (both endians)
16 fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>, abi: ABI)
18 where Ty: TyLayoutMethods<'a, C> + Copy,
19 C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
21 arg.layout.homogeneous_aggregate(cx).unit().and_then(|unit| {
22 // ELFv1 only passes one-member aggregates transparently.
23 // ELFv2 passes up to eight uniquely addressable members.
24 if (abi == ELFv1 && arg.layout.size > unit.size)
25 || arg.layout.size > unit.size.checked_mul(8, cx).unwrap() {
29 let valid_unit = match unit.kind {
30 RegKind::Integer => false,
31 RegKind::Float => true,
32 RegKind::Vector => arg.layout.size.bits() == 128
38 total: arg.layout.size
46 fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>, abi: ABI)
47 where Ty: TyLayoutMethods<'a, C> + Copy,
48 C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
50 if !ret.layout.is_aggregate() {
51 ret.extend_integer_width_to(64);
55 // The ELFv1 ABI doesn't return aggregates in registers
61 if let Some(uniform) = is_homogeneous_aggregate(cx, ret, abi) {
66 let size = ret.layout.size;
67 let bits = size.bits();
69 let unit = if cx.data_layout().endian == Endian::Big {
70 Reg { kind: RegKind::Integer, size }
73 } else if bits <= 16 {
75 } else if bits <= 32 {
91 fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>, abi: ABI)
92 where Ty: TyLayoutMethods<'a, C> + Copy,
93 C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
95 if !arg.layout.is_aggregate() {
96 arg.extend_integer_width_to(64);
100 if let Some(uniform) = is_homogeneous_aggregate(cx, arg, abi) {
101 arg.cast_to(uniform);
105 let size = arg.layout.size;
106 let (unit, total) = if size.bits() <= 64 {
107 // Aggregates smaller than a doubleword should appear in
108 // the least-significant bits of the parameter doubleword.
109 (Reg { kind: RegKind::Integer, size }, size)
111 // Aggregates larger than a doubleword should be padded
112 // at the tail to fill out a whole number of doublewords.
113 let reg_i64 = Reg::i64();
114 (reg_i64, size.align_to(reg_i64.align(cx)))
117 arg.cast_to(Uniform {
123 pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
124 where Ty: TyLayoutMethods<'a, C> + Copy,
125 C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec
127 let abi = if cx.target_spec().target_env == "musl" {
130 match cx.data_layout().endian {
131 Endian::Big => ELFv1,
132 Endian::Little => ELFv2
136 if !fty.ret.is_ignore() {
137 classify_ret_ty(cx, &mut fty.ret, abi);
140 for arg in &mut fty.args {
141 if arg.is_ignore() { continue; }
142 classify_arg_ty(cx, arg, abi);