2 use gccjit::{RValue, Type, ToRValue};
3 use rustc_codegen_ssa::mir::place::PlaceRef;
4 use rustc_codegen_ssa::traits::{
11 use rustc_middle::mir::Mutability;
12 use rustc_middle::ty::ScalarInt;
13 use rustc_middle::ty::layout::{TyAndLayout, LayoutOf};
14 use rustc_middle::mir::interpret::{ConstAllocation, GlobalAlloc, Scalar};
15 use rustc_span::Symbol;
16 use rustc_target::abi::{self, HasDataLayout, Pointer, Size};
18 use crate::consts::const_alloc_to_gcc;
19 use crate::context::CodegenCx;
20 use crate::type_of::LayoutGccExt;
22 impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
23 pub fn const_bytes(&self, bytes: &[u8]) -> RValue<'gcc> {
24 bytes_in_context(self, bytes)
27 fn global_string(&self, string: &str) -> LValue<'gcc> {
28 // TODO(antoyo): handle non-null-terminated strings.
29 let string = self.context.new_string_literal(&*string);
30 let sym = self.generate_local_symbol_name("str");
31 let global = self.declare_private_global(&sym, self.val_ty(string));
32 global.global_set_initializer_rvalue(string);
34 // TODO(antoyo): set linkage.
38 pub fn bytes_in_context<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, bytes: &[u8]) -> RValue<'gcc> {
39 let context = &cx.context;
40 let byte_type = context.new_type::<u8>();
41 let typ = context.new_array_type(None, byte_type, bytes.len() as i32);
42 let elements: Vec<_> =
44 .map(|&byte| context.new_rvalue_from_int(byte_type, byte as i32))
46 context.new_array_constructor(None, typ, &elements)
49 pub fn type_is_pointer<'gcc>(typ: Type<'gcc>) -> bool {
50 typ.get_pointee().is_some()
53 impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
54 fn const_null(&self, typ: Type<'gcc>) -> RValue<'gcc> {
55 if type_is_pointer(typ) {
56 self.context.new_null(typ)
59 self.const_int(typ, 0)
63 fn const_undef(&self, typ: Type<'gcc>) -> RValue<'gcc> {
64 let local = self.current_func.borrow().expect("func")
65 .new_local(None, typ, "undefined");
66 if typ.is_struct().is_some() {
67 // NOTE: hack to workaround a limitation of the rustc API: see comment on
68 // CodegenCx.structs_as_pointer
69 let pointer = local.get_address(None);
70 self.structs_as_pointer.borrow_mut().insert(pointer);
78 fn const_int(&self, typ: Type<'gcc>, int: i64) -> RValue<'gcc> {
79 self.gcc_int(typ, int)
82 fn const_uint(&self, typ: Type<'gcc>, int: u64) -> RValue<'gcc> {
83 self.gcc_uint(typ, int)
86 fn const_uint_big(&self, typ: Type<'gcc>, num: u128) -> RValue<'gcc> {
87 self.gcc_uint_big(typ, num)
90 fn const_bool(&self, val: bool) -> RValue<'gcc> {
91 self.const_uint(self.type_i1(), val as u64)
94 fn const_i32(&self, i: i32) -> RValue<'gcc> {
95 self.const_int(self.type_i32(), i as i64)
98 fn const_u32(&self, i: u32) -> RValue<'gcc> {
99 self.const_uint(self.type_u32(), i as u64)
102 fn const_u64(&self, i: u64) -> RValue<'gcc> {
103 self.const_uint(self.type_u64(), i)
106 fn const_usize(&self, i: u64) -> RValue<'gcc> {
107 let bit_size = self.data_layout().pointer_size.bits();
109 // make sure it doesn't overflow
110 assert!(i < (1 << bit_size));
113 self.const_uint(self.usize_type, i)
116 fn const_u8(&self, _i: u8) -> RValue<'gcc> {
120 fn const_real(&self, _t: Type<'gcc>, _val: f64) -> RValue<'gcc> {
124 fn const_str(&self, s: Symbol) -> (RValue<'gcc>, RValue<'gcc>) {
125 let s_str = s.as_str();
126 let str_global = *self.const_str_cache.borrow_mut().entry(s).or_insert_with(|| {
127 self.global_string(s_str)
129 let len = s_str.len();
130 let cs = self.const_ptrcast(str_global.get_address(None),
131 self.type_ptr_to(self.layout_of(self.tcx.types.str_).gcc_type(self, true)),
133 (cs, self.const_usize(len as u64))
136 fn const_struct(&self, values: &[RValue<'gcc>], packed: bool) -> RValue<'gcc> {
137 let fields: Vec<_> = values.iter()
138 .map(|value| value.get_type())
140 // TODO(antoyo): cache the type? It's anonymous, so probably not.
141 let typ = self.type_struct(&fields, packed);
142 let struct_type = typ.is_struct().expect("struct type");
143 self.context.new_struct_constructor(None, struct_type.as_type(), None, values)
146 fn const_to_opt_uint(&self, _v: RValue<'gcc>) -> Option<u64> {
151 fn const_to_opt_u128(&self, _v: RValue<'gcc>, _sign_ext: bool) -> Option<u128> {
156 fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, ty: Type<'gcc>) -> RValue<'gcc> {
157 let bitsize = if layout.is_bool() { 1 } else { layout.value.size(self).bits() };
159 Scalar::Int(ScalarInt::ZST) => {
160 assert_eq!(0, layout.value.size(self).bytes());
161 self.const_undef(self.type_ix(0))
163 Scalar::Int(int) => {
164 let data = int.assert_bits(layout.value.size(self));
166 // FIXME(antoyo): there's some issues with using the u128 code that follows, so hard-code
167 // the paths for floating-point values.
168 if ty == self.float_type {
169 return self.context.new_rvalue_from_double(ty, f32::from_bits(data as u32) as f64);
171 else if ty == self.double_type {
172 return self.context.new_rvalue_from_double(ty, f64::from_bits(data as u64));
175 let value = self.const_uint_big(self.type_ix(bitsize), data);
176 // TODO(bjorn3): assert size is correct
177 self.const_bitcast(value, ty)
179 Scalar::Ptr(ptr, _size) => {
180 let (alloc_id, offset) = ptr.into_parts();
182 match self.tcx.global_alloc(alloc_id) {
183 GlobalAlloc::Memory(alloc) => {
184 let init = const_alloc_to_gcc(self, alloc);
185 let alloc = alloc.inner();
187 match alloc.mutability {
188 Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None),
189 _ => self.static_addr_of(init, alloc.align, None),
191 if !self.sess().fewer_names() {
192 // TODO(antoyo): set value name.
196 GlobalAlloc::Function(fn_instance) => {
197 self.get_fn_addr(fn_instance)
199 GlobalAlloc::Static(def_id) => {
200 assert!(self.tcx.is_static(def_id));
201 self.get_static(def_id).get_address(None)
204 let ptr_type = base_addr.get_type();
205 let base_addr = self.const_bitcast(base_addr, self.usize_type);
206 let offset = self.context.new_rvalue_from_long(self.usize_type, offset.bytes() as i64);
207 let ptr = self.const_bitcast(base_addr + offset, ptr_type);
208 if layout.value != Pointer {
209 self.const_bitcast(ptr.dereference(None).to_rvalue(), ty)
212 self.const_bitcast(ptr, ty)
218 fn const_data_from_alloc(&self, alloc: ConstAllocation<'tcx>) -> Self::Value {
219 const_alloc_to_gcc(self, alloc)
222 fn from_const_alloc(&self, layout: TyAndLayout<'tcx>, alloc: ConstAllocation<'tcx>, offset: Size) -> PlaceRef<'tcx, RValue<'gcc>> {
223 assert_eq!(alloc.inner().align, layout.align.abi);
224 let ty = self.type_ptr_to(layout.gcc_type(self, true));
226 if layout.size == Size::ZERO {
227 let value = self.const_usize(alloc.inner().align.bytes());
228 self.context.new_cast(None, value, ty)
231 let init = const_alloc_to_gcc(self, alloc);
232 let base_addr = self.static_addr_of(init, alloc.inner().align, None);
234 let array = self.const_bitcast(base_addr, self.type_i8p());
235 let value = self.context.new_array_access(None, array, self.const_usize(offset.bytes())).get_address(None);
236 self.const_bitcast(value, ty)
238 PlaceRef::new_sized(value, layout)
241 fn const_ptrcast(&self, val: RValue<'gcc>, ty: Type<'gcc>) -> RValue<'gcc> {
242 self.context.new_cast(None, val, ty)
246 pub trait SignType<'gcc, 'tcx> {
247 fn is_signed(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
248 fn is_unsigned(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
249 fn to_signed(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>;
250 fn to_unsigned(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>;
253 impl<'gcc, 'tcx> SignType<'gcc, 'tcx> for Type<'gcc> {
254 fn is_signed(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
255 self.is_i8(cx) || self.is_i16(cx) || self.is_i32(cx) || self.is_i64(cx) || self.is_i128(cx)
258 fn is_unsigned(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
259 self.is_u8(cx) || self.is_u16(cx) || self.is_u32(cx) || self.is_u64(cx) || self.is_u128(cx)
262 fn to_signed(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc> {
266 else if self.is_u16(cx) {
269 else if self.is_u32(cx) {
272 else if self.is_u64(cx) {
275 else if self.is_u128(cx) {
283 fn to_unsigned(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc> {
287 else if self.is_i16(cx) {
290 else if self.is_i32(cx) {
293 else if self.is_i64(cx) {
296 else if self.is_i128(cx) {
305 pub trait TypeReflection<'gcc, 'tcx> {
306 fn is_uchar(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
307 fn is_ushort(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
308 fn is_uint(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
309 fn is_ulong(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
310 fn is_ulonglong(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
312 fn is_i8(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
313 fn is_u8(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
314 fn is_i16(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
315 fn is_u16(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
316 fn is_i32(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
317 fn is_u32(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
318 fn is_i64(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
319 fn is_u64(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
320 fn is_i128(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
321 fn is_u128(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
323 fn is_f32(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
324 fn is_f64(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool;
327 impl<'gcc, 'tcx> TypeReflection<'gcc, 'tcx> for Type<'gcc> {
328 fn is_uchar(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
329 self.unqualified() == cx.u8_type
332 fn is_ushort(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
333 self.unqualified() == cx.u16_type
336 fn is_uint(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
337 self.unqualified() == cx.uint_type
340 fn is_ulong(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
341 self.unqualified() == cx.ulong_type
344 fn is_ulonglong(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
345 self.unqualified() == cx.ulonglong_type
348 fn is_i8(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
349 self.unqualified() == cx.i8_type
352 fn is_u8(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
353 self.unqualified() == cx.u8_type
356 fn is_i16(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
357 self.unqualified() == cx.i16_type
360 fn is_u16(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
361 self.unqualified() == cx.u16_type
364 fn is_i32(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
365 self.unqualified() == cx.i32_type
368 fn is_u32(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
369 self.unqualified() == cx.u32_type
372 fn is_i64(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
373 self.unqualified() == cx.i64_type
376 fn is_u64(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
377 self.unqualified() == cx.u64_type
380 fn is_i128(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
381 self.unqualified() == cx.i128_type.unqualified()
384 fn is_u128(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
385 self.unqualified() == cx.u128_type.unqualified()
388 fn is_f32(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
389 self.unqualified() == cx.context.new_type::<f32>()
392 fn is_f64(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool {
393 self.unqualified() == cx.context.new_type::<f64>()