3 fn codegen_field<'tcx>(
4 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
7 layout: TyLayout<'tcx>,
9 ) -> (Value, TyLayout<'tcx>) {
10 let field_offset = layout.fields.offset(field.index());
11 let field_layout = layout.field(&*fx, field.index());
13 let simple = |fx: &mut FunctionCx<_>| {
14 if field_offset.bytes() > 0 {
16 fx.bcx.ins().iadd_imm(base, field_offset.bytes() as i64),
24 if let Some(extra) = extra {
25 if !field_layout.is_unsized() {
28 match field_layout.ty.kind {
29 ty::Slice(..) | ty::Str | ty::Foreign(..) => return simple(fx),
30 ty::Adt(def, _) if def.repr.packed() => {
31 assert_eq!(layout.align.abi.bytes(), 1);
35 // We have to align the offset for DST's
36 let unaligned_offset = field_offset.bytes();
37 let (_, unsized_align) = crate::unsize::size_and_align_of_dst(fx, field_layout.ty, extra);
39 let one = fx.bcx.ins().iconst(pointer_ty(fx.tcx), 1);
40 let align_sub_1 = fx.bcx.ins().isub(unsized_align, one);
41 let and_lhs = fx.bcx.ins().iadd_imm(align_sub_1, unaligned_offset as i64);
42 let zero = fx.bcx.ins().iconst(pointer_ty(fx.tcx), 0);
43 let and_rhs = fx.bcx.ins().isub(zero, unsized_align);
44 let offset = fx.bcx.ins().band(and_lhs, and_rhs);
47 fx.bcx.ins().iadd(base, offset),
57 fn scalar_pair_calculate_b_offset(tcx: TyCtxt<'_>, a_scalar: &Scalar, b_scalar: &Scalar) -> i32 {
58 let b_offset = a_scalar
61 .align_to(b_scalar.value.align(&tcx).abi);
62 b_offset.bytes().try_into().unwrap()
66 #[derive(Debug, Copy, Clone)]
67 pub struct CValue<'tcx>(CValueInner, TyLayout<'tcx>);
69 #[derive(Debug, Copy, Clone)]
73 ByValPair(Value, Value),
76 impl<'tcx> CValue<'tcx> {
77 pub fn by_ref(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
78 CValue(CValueInner::ByRef(value), layout)
81 pub fn by_val(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
82 CValue(CValueInner::ByVal(value), layout)
85 pub fn by_val_pair(value: Value, extra: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
86 CValue(CValueInner::ByValPair(value, extra), layout)
89 pub fn layout(&self) -> TyLayout<'tcx> {
93 pub fn force_stack<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Value {
96 CValueInner::ByRef(value) => value,
97 CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => {
98 let cplace = CPlace::new_stack_slot(fx, layout.ty);
99 cplace.write_cvalue(fx, self);
105 pub fn try_to_addr(self) -> Option<Value> {
107 CValueInner::ByRef(addr) => Some(addr),
108 CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => None,
112 /// Load a value with layout.abi of scalar
113 pub fn load_scalar<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Value {
116 CValueInner::ByRef(addr) => {
117 let scalar = match layout.abi {
118 layout::Abi::Scalar(ref scalar) => scalar.clone(),
121 let clif_ty = scalar_to_clif_type(fx.tcx, scalar);
122 fx.bcx.ins().load(clif_ty, MemFlags::new(), addr, 0)
124 CValueInner::ByVal(value) => value,
125 CValueInner::ByValPair(_, _) => bug!("Please use load_scalar_pair for ByValPair"),
129 /// Load a value pair with layout.abi of scalar pair
130 pub fn load_scalar_pair<'a>(
132 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
133 ) -> (Value, Value) {
136 CValueInner::ByRef(addr) => {
137 let (a_scalar, b_scalar) = match &layout.abi {
138 layout::Abi::ScalarPair(a, b) => (a, b),
139 _ => unreachable!("load_scalar_pair({:?})", self),
141 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
142 let clif_ty1 = scalar_to_clif_type(fx.tcx, a_scalar.clone());
143 let clif_ty2 = scalar_to_clif_type(fx.tcx, b_scalar.clone());
144 let val1 = fx.bcx.ins().load(clif_ty1, MemFlags::new(), addr, 0);
145 let val2 = fx.bcx.ins().load(clif_ty2, MemFlags::new(), addr, b_offset);
148 CValueInner::ByVal(_) => bug!("Please use load_scalar for ByVal"),
149 CValueInner::ByValPair(val1, val2) => (val1, val2),
153 pub fn value_field<'a>(
155 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
159 let base = match self.0 {
160 CValueInner::ByRef(addr) => addr,
161 _ => bug!("place_field for {:?}", self),
164 let (field_ptr, field_layout) = codegen_field(fx, base, None, layout, field);
165 CValue::by_ref(field_ptr, field_layout)
168 pub fn unsize_value<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
169 crate::unsize::coerce_unsized_into(fx, self, dest);
172 /// If `ty` is signed, `const_val` must already be sign extended.
173 pub fn const_val<'a>(
174 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
178 let clif_ty = fx.clif_type(ty).unwrap();
179 let layout = fx.layout_of(ty);
181 let val = match ty.kind {
182 ty::TyKind::Uint(UintTy::U128) | ty::TyKind::Int(IntTy::I128) => {
183 let lsb = fx.bcx.ins().iconst(types::I64, const_val as u64 as i64);
187 .iconst(types::I64, (const_val >> 64) as u64 as i64);
188 fx.bcx.ins().iconcat(lsb, msb)
190 ty::TyKind::Bool => {
192 const_val == 0 || const_val == 1,
193 "Invalid bool 0x{:032X}",
196 fx.bcx.ins().iconst(types::I8, const_val as i64)
198 ty::TyKind::Uint(_) | ty::TyKind::Ref(..) | ty::TyKind::RawPtr(..) => fx
201 .iconst(clif_ty, u64::try_from(const_val).expect("uint") as i64),
202 ty::TyKind::Int(_) => fx.bcx.ins().iconst(clif_ty, const_val as i128 as i64),
204 "CValue::const_val for non bool/integer/pointer type {:?} is not allowed",
209 CValue::by_val(val, layout)
212 pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
213 CValue(self.0, layout)
217 /// A place where you can write a value to or read a value from
218 #[derive(Debug, Copy, Clone)]
219 pub struct CPlace<'tcx> {
221 layout: TyLayout<'tcx>,
224 #[derive(Debug, Copy, Clone)]
225 pub enum CPlaceInner {
227 Addr(Value, Option<Value>),
232 impl<'tcx> CPlace<'tcx> {
233 pub fn layout(&self) -> TyLayout<'tcx> {
237 pub fn inner(&self) -> &CPlaceInner {
241 pub fn no_place(layout: TyLayout<'tcx>) -> CPlace<'tcx> {
243 inner: CPlaceInner::NoPlace,
248 pub fn new_stack_slot(
249 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
252 let layout = fx.layout_of(ty);
253 assert!(!layout.is_unsized());
254 if layout.size.bytes() == 0 {
256 inner: CPlaceInner::NoPlace,
261 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
262 kind: StackSlotKind::ExplicitSlot,
263 size: layout.size.bytes() as u32,
267 inner: CPlaceInner::Stack(stack_slot),
273 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
275 layout: TyLayout<'tcx>,
278 .declare_var(mir_var(local), fx.clif_type(layout.ty).unwrap());
280 inner: CPlaceInner::Var(local),
285 pub fn for_addr(addr: Value, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
287 inner: CPlaceInner::Addr(addr, None),
292 pub fn for_addr_with_extra(addr: Value, extra: Value, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
294 inner: CPlaceInner::Addr(addr, Some(extra)),
299 pub fn to_cvalue(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> CValue<'tcx> {
300 let layout = self.layout();
302 CPlaceInner::Var(var) => {
303 let val = fx.bcx.use_var(mir_var(var));
304 fx.bcx.set_val_label(val, cranelift::codegen::ir::ValueLabel::from_u32(var.as_u32()));
305 CValue::by_val(val, layout)
307 CPlaceInner::Addr(addr, extra) => {
308 assert!(extra.is_none(), "unsized values are not yet supported");
309 CValue::by_ref(addr, layout)
311 CPlaceInner::Stack(stack_slot) => CValue::by_ref(
312 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
315 CPlaceInner::NoPlace => CValue::by_ref(
318 .iconst(fx.pointer_type, fx.pointer_type.bytes() as i64),
324 pub fn to_addr(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Value {
325 match self.to_addr_maybe_unsized(fx) {
326 (addr, None) => addr,
327 (_, Some(_)) => bug!("Expected sized cplace, found {:?}", self),
331 pub fn to_addr_maybe_unsized(
333 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
334 ) -> (Value, Option<Value>) {
336 CPlaceInner::Addr(addr, extra) => (addr, extra),
337 CPlaceInner::Stack(stack_slot) => (
338 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
341 CPlaceInner::NoPlace => {
345 i64::try_from(self.layout.align.pref.bytes()).unwrap(),
350 CPlaceInner::Var(_) => bug!("Expected CPlace::Addr, found CPlace::Var"),
354 pub fn write_cvalue(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, from: CValue<'tcx>) {
355 use rustc::hir::Mutability::*;
357 let from_ty = from.layout().ty;
358 let to_ty = self.layout().ty;
360 fn assert_assignable<'tcx>(
361 fx: &FunctionCx<'_, 'tcx, impl Backend>,
365 match (&from_ty.kind, &to_ty.kind) {
366 (ty::Ref(_, t, Immutable), ty::Ref(_, u, Immutable))
367 | (ty::Ref(_, t, Mutable), ty::Ref(_, u, Immutable))
368 | (ty::Ref(_, t, Mutable), ty::Ref(_, u, Mutable)) => {
369 assert_assignable(fx, t, u);
370 // &mut T -> &T is allowed
371 // &'a T -> &'b T is allowed
373 (ty::Ref(_, _, Immutable), ty::Ref(_, _, Mutable)) => panic!(
374 "Cant assign value of type {} to place of type {}",
377 (ty::FnPtr(_), ty::FnPtr(_)) => {
378 let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
379 ParamEnv::reveal_all(),
380 &from_ty.fn_sig(fx.tcx),
382 let to_sig = fx.tcx.normalize_erasing_late_bound_regions(
383 ParamEnv::reveal_all(),
384 &to_ty.fn_sig(fx.tcx),
388 "Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
389 from_sig, to_sig, fx,
391 // fn(&T) -> for<'l> fn(&'l T) is allowed
393 (ty::Dynamic(from_traits, _), ty::Dynamic(to_traits, _)) => {
396 .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from_traits);
399 .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to_traits);
401 from_traits, to_traits,
402 "Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
403 from_traits, to_traits, fx,
405 // dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
411 "Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
420 assert_assignable(fx, from_ty, to_ty);
422 let dst_layout = self.layout();
423 let addr = match self.inner {
424 CPlaceInner::Var(var) => {
425 let data = from.load_scalar(fx);
426 fx.bcx.set_val_label(data, cranelift::codegen::ir::ValueLabel::from_u32(var.as_u32()));
427 fx.bcx.def_var(mir_var(var), data);
430 CPlaceInner::Addr(addr, None) => addr,
431 CPlaceInner::Stack(stack_slot) => {
432 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0)
434 CPlaceInner::NoPlace => {
435 if dst_layout.abi != Abi::Uninhabited {
436 assert_eq!(dst_layout.size.bytes(), 0, "{:?}", dst_layout);
440 CPlaceInner::Addr(_, Some(_)) => bug!("Can't write value to unsized place {:?}", self),
444 CValueInner::ByVal(val) => {
445 fx.bcx.ins().store(MemFlags::new(), val, addr, 0);
447 CValueInner::ByValPair(value, extra) => match dst_layout.abi {
448 Abi::ScalarPair(ref a_scalar, ref b_scalar) => {
449 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
450 fx.bcx.ins().store(MemFlags::new(), value, addr, 0);
451 fx.bcx.ins().store(MemFlags::new(), extra, addr, b_offset);
454 "Non ScalarPair abi {:?} for ByValPair CValue",
458 CValueInner::ByRef(from_addr) => {
459 let src_layout = from.1;
460 let size = dst_layout.size.bytes();
461 let src_align = src_layout.align.abi.bytes() as u8;
462 let dst_align = dst_layout.align.abi.bytes() as u8;
463 fx.bcx.emit_small_memcpy(
464 fx.module.target_config(),
477 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
480 let layout = self.layout();
481 let (base, extra) = self.to_addr_maybe_unsized(fx);
483 let (field_ptr, field_layout) = codegen_field(fx, base, extra, layout, field);
484 if field_layout.is_unsized() {
485 CPlace::for_addr_with_extra(field_ptr, extra.unwrap(), field_layout)
487 CPlace::for_addr(field_ptr, field_layout)
493 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
496 let (elem_layout, addr) = match self.layout().ty.kind {
497 ty::Array(elem_ty, _) => (fx.layout_of(elem_ty), self.to_addr(fx)),
498 ty::Slice(elem_ty) => (fx.layout_of(elem_ty), self.to_addr_maybe_unsized(fx).0),
499 _ => bug!("place_index({:?})", self.layout().ty),
505 .imul_imm(index, elem_layout.size.bytes() as i64);
507 CPlace::for_addr(fx.bcx.ins().iadd(addr, offset), elem_layout)
510 pub fn place_deref(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> CPlace<'tcx> {
511 let inner_layout = fx.layout_of(self.layout().ty.builtin_deref(true).unwrap().ty);
512 if has_ptr_meta(fx.tcx, inner_layout.ty) {
513 let (addr, extra) = self.to_cvalue(fx).load_scalar_pair(fx);
514 CPlace::for_addr_with_extra(addr, extra, inner_layout)
516 CPlace::for_addr(self.to_cvalue(fx).load_scalar(fx), inner_layout)
520 pub fn write_place_ref(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
521 if has_ptr_meta(fx.tcx, self.layout().ty) {
522 let (value, extra) = self.to_addr_maybe_unsized(fx);
523 let ptr = CValue::by_val_pair(
525 extra.expect("unsized type without metadata"),
528 dest.write_cvalue(fx, ptr);
530 let ptr = CValue::by_val(self.to_addr(fx), dest.layout());
531 dest.write_cvalue(fx, ptr);
535 pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
536 assert!(!self.layout().is_unsized());
538 CPlaceInner::NoPlace => {
539 assert!(layout.size.bytes() == 0);
549 pub fn downcast_variant(
551 fx: &FunctionCx<'_, 'tcx, impl Backend>,
554 let layout = self.layout().for_variant(fx, variant);
555 self.unchecked_cast_to(layout)