3 fn codegen_field<'tcx>(
4 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
6 layout: TyLayout<'tcx>,
8 ) -> (Value, TyLayout<'tcx>) {
9 let field_offset = layout.fields.offset(field.index());
10 let field_ty = layout.field(&*fx, field.index());
11 if field_offset.bytes() > 0 {
13 fx.bcx.ins().iadd_imm(base, field_offset.bytes() as i64),
21 fn scalar_pair_calculate_b_offset(tcx: TyCtxt<'_>, a_scalar: &Scalar, b_scalar: &Scalar) -> i32 {
22 let b_offset = a_scalar
25 .align_to(b_scalar.value.align(&tcx).abi);
26 b_offset.bytes().try_into().unwrap()
30 #[derive(Debug, Copy, Clone)]
31 pub struct CValue<'tcx>(CValueInner, TyLayout<'tcx>);
33 #[derive(Debug, Copy, Clone)]
37 ByValPair(Value, Value),
40 impl<'tcx> CValue<'tcx> {
41 pub fn by_ref(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
42 CValue(CValueInner::ByRef(value), layout)
45 pub fn by_val(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
46 CValue(CValueInner::ByVal(value), layout)
49 pub fn by_val_pair(value: Value, extra: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
50 CValue(CValueInner::ByValPair(value, extra), layout)
53 pub fn layout(&self) -> TyLayout<'tcx> {
57 pub fn force_stack<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Value {
60 CValueInner::ByRef(value) => value,
61 CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => {
62 let cplace = CPlace::new_stack_slot(fx, layout.ty);
63 cplace.write_cvalue(fx, self);
69 pub fn try_to_addr(self) -> Option<Value> {
71 CValueInner::ByRef(addr) => Some(addr),
72 CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => None,
76 /// Load a value with layout.abi of scalar
77 pub fn load_scalar<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Value {
80 CValueInner::ByRef(addr) => {
81 let scalar = match layout.abi {
82 layout::Abi::Scalar(ref scalar) => scalar.clone(),
85 let clif_ty = scalar_to_clif_type(fx.tcx, scalar);
86 fx.bcx.ins().load(clif_ty, MemFlags::new(), addr, 0)
88 CValueInner::ByVal(value) => value,
89 CValueInner::ByValPair(_, _) => bug!("Please use load_scalar_pair for ByValPair"),
93 /// Load a value pair with layout.abi of scalar pair
94 pub fn load_scalar_pair<'a>(
96 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
100 CValueInner::ByRef(addr) => {
101 let (a_scalar, b_scalar) = match &layout.abi {
102 layout::Abi::ScalarPair(a, b) => (a, b),
105 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
106 let clif_ty1 = scalar_to_clif_type(fx.tcx, a_scalar.clone());
107 let clif_ty2 = scalar_to_clif_type(fx.tcx, b_scalar.clone());
108 let val1 = fx.bcx.ins().load(clif_ty1, MemFlags::new(), addr, 0);
109 let val2 = fx.bcx.ins().load(clif_ty2, MemFlags::new(), addr, b_offset);
112 CValueInner::ByVal(_) => bug!("Please use load_scalar for ByVal"),
113 CValueInner::ByValPair(val1, val2) => (val1, val2),
117 pub fn value_field<'a>(
119 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
123 let base = match self.0 {
124 CValueInner::ByRef(addr) => addr,
125 _ => bug!("place_field for {:?}", self),
128 let (field_ptr, field_layout) = codegen_field(fx, base, layout, field);
129 CValue::by_ref(field_ptr, field_layout)
132 pub fn unsize_value<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
133 crate::unsize::coerce_unsized_into(fx, self, dest);
136 /// If `ty` is signed, `const_val` must already be sign extended.
137 pub fn const_val<'a>(
138 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
142 let clif_ty = fx.clif_type(ty).unwrap();
143 let layout = fx.layout_of(ty);
145 let val = match ty.sty {
146 ty::TyKind::Uint(UintTy::U128) | ty::TyKind::Int(IntTy::I128) => {
147 let lsb = fx.bcx.ins().iconst(types::I64, const_val as u64 as i64);
151 .iconst(types::I64, (const_val >> 64) as u64 as i64);
152 fx.bcx.ins().iconcat(lsb, msb)
154 ty::TyKind::Bool => {
156 const_val == 0 || const_val == 1,
157 "Invalid bool 0x{:032X}",
160 fx.bcx.ins().iconst(types::I8, const_val as i64)
162 ty::TyKind::Uint(_) | ty::TyKind::Ref(..) | ty::TyKind::RawPtr(..) => fx
165 .iconst(clif_ty, u64::try_from(const_val).expect("uint") as i64),
166 ty::TyKind::Int(_) => fx.bcx.ins().iconst(clif_ty, const_val as i128 as i64),
168 "CValue::const_val for non bool/integer/pointer type {:?} is not allowed",
173 CValue::by_val(val, layout)
176 pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
177 CValue(self.0, layout)
181 /// A place where you can write a value to or read a value from
182 #[derive(Debug, Copy, Clone)]
183 pub struct CPlace<'tcx> {
185 layout: TyLayout<'tcx>,
188 #[derive(Debug, Copy, Clone)]
189 pub enum CPlaceInner {
191 Addr(Value, Option<Value>),
196 impl<'tcx> CPlace<'tcx> {
197 pub fn layout(&self) -> TyLayout<'tcx> {
201 pub fn inner(&self) -> &CPlaceInner {
205 pub fn no_place(layout: TyLayout<'tcx>) -> CPlace<'tcx> {
207 inner: CPlaceInner::NoPlace,
212 pub fn new_stack_slot(
213 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
216 let layout = fx.layout_of(ty);
217 assert!(!layout.is_unsized());
218 if layout.size.bytes() == 0 {
220 inner: CPlaceInner::NoPlace,
225 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
226 kind: StackSlotKind::ExplicitSlot,
227 size: layout.size.bytes() as u32,
231 inner: CPlaceInner::Stack(stack_slot),
237 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
239 layout: TyLayout<'tcx>,
242 .declare_var(mir_var(local), fx.clif_type(layout.ty).unwrap());
244 inner: CPlaceInner::Var(local),
249 pub fn for_addr(addr: Value, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
251 inner: CPlaceInner::Addr(addr, None),
256 pub fn for_addr_with_extra(addr: Value, extra: Value, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
258 inner: CPlaceInner::Addr(addr, Some(extra)),
263 pub fn to_cvalue(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> CValue<'tcx> {
264 let layout = self.layout();
266 CPlaceInner::Var(var) => CValue::by_val(fx.bcx.use_var(mir_var(var)), layout),
267 CPlaceInner::Addr(addr, extra) => {
268 assert!(extra.is_none(), "unsized values are not yet supported");
269 CValue::by_ref(addr, layout)
271 CPlaceInner::Stack(stack_slot) => CValue::by_ref(
272 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
275 CPlaceInner::NoPlace => CValue::by_ref(
278 .iconst(fx.pointer_type, fx.pointer_type.bytes() as i64),
284 pub fn to_addr(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Value {
285 match self.to_addr_maybe_unsized(fx) {
286 (addr, None) => addr,
287 (_, Some(_)) => bug!("Expected sized cplace, found {:?}", self),
291 pub fn to_addr_maybe_unsized(
293 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
294 ) -> (Value, Option<Value>) {
296 CPlaceInner::Addr(addr, extra) => (addr, extra),
297 CPlaceInner::Stack(stack_slot) => (
298 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
301 CPlaceInner::NoPlace => (fx.bcx.ins().iconst(fx.pointer_type, 45), None),
302 CPlaceInner::Var(_) => bug!("Expected CPlace::Addr, found CPlace::Var"),
306 pub fn write_cvalue(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, from: CValue<'tcx>) {
307 use rustc::hir::Mutability::*;
309 let from_ty = from.layout().ty;
310 let to_ty = self.layout().ty;
312 fn assert_assignable<'tcx>(
313 fx: &FunctionCx<'_, 'tcx, impl Backend>,
317 match (&from_ty.sty, &to_ty.sty) {
318 (ty::Ref(_, t, MutImmutable), ty::Ref(_, u, MutImmutable))
319 | (ty::Ref(_, t, MutMutable), ty::Ref(_, u, MutImmutable))
320 | (ty::Ref(_, t, MutMutable), ty::Ref(_, u, MutMutable)) => {
321 assert_assignable(fx, t, u);
322 // &mut T -> &T is allowed
323 // &'a T -> &'b T is allowed
325 (ty::Ref(_, _, MutImmutable), ty::Ref(_, _, MutMutable)) => panic!(
326 "Cant assign value of type {} to place of type {}",
329 (ty::FnPtr(_), ty::FnPtr(_)) => {
330 let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
331 ParamEnv::reveal_all(),
332 &from_ty.fn_sig(fx.tcx),
334 let to_sig = fx.tcx.normalize_erasing_late_bound_regions(
335 ParamEnv::reveal_all(),
336 &to_ty.fn_sig(fx.tcx),
340 "Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
341 from_sig, to_sig, fx,
343 // fn(&T) -> for<'l> fn(&'l T) is allowed
345 (ty::Dynamic(from_traits, _), ty::Dynamic(to_traits, _)) => {
348 .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from_traits);
351 .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to_traits);
353 from_traits, to_traits,
354 "Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
355 from_traits, to_traits, fx,
357 // dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
363 "Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
372 assert_assignable(fx, from_ty, to_ty);
374 let dst_layout = self.layout();
375 let addr = match self.inner {
376 CPlaceInner::Var(var) => {
377 let data = from.load_scalar(fx);
378 fx.bcx.def_var(mir_var(var), data);
381 CPlaceInner::Addr(addr, None) => addr,
382 CPlaceInner::Stack(stack_slot) => {
383 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0)
385 CPlaceInner::NoPlace => {
386 if dst_layout.abi != Abi::Uninhabited {
387 assert_eq!(dst_layout.size.bytes(), 0, "{:?}", dst_layout);
391 CPlaceInner::Addr(_, Some(_)) => bug!("Can't write value to unsized place {:?}", self),
395 CValueInner::ByVal(val) => {
396 fx.bcx.ins().store(MemFlags::new(), val, addr, 0);
398 CValueInner::ByValPair(value, extra) => match dst_layout.abi {
399 Abi::ScalarPair(ref a_scalar, ref b_scalar) => {
400 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
401 fx.bcx.ins().store(MemFlags::new(), value, addr, 0);
402 fx.bcx.ins().store(MemFlags::new(), extra, addr, b_offset);
405 "Non ScalarPair abi {:?} for ByValPair CValue",
409 CValueInner::ByRef(from_addr) => {
410 let src_layout = from.1;
411 let size = dst_layout.size.bytes();
412 let src_align = src_layout.align.abi.bytes() as u8;
413 let dst_align = dst_layout.align.abi.bytes() as u8;
414 fx.bcx.emit_small_memcpy(
415 fx.module.target_config(),
428 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
431 let layout = self.layout();
432 let (base, extra) = self.to_addr_maybe_unsized(fx);
434 let (field_ptr, field_layout) = codegen_field(fx, base, layout, field);
435 if field_layout.is_unsized() {
436 CPlace::for_addr_with_extra(field_ptr, extra.unwrap(), field_layout)
438 CPlace::for_addr(field_ptr, field_layout)
444 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
447 let (elem_layout, addr) = match self.layout().ty.sty {
448 ty::Array(elem_ty, _) => (fx.layout_of(elem_ty), self.to_addr(fx)),
449 ty::Slice(elem_ty) => (fx.layout_of(elem_ty), self.to_addr_maybe_unsized(fx).0),
450 _ => bug!("place_index({:?})", self.layout().ty),
456 .imul_imm(index, elem_layout.size.bytes() as i64);
458 CPlace::for_addr(fx.bcx.ins().iadd(addr, offset), elem_layout)
461 pub fn place_deref(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> CPlace<'tcx> {
462 let inner_layout = fx.layout_of(self.layout().ty.builtin_deref(true).unwrap().ty);
463 if !inner_layout.is_unsized() {
464 CPlace::for_addr(self.to_cvalue(fx).load_scalar(fx), inner_layout)
466 let (addr, extra) = self.to_cvalue(fx).load_scalar_pair(fx);
467 CPlace::for_addr_with_extra(addr, extra, inner_layout)
471 pub fn write_place_ref(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
472 if !self.layout().is_unsized() {
473 let ptr = CValue::by_val(self.to_addr(fx), dest.layout());
474 dest.write_cvalue(fx, ptr);
476 let (value, extra) = self.to_addr_maybe_unsized(fx);
477 let ptr = CValue::by_val_pair(
479 extra.expect("unsized type without metadata"),
482 dest.write_cvalue(fx, ptr);
486 pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
487 assert!(!self.layout().is_unsized());
489 CPlaceInner::NoPlace => {
490 assert!(layout.size.bytes() == 0);
500 pub fn downcast_variant(
502 fx: &FunctionCx<'_, 'tcx, impl Backend>,
505 let layout = self.layout().for_variant(fx, variant);
506 self.unchecked_cast_to(layout)