3 fn codegen_field<'a, 'tcx: 'a>(
4 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
6 layout: TyLayout<'tcx>,
8 ) -> (Value, TyLayout<'tcx>) {
9 let field_offset = layout.fields.offset(field.index());
10 let field_ty = layout.field(&*fx, field.index());
11 if field_offset.bytes() > 0 {
13 fx.bcx.ins().iadd_imm(base, field_offset.bytes() as i64),
21 fn scalar_pair_calculate_b_offset(tcx: TyCtxt<'_>, a_scalar: &Scalar, b_scalar: &Scalar) -> i32 {
22 let b_offset = a_scalar.value.size(&tcx).align_to(b_scalar.value.align(&tcx).abi);
23 b_offset.bytes().try_into().unwrap()
27 #[derive(Debug, Copy, Clone)]
28 pub struct CValue<'tcx>(CValueInner, TyLayout<'tcx>);
30 #[derive(Debug, Copy, Clone)]
34 ByValPair(Value, Value),
37 fn store_scalar<'a, 'tcx: 'a>(fx: &mut FunctionCx<'a, 'tcx, impl Backend>, value: Value, addr: Value, offset: i32) {
38 if fx.bcx.func.dfg.value_type(value) == types::I128 {
39 let (a, b) = fx.bcx.ins().isplit(value);
40 fx.bcx.ins().store(MemFlags::new(), a, addr, offset);
41 fx.bcx.ins().store(MemFlags::new(), b, addr, offset + 8);
43 fx.bcx.ins().store(MemFlags::new(), value, addr, offset);
47 fn load_scalar<'a, 'tcx: 'a>(
48 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
53 if clif_ty == types::I128 {
54 let a = fx.bcx.ins().load(types::I64, MemFlags::new(), addr, offset);
55 let b = fx.bcx.ins().load(types::I64, MemFlags::new(), addr, offset + 8);
56 fx.bcx.ins().iconcat(a, b)
58 fx.bcx.ins().load(clif_ty, MemFlags::new(), addr, offset)
62 impl<'tcx> CValue<'tcx> {
63 pub fn by_ref(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
64 CValue(CValueInner::ByRef(value), layout)
67 pub fn by_val(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
68 CValue(CValueInner::ByVal(value), layout)
71 pub fn by_val_pair(value: Value, extra: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
72 CValue(CValueInner::ByValPair(value, extra), layout)
75 pub fn layout(&self) -> TyLayout<'tcx> {
79 pub fn force_stack<'a>(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> Value
85 CValueInner::ByRef(value) => value,
86 CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => {
87 let cplace = CPlace::new_stack_slot(fx, layout.ty);
88 cplace.write_cvalue(fx, self);
94 /// Load a value with layout.abi of scalar
95 pub fn load_scalar<'a>(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> Value
101 CValueInner::ByRef(addr) => {
102 let scalar = match layout.abi {
103 layout::Abi::Scalar(ref scalar) => scalar.clone(),
106 let clif_ty = scalar_to_clif_type(fx.tcx, scalar);
107 load_scalar(fx, clif_ty, addr, 0)
109 CValueInner::ByVal(value) => value,
110 CValueInner::ByValPair(_, _) => bug!("Please use load_scalar_pair for ByValPair"),
114 /// Load a value pair with layout.abi of scalar pair
115 pub fn load_scalar_pair<'a>(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> (Value, Value)
121 CValueInner::ByRef(addr) => {
122 let (a_scalar, b_scalar) = match &layout.abi {
123 layout::Abi::ScalarPair(a, b) => (a, b),
126 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
127 let clif_ty1 = scalar_to_clif_type(fx.tcx, a_scalar.clone());
128 let clif_ty2 = scalar_to_clif_type(fx.tcx, b_scalar.clone());
129 let val1 = load_scalar(fx, clif_ty1, addr, 0);
130 let val2 = load_scalar(
138 CValueInner::ByVal(_) => bug!("Please use load_scalar for ByVal"),
139 CValueInner::ByValPair(val1, val2) => (val1, val2),
143 pub fn value_field<'a>(
145 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
152 let base = match self.0 {
153 CValueInner::ByRef(addr) => addr,
154 _ => bug!("place_field for {:?}", self),
157 let (field_ptr, field_layout) = codegen_field(fx, base, layout, field);
158 CValue::by_ref(field_ptr, field_layout)
161 pub fn unsize_value<'a>(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
162 crate::unsize::coerce_unsized_into(fx, self, dest);
165 pub fn const_val<'a>(
166 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
173 let clif_ty = fx.clif_type(ty).unwrap();
174 let layout = fx.layout_of(ty);
175 let val = if clif_ty == types::I128 {
176 // FIXME don't assume little-endian arch
177 let lsb = fx.bcx.ins().iconst(types::I64, const_val);
178 let msb = fx.bcx.ins().iconst(types::I64, 0);
179 fx.bcx.ins().iconcat(lsb, msb)
181 fx.bcx.ins().iconst(clif_ty, const_val)
183 CValue::by_val(val, layout)
186 pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
187 CValue(self.0, layout)
191 /// A place where you can write a value to or read a value from
192 #[derive(Debug, Copy, Clone)]
193 pub enum CPlace<'tcx> {
194 Var(Local, TyLayout<'tcx>),
195 Addr(Value, Option<Value>, TyLayout<'tcx>),
196 Stack(StackSlot, TyLayout<'tcx>),
197 NoPlace(TyLayout<'tcx>),
200 impl<'a, 'tcx: 'a> CPlace<'tcx> {
201 pub fn layout(&self) -> TyLayout<'tcx> {
203 CPlace::Var(_, layout)
204 | CPlace::Addr(_, _, layout)
205 | CPlace::Stack(_, layout)
206 | CPlace::NoPlace(layout) => layout,
210 pub fn no_place(layout: TyLayout<'tcx>) -> CPlace<'tcx> {
211 CPlace::NoPlace(layout)
214 pub fn new_stack_slot(
215 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
218 let layout = fx.layout_of(ty);
219 assert!(!layout.is_unsized());
220 if layout.size.bytes() == 0 {
221 return CPlace::NoPlace(layout);
224 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
225 kind: StackSlotKind::ExplicitSlot,
226 size: layout.size.bytes() as u32,
229 CPlace::Stack(stack_slot, layout)
233 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
235 layout: TyLayout<'tcx>,
238 .declare_var(mir_var(local), fx.clif_type(layout.ty).unwrap());
239 CPlace::Var(local, layout)
242 pub fn for_addr(addr: Value, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
243 CPlace::Addr(addr, None, layout)
246 pub fn for_addr_with_extra(addr: Value, extra: Value, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
247 CPlace::Addr(addr, Some(extra), layout)
250 pub fn to_cvalue(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> CValue<'tcx> {
252 CPlace::Var(var, layout) => CValue::by_val(fx.bcx.use_var(mir_var(var)), layout),
253 CPlace::Addr(addr, extra, layout) => {
254 assert!(extra.is_none(), "unsized values are not yet supported");
255 CValue::by_ref(addr, layout)
257 CPlace::Stack(stack_slot, layout) => CValue::by_ref(
258 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
261 CPlace::NoPlace(layout) => CValue::by_ref(
264 .iconst(fx.pointer_type, fx.pointer_type.bytes() as i64),
270 pub fn to_addr(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> Value {
271 match self.to_addr_maybe_unsized(fx) {
272 (addr, None) => addr,
273 (_, Some(_)) => bug!("Expected sized cplace, found {:?}", self),
277 pub fn to_addr_maybe_unsized(
279 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
280 ) -> (Value, Option<Value>) {
282 CPlace::Addr(addr, extra, _layout) => (addr, extra),
283 CPlace::Stack(stack_slot, _layout) => (
284 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
287 CPlace::NoPlace(_) => (fx.bcx.ins().iconst(fx.pointer_type, 45), None),
288 CPlace::Var(_, _) => bug!("Expected CPlace::Addr, found CPlace::Var"),
292 pub fn write_cvalue(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>, from: CValue<'tcx>) {
293 use rustc::hir::Mutability::*;
295 let from_ty = from.layout().ty;
296 let to_ty = self.layout().ty;
298 fn assert_assignable<'a, 'tcx: 'a>(fx: &FunctionCx<'a, 'tcx, impl Backend>, from_ty: Ty<'tcx>, to_ty: Ty<'tcx>) {
299 match (&from_ty.sty, &to_ty.sty) {
300 (ty::Ref(_, t, MutImmutable), ty::Ref(_, u, MutImmutable))
301 | (ty::Ref(_, t, MutMutable), ty::Ref(_, u, MutImmutable))
302 | (ty::Ref(_, t, MutMutable), ty::Ref(_, u, MutMutable)) => {
303 assert_assignable(fx, t, u);
304 // &mut T -> &T is allowed
305 // &'a T -> &'b T is allowed
307 (ty::Ref(_, _, MutImmutable), ty::Ref(_, _, MutMutable)) => {
308 panic!("Cant assign value of type {} to place of type {}", from_ty, to_ty)
310 (ty::FnPtr(_), ty::FnPtr(_)) => {
311 let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
312 ParamEnv::reveal_all(),
313 &from_ty.fn_sig(fx.tcx),
315 let to_sig = fx.tcx.normalize_erasing_late_bound_regions(
316 ParamEnv::reveal_all(),
317 &to_ty.fn_sig(fx.tcx),
321 "Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
322 from_sig, to_sig, fx,
324 // fn(&T) -> for<'l> fn(&'l T) is allowed
326 (ty::Dynamic(from_traits, _), ty::Dynamic(to_traits, _)) => {
327 let from_traits = fx.tcx.normalize_erasing_late_bound_regions(
328 ParamEnv::reveal_all(),
331 let to_traits = fx.tcx.normalize_erasing_late_bound_regions(
332 ParamEnv::reveal_all(),
336 from_traits, to_traits,
337 "Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
338 from_traits, to_traits, fx,
340 // dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
346 "Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
355 assert_assignable(fx, from_ty, to_ty);
357 let (addr, dst_layout) = match self {
358 CPlace::Var(var, _) => {
359 let data = from.load_scalar(fx);
360 fx.bcx.def_var(mir_var(var), data);
363 CPlace::Addr(addr, None, dst_layout) => (addr, dst_layout),
364 CPlace::Stack(stack_slot, dst_layout) => (
365 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
368 CPlace::NoPlace(layout) => {
369 if layout.abi != Abi::Uninhabited {
370 assert_eq!(layout.size.bytes(), 0, "{:?}", layout);
374 CPlace::Addr(_, _, _) => bug!("Can't write value to unsized place {:?}", self),
378 CValueInner::ByVal(val) => {
379 store_scalar(fx, val, addr, 0);
381 CValueInner::ByValPair(value, extra) => {
382 match dst_layout.abi {
383 Abi::ScalarPair(ref a_scalar, ref b_scalar) => {
384 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
385 store_scalar(fx, value, addr, 0);
394 "Non ScalarPair abi {:?} for ByValPair CValue",
399 CValueInner::ByRef(from_addr) => {
400 let src_layout = from.1;
401 let size = dst_layout.size.bytes();
402 let src_align = src_layout.align.abi.bytes() as u8;
403 let dst_align = dst_layout.align.abi.bytes() as u8;
404 fx.bcx.emit_small_memcpy(
405 fx.module.target_config(),
418 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
421 let layout = self.layout();
422 let (base, extra) = self.to_addr_maybe_unsized(fx);
424 let (field_ptr, field_layout) = codegen_field(fx, base, layout, field);
425 let extra = if field_layout.is_unsized() {
426 assert!(extra.is_some());
431 CPlace::Addr(field_ptr, extra, field_layout)
436 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
439 let (elem_layout, addr) = match self.layout().ty.sty {
440 ty::Array(elem_ty, _) => (fx.layout_of(elem_ty), self.to_addr(fx)),
441 ty::Slice(elem_ty) => (fx.layout_of(elem_ty), self.to_addr_maybe_unsized(fx).0),
442 _ => bug!("place_index({:?})", self.layout().ty),
448 .imul_imm(index, elem_layout.size.bytes() as i64);
450 CPlace::Addr(fx.bcx.ins().iadd(addr, offset), None, elem_layout)
453 pub fn place_deref(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> CPlace<'tcx> {
454 let inner_layout = fx.layout_of(self.layout().ty.builtin_deref(true).unwrap().ty);
455 if !inner_layout.is_unsized() {
456 CPlace::Addr(self.to_cvalue(fx).load_scalar(fx), None, inner_layout)
458 let (addr, extra) = self.to_cvalue(fx).load_scalar_pair(fx);
459 CPlace::Addr(addr, Some(extra), inner_layout)
463 pub fn write_place_ref(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
464 if !self.layout().is_unsized() {
465 let ptr = CValue::by_val(self.to_addr(fx), dest.layout());
466 dest.write_cvalue(fx, ptr);
468 let (value, extra) = self.to_addr_maybe_unsized(fx);
469 let ptr = CValue::by_val_pair(value, extra.expect("unsized type without metadata"), dest.layout());
470 dest.write_cvalue(fx, ptr);
474 pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
475 assert!(!self.layout().is_unsized());
477 CPlace::Var(var, _) => CPlace::Var(var, layout),
478 CPlace::Addr(addr, extra, _) => CPlace::Addr(addr, extra, layout),
479 CPlace::Stack(stack_slot, _) => CPlace::Stack(stack_slot, layout),
480 CPlace::NoPlace(_) => {
481 assert!(layout.size.bytes() == 0);
482 CPlace::NoPlace(layout)
487 pub fn downcast_variant(
489 fx: &FunctionCx<'a, 'tcx, impl Backend>,
492 let layout = self.layout().for_variant(fx, variant);
493 self.unchecked_cast_to(layout)