]> git.lizzy.rs Git - rust.git/blob - src/value_and_place.rs
Make it possible it use value_field for SIMD values stored ByVal
[rust.git] / src / value_and_place.rs
1 use crate::prelude::*;
2
3 use cranelift_codegen::ir::immediates::Offset32;
4
5 fn codegen_field<'tcx>(
6     fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
7     base: Pointer,
8     extra: Option<Value>,
9     layout: TyLayout<'tcx>,
10     field: mir::Field,
11 ) -> (Pointer, TyLayout<'tcx>) {
12     let field_offset = layout.fields.offset(field.index());
13     let field_layout = layout.field(&*fx, field.index());
14
15     let simple = |fx: &mut FunctionCx<_>| {
16         (
17             base.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap()),
18             field_layout,
19         )
20     };
21
22     if let Some(extra) = extra {
23         if !field_layout.is_unsized() {
24             return simple(fx);
25         }
26         match field_layout.ty.kind {
27             ty::Slice(..) | ty::Str | ty::Foreign(..) => return simple(fx),
28             ty::Adt(def, _) if def.repr.packed() => {
29                 assert_eq!(layout.align.abi.bytes(), 1);
30                 return simple(fx);
31             }
32             _ => {
33                 // We have to align the offset for DST's
34                 let unaligned_offset = field_offset.bytes();
35                 let (_, unsized_align) = crate::unsize::size_and_align_of_dst(fx, field_layout.ty, extra);
36
37                 let one = fx.bcx.ins().iconst(pointer_ty(fx.tcx), 1);
38                 let align_sub_1 = fx.bcx.ins().isub(unsized_align, one);
39                 let and_lhs = fx.bcx.ins().iadd_imm(align_sub_1, unaligned_offset as i64);
40                 let zero = fx.bcx.ins().iconst(pointer_ty(fx.tcx), 0);
41                 let and_rhs = fx.bcx.ins().isub(zero, unsized_align);
42                 let offset = fx.bcx.ins().band(and_lhs, and_rhs);
43
44                 (
45                     base.offset_value(fx, offset),
46                     field_layout,
47                 )
48             }
49         }
50     } else {
51         simple(fx)
52     }
53 }
54
55 fn scalar_pair_calculate_b_offset(tcx: TyCtxt<'_>, a_scalar: &Scalar, b_scalar: &Scalar) -> Offset32 {
56     let b_offset = a_scalar
57         .value
58         .size(&tcx)
59         .align_to(b_scalar.value.align(&tcx).abi);
60     Offset32::new(b_offset.bytes().try_into().unwrap())
61 }
62
63 /// A read-only value
64 #[derive(Debug, Copy, Clone)]
65 pub struct CValue<'tcx>(CValueInner, TyLayout<'tcx>);
66
67 #[derive(Debug, Copy, Clone)]
68 enum CValueInner {
69     ByRef(Pointer),
70     ByVal(Value),
71     ByValPair(Value, Value),
72 }
73
74 impl<'tcx> CValue<'tcx> {
75     pub fn by_ref(ptr: Pointer, layout: TyLayout<'tcx>) -> CValue<'tcx> {
76         CValue(CValueInner::ByRef(ptr), layout)
77     }
78
79     pub fn by_val(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
80         CValue(CValueInner::ByVal(value), layout)
81     }
82
83     pub fn by_val_pair(value: Value, extra: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
84         CValue(CValueInner::ByValPair(value, extra), layout)
85     }
86
87     pub fn layout(&self) -> TyLayout<'tcx> {
88         self.1
89     }
90
91     pub fn force_stack<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Pointer {
92         let layout = self.1;
93         match self.0 {
94             CValueInner::ByRef(ptr) => ptr,
95             CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => {
96                 let cplace = CPlace::new_stack_slot(fx, layout.ty);
97                 cplace.write_cvalue(fx, self);
98                 cplace.to_ptr(fx)
99             }
100         }
101     }
102
103     pub fn try_to_addr(self) -> Option<Value> {
104         match self.0 {
105             CValueInner::ByRef(ptr) => {
106                 if let Some((base_addr, offset)) = ptr.try_get_addr_and_offset() {
107                     if offset == Offset32::new(0) {
108                         Some(base_addr)
109                     } else {
110                         None
111                     }
112                 } else {
113                     None
114                 }
115             }
116             CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => None,
117         }
118     }
119
120     /// Load a value with layout.abi of scalar
121     pub fn load_scalar<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Value {
122         let layout = self.1;
123         match self.0 {
124             CValueInner::ByRef(ptr) => {
125                 let clif_ty = match layout.abi {
126                     layout::Abi::Scalar(ref scalar) => scalar_to_clif_type(fx.tcx, scalar.clone()),
127                     layout::Abi::Vector { ref element, count } => {
128                         scalar_to_clif_type(fx.tcx, element.clone())
129                             .by(u16::try_from(count).unwrap()).unwrap()
130                     }
131                     _ => unreachable!(),
132                 };
133                 ptr.load(fx, clif_ty, MemFlags::new())
134             }
135             CValueInner::ByVal(value) => value,
136             CValueInner::ByValPair(_, _) => bug!("Please use load_scalar_pair for ByValPair"),
137         }
138     }
139
140     /// Load a value pair with layout.abi of scalar pair
141     pub fn load_scalar_pair<'a>(
142         self,
143         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
144     ) -> (Value, Value) {
145         let layout = self.1;
146         match self.0 {
147             CValueInner::ByRef(ptr) => {
148                 let (a_scalar, b_scalar) = match &layout.abi {
149                     layout::Abi::ScalarPair(a, b) => (a, b),
150                     _ => unreachable!("load_scalar_pair({:?})", self),
151                 };
152                 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
153                 let clif_ty1 = scalar_to_clif_type(fx.tcx, a_scalar.clone());
154                 let clif_ty2 = scalar_to_clif_type(fx.tcx, b_scalar.clone());
155                 let val1 = ptr.load(fx, clif_ty1, MemFlags::new());
156                 let val2 = ptr.offset(fx, b_offset).load(fx, clif_ty2, MemFlags::new());
157                 (val1, val2)
158             }
159             CValueInner::ByVal(_) => bug!("Please use load_scalar for ByVal"),
160             CValueInner::ByValPair(val1, val2) => (val1, val2),
161         }
162     }
163
164     pub fn value_field<'a>(
165         self,
166         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
167         field: mir::Field,
168     ) -> CValue<'tcx> {
169         let layout = self.1;
170         match self.0 {
171             CValueInner::ByVal(val) => {
172                 match layout.abi {
173                     layout::Abi::Vector { element: _, count } => {
174                         let count = u8::try_from(count).expect("SIMD type with more than 255 lanes???");
175                         let field = u8::try_from(field.index()).unwrap();
176                         assert!(field < count);
177                         let lane = fx.bcx.ins().extractlane(val, field);
178                         let field_layout = layout.field(&*fx, usize::from(field));
179                         CValue::by_val(lane, field_layout)
180                     }
181                     _ => unreachable!("value_field for ByVal with abi {:?}", layout.abi),
182                 }
183             }
184             CValueInner::ByRef(ptr) => {
185                 let (field_ptr, field_layout) = codegen_field(fx, ptr, None, layout, field);
186                 CValue::by_ref(field_ptr, field_layout)
187             }
188             _ => bug!("place_field for {:?}", self),
189         }
190     }
191
192     pub fn unsize_value<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
193         crate::unsize::coerce_unsized_into(fx, self, dest);
194     }
195
196     /// If `ty` is signed, `const_val` must already be sign extended.
197     pub fn const_val<'a>(
198         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
199         ty: Ty<'tcx>,
200         const_val: u128,
201     ) -> CValue<'tcx> {
202         let clif_ty = fx.clif_type(ty).unwrap();
203         let layout = fx.layout_of(ty);
204
205         let val = match ty.kind {
206             ty::TyKind::Uint(UintTy::U128) | ty::TyKind::Int(IntTy::I128) => {
207                 let lsb = fx.bcx.ins().iconst(types::I64, const_val as u64 as i64);
208                 let msb = fx
209                     .bcx
210                     .ins()
211                     .iconst(types::I64, (const_val >> 64) as u64 as i64);
212                 fx.bcx.ins().iconcat(lsb, msb)
213             }
214             ty::TyKind::Bool => {
215                 assert!(
216                     const_val == 0 || const_val == 1,
217                     "Invalid bool 0x{:032X}",
218                     const_val
219                 );
220                 fx.bcx.ins().iconst(types::I8, const_val as i64)
221             }
222             ty::TyKind::Uint(_) | ty::TyKind::Ref(..) | ty::TyKind::RawPtr(..) => fx
223                 .bcx
224                 .ins()
225                 .iconst(clif_ty, u64::try_from(const_val).expect("uint") as i64),
226             ty::TyKind::Int(_) => fx.bcx.ins().iconst(clif_ty, const_val as i128 as i64),
227             _ => panic!(
228                 "CValue::const_val for non bool/integer/pointer type {:?} is not allowed",
229                 ty
230             ),
231         };
232
233         CValue::by_val(val, layout)
234     }
235
236     pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
237         CValue(self.0, layout)
238     }
239 }
240
241 /// A place where you can write a value to or read a value from
242 #[derive(Debug, Copy, Clone)]
243 pub struct CPlace<'tcx> {
244     inner: CPlaceInner,
245     layout: TyLayout<'tcx>,
246 }
247
248 #[derive(Debug, Copy, Clone)]
249 pub enum CPlaceInner {
250     Var(Local),
251     Addr(Pointer, Option<Value>),
252     NoPlace,
253 }
254
255 impl<'tcx> CPlace<'tcx> {
256     pub fn layout(&self) -> TyLayout<'tcx> {
257         self.layout
258     }
259
260     pub fn inner(&self) -> &CPlaceInner {
261         &self.inner
262     }
263
264     pub fn no_place(layout: TyLayout<'tcx>) -> CPlace<'tcx> {
265         CPlace {
266             inner: CPlaceInner::NoPlace,
267             layout,
268         }
269     }
270
271     pub fn new_stack_slot(
272         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
273         ty: Ty<'tcx>,
274     ) -> CPlace<'tcx> {
275         let layout = fx.layout_of(ty);
276         assert!(!layout.is_unsized());
277         if layout.size.bytes() == 0 {
278             return CPlace {
279                 inner: CPlaceInner::NoPlace,
280                 layout,
281             };
282         }
283
284         let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
285             kind: StackSlotKind::ExplicitSlot,
286             size: layout.size.bytes() as u32,
287             offset: None,
288         });
289         CPlace {
290             inner: CPlaceInner::Addr(Pointer::stack_slot(stack_slot), None),
291             layout,
292         }
293     }
294
295     pub fn new_var(
296         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
297         local: Local,
298         layout: TyLayout<'tcx>,
299     ) -> CPlace<'tcx> {
300         fx.bcx
301             .declare_var(mir_var(local), fx.clif_type(layout.ty).unwrap());
302         CPlace {
303             inner: CPlaceInner::Var(local),
304             layout,
305         }
306     }
307
308     pub fn for_ptr(ptr: Pointer, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
309         CPlace {
310             inner: CPlaceInner::Addr(ptr, None),
311             layout,
312         }
313     }
314
315     pub fn for_ptr_with_extra(ptr: Pointer, extra: Value, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
316         CPlace {
317             inner: CPlaceInner::Addr(ptr, Some(extra)),
318             layout,
319         }
320     }
321
322     pub fn to_cvalue(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> CValue<'tcx> {
323         let layout = self.layout();
324         match self.inner {
325             CPlaceInner::Var(var) => {
326                 let val = fx.bcx.use_var(mir_var(var));
327                 fx.bcx.set_val_label(val, cranelift_codegen::ir::ValueLabel::from_u32(var.as_u32()));
328                 CValue::by_val(val, layout)
329             }
330             CPlaceInner::Addr(ptr, extra) => {
331                 assert!(extra.is_none(), "unsized values are not yet supported");
332                 CValue::by_ref(ptr, layout)
333             }
334             CPlaceInner::NoPlace => CValue::by_ref(
335                 Pointer::const_addr(fx, i64::try_from(self.layout.align.pref.bytes()).unwrap()),
336                 layout,
337             ),
338         }
339     }
340
341     pub fn to_ptr(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Pointer {
342         match self.to_ptr_maybe_unsized(fx) {
343             (ptr, None) => ptr,
344             (_, Some(_)) => bug!("Expected sized cplace, found {:?}", self),
345         }
346     }
347
348     pub fn to_ptr_maybe_unsized(
349         self,
350         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
351     ) -> (Pointer, Option<Value>) {
352         match self.inner {
353             CPlaceInner::Addr(ptr, extra) => (ptr, extra),
354             CPlaceInner::NoPlace => {
355                 (
356                     Pointer::const_addr(fx, i64::try_from(self.layout.align.pref.bytes()).unwrap()),
357                     None,
358                 )
359             }
360             CPlaceInner::Var(_) => bug!("Expected CPlace::Addr, found CPlace::Var"),
361         }
362     }
363
364     pub fn write_cvalue(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, from: CValue<'tcx>) {
365         #[cfg(debug_assertions)]
366         {
367             use cranelift_codegen::cursor::{Cursor, CursorPosition};
368             let cur_ebb = match fx.bcx.cursor().position() {
369                 CursorPosition::After(ebb) => ebb,
370                 _ => unreachable!(),
371             };
372             fx.add_comment(
373                 fx.bcx.func.layout.last_inst(cur_ebb).unwrap(),
374                 format!("write_cvalue: {:?} <- {:?}",self, from),
375             );
376         }
377
378         let from_ty = from.layout().ty;
379         let to_ty = self.layout().ty;
380
381         fn assert_assignable<'tcx>(
382             fx: &FunctionCx<'_, 'tcx, impl Backend>,
383             from_ty: Ty<'tcx>,
384             to_ty: Ty<'tcx>,
385         ) {
386             match (&from_ty.kind, &to_ty.kind) {
387                 (ty::Ref(_, t, Mutability::Not), ty::Ref(_, u, Mutability::Not))
388                 | (ty::Ref(_, t, Mutability::Mut), ty::Ref(_, u, Mutability::Not))
389                 | (ty::Ref(_, t, Mutability::Mut), ty::Ref(_, u, Mutability::Mut)) => {
390                     assert_assignable(fx, t, u);
391                     // &mut T -> &T is allowed
392                     // &'a T -> &'b T is allowed
393                 }
394                 (ty::Ref(_, _, Mutability::Not), ty::Ref(_, _, Mutability::Mut)) => panic!(
395                     "Cant assign value of type {} to place of type {}",
396                     from_ty, to_ty
397                 ),
398                 (ty::FnPtr(_), ty::FnPtr(_)) => {
399                     let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
400                         ParamEnv::reveal_all(),
401                         &from_ty.fn_sig(fx.tcx),
402                     );
403                     let to_sig = fx.tcx.normalize_erasing_late_bound_regions(
404                         ParamEnv::reveal_all(),
405                         &to_ty.fn_sig(fx.tcx),
406                     );
407                     assert_eq!(
408                         from_sig, to_sig,
409                         "Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
410                         from_sig, to_sig, fx,
411                     );
412                     // fn(&T) -> for<'l> fn(&'l T) is allowed
413                 }
414                 (ty::Dynamic(from_traits, _), ty::Dynamic(to_traits, _)) => {
415                     let from_traits = fx
416                         .tcx
417                         .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from_traits);
418                     let to_traits = fx
419                         .tcx
420                         .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to_traits);
421                     assert_eq!(
422                         from_traits, to_traits,
423                         "Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
424                         from_traits, to_traits, fx,
425                     );
426                     // dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
427                 }
428                 _ => {
429                     assert_eq!(
430                         from_ty,
431                         to_ty,
432                         "Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
433                         from_ty,
434                         to_ty,
435                         fx,
436                     );
437                 }
438             }
439         }
440
441         assert_assignable(fx, from_ty, to_ty);
442
443         let dst_layout = self.layout();
444         let to_ptr = match self.inner {
445             CPlaceInner::Var(var) => {
446                 let data = from.load_scalar(fx);
447                 fx.bcx.set_val_label(data, cranelift_codegen::ir::ValueLabel::from_u32(var.as_u32()));
448                 fx.bcx.def_var(mir_var(var), data);
449                 return;
450             }
451             CPlaceInner::Addr(ptr, None) => ptr,
452             CPlaceInner::NoPlace => {
453                 if dst_layout.abi != Abi::Uninhabited {
454                     assert_eq!(dst_layout.size.bytes(), 0, "{:?}", dst_layout);
455                 }
456                 return;
457             }
458             CPlaceInner::Addr(_, Some(_)) => bug!("Can't write value to unsized place {:?}", self),
459         };
460
461         match from.0 {
462             CValueInner::ByVal(val) => {
463                 to_ptr.store(fx, val, MemFlags::new());
464             }
465             CValueInner::ByValPair(value, extra) => match dst_layout.abi {
466                 Abi::ScalarPair(ref a_scalar, ref b_scalar) => {
467                     let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
468                     to_ptr.store(fx, value, MemFlags::new());
469                     to_ptr.offset(fx, b_offset).store(fx, extra, MemFlags::new());
470                 }
471                 _ => bug!(
472                     "Non ScalarPair abi {:?} for ByValPair CValue",
473                     dst_layout.abi
474                 ),
475             },
476             CValueInner::ByRef(from_ptr) => {
477                 let from_addr = from_ptr.get_addr(fx);
478                 let to_addr = to_ptr.get_addr(fx);
479                 let src_layout = from.1;
480                 let size = dst_layout.size.bytes();
481                 let src_align = src_layout.align.abi.bytes() as u8;
482                 let dst_align = dst_layout.align.abi.bytes() as u8;
483                 fx.bcx.emit_small_memcpy(
484                     fx.module.target_config(),
485                     to_addr,
486                     from_addr,
487                     size,
488                     dst_align,
489                     src_align,
490                 );
491             }
492         }
493     }
494
495     pub fn place_field(
496         self,
497         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
498         field: mir::Field,
499     ) -> CPlace<'tcx> {
500         let layout = self.layout();
501         let (base, extra) = self.to_ptr_maybe_unsized(fx);
502
503         let (field_ptr, field_layout) = codegen_field(fx, base, extra, layout, field);
504         if field_layout.is_unsized() {
505             CPlace::for_ptr_with_extra(field_ptr, extra.unwrap(), field_layout)
506         } else {
507             CPlace::for_ptr(field_ptr, field_layout)
508         }
509     }
510
511     pub fn place_index(
512         self,
513         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
514         index: Value,
515     ) -> CPlace<'tcx> {
516         let (elem_layout, ptr) = match self.layout().ty.kind {
517             ty::Array(elem_ty, _) => (fx.layout_of(elem_ty), self.to_ptr(fx)),
518             ty::Slice(elem_ty) => (fx.layout_of(elem_ty), self.to_ptr_maybe_unsized(fx).0),
519             _ => bug!("place_index({:?})", self.layout().ty),
520         };
521
522         let offset = fx
523             .bcx
524             .ins()
525             .imul_imm(index, elem_layout.size.bytes() as i64);
526
527         CPlace::for_ptr(ptr.offset_value(fx, offset), elem_layout)
528     }
529
530     pub fn place_deref(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> CPlace<'tcx> {
531         let inner_layout = fx.layout_of(self.layout().ty.builtin_deref(true).unwrap().ty);
532         if has_ptr_meta(fx.tcx, inner_layout.ty) {
533             let (addr, extra) = self.to_cvalue(fx).load_scalar_pair(fx);
534             CPlace::for_ptr_with_extra(Pointer::new(addr), extra, inner_layout)
535         } else {
536             CPlace::for_ptr(Pointer::new(self.to_cvalue(fx).load_scalar(fx)), inner_layout)
537         }
538     }
539
540     pub fn write_place_ref(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
541         if has_ptr_meta(fx.tcx, self.layout().ty) {
542             let (ptr, extra) = self.to_ptr_maybe_unsized(fx);
543             let ptr = CValue::by_val_pair(
544                 ptr.get_addr(fx),
545                 extra.expect("unsized type without metadata"),
546                 dest.layout(),
547             );
548             dest.write_cvalue(fx, ptr);
549         } else {
550             let ptr = CValue::by_val(self.to_ptr(fx).get_addr(fx), dest.layout());
551             dest.write_cvalue(fx, ptr);
552         }
553     }
554
555     pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
556         assert!(!self.layout().is_unsized());
557         match self.inner {
558             CPlaceInner::NoPlace => {
559                 assert!(layout.size.bytes() == 0);
560             }
561             _ => {}
562         }
563         CPlace {
564             inner: self.inner,
565             layout,
566         }
567     }
568
569     pub fn downcast_variant(
570         self,
571         fx: &FunctionCx<'_, 'tcx, impl Backend>,
572         variant: VariantIdx,
573     ) -> Self {
574         let layout = self.layout().for_variant(fx, variant);
575         self.unchecked_cast_to(layout)
576     }
577 }