]> git.lizzy.rs Git - rust.git/blob - src/value_and_place.rs
Allow more types in CValue::const_val
[rust.git] / src / value_and_place.rs
1 use crate::prelude::*;
2
3 use cranelift_codegen::ir::immediates::Offset32;
4
5 fn codegen_field<'tcx>(
6     fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
7     base: Pointer,
8     extra: Option<Value>,
9     layout: TyLayout<'tcx>,
10     field: mir::Field,
11 ) -> (Pointer, TyLayout<'tcx>) {
12     let field_offset = layout.fields.offset(field.index());
13     let field_layout = layout.field(&*fx, field.index());
14
15     let simple = |fx: &mut FunctionCx<_>| {
16         (
17             base.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap()),
18             field_layout,
19         )
20     };
21
22     if let Some(extra) = extra {
23         if !field_layout.is_unsized() {
24             return simple(fx);
25         }
26         match field_layout.ty.kind {
27             ty::Slice(..) | ty::Str | ty::Foreign(..) => return simple(fx),
28             ty::Adt(def, _) if def.repr.packed() => {
29                 assert_eq!(layout.align.abi.bytes(), 1);
30                 return simple(fx);
31             }
32             _ => {
33                 // We have to align the offset for DST's
34                 let unaligned_offset = field_offset.bytes();
35                 let (_, unsized_align) = crate::unsize::size_and_align_of_dst(fx, field_layout, extra);
36
37                 let one = fx.bcx.ins().iconst(pointer_ty(fx.tcx), 1);
38                 let align_sub_1 = fx.bcx.ins().isub(unsized_align, one);
39                 let and_lhs = fx.bcx.ins().iadd_imm(align_sub_1, unaligned_offset as i64);
40                 let zero = fx.bcx.ins().iconst(pointer_ty(fx.tcx), 0);
41                 let and_rhs = fx.bcx.ins().isub(zero, unsized_align);
42                 let offset = fx.bcx.ins().band(and_lhs, and_rhs);
43
44                 (
45                     base.offset_value(fx, offset),
46                     field_layout,
47                 )
48             }
49         }
50     } else {
51         simple(fx)
52     }
53 }
54
55 fn scalar_pair_calculate_b_offset(tcx: TyCtxt<'_>, a_scalar: &Scalar, b_scalar: &Scalar) -> Offset32 {
56     let b_offset = a_scalar
57         .value
58         .size(&tcx)
59         .align_to(b_scalar.value.align(&tcx).abi);
60     Offset32::new(b_offset.bytes().try_into().unwrap())
61 }
62
63 /// A read-only value
64 #[derive(Debug, Copy, Clone)]
65 pub struct CValue<'tcx>(CValueInner, TyLayout<'tcx>);
66
67 #[derive(Debug, Copy, Clone)]
68 enum CValueInner {
69     ByRef(Pointer),
70     ByVal(Value),
71     ByValPair(Value, Value),
72 }
73
74 impl<'tcx> CValue<'tcx> {
75     pub fn by_ref(ptr: Pointer, layout: TyLayout<'tcx>) -> CValue<'tcx> {
76         CValue(CValueInner::ByRef(ptr), layout)
77     }
78
79     pub fn by_val(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
80         CValue(CValueInner::ByVal(value), layout)
81     }
82
83     pub fn by_val_pair(value: Value, extra: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
84         CValue(CValueInner::ByValPair(value, extra), layout)
85     }
86
87     pub fn layout(&self) -> TyLayout<'tcx> {
88         self.1
89     }
90
91     // FIXME remove
92     pub fn force_stack<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Pointer {
93         let layout = self.1;
94         match self.0 {
95             CValueInner::ByRef(ptr) => ptr,
96             CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => {
97                 let cplace = CPlace::new_stack_slot(fx, layout);
98                 cplace.write_cvalue(fx, self);
99                 cplace.to_ptr(fx)
100             }
101         }
102     }
103
104     pub fn try_to_addr(self) -> Option<Value> {
105         match self.0 {
106             CValueInner::ByRef(ptr) => {
107                 if let Some((base_addr, offset)) = ptr.try_get_addr_and_offset() {
108                     if offset == Offset32::new(0) {
109                         Some(base_addr)
110                     } else {
111                         None
112                     }
113                 } else {
114                     None
115                 }
116             }
117             CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => None,
118         }
119     }
120
121     /// Load a value with layout.abi of scalar
122     pub fn load_scalar<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Value {
123         let layout = self.1;
124         match self.0 {
125             CValueInner::ByRef(ptr) => {
126                 let clif_ty = match layout.abi {
127                     layout::Abi::Scalar(ref scalar) => scalar_to_clif_type(fx.tcx, scalar.clone()),
128                     layout::Abi::Vector { ref element, count } => {
129                         scalar_to_clif_type(fx.tcx, element.clone())
130                             .by(u16::try_from(count).unwrap()).unwrap()
131                     }
132                     _ => unreachable!(),
133                 };
134                 ptr.load(fx, clif_ty, MemFlags::new())
135             }
136             CValueInner::ByVal(value) => value,
137             CValueInner::ByValPair(_, _) => bug!("Please use load_scalar_pair for ByValPair"),
138         }
139     }
140
141     /// Load a value pair with layout.abi of scalar pair
142     pub fn load_scalar_pair<'a>(
143         self,
144         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
145     ) -> (Value, Value) {
146         let layout = self.1;
147         match self.0 {
148             CValueInner::ByRef(ptr) => {
149                 let (a_scalar, b_scalar) = match &layout.abi {
150                     layout::Abi::ScalarPair(a, b) => (a, b),
151                     _ => unreachable!("load_scalar_pair({:?})", self),
152                 };
153                 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
154                 let clif_ty1 = scalar_to_clif_type(fx.tcx, a_scalar.clone());
155                 let clif_ty2 = scalar_to_clif_type(fx.tcx, b_scalar.clone());
156                 let val1 = ptr.load(fx, clif_ty1, MemFlags::new());
157                 let val2 = ptr.offset(fx, b_offset).load(fx, clif_ty2, MemFlags::new());
158                 (val1, val2)
159             }
160             CValueInner::ByVal(_) => bug!("Please use load_scalar for ByVal"),
161             CValueInner::ByValPair(val1, val2) => (val1, val2),
162         }
163     }
164
165     pub fn value_field<'a>(
166         self,
167         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
168         field: mir::Field,
169     ) -> CValue<'tcx> {
170         let layout = self.1;
171         match self.0 {
172             CValueInner::ByVal(val) => {
173                 match layout.abi {
174                     layout::Abi::Vector { element: _, count } => {
175                         let count = u8::try_from(count).expect("SIMD type with more than 255 lanes???");
176                         let field = u8::try_from(field.index()).unwrap();
177                         assert!(field < count);
178                         let lane = fx.bcx.ins().extractlane(val, field);
179                         let field_layout = layout.field(&*fx, usize::from(field));
180                         CValue::by_val(lane, field_layout)
181                     }
182                     _ => unreachable!("value_field for ByVal with abi {:?}", layout.abi),
183                 }
184             }
185             CValueInner::ByRef(ptr) => {
186                 let (field_ptr, field_layout) = codegen_field(fx, ptr, None, layout, field);
187                 CValue::by_ref(field_ptr, field_layout)
188             }
189             _ => bug!("place_field for {:?}", self),
190         }
191     }
192
193     pub fn unsize_value<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
194         crate::unsize::coerce_unsized_into(fx, self, dest);
195     }
196
197     /// If `ty` is signed, `const_val` must already be sign extended.
198     pub fn const_val(
199         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
200         layout: TyLayout<'tcx>,
201         const_val: u128,
202     ) -> CValue<'tcx> {
203         let clif_ty = fx.clif_type(layout.ty).unwrap();
204
205         match layout.ty.kind {
206             ty::TyKind::Bool => {
207                 assert!(const_val == 0 || const_val == 1, "Invalid bool 0x{:032X}", const_val);
208             }
209             _ => {}
210         }
211
212         let val = match layout.ty.kind {
213             ty::TyKind::Uint(UintTy::U128) | ty::TyKind::Int(IntTy::I128) => {
214                 let lsb = fx.bcx.ins().iconst(types::I64, const_val as u64 as i64);
215                 let msb = fx
216                     .bcx
217                     .ins()
218                     .iconst(types::I64, (const_val >> 64) as u64 as i64);
219                 fx.bcx.ins().iconcat(lsb, msb)
220             }
221             ty::TyKind::Bool | ty::TyKind::Char | ty::TyKind::Uint(_) | ty::TyKind::Ref(..)
222             | ty::TyKind::RawPtr(..) => {
223                 fx
224                     .bcx
225                     .ins()
226                     .iconst(clif_ty, u64::try_from(const_val).expect("uint") as i64)
227             }
228             ty::TyKind::Int(_) => {
229                 let const_val = rustc::mir::interpret::sign_extend(const_val, layout.size);
230                 fx.bcx.ins().iconst(clif_ty, i64::try_from(const_val as i128).unwrap())
231             }
232             ty::TyKind::Float(FloatTy::F32) => {
233                 fx.bcx.ins().f32const(Ieee32::with_bits(u32::try_from(const_val).unwrap()))
234             }
235             ty::TyKind::Float(FloatTy::F64) => {
236                 fx.bcx.ins().f64const(Ieee64::with_bits(u64::try_from(const_val).unwrap()))
237             }
238             _ => panic!(
239                 "CValue::const_val for non bool/char/float/integer/pointer type {:?} is not allowed",
240                 layout.ty
241             ),
242         };
243
244         CValue::by_val(val, layout)
245     }
246
247     pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
248         CValue(self.0, layout)
249     }
250 }
251
252 /// A place where you can write a value to or read a value from
253 #[derive(Debug, Copy, Clone)]
254 pub struct CPlace<'tcx> {
255     inner: CPlaceInner,
256     layout: TyLayout<'tcx>,
257 }
258
259 #[derive(Debug, Copy, Clone)]
260 pub enum CPlaceInner {
261     Var(Local),
262     Addr(Pointer, Option<Value>),
263     NoPlace,
264 }
265
266 impl<'tcx> CPlace<'tcx> {
267     pub fn layout(&self) -> TyLayout<'tcx> {
268         self.layout
269     }
270
271     pub fn inner(&self) -> &CPlaceInner {
272         &self.inner
273     }
274
275     pub fn no_place(layout: TyLayout<'tcx>) -> CPlace<'tcx> {
276         CPlace {
277             inner: CPlaceInner::NoPlace,
278             layout,
279         }
280     }
281
282     pub fn new_stack_slot(
283         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
284         layout: TyLayout<'tcx>,
285     ) -> CPlace<'tcx> {
286         assert!(!layout.is_unsized());
287         if layout.size.bytes() == 0 {
288             return CPlace {
289                 inner: CPlaceInner::NoPlace,
290                 layout,
291             };
292         }
293
294         let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
295             kind: StackSlotKind::ExplicitSlot,
296             size: layout.size.bytes() as u32,
297             offset: None,
298         });
299         CPlace {
300             inner: CPlaceInner::Addr(Pointer::stack_slot(stack_slot), None),
301             layout,
302         }
303     }
304
305     pub fn new_var(
306         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
307         local: Local,
308         layout: TyLayout<'tcx>,
309     ) -> CPlace<'tcx> {
310         fx.bcx
311             .declare_var(mir_var(local), fx.clif_type(layout.ty).unwrap());
312         CPlace {
313             inner: CPlaceInner::Var(local),
314             layout,
315         }
316     }
317
318     pub fn for_ptr(ptr: Pointer, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
319         CPlace {
320             inner: CPlaceInner::Addr(ptr, None),
321             layout,
322         }
323     }
324
325     pub fn for_ptr_with_extra(ptr: Pointer, extra: Value, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
326         CPlace {
327             inner: CPlaceInner::Addr(ptr, Some(extra)),
328             layout,
329         }
330     }
331
332     pub fn to_cvalue(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> CValue<'tcx> {
333         let layout = self.layout();
334         match self.inner {
335             CPlaceInner::Var(var) => {
336                 let val = fx.bcx.use_var(mir_var(var));
337                 fx.bcx.set_val_label(val, cranelift_codegen::ir::ValueLabel::from_u32(var.as_u32()));
338                 CValue::by_val(val, layout)
339             }
340             CPlaceInner::Addr(ptr, extra) => {
341                 assert!(extra.is_none(), "unsized values are not yet supported");
342                 CValue::by_ref(ptr, layout)
343             }
344             CPlaceInner::NoPlace => CValue::by_ref(
345                 Pointer::const_addr(fx, i64::try_from(self.layout.align.pref.bytes()).unwrap()),
346                 layout,
347             ),
348         }
349     }
350
351     pub fn to_ptr(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Pointer {
352         match self.to_ptr_maybe_unsized(fx) {
353             (ptr, None) => ptr,
354             (_, Some(_)) => bug!("Expected sized cplace, found {:?}", self),
355         }
356     }
357
358     pub fn to_ptr_maybe_unsized(
359         self,
360         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
361     ) -> (Pointer, Option<Value>) {
362         match self.inner {
363             CPlaceInner::Addr(ptr, extra) => (ptr, extra),
364             CPlaceInner::NoPlace => {
365                 (
366                     Pointer::const_addr(fx, i64::try_from(self.layout.align.pref.bytes()).unwrap()),
367                     None,
368                 )
369             }
370             CPlaceInner::Var(_) => bug!("Expected CPlace::Addr, found CPlace::Var"),
371         }
372     }
373
374     pub fn write_cvalue(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, from: CValue<'tcx>) {
375         #[cfg(debug_assertions)]
376         {
377             use cranelift_codegen::cursor::{Cursor, CursorPosition};
378             let cur_ebb = match fx.bcx.cursor().position() {
379                 CursorPosition::After(ebb) => ebb,
380                 _ => unreachable!(),
381             };
382             fx.add_comment(
383                 fx.bcx.func.layout.last_inst(cur_ebb).unwrap(),
384                 format!("write_cvalue: {:?} <- {:?}",self, from),
385             );
386         }
387
388         let from_ty = from.layout().ty;
389         let to_ty = self.layout().ty;
390
391         fn assert_assignable<'tcx>(
392             fx: &FunctionCx<'_, 'tcx, impl Backend>,
393             from_ty: Ty<'tcx>,
394             to_ty: Ty<'tcx>,
395         ) {
396             match (&from_ty.kind, &to_ty.kind) {
397                 (ty::Ref(_, t, Mutability::Not), ty::Ref(_, u, Mutability::Not))
398                 | (ty::Ref(_, t, Mutability::Mut), ty::Ref(_, u, Mutability::Not))
399                 | (ty::Ref(_, t, Mutability::Mut), ty::Ref(_, u, Mutability::Mut)) => {
400                     assert_assignable(fx, t, u);
401                     // &mut T -> &T is allowed
402                     // &'a T -> &'b T is allowed
403                 }
404                 (ty::Ref(_, _, Mutability::Not), ty::Ref(_, _, Mutability::Mut)) => panic!(
405                     "Cant assign value of type {} to place of type {}",
406                     from_ty, to_ty
407                 ),
408                 (ty::FnPtr(_), ty::FnPtr(_)) => {
409                     let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
410                         ParamEnv::reveal_all(),
411                         &from_ty.fn_sig(fx.tcx),
412                     );
413                     let to_sig = fx.tcx.normalize_erasing_late_bound_regions(
414                         ParamEnv::reveal_all(),
415                         &to_ty.fn_sig(fx.tcx),
416                     );
417                     assert_eq!(
418                         from_sig, to_sig,
419                         "Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
420                         from_sig, to_sig, fx,
421                     );
422                     // fn(&T) -> for<'l> fn(&'l T) is allowed
423                 }
424                 (ty::Dynamic(from_traits, _), ty::Dynamic(to_traits, _)) => {
425                     let from_traits = fx
426                         .tcx
427                         .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from_traits);
428                     let to_traits = fx
429                         .tcx
430                         .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to_traits);
431                     assert_eq!(
432                         from_traits, to_traits,
433                         "Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
434                         from_traits, to_traits, fx,
435                     );
436                     // dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
437                 }
438                 _ => {
439                     assert_eq!(
440                         from_ty,
441                         to_ty,
442                         "Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
443                         from_ty,
444                         to_ty,
445                         fx,
446                     );
447                 }
448             }
449         }
450
451         assert_assignable(fx, from_ty, to_ty);
452
453         let dst_layout = self.layout();
454         let to_ptr = match self.inner {
455             CPlaceInner::Var(var) => {
456                 let data = from.load_scalar(fx);
457                 fx.bcx.set_val_label(data, cranelift_codegen::ir::ValueLabel::from_u32(var.as_u32()));
458                 fx.bcx.def_var(mir_var(var), data);
459                 return;
460             }
461             CPlaceInner::Addr(ptr, None) => ptr,
462             CPlaceInner::NoPlace => {
463                 if dst_layout.abi != Abi::Uninhabited {
464                     assert_eq!(dst_layout.size.bytes(), 0, "{:?}", dst_layout);
465                 }
466                 return;
467             }
468             CPlaceInner::Addr(_, Some(_)) => bug!("Can't write value to unsized place {:?}", self),
469         };
470
471         match self.layout().abi {
472             // FIXME make Abi::Vector work too
473             Abi::Scalar(_) => {
474                 let val = from.load_scalar(fx);
475                 to_ptr.store(fx, val, MemFlags::new());
476                 return;
477             }
478             Abi::ScalarPair(ref a_scalar, ref b_scalar) => {
479                 let (value, extra) = from.load_scalar_pair(fx);
480                 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
481                 to_ptr.store(fx, value, MemFlags::new());
482                 to_ptr.offset(fx, b_offset).store(fx, extra, MemFlags::new());
483                 return;
484             }
485             _ => {}
486         }
487
488         match from.0 {
489             CValueInner::ByVal(val) => {
490                 to_ptr.store(fx, val, MemFlags::new());
491             }
492             CValueInner::ByValPair(_, _) => {
493                 bug!(
494                     "Non ScalarPair abi {:?} for ByValPair CValue",
495                     dst_layout.abi
496                 );
497             }
498             CValueInner::ByRef(from_ptr) => {
499                 let from_addr = from_ptr.get_addr(fx);
500                 let to_addr = to_ptr.get_addr(fx);
501                 let src_layout = from.1;
502                 let size = dst_layout.size.bytes();
503                 let src_align = src_layout.align.abi.bytes() as u8;
504                 let dst_align = dst_layout.align.abi.bytes() as u8;
505                 fx.bcx.emit_small_memcpy(
506                     fx.module.target_config(),
507                     to_addr,
508                     from_addr,
509                     size,
510                     dst_align,
511                     src_align,
512                 );
513             }
514         }
515     }
516
517     pub fn place_field(
518         self,
519         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
520         field: mir::Field,
521     ) -> CPlace<'tcx> {
522         let layout = self.layout();
523         let (base, extra) = self.to_ptr_maybe_unsized(fx);
524
525         let (field_ptr, field_layout) = codegen_field(fx, base, extra, layout, field);
526         if field_layout.is_unsized() {
527             CPlace::for_ptr_with_extra(field_ptr, extra.unwrap(), field_layout)
528         } else {
529             CPlace::for_ptr(field_ptr, field_layout)
530         }
531     }
532
533     pub fn place_index(
534         self,
535         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
536         index: Value,
537     ) -> CPlace<'tcx> {
538         let (elem_layout, ptr) = match self.layout().ty.kind {
539             ty::Array(elem_ty, _) => (fx.layout_of(elem_ty), self.to_ptr(fx)),
540             ty::Slice(elem_ty) => (fx.layout_of(elem_ty), self.to_ptr_maybe_unsized(fx).0),
541             _ => bug!("place_index({:?})", self.layout().ty),
542         };
543
544         let offset = fx
545             .bcx
546             .ins()
547             .imul_imm(index, elem_layout.size.bytes() as i64);
548
549         CPlace::for_ptr(ptr.offset_value(fx, offset), elem_layout)
550     }
551
552     pub fn place_deref(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> CPlace<'tcx> {
553         let inner_layout = fx.layout_of(self.layout().ty.builtin_deref(true).unwrap().ty);
554         if has_ptr_meta(fx.tcx, inner_layout.ty) {
555             let (addr, extra) = self.to_cvalue(fx).load_scalar_pair(fx);
556             CPlace::for_ptr_with_extra(Pointer::new(addr), extra, inner_layout)
557         } else {
558             CPlace::for_ptr(Pointer::new(self.to_cvalue(fx).load_scalar(fx)), inner_layout)
559         }
560     }
561
562     pub fn write_place_ref(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
563         if has_ptr_meta(fx.tcx, self.layout().ty) {
564             let (ptr, extra) = self.to_ptr_maybe_unsized(fx);
565             let ptr = CValue::by_val_pair(
566                 ptr.get_addr(fx),
567                 extra.expect("unsized type without metadata"),
568                 dest.layout(),
569             );
570             dest.write_cvalue(fx, ptr);
571         } else {
572             let ptr = CValue::by_val(self.to_ptr(fx).get_addr(fx), dest.layout());
573             dest.write_cvalue(fx, ptr);
574         }
575     }
576
577     pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
578         assert!(!self.layout().is_unsized());
579         match self.inner {
580             CPlaceInner::NoPlace => {
581                 assert!(layout.size.bytes() == 0);
582             }
583             _ => {}
584         }
585         CPlace {
586             inner: self.inner,
587             layout,
588         }
589     }
590
591     pub fn downcast_variant(
592         self,
593         fx: &FunctionCx<'_, 'tcx, impl Backend>,
594         variant: VariantIdx,
595     ) -> Self {
596         let layout = self.layout().for_variant(fx, variant);
597         self.unchecked_cast_to(layout)
598     }
599 }