]> git.lizzy.rs Git - rust.git/blob - src/value_and_place.rs
[WIP] debuginfo for locals
[rust.git] / src / value_and_place.rs
1 use crate::prelude::*;
2
3 fn codegen_field<'tcx>(
4     fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
5     base: Value,
6     extra: Option<Value>,
7     layout: TyLayout<'tcx>,
8     field: mir::Field,
9 ) -> (Value, TyLayout<'tcx>) {
10     let field_offset = layout.fields.offset(field.index());
11     let field_layout = layout.field(&*fx, field.index());
12
13     let simple = |fx: &mut FunctionCx<_>| {
14         if field_offset.bytes() > 0 {
15             (
16                 fx.bcx.ins().iadd_imm(base, field_offset.bytes() as i64),
17                 field_layout,
18             )
19         } else {
20             (base, field_layout)
21         }
22     };
23
24     if let Some(extra) = extra {
25         if !field_layout.is_unsized() {
26             return simple(fx);
27         }
28         match field_layout.ty.kind {
29             ty::Slice(..) | ty::Str | ty::Foreign(..) => return simple(fx),
30             ty::Adt(def, _) if def.repr.packed() => {
31                 assert_eq!(layout.align.abi.bytes(), 1);
32                 return simple(fx);
33             }
34             _ => {
35                 // We have to align the offset for DST's
36                 let unaligned_offset = field_offset.bytes();
37                 let (_, unsized_align) = crate::unsize::size_and_align_of_dst(fx, field_layout.ty, extra);
38
39                 let one = fx.bcx.ins().iconst(pointer_ty(fx.tcx), 1);
40                 let align_sub_1 = fx.bcx.ins().isub(unsized_align, one);
41                 let and_lhs = fx.bcx.ins().iadd_imm(align_sub_1, unaligned_offset as i64);
42                 let zero = fx.bcx.ins().iconst(pointer_ty(fx.tcx), 0);
43                 let and_rhs = fx.bcx.ins().isub(zero, unsized_align);
44                 let offset = fx.bcx.ins().band(and_lhs, and_rhs);
45
46                 (
47                     fx.bcx.ins().iadd(base, offset),
48                     field_layout,
49                 )
50             }
51         }
52     } else {
53         simple(fx)
54     }
55 }
56
57 fn scalar_pair_calculate_b_offset(tcx: TyCtxt<'_>, a_scalar: &Scalar, b_scalar: &Scalar) -> i32 {
58     let b_offset = a_scalar
59         .value
60         .size(&tcx)
61         .align_to(b_scalar.value.align(&tcx).abi);
62     b_offset.bytes().try_into().unwrap()
63 }
64
65 /// A read-only value
66 #[derive(Debug, Copy, Clone)]
67 pub struct CValue<'tcx>(CValueInner, TyLayout<'tcx>);
68
69 #[derive(Debug, Copy, Clone)]
70 enum CValueInner {
71     ByRef(Value),
72     ByVal(Value),
73     ByValPair(Value, Value),
74 }
75
76 impl<'tcx> CValue<'tcx> {
77     pub fn by_ref(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
78         CValue(CValueInner::ByRef(value), layout)
79     }
80
81     pub fn by_val(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
82         CValue(CValueInner::ByVal(value), layout)
83     }
84
85     pub fn by_val_pair(value: Value, extra: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
86         CValue(CValueInner::ByValPair(value, extra), layout)
87     }
88
89     pub fn layout(&self) -> TyLayout<'tcx> {
90         self.1
91     }
92
93     pub fn force_stack<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Value {
94         let layout = self.1;
95         match self.0 {
96             CValueInner::ByRef(value) => value,
97             CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => {
98                 let cplace = CPlace::new_stack_slot(fx, layout.ty);
99                 cplace.write_cvalue(fx, self);
100                 cplace.to_addr(fx)
101             }
102         }
103     }
104
105     pub fn try_to_addr(self) -> Option<Value> {
106         match self.0 {
107             CValueInner::ByRef(addr) => Some(addr),
108             CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => None,
109         }
110     }
111
112     /// Load a value with layout.abi of scalar
113     pub fn load_scalar<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Value {
114         let layout = self.1;
115         match self.0 {
116             CValueInner::ByRef(addr) => {
117                 let scalar = match layout.abi {
118                     layout::Abi::Scalar(ref scalar) => scalar.clone(),
119                     _ => unreachable!(),
120                 };
121                 let clif_ty = scalar_to_clif_type(fx.tcx, scalar);
122                 fx.bcx.ins().load(clif_ty, MemFlags::new(), addr, 0)
123             }
124             CValueInner::ByVal(value) => value,
125             CValueInner::ByValPair(_, _) => bug!("Please use load_scalar_pair for ByValPair"),
126         }
127     }
128
129     /// Load a value pair with layout.abi of scalar pair
130     pub fn load_scalar_pair<'a>(
131         self,
132         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
133     ) -> (Value, Value) {
134         let layout = self.1;
135         match self.0 {
136             CValueInner::ByRef(addr) => {
137                 let (a_scalar, b_scalar) = match &layout.abi {
138                     layout::Abi::ScalarPair(a, b) => (a, b),
139                     _ => unreachable!("load_scalar_pair({:?})", self),
140                 };
141                 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
142                 let clif_ty1 = scalar_to_clif_type(fx.tcx, a_scalar.clone());
143                 let clif_ty2 = scalar_to_clif_type(fx.tcx, b_scalar.clone());
144                 let val1 = fx.bcx.ins().load(clif_ty1, MemFlags::new(), addr, 0);
145                 let val2 = fx.bcx.ins().load(clif_ty2, MemFlags::new(), addr, b_offset);
146                 (val1, val2)
147             }
148             CValueInner::ByVal(_) => bug!("Please use load_scalar for ByVal"),
149             CValueInner::ByValPair(val1, val2) => (val1, val2),
150         }
151     }
152
153     pub fn value_field<'a>(
154         self,
155         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
156         field: mir::Field,
157     ) -> CValue<'tcx> {
158         let layout = self.1;
159         let base = match self.0 {
160             CValueInner::ByRef(addr) => addr,
161             _ => bug!("place_field for {:?}", self),
162         };
163
164         let (field_ptr, field_layout) = codegen_field(fx, base, None, layout, field);
165         CValue::by_ref(field_ptr, field_layout)
166     }
167
168     pub fn unsize_value<'a>(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
169         crate::unsize::coerce_unsized_into(fx, self, dest);
170     }
171
172     /// If `ty` is signed, `const_val` must already be sign extended.
173     pub fn const_val<'a>(
174         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
175         ty: Ty<'tcx>,
176         const_val: u128,
177     ) -> CValue<'tcx> {
178         let clif_ty = fx.clif_type(ty).unwrap();
179         let layout = fx.layout_of(ty);
180
181         let val = match ty.kind {
182             ty::TyKind::Uint(UintTy::U128) | ty::TyKind::Int(IntTy::I128) => {
183                 let lsb = fx.bcx.ins().iconst(types::I64, const_val as u64 as i64);
184                 let msb = fx
185                     .bcx
186                     .ins()
187                     .iconst(types::I64, (const_val >> 64) as u64 as i64);
188                 fx.bcx.ins().iconcat(lsb, msb)
189             }
190             ty::TyKind::Bool => {
191                 assert!(
192                     const_val == 0 || const_val == 1,
193                     "Invalid bool 0x{:032X}",
194                     const_val
195                 );
196                 fx.bcx.ins().iconst(types::I8, const_val as i64)
197             }
198             ty::TyKind::Uint(_) | ty::TyKind::Ref(..) | ty::TyKind::RawPtr(..) => fx
199                 .bcx
200                 .ins()
201                 .iconst(clif_ty, u64::try_from(const_val).expect("uint") as i64),
202             ty::TyKind::Int(_) => fx.bcx.ins().iconst(clif_ty, const_val as i128 as i64),
203             _ => panic!(
204                 "CValue::const_val for non bool/integer/pointer type {:?} is not allowed",
205                 ty
206             ),
207         };
208
209         CValue::by_val(val, layout)
210     }
211
212     pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
213         CValue(self.0, layout)
214     }
215 }
216
217 /// A place where you can write a value to or read a value from
218 #[derive(Debug, Copy, Clone)]
219 pub struct CPlace<'tcx> {
220     inner: CPlaceInner,
221     layout: TyLayout<'tcx>,
222 }
223
224 #[derive(Debug, Copy, Clone)]
225 pub enum CPlaceInner {
226     Var(Local),
227     Addr(Value, Option<Value>),
228     Stack(StackSlot),
229     NoPlace,
230 }
231
232 impl<'tcx> CPlace<'tcx> {
233     pub fn layout(&self) -> TyLayout<'tcx> {
234         self.layout
235     }
236
237     pub fn inner(&self) -> &CPlaceInner {
238         &self.inner
239     }
240
241     pub fn no_place(layout: TyLayout<'tcx>) -> CPlace<'tcx> {
242         CPlace {
243             inner: CPlaceInner::NoPlace,
244             layout,
245         }
246     }
247
248     pub fn new_stack_slot(
249         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
250         ty: Ty<'tcx>,
251     ) -> CPlace<'tcx> {
252         let layout = fx.layout_of(ty);
253         assert!(!layout.is_unsized());
254         if layout.size.bytes() == 0 {
255             return CPlace {
256                 inner: CPlaceInner::NoPlace,
257                 layout,
258             };
259         }
260
261         let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
262             kind: StackSlotKind::ExplicitSlot,
263             size: layout.size.bytes() as u32,
264             offset: None,
265         });
266         CPlace {
267             inner: CPlaceInner::Stack(stack_slot),
268             layout,
269         }
270     }
271
272     pub fn new_var(
273         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
274         local: Local,
275         layout: TyLayout<'tcx>,
276     ) -> CPlace<'tcx> {
277         fx.bcx
278             .declare_var(mir_var(local), fx.clif_type(layout.ty).unwrap());
279         CPlace {
280             inner: CPlaceInner::Var(local),
281             layout,
282         }
283     }
284
285     pub fn for_addr(addr: Value, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
286         CPlace {
287             inner: CPlaceInner::Addr(addr, None),
288             layout,
289         }
290     }
291
292     pub fn for_addr_with_extra(addr: Value, extra: Value, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
293         CPlace {
294             inner: CPlaceInner::Addr(addr, Some(extra)),
295             layout,
296         }
297     }
298
299     pub fn to_cvalue(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> CValue<'tcx> {
300         let layout = self.layout();
301         match self.inner {
302             CPlaceInner::Var(var) => {
303                 let val = fx.bcx.use_var(mir_var(var));
304                 fx.bcx.set_val_label(val, cranelift::codegen::ir::ValueLabel::from_u32(var.as_u32()));
305                 CValue::by_val(val, layout)
306             }
307             CPlaceInner::Addr(addr, extra) => {
308                 assert!(extra.is_none(), "unsized values are not yet supported");
309                 CValue::by_ref(addr, layout)
310             }
311             CPlaceInner::Stack(stack_slot) => CValue::by_ref(
312                 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
313                 layout,
314             ),
315             CPlaceInner::NoPlace => CValue::by_ref(
316                 fx.bcx
317                     .ins()
318                     .iconst(fx.pointer_type, fx.pointer_type.bytes() as i64),
319                 layout,
320             ),
321         }
322     }
323
324     pub fn to_addr(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> Value {
325         match self.to_addr_maybe_unsized(fx) {
326             (addr, None) => addr,
327             (_, Some(_)) => bug!("Expected sized cplace, found {:?}", self),
328         }
329     }
330
331     pub fn to_addr_maybe_unsized(
332         self,
333         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
334     ) -> (Value, Option<Value>) {
335         match self.inner {
336             CPlaceInner::Addr(addr, extra) => (addr, extra),
337             CPlaceInner::Stack(stack_slot) => (
338                 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
339                 None,
340             ),
341             CPlaceInner::NoPlace => {
342                 (
343                     fx.bcx.ins().iconst(
344                         fx.pointer_type,
345                         i64::try_from(self.layout.align.pref.bytes()).unwrap(),
346                     ),
347                     None
348                 )
349             }
350             CPlaceInner::Var(_) => bug!("Expected CPlace::Addr, found CPlace::Var"),
351         }
352     }
353
354     pub fn write_cvalue(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, from: CValue<'tcx>) {
355         use rustc::hir::Mutability::*;
356
357         let from_ty = from.layout().ty;
358         let to_ty = self.layout().ty;
359
360         fn assert_assignable<'tcx>(
361             fx: &FunctionCx<'_, 'tcx, impl Backend>,
362             from_ty: Ty<'tcx>,
363             to_ty: Ty<'tcx>,
364         ) {
365             match (&from_ty.kind, &to_ty.kind) {
366                 (ty::Ref(_, t, Immutable), ty::Ref(_, u, Immutable))
367                 | (ty::Ref(_, t, Mutable), ty::Ref(_, u, Immutable))
368                 | (ty::Ref(_, t, Mutable), ty::Ref(_, u, Mutable)) => {
369                     assert_assignable(fx, t, u);
370                     // &mut T -> &T is allowed
371                     // &'a T -> &'b T is allowed
372                 }
373                 (ty::Ref(_, _, Immutable), ty::Ref(_, _, Mutable)) => panic!(
374                     "Cant assign value of type {} to place of type {}",
375                     from_ty, to_ty
376                 ),
377                 (ty::FnPtr(_), ty::FnPtr(_)) => {
378                     let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
379                         ParamEnv::reveal_all(),
380                         &from_ty.fn_sig(fx.tcx),
381                     );
382                     let to_sig = fx.tcx.normalize_erasing_late_bound_regions(
383                         ParamEnv::reveal_all(),
384                         &to_ty.fn_sig(fx.tcx),
385                     );
386                     assert_eq!(
387                         from_sig, to_sig,
388                         "Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
389                         from_sig, to_sig, fx,
390                     );
391                     // fn(&T) -> for<'l> fn(&'l T) is allowed
392                 }
393                 (ty::Dynamic(from_traits, _), ty::Dynamic(to_traits, _)) => {
394                     let from_traits = fx
395                         .tcx
396                         .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from_traits);
397                     let to_traits = fx
398                         .tcx
399                         .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to_traits);
400                     assert_eq!(
401                         from_traits, to_traits,
402                         "Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
403                         from_traits, to_traits, fx,
404                     );
405                     // dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
406                 }
407                 _ => {
408                     assert_eq!(
409                         from_ty,
410                         to_ty,
411                         "Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
412                         from_ty,
413                         to_ty,
414                         fx,
415                     );
416                 }
417             }
418         }
419
420         assert_assignable(fx, from_ty, to_ty);
421
422         let dst_layout = self.layout();
423         let addr = match self.inner {
424             CPlaceInner::Var(var) => {
425                 let data = from.load_scalar(fx);
426                 fx.bcx.def_var(mir_var(var), data);
427                 return;
428             }
429             CPlaceInner::Addr(addr, None) => addr,
430             CPlaceInner::Stack(stack_slot) => {
431                 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0)
432             }
433             CPlaceInner::NoPlace => {
434                 if dst_layout.abi != Abi::Uninhabited {
435                     assert_eq!(dst_layout.size.bytes(), 0, "{:?}", dst_layout);
436                 }
437                 return;
438             }
439             CPlaceInner::Addr(_, Some(_)) => bug!("Can't write value to unsized place {:?}", self),
440         };
441
442         match from.0 {
443             CValueInner::ByVal(val) => {
444                 fx.bcx.ins().store(MemFlags::new(), val, addr, 0);
445             }
446             CValueInner::ByValPair(value, extra) => match dst_layout.abi {
447                 Abi::ScalarPair(ref a_scalar, ref b_scalar) => {
448                     let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
449                     fx.bcx.ins().store(MemFlags::new(), value, addr, 0);
450                     fx.bcx.ins().store(MemFlags::new(), extra, addr, b_offset);
451                 }
452                 _ => bug!(
453                     "Non ScalarPair abi {:?} for ByValPair CValue",
454                     dst_layout.abi
455                 ),
456             },
457             CValueInner::ByRef(from_addr) => {
458                 let src_layout = from.1;
459                 let size = dst_layout.size.bytes();
460                 let src_align = src_layout.align.abi.bytes() as u8;
461                 let dst_align = dst_layout.align.abi.bytes() as u8;
462                 fx.bcx.emit_small_memcpy(
463                     fx.module.target_config(),
464                     addr,
465                     from_addr,
466                     size,
467                     dst_align,
468                     src_align,
469                 );
470             }
471         }
472     }
473
474     pub fn place_field(
475         self,
476         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
477         field: mir::Field,
478     ) -> CPlace<'tcx> {
479         let layout = self.layout();
480         let (base, extra) = self.to_addr_maybe_unsized(fx);
481
482         let (field_ptr, field_layout) = codegen_field(fx, base, extra, layout, field);
483         if field_layout.is_unsized() {
484             CPlace::for_addr_with_extra(field_ptr, extra.unwrap(), field_layout)
485         } else {
486             CPlace::for_addr(field_ptr, field_layout)
487         }
488     }
489
490     pub fn place_index(
491         self,
492         fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
493         index: Value,
494     ) -> CPlace<'tcx> {
495         let (elem_layout, addr) = match self.layout().ty.kind {
496             ty::Array(elem_ty, _) => (fx.layout_of(elem_ty), self.to_addr(fx)),
497             ty::Slice(elem_ty) => (fx.layout_of(elem_ty), self.to_addr_maybe_unsized(fx).0),
498             _ => bug!("place_index({:?})", self.layout().ty),
499         };
500
501         let offset = fx
502             .bcx
503             .ins()
504             .imul_imm(index, elem_layout.size.bytes() as i64);
505
506         CPlace::for_addr(fx.bcx.ins().iadd(addr, offset), elem_layout)
507     }
508
509     pub fn place_deref(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>) -> CPlace<'tcx> {
510         let inner_layout = fx.layout_of(self.layout().ty.builtin_deref(true).unwrap().ty);
511         if has_ptr_meta(fx.tcx, inner_layout.ty) {
512             let (addr, extra) = self.to_cvalue(fx).load_scalar_pair(fx);
513             CPlace::for_addr_with_extra(addr, extra, inner_layout)
514         } else {
515             CPlace::for_addr(self.to_cvalue(fx).load_scalar(fx), inner_layout)
516         }
517     }
518
519     pub fn write_place_ref(self, fx: &mut FunctionCx<'_, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
520         if has_ptr_meta(fx.tcx, self.layout().ty) {
521             let (value, extra) = self.to_addr_maybe_unsized(fx);
522             let ptr = CValue::by_val_pair(
523                 value,
524                 extra.expect("unsized type without metadata"),
525                 dest.layout(),
526             );
527             dest.write_cvalue(fx, ptr);
528         } else {
529             let ptr = CValue::by_val(self.to_addr(fx), dest.layout());
530             dest.write_cvalue(fx, ptr);
531         }
532     }
533
534     pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
535         assert!(!self.layout().is_unsized());
536         match self.inner {
537             CPlaceInner::NoPlace => {
538                 assert!(layout.size.bytes() == 0);
539             }
540             _ => {}
541         }
542         CPlace {
543             inner: self.inner,
544             layout,
545         }
546     }
547
548     pub fn downcast_variant(
549         self,
550         fx: &FunctionCx<'_, 'tcx, impl Backend>,
551         variant: VariantIdx,
552     ) -> Self {
553         let layout = self.layout().for_variant(fx, variant);
554         self.unchecked_cast_to(layout)
555     }
556 }