]> git.lizzy.rs Git - rust.git/blob - src/value_and_place.rs
Fix `write_cvalue` for `ByValPair` when the cvalue is not trivially a pair
[rust.git] / src / value_and_place.rs
1 use crate::prelude::*;
2
3 fn codegen_field<'a, 'tcx: 'a>(
4     fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
5     base: Value,
6     layout: TyLayout<'tcx>,
7     field: mir::Field,
8 ) -> (Value, TyLayout<'tcx>) {
9     let field_offset = layout.fields.offset(field.index());
10     let field_ty = layout.field(&*fx, field.index());
11     if field_offset.bytes() > 0 {
12         (
13             fx.bcx.ins().iadd_imm(base, field_offset.bytes() as i64),
14             field_ty,
15         )
16     } else {
17         (base, field_ty)
18     }
19 }
20
21 /// A read-only value
22 #[derive(Debug, Copy, Clone)]
23 pub struct CValue<'tcx>(CValueInner, TyLayout<'tcx>);
24
25 #[derive(Debug, Copy, Clone)]
26 enum CValueInner {
27     ByRef(Value),
28     ByVal(Value),
29     ByValPair(Value, Value),
30 }
31
32 impl<'tcx> CValue<'tcx> {
33     pub fn by_ref(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
34         CValue(CValueInner::ByRef(value), layout)
35     }
36
37     pub fn by_val(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
38         CValue(CValueInner::ByVal(value), layout)
39     }
40
41     pub fn by_val_pair(value: Value, extra: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
42         CValue(CValueInner::ByValPair(value, extra), layout)
43     }
44
45     pub fn layout(&self) -> TyLayout<'tcx> {
46         self.1
47     }
48
49     pub fn force_stack<'a>(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> Value
50     where
51         'tcx: 'a,
52     {
53         let layout = self.1;
54         match self.0 {
55             CValueInner::ByRef(value) => value,
56             CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => {
57                 let cplace = CPlace::new_stack_slot(fx, layout.ty);
58                 cplace.write_cvalue(fx, self);
59                 cplace.to_addr(fx)
60             }
61         }
62     }
63
64     /// Load a value with layout.abi of scalar
65     pub fn load_scalar<'a>(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> Value
66     where
67         'tcx: 'a,
68     {
69         let layout = self.1;
70         match self.0 {
71             CValueInner::ByRef(addr) => {
72                 let scalar = match layout.abi {
73                     layout::Abi::Scalar(ref scalar) => scalar.clone(),
74                     _ => unreachable!(),
75                 };
76                 let clif_ty = scalar_to_clif_type(fx.tcx, scalar);
77                 fx.bcx.ins().load(clif_ty, MemFlags::new(), addr, 0)
78             }
79             CValueInner::ByVal(value) => value,
80             CValueInner::ByValPair(_, _) => bug!("Please use load_scalar_pair for ByValPair"),
81         }
82     }
83
84     /// Load a value pair with layout.abi of scalar pair
85     pub fn load_scalar_pair<'a>(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> (Value, Value)
86     where
87         'tcx: 'a,
88     {
89         let layout = self.1;
90         match self.0 {
91             CValueInner::ByRef(addr) => {
92                 let (a, b) = match &layout.abi {
93                     layout::Abi::ScalarPair(a, b) => (a.clone(), b.clone()),
94                     _ => unreachable!(),
95                 };
96                 let clif_ty1 = scalar_to_clif_type(fx.tcx, a.clone());
97                 let clif_ty2 = scalar_to_clif_type(fx.tcx, b);
98                 let val1 = fx.bcx.ins().load(clif_ty1, MemFlags::new(), addr, 0);
99                 let val2 = fx.bcx.ins().load(
100                     clif_ty2,
101                     MemFlags::new(),
102                     addr,
103                     a.value.size(&fx.tcx).bytes() as i32,
104                 );
105                 (val1, val2)
106             }
107             CValueInner::ByVal(_) => bug!("Please use load_scalar for ByVal"),
108             CValueInner::ByValPair(val1, val2) => (val1, val2),
109         }
110     }
111
112     pub fn value_field<'a>(
113         self,
114         fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
115         field: mir::Field,
116     ) -> CValue<'tcx>
117     where
118         'tcx: 'a,
119     {
120         let layout = self.1;
121         let base = match self.0 {
122             CValueInner::ByRef(addr) => addr,
123             _ => bug!("place_field for {:?}", self),
124         };
125
126         let (field_ptr, field_layout) = codegen_field(fx, base, layout, field);
127         CValue::by_ref(field_ptr, field_layout)
128     }
129
130     pub fn unsize_value<'a>(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
131         crate::unsize::coerce_unsized_into(fx, self, dest);
132     }
133
134     pub fn const_val<'a>(
135         fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
136         ty: Ty<'tcx>,
137         const_val: i64,
138     ) -> CValue<'tcx>
139     where
140         'tcx: 'a,
141     {
142         let clif_ty = fx.clif_type(ty).unwrap();
143         let layout = fx.layout_of(ty);
144         CValue::by_val(fx.bcx.ins().iconst(clif_ty, const_val), layout)
145     }
146
147     pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
148         CValue(self.0, layout)
149     }
150 }
151
152 /// A place where you can write a value to or read a value from
153 #[derive(Debug, Copy, Clone)]
154 pub enum CPlace<'tcx> {
155     Var(Local, TyLayout<'tcx>),
156     Addr(Value, Option<Value>, TyLayout<'tcx>),
157     Stack(StackSlot, TyLayout<'tcx>),
158     NoPlace(TyLayout<'tcx>),
159 }
160
161 impl<'a, 'tcx: 'a> CPlace<'tcx> {
162     pub fn layout(&self) -> TyLayout<'tcx> {
163         match *self {
164             CPlace::Var(_, layout)
165             | CPlace::Addr(_, _, layout)
166             | CPlace::Stack(_, layout)
167             | CPlace::NoPlace(layout) => layout,
168         }
169     }
170
171     pub fn no_place(layout: TyLayout<'tcx>) -> CPlace<'tcx> {
172         CPlace::NoPlace(layout)
173     }
174
175     pub fn new_stack_slot(
176         fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
177         ty: Ty<'tcx>,
178     ) -> CPlace<'tcx> {
179         let layout = fx.layout_of(ty);
180         assert!(!layout.is_unsized());
181         if layout.size.bytes() == 0 {
182             return CPlace::NoPlace(layout);
183         }
184
185         let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
186             kind: StackSlotKind::ExplicitSlot,
187             size: layout.size.bytes() as u32,
188             offset: None,
189         });
190         CPlace::Stack(stack_slot, layout)
191     }
192
193     pub fn new_var(
194         fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
195         local: Local,
196         layout: TyLayout<'tcx>,
197     ) -> CPlace<'tcx> {
198         fx.bcx
199             .declare_var(mir_var(local), fx.clif_type(layout.ty).unwrap());
200         CPlace::Var(local, layout)
201     }
202
203     pub fn for_addr(addr: Value, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
204         CPlace::Addr(addr, None, layout)
205     }
206
207     pub fn for_addr_with_extra(addr: Value, extra: Value, layout: TyLayout<'tcx>) -> CPlace<'tcx> {
208         CPlace::Addr(addr, Some(extra), layout)
209     }
210
211     pub fn to_cvalue(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> CValue<'tcx> {
212         match self {
213             CPlace::Var(var, layout) => CValue::by_val(fx.bcx.use_var(mir_var(var)), layout),
214             CPlace::Addr(addr, extra, layout) => {
215                 assert!(extra.is_none(), "unsized values are not yet supported");
216                 CValue::by_ref(addr, layout)
217             }
218             CPlace::Stack(stack_slot, layout) => CValue::by_ref(
219                 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
220                 layout,
221             ),
222             CPlace::NoPlace(layout) => CValue::by_ref(
223                 fx.bcx
224                     .ins()
225                     .iconst(fx.pointer_type, fx.pointer_type.bytes() as i64),
226                 layout,
227             ),
228         }
229     }
230
231     pub fn to_addr(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> Value {
232         match self.to_addr_maybe_unsized(fx) {
233             (addr, None) => addr,
234             (_, Some(_)) => bug!("Expected sized cplace, found {:?}", self),
235         }
236     }
237
238     pub fn to_addr_maybe_unsized(
239         self,
240         fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
241     ) -> (Value, Option<Value>) {
242         match self {
243             CPlace::Addr(addr, extra, _layout) => (addr, extra),
244             CPlace::Stack(stack_slot, _layout) => (
245                 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
246                 None,
247             ),
248             CPlace::NoPlace(_) => (fx.bcx.ins().iconst(fx.pointer_type, 45), None),
249             CPlace::Var(_, _) => bug!("Expected CPlace::Addr, found CPlace::Var"),
250         }
251     }
252
253     pub fn write_cvalue(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>, from: CValue<'tcx>) {
254         use rustc::hir::Mutability::*;
255
256         let from_ty = from.layout().ty;
257         let to_ty = self.layout().ty;
258
259         fn assert_assignable<'a, 'tcx: 'a>(fx: &FunctionCx<'a, 'tcx, impl Backend>, from_ty: Ty<'tcx>, to_ty: Ty<'tcx>) {
260             match (&from_ty.sty, &to_ty.sty) {
261                 (ty::Ref(_, t, MutImmutable), ty::Ref(_, u, MutImmutable))
262                 | (ty::Ref(_, t, MutMutable), ty::Ref(_, u, MutImmutable))
263                 | (ty::Ref(_, t, MutMutable), ty::Ref(_, u, MutMutable)) => {
264                     assert_assignable(fx, t, u);
265                     // &mut T -> &T is allowed
266                     // &'a T -> &'b T is allowed
267                 }
268                 (ty::Ref(_, _, MutImmutable), ty::Ref(_, _, MutMutable)) => {
269                     panic!("Cant assign value of type {} to place of type {}", from_ty, to_ty)
270                 }
271                 (ty::FnPtr(_), ty::FnPtr(_)) => {
272                     let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
273                         ParamEnv::reveal_all(),
274                         &from_ty.fn_sig(fx.tcx),
275                     );
276                     let to_sig = fx.tcx.normalize_erasing_late_bound_regions(
277                         ParamEnv::reveal_all(),
278                         &to_ty.fn_sig(fx.tcx),
279                     );
280                     assert_eq!(
281                         from_sig, to_sig,
282                         "Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
283                         from_sig, to_sig, fx,
284                     );
285                     // fn(&T) -> for<'l> fn(&'l T) is allowed
286                 }
287                 (ty::Dynamic(from_traits, _), ty::Dynamic(to_traits, _)) => {
288                     let from_traits = fx.tcx.normalize_erasing_late_bound_regions(
289                         ParamEnv::reveal_all(),
290                         from_traits,
291                     );
292                     let to_traits = fx.tcx.normalize_erasing_late_bound_regions(
293                         ParamEnv::reveal_all(),
294                         to_traits,
295                     );
296                     assert_eq!(
297                         from_traits, to_traits,
298                         "Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
299                         from_traits, to_traits, fx,
300                     );
301                     // dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
302                 }
303                 _ => {
304                     assert_eq!(
305                         from_ty,
306                         to_ty,
307                         "Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
308                         from_ty,
309                         to_ty,
310                         fx,
311                     );
312                 }
313             }
314         }
315
316         assert_assignable(fx, from_ty, to_ty);
317
318         let (addr, dst_layout) = match self {
319             CPlace::Var(var, _) => {
320                 let data = from.load_scalar(fx);
321                 fx.bcx.def_var(mir_var(var), data);
322                 return;
323             }
324             CPlace::Addr(addr, None, dst_layout) => (addr, dst_layout),
325             CPlace::Stack(stack_slot, dst_layout) => (
326                 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
327                 dst_layout,
328             ),
329             CPlace::NoPlace(layout) => {
330                 assert_eq!(layout.size.bytes(), 0);
331                 assert_eq!(from.layout().size.bytes(), 0);
332                 return;
333             }
334             CPlace::Addr(_, _, _) => bug!("Can't write value to unsized place {:?}", self),
335         };
336
337         match from.0 {
338             CValueInner::ByVal(val) => {
339                 fx.bcx.ins().store(MemFlags::new(), val, addr, 0);
340             }
341             CValueInner::ByValPair(value, extra) => {
342                 match dst_layout.abi {
343                     Abi::ScalarPair(ref a, _) => {
344                         fx.bcx.ins().store(MemFlags::new(), value, addr, 0);
345                         fx.bcx.ins().store(
346                             MemFlags::new(),
347                             extra,
348                             addr,
349                             a.value.size(&fx.tcx).bytes() as u32 as i32,
350                         );
351                     }
352                     _ => bug!(
353                         "Non ScalarPair abi {:?} for ByValPair CValue",
354                         dst_layout.abi
355                     ),
356                 }
357             }
358             CValueInner::ByRef(from_addr) => {
359                 let src_layout = from.1;
360                 let size = dst_layout.size.bytes();
361                 let src_align = src_layout.align.abi.bytes() as u8;
362                 let dst_align = dst_layout.align.abi.bytes() as u8;
363                 fx.bcx.emit_small_memcpy(
364                     fx.module.target_config(),
365                     addr,
366                     from_addr,
367                     size,
368                     dst_align,
369                     src_align,
370                 );
371             }
372         }
373     }
374
375     pub fn place_field(
376         self,
377         fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
378         field: mir::Field,
379     ) -> CPlace<'tcx> {
380         let layout = self.layout();
381         let (base, extra) = self.to_addr_maybe_unsized(fx);
382
383         let (field_ptr, field_layout) = codegen_field(fx, base, layout, field);
384         let extra = if field_layout.is_unsized() {
385             assert!(extra.is_some());
386             extra
387         } else {
388             None
389         };
390         CPlace::Addr(field_ptr, extra, field_layout)
391     }
392
393     pub fn place_index(
394         self,
395         fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
396         index: Value,
397     ) -> CPlace<'tcx> {
398         let (elem_layout, addr) = match self.layout().ty.sty {
399             ty::Array(elem_ty, _) => (fx.layout_of(elem_ty), self.to_addr(fx)),
400             ty::Slice(elem_ty) => (fx.layout_of(elem_ty), self.to_addr_maybe_unsized(fx).0),
401             _ => bug!("place_index({:?})", self.layout().ty),
402         };
403
404         let offset = fx
405             .bcx
406             .ins()
407             .imul_imm(index, elem_layout.size.bytes() as i64);
408
409         CPlace::Addr(fx.bcx.ins().iadd(addr, offset), None, elem_layout)
410     }
411
412     pub fn place_deref(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> CPlace<'tcx> {
413         let inner_layout = fx.layout_of(self.layout().ty.builtin_deref(true).unwrap().ty);
414         if !inner_layout.is_unsized() {
415             CPlace::Addr(self.to_cvalue(fx).load_scalar(fx), None, inner_layout)
416         } else {
417             match self.layout().abi {
418                 Abi::ScalarPair(ref a, ref b) => {
419                     let addr = self.to_addr(fx);
420                     let ptr =
421                         fx.bcx
422                             .ins()
423                             .load(scalar_to_clif_type(fx.tcx, a.clone()), MemFlags::new(), addr, 0);
424                     let extra = fx.bcx.ins().load(
425                         scalar_to_clif_type(fx.tcx, b.clone()),
426                         MemFlags::new(),
427                         addr,
428                         a.value.size(&fx.tcx).bytes() as u32 as i32,
429                     );
430                     CPlace::Addr(ptr, Some(extra), inner_layout)
431                 }
432                 _ => bug!(
433                     "Fat ptr doesn't have abi ScalarPair, but it has {:?}",
434                     self.layout().abi
435                 ),
436             }
437         }
438     }
439
440     pub fn write_place_ref(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
441         if !self.layout().is_unsized() {
442             let ptr = CValue::by_val(self.to_addr(fx), dest.layout());
443             dest.write_cvalue(fx, ptr);
444         } else {
445             let (value, extra) = self.to_addr_maybe_unsized(fx);
446
447             match dest.layout().abi {
448                 Abi::ScalarPair(ref a, _) => {
449                     let dest_addr = dest.to_addr(fx);
450                     fx.bcx.ins().store(MemFlags::new(), value, dest_addr, 0);
451                     fx.bcx.ins().store(
452                         MemFlags::new(),
453                         extra.expect("unsized type without metadata"),
454                         dest_addr,
455                         a.value.size(&fx.tcx).bytes() as u32 as i32,
456                     );
457                 }
458                 _ => bug!(
459                     "Non ScalarPair abi {:?} in write_place_ref dest",
460                     dest.layout().abi
461                 ),
462             }
463         }
464     }
465
466     pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
467         assert!(!self.layout().is_unsized());
468         match self {
469             CPlace::Var(var, _) => CPlace::Var(var, layout),
470             CPlace::Addr(addr, extra, _) => CPlace::Addr(addr, extra, layout),
471             CPlace::Stack(stack_slot, _) => CPlace::Stack(stack_slot, layout),
472             CPlace::NoPlace(_) => {
473                 assert!(layout.size.bytes() == 0);
474                 CPlace::NoPlace(layout)
475             }
476         }
477     }
478
479     pub fn downcast_variant(
480         self,
481         fx: &FunctionCx<'a, 'tcx, impl Backend>,
482         variant: VariantIdx,
483     ) -> Self {
484         let layout = self.layout().for_variant(fx, variant);
485         self.unchecked_cast_to(layout)
486     }
487 }