3 fn codegen_field<'a, 'tcx: 'a>(
4 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
6 layout: TyLayout<'tcx>,
8 ) -> (Value, TyLayout<'tcx>) {
9 let field_offset = layout.fields.offset(field.index());
10 let field_ty = layout.field(&*fx, field.index());
11 if field_offset.bytes() > 0 {
13 fx.bcx.ins().iadd_imm(base, field_offset.bytes() as i64),
22 #[derive(Debug, Copy, Clone)]
23 pub enum CValue<'tcx> {
24 ByRef(Value, TyLayout<'tcx>),
25 ByVal(Value, TyLayout<'tcx>),
26 ByValPair(Value, Value, TyLayout<'tcx>),
29 impl<'tcx> CValue<'tcx> {
30 pub fn by_ref(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
31 CValue::ByRef(value, layout)
34 pub fn by_val(value: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
35 CValue::ByVal(value, layout)
38 pub fn by_val_pair(value: Value, extra: Value, layout: TyLayout<'tcx>) -> CValue<'tcx> {
39 CValue::ByValPair(value, extra, layout)
42 pub fn layout(&self) -> TyLayout<'tcx> {
44 CValue::ByRef(_, layout)
45 | CValue::ByVal(_, layout)
46 | CValue::ByValPair(_, _, layout) => layout,
50 pub fn force_stack<'a>(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> Value
55 CValue::ByRef(value, _layout) => value,
56 CValue::ByVal(value, layout) => {
57 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
58 kind: StackSlotKind::ExplicitSlot,
59 size: layout.size.bytes() as u32,
62 let addr = fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0);
63 fx.bcx.ins().store(MemFlags::new(), value, addr, 0);
66 CValue::ByValPair(value, extra, layout) => {
67 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
68 kind: StackSlotKind::ExplicitSlot,
69 size: layout.size.bytes() as u32,
72 let base = fx.bcx.ins().stack_addr(types::I64, stack_slot, 0);
73 let a_addr = codegen_field(fx, base, layout, mir::Field::new(0)).0;
74 let b_addr = codegen_field(fx, base, layout, mir::Field::new(1)).0;
75 fx.bcx.ins().store(MemFlags::new(), value, a_addr, 0);
76 fx.bcx.ins().store(MemFlags::new(), extra, b_addr, 0);
82 /// Load a value with layout.abi of scalar
83 pub fn load_scalar<'a>(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> Value
88 CValue::ByRef(addr, layout) => {
89 let scalar = match layout.abi {
90 layout::Abi::Scalar(ref scalar) => scalar.clone(),
93 let clif_ty = scalar_to_clif_type(fx.tcx, scalar);
94 fx.bcx.ins().load(clif_ty, MemFlags::new(), addr, 0)
96 CValue::ByVal(value, _layout) => value,
97 CValue::ByValPair(_, _, _layout) => bug!("Please use load_scalar_pair for ByValPair"),
101 /// Load a value pair with layout.abi of scalar pair
102 pub fn load_scalar_pair<'a>(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> (Value, Value)
107 CValue::ByRef(addr, layout) => {
108 let (a, b) = match &layout.abi {
109 layout::Abi::ScalarPair(a, b) => (a.clone(), b.clone()),
112 let clif_ty1 = scalar_to_clif_type(fx.tcx, a.clone());
113 let clif_ty2 = scalar_to_clif_type(fx.tcx, b);
114 let val1 = fx.bcx.ins().load(clif_ty1, MemFlags::new(), addr, 0);
115 let val2 = fx.bcx.ins().load(
119 a.value.size(&fx.tcx).bytes() as i32,
123 CValue::ByVal(_, _layout) => bug!("Please use load_scalar for ByVal"),
124 CValue::ByValPair(val1, val2, _layout) => (val1, val2),
128 pub fn value_field<'a>(
130 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
136 let (base, layout) = match self {
137 CValue::ByRef(addr, layout) => (addr, layout),
138 _ => bug!("place_field for {:?}", self),
141 let (field_ptr, field_layout) = codegen_field(fx, base, layout, field);
142 CValue::ByRef(field_ptr, field_layout)
145 pub fn unsize_value<'a>(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
146 crate::unsize::coerce_unsized_into(fx, self, dest);
149 pub fn const_val<'a>(
150 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
157 let clif_ty = fx.clif_type(ty).unwrap();
158 let layout = fx.layout_of(ty);
159 CValue::ByVal(fx.bcx.ins().iconst(clif_ty, const_val), layout)
162 pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
164 CValue::ByRef(addr, _) => CValue::ByRef(addr, layout),
165 CValue::ByVal(val, _) => CValue::ByVal(val, layout),
166 CValue::ByValPair(val, extra, _) => CValue::ByValPair(val, extra, layout),
171 /// A place where you can write a value to or read a value from
172 #[derive(Debug, Copy, Clone)]
173 pub enum CPlace<'tcx> {
174 Var(Local, TyLayout<'tcx>),
175 Addr(Value, Option<Value>, TyLayout<'tcx>),
176 Stack(StackSlot, TyLayout<'tcx>),
177 NoPlace(TyLayout<'tcx>),
180 impl<'a, 'tcx: 'a> CPlace<'tcx> {
181 pub fn layout(&self) -> TyLayout<'tcx> {
183 CPlace::Var(_, layout)
184 | CPlace::Addr(_, _, layout)
185 | CPlace::Stack(_, layout)
186 | CPlace::NoPlace(layout) => layout,
190 pub fn no_place(layout: TyLayout<'tcx>) -> CPlace<'tcx> {
191 CPlace::NoPlace(layout)
194 pub fn new_stack_slot(
195 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
198 let layout = fx.layout_of(ty);
199 assert!(!layout.is_unsized());
200 if layout.size.bytes() == 0 {
201 return CPlace::NoPlace(layout);
204 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
205 kind: StackSlotKind::ExplicitSlot,
206 size: layout.size.bytes() as u32,
209 CPlace::Stack(stack_slot, layout)
213 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
215 layout: TyLayout<'tcx>,
218 .declare_var(mir_var(local), fx.clif_type(layout.ty).unwrap());
219 CPlace::Var(local, layout)
222 pub fn to_cvalue(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> CValue<'tcx> {
224 CPlace::Var(var, layout) => CValue::ByVal(fx.bcx.use_var(mir_var(var)), layout),
225 CPlace::Addr(addr, extra, layout) => {
226 assert!(extra.is_none(), "unsized values are not yet supported");
227 CValue::ByRef(addr, layout)
229 CPlace::Stack(stack_slot, layout) => CValue::ByRef(
230 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
233 CPlace::NoPlace(layout) => CValue::ByRef(
236 .iconst(fx.pointer_type, fx.pointer_type.bytes() as i64),
242 pub fn to_addr(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> Value {
243 match self.to_addr_maybe_unsized(fx) {
244 (addr, None) => addr,
245 (_, Some(_)) => bug!("Expected sized cplace, found {:?}", self),
249 pub fn to_addr_maybe_unsized(
251 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
252 ) -> (Value, Option<Value>) {
254 CPlace::Addr(addr, extra, _layout) => (addr, extra),
255 CPlace::Stack(stack_slot, _layout) => (
256 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
259 CPlace::NoPlace(_) => (fx.bcx.ins().iconst(fx.pointer_type, 45), None),
260 CPlace::Var(_, _) => bug!("Expected CPlace::Addr, found CPlace::Var"),
264 pub fn write_cvalue(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>, from: CValue<'tcx>) {
265 use rustc::hir::Mutability::*;
267 let from_ty = from.layout().ty;
268 let to_ty = self.layout().ty;
270 fn assert_assignable<'a, 'tcx: 'a>(fx: &FunctionCx<'a, 'tcx, impl Backend>, from_ty: Ty<'tcx>, to_ty: Ty<'tcx>) {
271 match (&from_ty.sty, &to_ty.sty) {
272 (ty::Ref(_, t, MutImmutable), ty::Ref(_, u, MutImmutable))
273 | (ty::Ref(_, t, MutMutable), ty::Ref(_, u, MutImmutable))
274 | (ty::Ref(_, t, MutMutable), ty::Ref(_, u, MutMutable)) => {
275 assert_assignable(fx, t, u);
276 // &mut T -> &T is allowed
277 // &'a T -> &'b T is allowed
279 (ty::Ref(_, _, MutImmutable), ty::Ref(_, _, MutMutable)) => {
280 panic!("Cant assign value of type {} to place of type {}", from_ty, to_ty)
282 (ty::FnPtr(_), ty::FnPtr(_)) => {
283 let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
284 ParamEnv::reveal_all(),
285 &from_ty.fn_sig(fx.tcx),
287 let to_sig = fx.tcx.normalize_erasing_late_bound_regions(
288 ParamEnv::reveal_all(),
289 &to_ty.fn_sig(fx.tcx),
293 "Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
294 from_sig, to_sig, fx,
296 // fn(&T) -> for<'l> fn(&'l T) is allowed
298 (ty::Dynamic(from_traits, _), ty::Dynamic(to_traits, _)) => {
299 let from_traits = fx.tcx.normalize_erasing_late_bound_regions(
300 ParamEnv::reveal_all(),
303 let to_traits = fx.tcx.normalize_erasing_late_bound_regions(
304 ParamEnv::reveal_all(),
308 from_traits, to_traits,
309 "Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
310 from_traits, to_traits, fx,
312 // dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
318 "Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
327 assert_assignable(fx, from_ty, to_ty);
329 let (addr, dst_layout) = match self {
330 CPlace::Var(var, _) => {
331 let data = from.load_scalar(fx);
332 fx.bcx.def_var(mir_var(var), data);
335 CPlace::Addr(addr, None, dst_layout) => (addr, dst_layout),
336 CPlace::Stack(stack_slot, dst_layout) => (
337 fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0),
340 CPlace::NoPlace(layout) => {
341 assert_eq!(layout.size.bytes(), 0);
342 assert_eq!(from.layout().size.bytes(), 0);
345 CPlace::Addr(_, _, _) => bug!("Can't write value to unsized place {:?}", self),
349 CValue::ByVal(val, _src_layout) => {
350 fx.bcx.ins().store(MemFlags::new(), val, addr, 0);
352 CValue::ByValPair(val1, val2, _src_layout) => {
353 let val1_offset = dst_layout.fields.offset(0).bytes() as i32;
354 let val2_offset = dst_layout.fields.offset(1).bytes() as i32;
355 fx.bcx.ins().store(MemFlags::new(), val1, addr, val1_offset);
356 fx.bcx.ins().store(MemFlags::new(), val2, addr, val2_offset);
358 CValue::ByRef(from, src_layout) => {
359 let size = dst_layout.size.bytes();
360 let src_align = src_layout.align.abi.bytes() as u8;
361 let dst_align = dst_layout.align.abi.bytes() as u8;
362 fx.bcx.emit_small_memcpy(
363 fx.module.target_config(),
376 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
379 let layout = self.layout();
380 let (base, extra) = self.to_addr_maybe_unsized(fx);
382 let (field_ptr, field_layout) = codegen_field(fx, base, layout, field);
383 let extra = if field_layout.is_unsized() {
384 assert!(extra.is_some());
389 CPlace::Addr(field_ptr, extra, field_layout)
394 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
397 let (elem_layout, addr) = match self.layout().ty.sty {
398 ty::Array(elem_ty, _) => (fx.layout_of(elem_ty), self.to_addr(fx)),
399 ty::Slice(elem_ty) => (fx.layout_of(elem_ty), self.to_addr_maybe_unsized(fx).0),
400 _ => bug!("place_index({:?})", self.layout().ty),
406 .imul_imm(index, elem_layout.size.bytes() as i64);
408 CPlace::Addr(fx.bcx.ins().iadd(addr, offset), None, elem_layout)
411 pub fn place_deref(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>) -> CPlace<'tcx> {
412 let inner_layout = fx.layout_of(self.layout().ty.builtin_deref(true).unwrap().ty);
413 if !inner_layout.is_unsized() {
414 CPlace::Addr(self.to_cvalue(fx).load_scalar(fx), None, inner_layout)
416 match self.layout().abi {
417 Abi::ScalarPair(ref a, ref b) => {
418 let addr = self.to_addr(fx);
422 .load(scalar_to_clif_type(fx.tcx, a.clone()), MemFlags::new(), addr, 0);
423 let extra = fx.bcx.ins().load(
424 scalar_to_clif_type(fx.tcx, b.clone()),
427 a.value.size(&fx.tcx).bytes() as u32 as i32,
429 CPlace::Addr(ptr, Some(extra), inner_layout)
432 "Fat ptr doesn't have abi ScalarPair, but it has {:?}",
439 pub fn write_place_ref(self, fx: &mut FunctionCx<'a, 'tcx, impl Backend>, dest: CPlace<'tcx>) {
440 if !self.layout().is_unsized() {
441 let ptr = CValue::ByVal(self.to_addr(fx), dest.layout());
442 dest.write_cvalue(fx, ptr);
444 let (value, extra) = self.to_addr_maybe_unsized(fx);
446 match dest.layout().abi {
447 Abi::ScalarPair(ref a, _) => {
448 let dest_addr = dest.to_addr(fx);
449 fx.bcx.ins().store(MemFlags::new(), value, dest_addr, 0);
452 extra.expect("unsized type without metadata"),
454 a.value.size(&fx.tcx).bytes() as u32 as i32,
458 "Non ScalarPair abi {:?} in write_place_ref dest",
465 pub fn unchecked_cast_to(self, layout: TyLayout<'tcx>) -> Self {
466 assert!(!self.layout().is_unsized());
468 CPlace::Var(var, _) => CPlace::Var(var, layout),
469 CPlace::Addr(addr, extra, _) => CPlace::Addr(addr, extra, layout),
470 CPlace::Stack(stack_slot, _) => CPlace::Stack(stack_slot, layout),
471 CPlace::NoPlace(_) => {
472 assert!(layout.size.bytes() == 0);
473 CPlace::NoPlace(layout)
478 pub fn downcast_variant(
480 fx: &FunctionCx<'a, 'tcx, impl Backend>,
483 let layout = self.layout().for_variant(fx, variant);
484 self.unchecked_cast_to(layout)