1 //! Reading and writing values from/to memory, handling LocalValue and the ByRef optimization,
2 //! reading/writing discriminants
7 use rustc::ty::layout::{self, Size, Align, IntegerExt, LayoutOf, TyLayout, Primitive};
8 use rustc::ty::{self, Ty, TyCtxt, TypeAndMut};
9 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
10 use rustc::mir::interpret::{
11 GlobalId, Value, Scalar, FrameInfo, AllocType,
12 EvalResult, EvalErrorKind, Pointer, ConstValue,
16 use super::{Place, PlaceExtra, Memory, Frame,
17 HasMemory, MemoryKind,
18 Machine, ValTy, EvalContext};
20 #[derive(Copy, Clone, PartialEq, Eq, Hash)]
27 pub fn access(self) -> EvalResult<'static, Value> {
29 LocalValue::Dead => err!(DeadLocal),
30 LocalValue::Live(val) => Ok(val),
35 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
36 pub fn write_ptr(&mut self, dest: Place, val: Scalar, dest_ty: Ty<'tcx>) -> EvalResult<'tcx> {
38 value: val.to_value(),
41 self.write_value(valty, dest)
47 val: impl Into<ScalarMaybeUndef>,
49 ) -> EvalResult<'tcx> {
51 value: Value::Scalar(val.into()),
54 self.write_value(valty, dest)
59 ValTy { value: src_val, ty: dest_ty } : ValTy<'tcx>,
61 ) -> EvalResult<'tcx> {
62 //trace!("Writing {:?} to {:?} at type {:?}", src_val, dest, dest_ty);
63 // Note that it is really important that the type here is the right one, and matches the type things are read at.
64 // In case `src_val` is a `ScalarPair`, we don't do any magic here to handle padding properly, which is only
65 // correct if we never look at this data with the wrong type.
68 Place::Ptr { ptr, align, extra } => {
69 assert_eq!(extra, PlaceExtra::None);
70 self.write_value_to_ptr(src_val, ptr.unwrap_or_err()?, align, dest_ty)
73 Place::Local { frame, local } => {
74 let old_val = self.stack[frame].locals[local].access()?;
75 self.write_value_possibly_by_val(
77 |this, val| this.stack[frame].set_local(local, val),
85 // The cases here can be a bit subtle. Read carefully!
86 fn write_value_possibly_by_val<F: FnOnce(&mut Self, Value) -> EvalResult<'tcx>>(
92 ) -> EvalResult<'tcx> {
93 // FIXME: this should be a layout check, not underlying value
94 if let Value::ByRef(dest_ptr, align) = old_dest_val {
95 // If the value is already `ByRef` (that is, backed by an `Allocation`),
96 // then we must write the new value into this allocation, because there may be
97 // other pointers into the allocation. These other pointers are logically
98 // pointers into the local variable, and must be able to observe the change.
100 // Thus, it would be an error to replace the `ByRef` with a `ByVal`, unless we
101 // knew for certain that there were no outstanding pointers to this allocation.
102 self.write_value_to_ptr(src_val, dest_ptr, align, dest_ty)?;
103 } else if let Value::ByRef(src_ptr, align) = src_val {
104 // If the value is not `ByRef`, then we know there are no pointers to it
105 // and we can simply overwrite the `Value` in the locals array directly.
107 // In this specific case, where the source value is `ByRef`, we must duplicate
108 // the allocation, because this is a by-value operation. It would be incorrect
109 // if they referred to the same allocation, since then a change to one would
110 // implicitly change the other.
112 // It is a valid optimization to attempt reading a primitive value out of the
113 // source and write that into the destination without making an allocation, so
115 if let Ok(Some(src_val)) = self.try_read_value(src_ptr, align, dest_ty) {
116 write_dest(self, src_val)?;
118 let layout = self.layout_of(dest_ty)?;
119 let dest_ptr = self.alloc_ptr(layout)?.into();
120 self.memory.copy(src_ptr, align.min(layout.align), dest_ptr, layout.align, layout.size, false)?;
121 write_dest(self, Value::ByRef(dest_ptr, layout.align))?;
124 // Finally, we have the simple case where neither source nor destination are
125 // `ByRef`. We may simply copy the source value over the the destintion.
126 write_dest(self, src_val)?;
131 pub fn write_value_to_ptr(
137 ) -> EvalResult<'tcx> {
138 let layout = self.layout_of(dest_ty)?;
139 trace!("write_value_to_ptr: {:#?}, {}, {:#?}", value, dest_ty, layout);
141 Value::ByRef(ptr, align) => {
142 self.memory.copy(ptr, align.min(layout.align), dest, dest_align.min(layout.align), layout.size, false)
144 Value::Scalar(scalar) => {
145 let signed = match layout.abi {
146 layout::Abi::Scalar(ref scal) => match scal.value {
147 layout::Primitive::Int(_, signed) => signed,
152 self.memory.write_scalar(dest, dest_align, scalar, layout.size, layout.align, signed)
154 Value::ScalarPair(a_val, b_val) => {
155 trace!("write_value_to_ptr valpair: {:#?}", layout);
156 let (a, b) = match layout.abi {
157 layout::Abi::ScalarPair(ref a, ref b) => (&a.value, &b.value),
158 _ => bug!("write_value_to_ptr: invalid ScalarPair layout: {:#?}", layout)
160 let (a_size, b_size) = (a.size(&self), b.size(&self));
161 let (a_align, b_align) = (a.align(&self), b.align(&self));
163 let b_offset = a_size.abi_align(b_align);
164 let b_ptr = dest.ptr_offset(b_offset, &self)?.into();
165 // TODO: What about signedess?
166 self.memory.write_scalar(a_ptr, dest_align, a_val, a_size, a_align, false)?;
167 self.memory.write_scalar(b_ptr, dest_align, b_val, b_size, b_align, false)
172 pub fn try_read_value(&self, ptr: Scalar, ptr_align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Option<Value>> {
173 let layout = self.layout_of(ty)?;
174 self.memory.check_align(ptr, ptr_align)?;
176 if layout.size.bytes() == 0 {
177 return Ok(Some(Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { bits: 0, size: 0 }))));
180 let ptr = ptr.to_ptr()?;
183 layout::Abi::Scalar(..) => {
184 let scalar = self.memory.read_scalar(ptr, ptr_align, layout.size)?;
185 Ok(Some(Value::Scalar(scalar)))
187 layout::Abi::ScalarPair(ref a, ref b) => {
188 let (a, b) = (&a.value, &b.value);
189 let (a_size, b_size) = (a.size(self), b.size(self));
191 let b_offset = a_size.abi_align(b.align(self));
192 let b_ptr = ptr.offset(b_offset, self)?.into();
193 let a_val = self.memory.read_scalar(a_ptr, ptr_align, a_size)?;
194 let b_val = self.memory.read_scalar(b_ptr, ptr_align, b_size)?;
195 Ok(Some(Value::ScalarPair(a_val, b_val)))
201 pub fn read_value(&self, ptr: Scalar, align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
202 if let Some(val) = self.try_read_value(ptr, align, ty)? {
205 bug!("primitive read failed for type: {:?}", ty);
209 pub(super) fn eval_operand_to_scalar(
211 op: &mir::Operand<'tcx>,
212 ) -> EvalResult<'tcx, Scalar> {
213 let valty = self.eval_operand(op)?;
214 self.value_to_scalar(valty)
217 pub(crate) fn operands_to_args(
219 ops: &[mir::Operand<'tcx>],
220 ) -> EvalResult<'tcx, Vec<ValTy<'tcx>>> {
222 .map(|op| self.eval_operand(op))
226 pub fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, ValTy<'tcx>> {
227 use rustc::mir::Operand::*;
228 let ty = self.monomorphize(op.ty(self.mir(), *self.tcx), self.substs());
230 // FIXME: do some more logic on `move` to invalidate the old location
234 value: self.eval_and_read_place(place)?,
239 Constant(ref constant) => {
240 let value = self.const_to_value(constant.literal.val)?;
250 pub fn deallocate_local(&mut self, local: LocalValue) -> EvalResult<'tcx> {
251 // FIXME: should we tell the user that there was a local which was never written to?
252 if let LocalValue::Live(Value::ByRef(ptr, _align)) = local {
253 trace!("deallocating local");
254 let ptr = ptr.to_ptr()?;
255 self.memory.dump_alloc(ptr.alloc_id);
256 self.memory.deallocate_local(ptr)?;
261 pub fn allocate_place_for_value(
264 layout: TyLayout<'tcx>,
265 variant: Option<usize>,
266 ) -> EvalResult<'tcx, Place> {
267 let (ptr, align) = match value {
268 Value::ByRef(ptr, align) => (ptr, align),
269 Value::ScalarPair(..) | Value::Scalar(_) => {
270 let ptr = self.alloc_ptr(layout)?.into();
271 self.write_value_to_ptr(value, ptr, layout.align, layout.ty)?;
278 extra: variant.map_or(PlaceExtra::None, PlaceExtra::DowncastVariant),
282 pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> {
283 let new_place = match place {
284 Place::Local { frame, local } => {
285 match self.stack[frame].locals[local].access()? {
286 Value::ByRef(ptr, align) => {
290 extra: PlaceExtra::None,
294 let ty = self.stack[frame].mir.local_decls[local].ty;
295 let ty = self.monomorphize(ty, self.stack[frame].instance.substs);
296 let layout = self.layout_of(ty)?;
297 let ptr = self.alloc_ptr(layout)?;
298 self.stack[frame].locals[local] =
299 LocalValue::Live(Value::ByRef(ptr.into(), layout.align)); // it stays live
301 let place = Place::from_ptr(ptr, layout.align);
302 self.write_value(ValTy { value: val, ty }, place)?;
307 Place::Ptr { .. } => place,
312 /// Convert to ByVal or ScalarPair *if possible*, leave `ByRef` otherwise
313 pub fn try_read_by_ref(&self, mut val: Value, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
314 if let Value::ByRef(ptr, align) = val {
315 if let Some(read_val) = self.try_read_value(ptr, align, ty)? {
322 pub fn value_to_scalar(
324 ValTy { value, ty } : ValTy<'tcx>,
325 ) -> EvalResult<'tcx, Scalar> {
326 let value = match value {
327 Value::ByRef(ptr, align) => self.read_value(ptr, align, ty)?,
328 scalar_or_pair => scalar_or_pair,
331 Value::ByRef(..) => bug!("read_value can't result in `ByRef`"),
333 Value::Scalar(scalar) => scalar.unwrap_or_err(),
335 Value::ScalarPair(..) => bug!("value_to_scalar can't work with fat pointers"),
339 pub fn storage_live(&mut self, local: mir::Local) -> EvalResult<'tcx, LocalValue> {
340 trace!("{:?} is now live", local);
342 let ty = self.frame().mir.local_decls[local].ty;
343 let init = self.init_value(ty)?;
344 // StorageLive *always* kills the value that's currently stored
345 Ok(mem::replace(&mut self.frame_mut().locals[local], LocalValue::Live(init)))
348 pub(super) fn init_value(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
349 let ty = self.monomorphize(ty, self.substs());
350 let layout = self.layout_of(ty)?;
351 Ok(match layout.abi {
352 layout::Abi::Scalar(..) => Value::Scalar(ScalarMaybeUndef::Undef),
353 layout::Abi::ScalarPair(..) => Value::ScalarPair(
354 ScalarMaybeUndef::Undef,
355 ScalarMaybeUndef::Undef,
357 _ => Value::ByRef(self.alloc_ptr(layout)?.into(), layout.align),
361 /// reads a tag and produces the corresponding variant index
362 pub fn read_discriminant_as_variant_index(
365 layout: TyLayout<'tcx>,
366 ) -> EvalResult<'tcx, usize> {
367 match layout.variants {
368 ty::layout::Variants::Single { index } => Ok(index),
369 ty::layout::Variants::Tagged { .. } => {
370 let discr_val = self.read_discriminant_value(place, layout)?;
374 .expect("tagged layout for non adt")
375 .discriminants(self.tcx.tcx)
376 .position(|var| var.val == discr_val)
377 .ok_or_else(|| EvalErrorKind::InvalidDiscriminant.into())
379 ty::layout::Variants::NicheFilling { .. } => {
380 let discr_val = self.read_discriminant_value(place, layout)?;
381 assert_eq!(discr_val as usize as u128, discr_val);
382 Ok(discr_val as usize)
387 pub fn read_discriminant_value(
390 layout: TyLayout<'tcx>,
391 ) -> EvalResult<'tcx, u128> {
392 trace!("read_discriminant_value {:#?}", layout);
393 if layout.abi == layout::Abi::Uninhabited {
397 match layout.variants {
398 layout::Variants::Single { index } => {
399 let discr_val = layout.ty.ty_adt_def().map_or(
401 |def| def.discriminant_for_variant(*self.tcx, index).val);
402 return Ok(discr_val);
404 layout::Variants::Tagged { .. } |
405 layout::Variants::NicheFilling { .. } => {},
407 let discr_place_val = self.read_place(place)?;
408 let (discr_val, discr) = self.read_field(discr_place_val, None, mir::Field::new(0), layout)?;
409 trace!("discr value: {:?}, {:?}", discr_val, discr);
410 let raw_discr = self.value_to_scalar(ValTy {
414 let discr_val = match layout.variants {
415 layout::Variants::Single { .. } => bug!(),
416 // FIXME: should we catch invalid discriminants here?
417 layout::Variants::Tagged { .. } => {
418 if discr.ty.is_signed() {
419 let i = raw_discr.to_bits(discr.size)? as i128;
420 // going from layout tag type to typeck discriminant type
421 // requires first sign extending with the layout discriminant
422 let shift = 128 - discr.size.bits();
423 let sexted = (i << shift) >> shift;
424 // and then zeroing with the typeck discriminant type
425 let discr_ty = layout
427 .ty_adt_def().expect("tagged layout corresponds to adt")
430 let discr_ty = layout::Integer::from_attr(self.tcx.tcx, discr_ty);
431 let shift = 128 - discr_ty.size().bits();
432 let truncatee = sexted as u128;
433 (truncatee << shift) >> shift
435 raw_discr.to_bits(discr.size)?
438 layout::Variants::NicheFilling {
444 let variants_start = *niche_variants.start() as u128;
445 let variants_end = *niche_variants.end() as u128;
448 assert!(niche_start == 0);
449 assert!(variants_start == variants_end);
450 dataful_variant as u128
452 Scalar::Bits { bits: raw_discr, size } => {
453 assert_eq!(size as u64, discr.size.bytes());
454 let discr = raw_discr.wrapping_sub(niche_start)
455 .wrapping_add(variants_start);
456 if variants_start <= discr && discr <= variants_end {
459 dataful_variant as u128
470 pub fn write_discriminant_value(
474 variant_index: usize,
475 ) -> EvalResult<'tcx> {
476 let layout = self.layout_of(dest_ty)?;
478 match layout.variants {
479 layout::Variants::Single { index } => {
480 if index != variant_index {
481 // If the layout of an enum is `Single`, all
482 // other variants are necessarily uninhabited.
483 assert_eq!(layout.for_variant(&self, variant_index).abi,
484 layout::Abi::Uninhabited);
487 layout::Variants::Tagged { ref tag, .. } => {
488 let discr_val = dest_ty.ty_adt_def().unwrap()
489 .discriminant_for_variant(*self.tcx, variant_index)
492 // raw discriminants for enums are isize or bigger during
493 // their computation, but the in-memory tag is the smallest possible
495 let size = tag.value.size(self.tcx.tcx);
496 let shift = 128 - size.bits();
497 let discr_val = (discr_val << shift) >> shift;
499 let (discr_dest, tag) = self.place_field(dest, mir::Field::new(0), layout)?;
500 self.write_scalar(discr_dest, Scalar::Bits {
502 size: size.bytes() as u8,
505 layout::Variants::NicheFilling {
511 if variant_index != dataful_variant {
512 let (niche_dest, niche) =
513 self.place_field(dest, mir::Field::new(0), layout)?;
514 let niche_value = ((variant_index - niche_variants.start()) as u128)
515 .wrapping_add(niche_start);
516 self.write_scalar(niche_dest, Scalar::Bits {
518 size: niche.size.bytes() as u8,
527 pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
528 let ptr = self.memory.allocate_bytes(s.as_bytes());
529 Ok(Scalar::Ptr(ptr).to_value_with_len(s.len() as u64, self.tcx.tcx))
532 pub fn const_to_value(
534 val: ConstValue<'tcx>,
535 ) -> EvalResult<'tcx, Value> {
537 ConstValue::Unevaluated(def_id, substs) => {
538 let instance = self.resolve(def_id, substs)?;
539 self.read_global_as_value(GlobalId {
544 ConstValue::ByRef(alloc, offset) => {
545 // FIXME: Allocate new AllocId for all constants inside
546 let id = self.memory.allocate_value(alloc.clone(), MemoryKind::Stack)?;
547 Ok(Value::ByRef(Pointer::new(id, offset).into(), alloc.align))
549 ConstValue::ScalarPair(a, b) => Ok(Value::ScalarPair(a.into(), b.into())),
550 ConstValue::Scalar(val) => Ok(Value::Scalar(val.into())),
555 impl<'mir, 'tcx> Frame<'mir, 'tcx> {
556 pub(super) fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> {
557 match self.locals[local] {
558 LocalValue::Dead => err!(DeadLocal),
559 LocalValue::Live(ref mut local) => {
566 /// Returns the old value of the local
567 pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue {
568 trace!("{:?} is now dead", local);
570 mem::replace(&mut self.locals[local], LocalValue::Dead)