ScalarPair(ScalarMaybeUndef<Tag, Id>, ScalarMaybeUndef<Tag, Id>),
}
-impl<'tcx, Tag> Immediate<Tag> {
- #[inline]
- pub fn from_scalar(val: Scalar<Tag>) -> Self {
- Immediate::Scalar(ScalarMaybeUndef::Scalar(val))
+impl<Tag> From<ScalarMaybeUndef<Tag>> for Immediate<Tag> {
+ #[inline(always)]
+ fn from(val: ScalarMaybeUndef<Tag>) -> Self {
+ Immediate::Scalar(val)
+ }
+}
+
+impl<Tag> From<Scalar<Tag>> for Immediate<Tag> {
+ #[inline(always)]
+ fn from(val: Scalar<Tag>) -> Self {
+ Immediate::Scalar(val.into())
}
+}
+impl<'tcx, Tag> Immediate<Tag> {
pub fn new_slice(
val: Scalar<Tag>,
len: u64,
// as input for binary and cast operations.
#[derive(Copy, Clone, Debug)]
pub struct ImmTy<'tcx, Tag=()> {
- pub imm: Immediate<Tag>,
+ pub(crate) imm: Immediate<Tag>,
pub layout: TyLayout<'tcx>,
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub struct OpTy<'tcx, Tag=()> {
- op: Operand<Tag>,
+ op: Operand<Tag>, // Keep this private, it helps enforce invariants
pub layout: TyLayout<'tcx>,
}
}
}
-impl<'tcx, Tag: Copy> ImmTy<'tcx, Tag>
-{
+impl<'tcx, Tag: Copy> ImmTy<'tcx, Tag> {
#[inline]
pub fn from_scalar(val: Scalar<Tag>, layout: TyLayout<'tcx>) -> Self {
- ImmTy { imm: Immediate::from_scalar(val), layout }
+ ImmTy { imm: val.into(), layout }
+ }
+
+ #[inline]
+ pub fn from_uint(i: impl Into<u128>, layout: TyLayout<'tcx>) -> Self {
+ Self::from_scalar(Scalar::from_uint(i, layout.size), layout)
+ }
+
+ #[inline]
+ pub fn from_int(i: impl Into<i128>, layout: TyLayout<'tcx>) -> Self {
+ Self::from_scalar(Scalar::from_int(i, layout.size), layout)
}
#[inline]
return Ok(None);
}
- let ptr = match self.check_mplace_access(mplace, None)? {
+ let ptr = match self.check_mplace_access(mplace, None)
+ .expect("places should be checked on creation")
+ {
Some(ptr) => ptr,
None => return Ok(Some(ImmTy { // zero-sized type
- imm: Immediate::Scalar(Scalar::zst().into()),
+ imm: Scalar::zst().into(),
layout: mplace.layout,
})),
};
.get(ptr.alloc_id)?
.read_scalar(self, ptr, mplace.layout.size)?;
Ok(Some(ImmTy {
- imm: Immediate::Scalar(scalar),
+ imm: scalar.into(),
layout: mplace.layout,
}))
}
let field = field.try_into().unwrap();
let field_layout = op.layout.field(self, field)?;
if field_layout.is_zst() {
- let immediate = Immediate::Scalar(Scalar::zst().into());
+ let immediate = Scalar::zst().into();
return Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout });
}
let offset = op.layout.fields.offset(field);
// extract fields from types with `ScalarPair` ABI
Immediate::ScalarPair(a, b) => {
let val = if offset.bytes() == 0 { a } else { b };
- Immediate::Scalar(val)
+ Immediate::from(val)
},
Immediate::Scalar(val) =>
bug!("field access on non aggregate {:#?}, {:#?}", val, op.layout),
Deref => self.deref_operand(base)?.into(),
Subslice { .. } | ConstantIndex { .. } | Index(_) => if base.layout.is_zst() {
OpTy {
- op: Operand::Immediate(Immediate::Scalar(Scalar::zst().into())),
+ op: Operand::Immediate(Scalar::zst().into()),
// the actual index doesn't matter, so we just pick a convenient one like 0
layout: base.layout.field(self, 0)?,
}
let layout = self.layout_of_local(frame, local, layout)?;
let op = if layout.is_zst() {
// Do not read from ZST, they might not be initialized
- Operand::Immediate(Immediate::Scalar(Scalar::zst().into()))
+ Operand::Immediate(Scalar::zst().into())
} else {
frame.locals[local].access()?
};
// avoid allocations.
pub(super) fn eval_place_to_op(
&self,
- mir_place: &mir::Place<'tcx>,
+ place: &mir::Place<'tcx>,
layout: Option<TyLayout<'tcx>>,
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
use rustc::mir::PlaceBase;
- mir_place.iterate(|place_base, place_projection| {
- let mut op = match place_base {
- PlaceBase::Local(mir::RETURN_PLACE) =>
- throw_unsup!(ReadFromReturnPointer),
- PlaceBase::Local(local) => {
- // Do not use the layout passed in as argument if the base we are looking at
- // here is not the entire place.
- // FIXME use place_projection.is_empty() when is available
- let layout = if mir_place.projection.is_none() {
- layout
- } else {
- None
- };
-
- self.access_local(self.frame(), *local, layout)?
- }
- PlaceBase::Static(place_static) => {
- self.eval_static_to_mplace(place_static)?.into()
- }
- };
+ let mut op = match &place.base {
+ PlaceBase::Local(mir::RETURN_PLACE) =>
+ throw_unsup!(ReadFromReturnPointer),
+ PlaceBase::Local(local) => {
+ // Do not use the layout passed in as argument if the base we are looking at
+ // here is not the entire place.
+ // FIXME use place_projection.is_empty() when is available
+ let layout = if place.projection.is_empty() {
+ layout
+ } else {
+ None
+ };
- for proj in place_projection {
- op = self.operand_projection(op, &proj.elem)?
+ self.access_local(self.frame(), *local, layout)?
+ }
+ PlaceBase::Static(place_static) => {
+ self.eval_static_to_mplace(&place_static)?.into()
}
+ };
- trace!("eval_place_to_op: got {:?}", *op);
- Ok(op)
- })
+ for elem in place.projection.iter() {
+ op = self.operand_projection(op, elem)?
+ }
+
+ trace!("eval_place_to_op: got {:?}", *op);
+ Ok(op)
}
/// Evaluate the operand, returning a place where you can then find the data.
Move(ref place) =>
self.eval_place_to_op(place, layout)?,
- Constant(ref constant) => self.eval_const_to_op(constant.literal, layout)?,
+ Constant(ref constant) => {
+ let val = self.subst_from_frame_and_normalize_erasing_regions(constant.literal);
+ self.eval_const_to_op(val, layout)?
+ }
};
trace!("{:?}: {:?}", mir_op, *op);
Ok(op)
// Used when the miri-engine runs into a constant and for extracting information from constants
// in patterns via the `const_eval` module
+ /// The `val` and `layout` are assumed to already be in our interpreter
+ /// "universe" (param_env).
crate fn eval_const_to_op(
&self,
val: &'tcx ty::Const<'tcx>,
// Early-return cases.
match val.val {
ConstValue::Param(_) =>
- // FIXME(oli-obk): try to monomorphize
throw_inval!(TooGeneric),
ConstValue::Unevaluated(def_id, substs) => {
let instance = self.resolve(def_id, substs)?;
}
// Other cases need layout.
let layout = from_known_layout(layout, || {
- self.layout_of(self.monomorphize(val.ty)?)
+ self.layout_of(val.ty)
})?;
let op = match val.val {
- ConstValue::ByRef { offset, align, alloc } => {
+ ConstValue::ByRef { alloc, offset } => {
let id = self.tcx.alloc_map.lock().create_memory_alloc(alloc);
// We rely on mutability being set correctly in that allocation to prevent writes
// where none should happen.
let ptr = self.tag_static_base_pointer(Pointer::new(id, offset));
- Operand::Indirect(MemPlace::from_ptr(ptr, align))
+ Operand::Indirect(MemPlace::from_ptr(ptr, layout.align.abi))
},
ConstValue::Scalar(x) =>
- Operand::Immediate(Immediate::Scalar(tag_scalar(x).into())),
+ Operand::Immediate(tag_scalar(x).into()),
ConstValue::Slice { data, start, end } => {
// We rely on mutability being set correctly in `data` to prevent writes
// where none should happen.
// post-process
Ok(match *discr_kind {
layout::DiscriminantKind::Tag => {
- let bits_discr = match raw_discr.to_bits(discr_val.layout.size) {
- Ok(raw_discr) => raw_discr,
- Err(_) =>
- throw_unsup!(InvalidDiscriminant(raw_discr.erase_tag())),
- };
+ let bits_discr = raw_discr
+ .not_undef()
+ .and_then(|raw_discr| self.force_bits(raw_discr, discr_val.layout.size))
+ .map_err(|_| err_unsup!(InvalidDiscriminant(raw_discr.erase_tag())))?;
let real_discr = if discr_val.layout.ty.is_signed() {
// going from layout tag type to typeck discriminant type
// requires first sign extending with the layout discriminant