let normalized_op = if normalize {
ecx.try_read_immediate(op)?
} else {
- match op.op {
+ match *op {
Operand::Indirect(mplace) => Err(mplace),
Operand::Immediate(val) => Ok(val)
}
Ok(ty::Const { val, ty: op.layout.ty })
}
-pub fn lazy_const_to_op<'tcx>(
- ecx: &CompileTimeEvalContext<'_, '_, 'tcx>,
- cnst: ty::LazyConst<'tcx>,
- ty: ty::Ty<'tcx>,
-) -> EvalResult<'tcx, OpTy<'tcx>> {
- let op = ecx.const_value_to_op(cnst)?;
- Ok(OpTy { op, layout: ecx.layout_of(ty)? })
-}
-
fn eval_body_and_ecx<'a, 'mir, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
cid: GlobalId<'tcx>,
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env);
let result = (|| {
// get the operand again
- let op = lazy_const_to_op(&ecx, ty::LazyConst::Evaluated(value), value.ty)?;
+ let op = ecx.lazy_const_to_op(ty::LazyConst::Evaluated(value), value.ty)?;
// downcast
let down = match variant {
None => op,
) -> EvalResult<'tcx, VariantIdx> {
trace!("const_variant_index: {:?}", val);
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env);
- let op = lazy_const_to_op(&ecx, ty::LazyConst::Evaluated(val), val.ty)?;
+ let op = ecx.lazy_const_to_op(ty::LazyConst::Evaluated(val), val.ty)?;
Ok(ecx.read_discriminant(op)?.1)
}
use rustc::mir::CastKind;
use rustc_apfloat::Float;
-use super::{EvalContext, Machine, PlaceTy, OpTy, Immediate};
+use super::{EvalContext, Machine, PlaceTy, OpTy, ImmTy, Immediate};
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
assert_eq!(src.layout.fields.offset(i).bytes(), 0);
assert_eq!(src_field_layout.size, src.layout.size);
// just sawp out the layout
- OpTy { op: src.op, layout: src_field_layout }
+ OpTy::from(ImmTy { imm: src.to_immediate(), layout: src_field_layout })
}
};
if src_field.layout.ty == dst_field.layout.ty {
ConstValue, Pointer, Scalar,
EvalResult, EvalErrorKind,
};
-use super::{EvalContext, Machine, MemPlace, MPlaceTy, MemoryKind};
+use super::{
+ EvalContext, Machine, AllocMap, Allocation, AllocationExtra,
+ MemPlace, MPlaceTy, PlaceTy, Place, MemoryKind,
+};
pub use rustc::mir::interpret::ScalarMaybeUndef;
/// A `Value` represents a single immediate self-contained Rust value.
// as input for binary and cast operations.
#[derive(Copy, Clone, Debug)]
pub struct ImmTy<'tcx, Tag=()> {
- immediate: Immediate<Tag>,
+ crate imm: Immediate<Tag>, // ideally we'd make this private, but const_prop needs this
pub layout: TyLayout<'tcx>,
}
type Target = Immediate<Tag>;
#[inline(always)]
fn deref(&self) -> &Immediate<Tag> {
- &self.immediate
+ &self.imm
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub struct OpTy<'tcx, Tag=()> {
- crate op: Operand<Tag>, // ideally we'd make this private, but const_prop needs this
+ op: Operand<Tag>,
pub layout: TyLayout<'tcx>,
}
#[inline(always)]
fn from(val: ImmTy<'tcx, Tag>) -> Self {
OpTy {
- op: Operand::Immediate(val.immediate),
+ op: Operand::Immediate(val.imm),
layout: val.layout
}
}
&self,
op: OpTy<'tcx, M::PointerTag>
) -> EvalResult<'tcx, ImmTy<'tcx, M::PointerTag>> {
- if let Ok(immediate) = self.try_read_immediate(op)? {
- Ok(ImmTy { immediate, layout: op.layout })
+ if let Ok(imm) = self.try_read_immediate(op)? {
+ Ok(ImmTy { imm, layout: op.layout })
} else {
bug!("primitive read failed for type: {:?}", op.layout.ty);
}
Ok(OpTy { op, layout })
}
+ /// Every place can be read from, so we can turm them into an operand
+ #[inline(always)]
+ pub fn place_to_op(
+ &self,
+ place: PlaceTy<'tcx, M::PointerTag>
+ ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
+ let op = match *place {
+ Place::Ptr(mplace) => {
+ Operand::Indirect(mplace)
+ }
+ Place::Local { frame, local } =>
+ *self.stack[frame].locals[local].access()?
+ };
+ Ok(OpTy { op, layout: place.layout })
+ }
+
// Evaluate a place with the goal of reading from it. This lets us sometimes
// avoid allocations.
fn eval_place_to_op(
.collect()
}
- // Used when miri runs into a constant, and by CTFE.
- // FIXME: CTFE should use allocations, then we can make this private (embed it into
- // `eval_operand`, ideally).
- pub(crate) fn const_value_to_op(
+ // Used when Miri runs into a constant, and (indirectly through lazy_const_to_op) by CTFE.
+ fn const_value_to_op(
&self,
val: ty::LazyConst<'tcx>,
) -> EvalResult<'tcx, Operand<M::PointerTag>> {
}
}
+
+impl<'a, 'mir, 'tcx, M> EvalContext<'a, 'mir, 'tcx, M>
+where
+ M: Machine<'a, 'mir, 'tcx, PointerTag=()>,
+ // FIXME: Working around https://github.com/rust-lang/rust/issues/24159
+ M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation<(), M::AllocExtra>)>,
+ M::AllocExtra: AllocationExtra<(), M::MemoryExtra>,
+{
+ // FIXME: CTFE should use allocations, then we can remove this.
+ pub(crate) fn lazy_const_to_op(
+ &self,
+ cnst: ty::LazyConst<'tcx>,
+ ty: ty::Ty<'tcx>,
+ ) -> EvalResult<'tcx, OpTy<'tcx>> {
+ let op = self.const_value_to_op(cnst)?;
+ Ok(OpTy { op, layout: self.layout_of(ty)? })
+ }
+}
}
}
-impl<'tcx, Tag: ::std::fmt::Debug> OpTy<'tcx, Tag> {
+impl<'tcx, Tag: ::std::fmt::Debug + Copy> OpTy<'tcx, Tag> {
#[inline(always)]
pub fn try_as_mplace(self) -> Result<MPlaceTy<'tcx, Tag>, Immediate<Tag>> {
- match self.op {
+ match *self {
Operand::Indirect(mplace) => Ok(MPlaceTy { mplace, layout: self.layout }),
Operand::Immediate(imm) => Err(imm),
}
Deref => self.deref_operand(base.into())?,
Index(local) => {
- let n = *self.frame().locals[local].access()?;
- let n_layout = self.layout_of(self.tcx.types.usize)?;
- let n = self.read_scalar(OpTy { op: n, layout: n_layout })?;
+ let layout = self.layout_of(self.tcx.types.usize)?;
+ let n = self.access_local(self.frame(), local, Some(layout))?;
+ let n = self.read_scalar(n)?;
let n = n.to_bits(self.tcx.data_layout.pointer_size)?;
self.mplace_field(base, u64::try_from(n).unwrap())?
}
Ok(())
}
- /// Every place can be read from, so we can turm them into an operand
- #[inline(always)]
- pub fn place_to_op(
- &self,
- place: PlaceTy<'tcx, M::PointerTag>
- ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
- let op = match place.place {
- Place::Ptr(mplace) => {
- Operand::Indirect(mplace)
- }
- Place::Local { frame, local } =>
- *self.stack[frame].locals[local].access()?
- };
- Ok(OpTy { op, layout: place.layout })
- }
-
pub fn raw_const_to_mplace(
&self,
raw: RawConst<'tcx>,
use rustc::mir::interpret::{EvalResult, PointerArithmetic, EvalErrorKind, Scalar};
use super::{
- EvalContext, Machine, Immediate, OpTy, PlaceTy, MPlaceTy, Operand, StackPopCleanup
+ EvalContext, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup
};
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let mut args = args.to_vec();
let pointee = args[0].layout.ty.builtin_deref(true).unwrap().ty;
let fake_fat_ptr_ty = self.tcx.mk_mut_ptr(pointee);
- args[0].layout = self.layout_of(fake_fat_ptr_ty)?.field(self, 0)?;
- args[0].op = Operand::Immediate(Immediate::Scalar(ptr.ptr.into())); // strip vtable
+ args[0] = OpTy::from(ImmTy { // strip vtable
+ layout: self.layout_of(fake_fat_ptr_ty)?.field(self, 0)?,
+ imm: Immediate::Scalar(ptr.ptr.into())
+ });
trace!("Patched self operand to {:#?}", args[0]);
// recurse with concrete function
self.eval_fn_call(instance, span, caller_abi, &args, dest, ret)
_ => (instance, place),
};
- let arg = OpTy {
- op: Operand::Immediate(place.to_ref()),
+ let arg = ImmTy {
+ imm: place.to_ref(),
layout: self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
};
instance,
span,
Abi::Rust,
- &[arg],
+ &[arg.into()],
Some(dest.into()),
Some(target),
)
let (ty, poly_trait_ref) = self.tcx.erase_regions(&(ty, poly_trait_ref));
if let Some(&vtable) = self.vtables.get(&(ty, poly_trait_ref)) {
+ // This means we guarantee that there are no duplicate vtables, we will
+ // always use the same vtable for the same (Type, Trait) combination.
+ // That's not what happens in rustc, but emulating per-crate deduplication
+ // does not sound like it actually makes anything any better.
return Ok(Pointer::from(vtable).with_default_tag());
}
HasTyCtxt, TargetDataLayout, HasDataLayout,
};
-use crate::interpret::{self, EvalContext, ScalarMaybeUndef, Immediate, OpTy, MemoryKind};
+use crate::interpret::{EvalContext, ScalarMaybeUndef, Immediate, OpTy, ImmTy, MemoryKind};
use crate::const_eval::{
CompileTimeInterpreter, error_to_const_error, eval_promoted, mk_eval_cx,
- lazy_const_to_op,
};
use crate::transform::{MirPass, MirSource};
source_info: SourceInfo,
) -> Option<Const<'tcx>> {
self.ecx.tcx.span = source_info.span;
- match lazy_const_to_op(&self.ecx, *c.literal, c.ty) {
+ match self.ecx.lazy_const_to_op(*c.literal, c.ty) {
Ok(op) => {
Some((op, c.span))
},
Rvalue::Len(_) => None,
Rvalue::NullaryOp(NullOp::SizeOf, ty) => {
type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some((
- OpTy {
- op: interpret::Operand::Immediate(Immediate::Scalar(
+ ImmTy {
+ imm: Immediate::Scalar(
Scalar::Bits {
bits: n as u128,
size: self.tcx.data_layout.pointer_size.bytes() as u8,
}.into()
- )),
+ ),
layout: self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).ok()?,
- },
+ }.into(),
span,
)))
}
// Now run the actual operation.
this.ecx.unary_op(op, prim, arg.layout)
})?;
- let res = OpTy {
- op: interpret::Operand::Immediate(Immediate::Scalar(val.into())),
+ let res = ImmTy {
+ imm: Immediate::Scalar(val.into()),
layout: place_layout,
};
- Some((res, span))
+ Some((res.into(), span))
}
Rvalue::CheckedBinaryOp(op, ref left, ref right) |
Rvalue::BinaryOp(op, ref left, ref right) => {
}
Immediate::Scalar(val.into())
};
- let res = OpTy {
- op: interpret::Operand::Immediate(val),
+ let res = ImmTy {
+ imm: val,
layout: place_layout,
};
- Some((res, span))
+ Some((res.into(), span))
},
}
}