From: bors Date: Fri, 3 Aug 2018 14:28:12 +0000 (+0000) Subject: Auto merge of #52712 - oli-obk:const_eval_cleanups, r=RalfJung X-Git-Url: https://git.lizzy.rs/?a=commitdiff_plain;h=59fa6bd6c14eebd213884da08a33639b4c848bb3;hp=88e0ff14a81a2122222e32cf7c285f585c516cfd;p=rust.git Auto merge of #52712 - oli-obk:const_eval_cleanups, r=RalfJung Reintroduce `Undef` and properly check constant value sizes r? @RalfJung cc @eddyb basically all kinds of silent failures that never occurred are assertions now --- diff --git a/src/librustc/ich/impls_ty.rs b/src/librustc/ich/impls_ty.rs index cb685f83aba..f13e26fee3e 100644 --- a/src/librustc/ich/impls_ty.rs +++ b/src/librustc/ich/impls_ty.rs @@ -392,6 +392,11 @@ fn hash_stable(&self, } } +impl_stable_hash_for!(enum mir::interpret::ScalarMaybeUndef { + Scalar(v), + Undef +}); + impl_stable_hash_for!(enum mir::interpret::Value { Scalar(v), ScalarPair(a, b), @@ -466,9 +471,9 @@ fn hash_stable(&self, mem::discriminant(self).hash_stable(hcx, hasher); match *self { - Bits { bits, defined } => { + Bits { bits, size } => { bits.hash_stable(hcx, hasher); - defined.hash_stable(hcx, hasher); + size.hash_stable(hcx, hasher); }, Ptr(ptr) => ptr.hash_stable(hcx, hasher), } diff --git a/src/librustc/mir/interpret/mod.rs b/src/librustc/mir/interpret/mod.rs index 4164fe3fd93..a0980b06230 100644 --- a/src/librustc/mir/interpret/mod.rs +++ b/src/librustc/mir/interpret/mod.rs @@ -13,7 +13,7 @@ macro_rules! err { FrameInfo, ConstEvalResult, }; -pub use self::value::{Scalar, Value, ConstValue}; +pub use self::value::{Scalar, Value, ConstValue, ScalarMaybeUndef}; use std::fmt; use mir; diff --git a/src/librustc/mir/interpret/value.rs b/src/librustc/mir/interpret/value.rs index ffd138c9c48..f569f4def14 100644 --- a/src/librustc/mir/interpret/value.rs +++ b/src/librustc/mir/interpret/value.rs @@ -15,22 +15,24 @@ pub enum ConstValue<'tcx> { /// to allow HIR creation to happen for everything before needing to be able to run constant /// evaluation Unevaluated(DefId, &'tcx Substs<'tcx>), - /// Used only for types with layout::abi::Scalar ABI and ZSTs which use Scalar::undef() + /// Used only for types with layout::abi::Scalar ABI and ZSTs Scalar(Scalar), /// Used only for types with layout::abi::ScalarPair - ScalarPair(Scalar, Scalar), + /// + /// The second field may be undef in case of `Option::None` + ScalarPair(Scalar, ScalarMaybeUndef), /// Used only for the remaining cases. An allocation + offset into the allocation ByRef(&'tcx Allocation, Size), } impl<'tcx> ConstValue<'tcx> { #[inline] - pub fn from_byval_value(val: Value) -> Self { - match val { + pub fn from_byval_value(val: Value) -> EvalResult<'static, Self> { + Ok(match val { Value::ByRef(..) => bug!(), - Value::ScalarPair(a, b) => ConstValue::ScalarPair(a, b), - Value::Scalar(val) => ConstValue::Scalar(val), - } + Value::ScalarPair(a, b) => ConstValue::ScalarPair(a.unwrap_or_err()?, b), + Value::Scalar(val) => ConstValue::Scalar(val.unwrap_or_err()?), + }) } #[inline] @@ -38,18 +40,13 @@ pub fn to_byval_value(&self) -> Option { match *self { ConstValue::Unevaluated(..) | ConstValue::ByRef(..) => None, - ConstValue::ScalarPair(a, b) => Some(Value::ScalarPair(a, b)), - ConstValue::Scalar(val) => Some(Value::Scalar(val)), + ConstValue::ScalarPair(a, b) => Some(Value::ScalarPair(a.into(), b)), + ConstValue::Scalar(val) => Some(Value::Scalar(val.into())), } } #[inline] - pub fn from_scalar(val: Scalar) -> Self { - ConstValue::Scalar(val) - } - - #[inline] - pub fn to_scalar(&self) -> Option { + pub fn try_to_scalar(&self) -> Option { match *self { ConstValue::Unevaluated(..) | ConstValue::ByRef(..) | @@ -60,12 +57,12 @@ pub fn to_scalar(&self) -> Option { #[inline] pub fn to_bits(&self, size: Size) -> Option { - self.to_scalar()?.to_bits(size).ok() + self.try_to_scalar()?.to_bits(size).ok() } #[inline] pub fn to_ptr(&self) -> Option { - self.to_scalar()?.to_ptr().ok() + self.try_to_scalar()?.to_ptr().ok() } } @@ -81,8 +78,8 @@ pub fn to_ptr(&self) -> Option { #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)] pub enum Value { ByRef(Scalar, Align), - Scalar(Scalar), - ScalarPair(Scalar, Scalar), + Scalar(ScalarMaybeUndef), + ScalarPair(ScalarMaybeUndef, ScalarMaybeUndef), } impl<'tcx> ty::TypeFoldable<'tcx> for Value { @@ -98,23 +95,27 @@ impl<'tcx> Scalar { pub fn ptr_null(cx: C) -> Self { Scalar::Bits { bits: 0, - defined: cx.data_layout().pointer_size.bits() as u8, + size: cx.data_layout().pointer_size.bytes() as u8, } } + pub fn to_value_with_len(self, len: u64, cx: C) -> Value { + ScalarMaybeUndef::Scalar(self).to_value_with_len(len, cx) + } + + pub fn to_value_with_vtable(self, vtable: Pointer) -> Value { + ScalarMaybeUndef::Scalar(self).to_value_with_vtable(vtable) + } + pub fn ptr_signed_offset(self, i: i64, cx: C) -> EvalResult<'tcx, Self> { let layout = cx.data_layout(); match self { - Scalar::Bits { bits, defined } => { - let pointer_size = layout.pointer_size.bits() as u8; - if defined < pointer_size { - err!(ReadUndefBytes) - } else { - Ok(Scalar::Bits { - bits: layout.signed_offset(bits as u64, i)? as u128, - defined: pointer_size, - }) - } + Scalar::Bits { bits, size } => { + assert_eq!(size as u64, layout.pointer_size.bytes()); + Ok(Scalar::Bits { + bits: layout.signed_offset(bits as u64, i)? as u128, + size, + }) } Scalar::Ptr(ptr) => ptr.signed_offset(i, layout).map(Scalar::Ptr), } @@ -123,65 +124,43 @@ pub fn ptr_signed_offset(self, i: i64, cx: C) -> EvalResult<'t pub fn ptr_offset(self, i: Size, cx: C) -> EvalResult<'tcx, Self> { let layout = cx.data_layout(); match self { - Scalar::Bits { bits, defined } => { - let pointer_size = layout.pointer_size.bits() as u8; - if defined < pointer_size { - err!(ReadUndefBytes) - } else { - Ok(Scalar::Bits { - bits: layout.offset(bits as u64, i.bytes())? as u128, - defined: pointer_size, - }) - } + Scalar::Bits { bits, size } => { + assert_eq!(size as u64, layout.pointer_size.bytes()); + Ok(Scalar::Bits { + bits: layout.offset(bits as u64, i.bytes())? as u128, + size, + }) } Scalar::Ptr(ptr) => ptr.offset(i, layout).map(Scalar::Ptr), } } - pub fn ptr_wrapping_signed_offset(self, i: i64, cx: C) -> EvalResult<'tcx, Self> { + pub fn ptr_wrapping_signed_offset(self, i: i64, cx: C) -> Self { let layout = cx.data_layout(); match self { - Scalar::Bits { bits, defined } => { - let pointer_size = layout.pointer_size.bits() as u8; - if defined < pointer_size { - err!(ReadUndefBytes) - } else { - Ok(Scalar::Bits { - bits: layout.wrapping_signed_offset(bits as u64, i) as u128, - defined: pointer_size, - }) - } + Scalar::Bits { bits, size } => { + assert_eq!(size as u64, layout.pointer_size.bytes()); + Scalar::Bits { + bits: layout.wrapping_signed_offset(bits as u64, i) as u128, + size, + } } - Scalar::Ptr(ptr) => Ok(Scalar::Ptr(ptr.wrapping_signed_offset(i, layout))), + Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_signed_offset(i, layout)), } } - pub fn is_null_ptr(self, cx: C) -> EvalResult<'tcx, bool> { + pub fn is_null_ptr(self, cx: C) -> bool { match self { - Scalar::Bits { - bits, defined, - } => if defined < cx.data_layout().pointer_size.bits() as u8 { - err!(ReadUndefBytes) - } else { - Ok(bits == 0) + Scalar::Bits { bits, size } => { + assert_eq!(size as u64, cx.data_layout().pointer_size.bytes()); + bits == 0 }, - Scalar::Ptr(_) => Ok(false), + Scalar::Ptr(_) => false, } } - pub fn to_value_with_len(self, len: u64, cx: C) -> Value { - Value::ScalarPair(self, Scalar::Bits { - bits: len as u128, - defined: cx.data_layout().pointer_size.bits() as u8, - }) - } - - pub fn to_value_with_vtable(self, vtable: Pointer) -> Value { - Value::ScalarPair(self, Scalar::Ptr(vtable)) - } - pub fn to_value(self) -> Value { - Value::Scalar(self) + Value::Scalar(ScalarMaybeUndef::Scalar(self)) } } @@ -199,8 +178,9 @@ fn from(ptr: Pointer) -> Self { pub enum Scalar { /// The raw bytes of a simple value. Bits { - /// The first `defined` number of bits are valid - defined: u8, + /// The first `size` bytes are the value. + /// Do not try to read less or more bytes that that + size: u8, bits: u128, }, @@ -210,25 +190,63 @@ pub enum Scalar { Ptr(Pointer), } -impl<'tcx> Scalar { - pub fn undef() -> Self { - Scalar::Bits { bits: 0, defined: 0 } +#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)] +pub enum ScalarMaybeUndef { + Scalar(Scalar), + Undef, +} + +impl From for ScalarMaybeUndef { + fn from(s: Scalar) -> Self { + ScalarMaybeUndef::Scalar(s) + } +} + +impl ScalarMaybeUndef { + pub fn unwrap_or_err(self) -> EvalResult<'static, Scalar> { + match self { + ScalarMaybeUndef::Scalar(scalar) => Ok(scalar), + ScalarMaybeUndef::Undef => err!(ReadUndefBytes), + } + } + + pub fn to_value_with_len(self, len: u64, cx: C) -> Value { + Value::ScalarPair(self, Scalar::Bits { + bits: len as u128, + size: cx.data_layout().pointer_size.bytes() as u8, + }.into()) } + pub fn to_value_with_vtable(self, vtable: Pointer) -> Value { + Value::ScalarPair(self, Scalar::Ptr(vtable).into()) + } + + pub fn ptr_offset(self, i: Size, cx: C) -> EvalResult<'tcx, Self> { + match self { + ScalarMaybeUndef::Scalar(scalar) => { + scalar.ptr_offset(i, cx).map(ScalarMaybeUndef::Scalar) + }, + ScalarMaybeUndef::Undef => Ok(ScalarMaybeUndef::Undef) + } + } +} + +impl<'tcx> Scalar { pub fn from_bool(b: bool) -> Self { - // FIXME: can we make defined `1`? - Scalar::Bits { bits: b as u128, defined: 8 } + Scalar::Bits { bits: b as u128, size: 1 } } pub fn from_char(c: char) -> Self { - Scalar::Bits { bits: c as u128, defined: 32 } + Scalar::Bits { bits: c as u128, size: 4 } } - pub fn to_bits(self, size: Size) -> EvalResult<'tcx, u128> { + pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> { match self { - Scalar::Bits { .. } if size.bits() == 0 => bug!("to_bits cannot be used with zsts"), - Scalar::Bits { bits, defined } if size.bits() <= defined as u64 => Ok(bits), - Scalar::Bits { .. } => err!(ReadUndefBytes), + Scalar::Bits { bits, size } => { + assert_eq!(target_size.bytes(), size as u64); + assert_ne!(size, 0, "to_bits cannot be used with zsts"); + Ok(bits) + } Scalar::Ptr(_) => err!(ReadPointerAsBytes), } } @@ -256,8 +274,8 @@ pub fn is_ptr(self) -> bool { pub fn to_bool(self) -> EvalResult<'tcx, bool> { match self { - Scalar::Bits { bits: 0, defined: 8 } => Ok(false), - Scalar::Bits { bits: 1, defined: 8 } => Ok(true), + Scalar::Bits { bits: 0, size: 1 } => Ok(false), + Scalar::Bits { bits: 1, size: 1 } => Ok(true), _ => err!(InvalidBool), } } diff --git a/src/librustc/mir/mod.rs b/src/librustc/mir/mod.rs index 4bfb4c96497..dae5709ba11 100644 --- a/src/librustc/mir/mod.rs +++ b/src/librustc/mir/mod.rs @@ -17,7 +17,7 @@ use hir::def_id::DefId; use hir::{self, HirId, InlineAsm}; use middle::region; -use mir::interpret::{EvalErrorKind, Scalar, Value}; +use mir::interpret::{EvalErrorKind, Scalar, Value, ScalarMaybeUndef}; use mir::visit::MirVisitable; use rustc_apfloat::ieee::{Double, Single}; use rustc_apfloat::Float; @@ -1465,10 +1465,10 @@ pub fn fmt_successor_labels(&self) -> Vec> { .map(|&u| { let mut s = String::new(); print_miri_value( - Value::Scalar(Scalar::Bits { + Scalar::Bits { bits: u, - defined: size.bits() as u8, - }), + size: size.bytes() as u8, + }.to_value(), switch_ty, &mut s, ).unwrap(); @@ -2225,45 +2225,58 @@ pub fn fmt_const_val(fmt: &mut W, const_val: &ty::Const) -> fmt::Resul pub fn print_miri_value(value: Value, ty: Ty, f: &mut W) -> fmt::Result { use ty::TypeVariants::*; - match (value, &ty.sty) { - (Value::Scalar(Scalar::Bits { bits: 0, .. }), &TyBool) => write!(f, "false"), - (Value::Scalar(Scalar::Bits { bits: 1, .. }), &TyBool) => write!(f, "true"), - (Value::Scalar(Scalar::Bits { bits, .. }), &TyFloat(ast::FloatTy::F32)) => { - write!(f, "{}f32", Single::from_bits(bits)) - } - (Value::Scalar(Scalar::Bits { bits, .. }), &TyFloat(ast::FloatTy::F64)) => { - write!(f, "{}f64", Double::from_bits(bits)) - } - (Value::Scalar(Scalar::Bits { bits, .. }), &TyUint(ui)) => write!(f, "{:?}{}", bits, ui), - (Value::Scalar(Scalar::Bits { bits, .. }), &TyInt(i)) => { - let bit_width = ty::tls::with(|tcx| { - let ty = tcx.lift_to_global(&ty).unwrap(); - tcx.layout_of(ty::ParamEnv::empty().and(ty)) - .unwrap() - .size - .bits() - }); - let shift = 128 - bit_width; - write!(f, "{:?}{}", ((bits as i128) << shift) >> shift, i) - } - (Value::Scalar(Scalar::Bits { bits, .. }), &TyChar) => { - write!(f, "{:?}", ::std::char::from_u32(bits as u32).unwrap()) + // print some primitives + if let Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { bits, .. })) = value { + match ty.sty { + TyBool if bits == 0 => return write!(f, "false"), + TyBool if bits == 1 => return write!(f, "true"), + TyFloat(ast::FloatTy::F32) => return write!(f, "{}f32", Single::from_bits(bits)), + TyFloat(ast::FloatTy::F64) => return write!(f, "{}f64", Double::from_bits(bits)), + TyUint(ui) => return write!(f, "{:?}{}", bits, ui), + TyInt(i) => { + let bit_width = ty::tls::with(|tcx| { + let ty = tcx.lift_to_global(&ty).unwrap(); + tcx.layout_of(ty::ParamEnv::empty().and(ty)) + .unwrap() + .size + .bits() + }); + let shift = 128 - bit_width; + return write!(f, "{:?}{}", ((bits as i128) << shift) >> shift, i); + } + TyChar => return write!(f, "{:?}", ::std::char::from_u32(bits as u32).unwrap()), + _ => {}, } - (_, &TyFnDef(did, _)) => write!(f, "{}", item_path_str(did)), - ( - Value::ScalarPair(Scalar::Ptr(ptr), Scalar::Bits { bits: len, .. }), - &TyRef(_, &ty::TyS { sty: TyStr, .. }, _), - ) => ty::tls::with(|tcx| match tcx.alloc_map.lock().get(ptr.alloc_id) { - Some(interpret::AllocType::Memory(alloc)) => { - assert_eq!(len as usize as u128, len); - let slice = &alloc.bytes[(ptr.offset.bytes() as usize)..][..(len as usize)]; - let s = ::std::str::from_utf8(slice).expect("non utf8 str from miri"); - write!(f, "{:?}", s) + } + // print function definitons + if let TyFnDef(did, _) = ty.sty { + return write!(f, "{}", item_path_str(did)); + } + // print string literals + if let Value::ScalarPair(ptr, len) = value { + if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = ptr { + if let ScalarMaybeUndef::Scalar(Scalar::Bits { bits: len, .. }) = len { + if let TyRef(_, &ty::TyS { sty: TyStr, .. }, _) = ty.sty { + return ty::tls::with(|tcx| { + let alloc = tcx.alloc_map.lock().get(ptr.alloc_id); + if let Some(interpret::AllocType::Memory(alloc)) = alloc { + assert_eq!(len as usize as u128, len); + let slice = &alloc + .bytes + [(ptr.offset.bytes() as usize)..] + [..(len as usize)]; + let s = ::std::str::from_utf8(slice).expect("non utf8 str from miri"); + write!(f, "{:?}", s) + } else { + write!(f, "pointer to erroneous constant {:?}, {:?}", ptr, len) + } + }); + } } - _ => write!(f, "pointer to erroneous constant {:?}, {:?}", ptr, len), - }), - _ => write!(f, "{:?}:{}", value, ty), + } } + // just raw dump everything else + write!(f, "{:?}:{}", value, ty) } fn item_path_str(def_id: DefId) -> String { diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs index dd381888243..96b4edce86b 100644 --- a/src/librustc/ty/sty.rs +++ b/src/librustc/ty/sty.rs @@ -1887,22 +1887,13 @@ pub fn from_const_value( }) } - #[inline] - pub fn from_byval_value( - tcx: TyCtxt<'_, '_, 'tcx>, - val: Value, - ty: Ty<'tcx>, - ) -> &'tcx Self { - Self::from_const_value(tcx, ConstValue::from_byval_value(val), ty) - } - #[inline] pub fn from_scalar( tcx: TyCtxt<'_, '_, 'tcx>, val: Scalar, ty: Ty<'tcx>, ) -> &'tcx Self { - Self::from_const_value(tcx, ConstValue::from_scalar(val), ty) + Self::from_const_value(tcx, ConstValue::Scalar(val), ty) } #[inline] @@ -1918,12 +1909,12 @@ pub fn from_bits( let shift = 128 - size.bits(); let truncated = (bits << shift) >> shift; assert_eq!(truncated, bits, "from_bits called with untruncated value"); - Self::from_scalar(tcx, Scalar::Bits { bits, defined: size.bits() as u8 }, ty.value) + Self::from_scalar(tcx, Scalar::Bits { bits, size: size.bytes() as u8 }, ty.value) } #[inline] pub fn zero_sized(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> &'tcx Self { - Self::from_scalar(tcx, Scalar::undef(), ty) + Self::from_scalar(tcx, Scalar::Bits { bits: 0, size: 0 }, ty) } #[inline] @@ -1960,11 +1951,6 @@ pub fn to_byval_value(&self) -> Option { self.val.to_byval_value() } - #[inline] - pub fn to_scalar(&self) -> Option { - self.val.to_scalar() - } - #[inline] pub fn assert_bits( &self, diff --git a/src/librustc_codegen_llvm/mir/constant.rs b/src/librustc_codegen_llvm/mir/constant.rs index 341ed9df64b..267db4467c2 100644 --- a/src/librustc_codegen_llvm/mir/constant.rs +++ b/src/librustc_codegen_llvm/mir/constant.rs @@ -39,10 +39,12 @@ pub fn scalar_to_llvm( ) -> &'ll Value { let bitsize = if layout.is_bool() { 1 } else { layout.value.size(cx).bits() }; match cv { - Scalar::Bits { defined, .. } if (defined as u64) < bitsize || defined == 0 => { - C_undef(Type::ix(cx, bitsize)) + Scalar::Bits { size: 0, .. } => { + assert_eq!(0, layout.value.size(cx).bytes()); + C_undef(Type::ix(cx, 0)) }, - Scalar::Bits { bits, .. } => { + Scalar::Bits { bits, size } => { + assert_eq!(size as u64, layout.value.size(cx).bytes()); let llval = C_uint_big(Type::ix(cx, bitsize), bits); if layout.value == layout::Pointer { unsafe { llvm::LLVMConstIntToPtr(llval, llty) } @@ -192,7 +194,7 @@ pub fn simd_shuffle_indices( mir::Field::new(field as usize), c, )?; - if let Some(prim) = field.to_scalar() { + if let Some(prim) = field.val.try_to_scalar() { let layout = bx.cx.layout_of(field_ty); let scalar = match layout.abi { layout::Abi::Scalar(ref x) => x, diff --git a/src/librustc_codegen_llvm/mir/operand.rs b/src/librustc_codegen_llvm/mir/operand.rs index 1296f5e4b14..f8166ee6491 100644 --- a/src/librustc_codegen_llvm/mir/operand.rs +++ b/src/librustc_codegen_llvm/mir/operand.rs @@ -10,7 +10,7 @@ use rustc::mir::interpret::ConstEvalErr; use rustc::mir; -use rustc::mir::interpret::ConstValue; +use rustc::mir::interpret::{ConstValue, ScalarMaybeUndef}; use rustc::ty; use rustc::ty::layout::{self, Align, LayoutOf, TyLayout}; use rustc_data_structures::indexed_vec::Idx; @@ -110,12 +110,16 @@ pub fn from_const(bx: &Builder<'a, 'll, 'tcx>, a_scalar, layout.scalar_pair_element_llvm_type(bx.cx, 0, true), ); - let b_llval = scalar_to_llvm( - bx.cx, - b, - b_scalar, - layout.scalar_pair_element_llvm_type(bx.cx, 1, true), - ); + let b_layout = layout.scalar_pair_element_llvm_type(bx.cx, 1, true); + let b_llval = match b { + ScalarMaybeUndef::Scalar(b) => scalar_to_llvm( + bx.cx, + b, + b_scalar, + b_layout, + ), + ScalarMaybeUndef::Undef => C_undef(b_layout), + }; OperandValue::Pair(a_llval, b_llval) }, ConstValue::ByRef(alloc, offset) => { diff --git a/src/librustc_mir/hair/cx/mod.rs b/src/librustc_mir/hair/cx/mod.rs index 4a7225c3a76..70148fc9176 100644 --- a/src/librustc_mir/hair/cx/mod.rs +++ b/src/librustc_mir/hair/cx/mod.rs @@ -151,14 +151,14 @@ pub fn const_eval_literal( let trunc = |n| { let param_ty = self.param_env.and(self.tcx.lift_to_global(&ty).unwrap()); - let bit_width = self.tcx.layout_of(param_ty).unwrap().size.bits(); - trace!("trunc {} with size {} and shift {}", n, bit_width, 128 - bit_width); - let shift = 128 - bit_width; + let width = self.tcx.layout_of(param_ty).unwrap().size; + trace!("trunc {} with size {} and shift {}", n, width.bits(), 128 - width.bits()); + let shift = 128 - width.bits(); let result = (n << shift) >> shift; trace!("trunc result: {}", result); ConstValue::Scalar(Scalar::Bits { bits: result, - defined: bit_width as u8, + size: width.bytes() as u8, }) }; @@ -168,7 +168,7 @@ pub fn const_eval_literal( let s = s.as_str(); let id = self.tcx.allocate_bytes(s.as_bytes()); let value = Scalar::Ptr(id.into()).to_value_with_len(s.len() as u64, self.tcx); - ConstValue::from_byval_value(value) + ConstValue::from_byval_value(value).unwrap() }, LitKind::ByteStr(ref data) => { let id = self.tcx.allocate_bytes(data); @@ -176,7 +176,7 @@ pub fn const_eval_literal( }, LitKind::Byte(n) => ConstValue::Scalar(Scalar::Bits { bits: n as u128, - defined: 8, + size: 1, }), LitKind::Int(n, _) if neg => { let n = n as i128; @@ -194,14 +194,8 @@ pub fn const_eval_literal( }; parse_float(n, fty) } - LitKind::Bool(b) => ConstValue::Scalar(Scalar::Bits { - bits: b as u128, - defined: 8, - }), - LitKind::Char(c) => ConstValue::Scalar(Scalar::Bits { - bits: c as u128, - defined: 32, - }), + LitKind::Bool(b) => ConstValue::Scalar(Scalar::from_bool(b)), + LitKind::Char(c) => ConstValue::Scalar(Scalar::from_char(c)), }; ty::Const::from_const_value(self.tcx, lit, ty) } diff --git a/src/librustc_mir/hair/pattern/mod.rs b/src/librustc_mir/hair/pattern/mod.rs index 53511c1c127..d614131c526 100644 --- a/src/librustc_mir/hair/pattern/mod.rs +++ b/src/librustc_mir/hair/pattern/mod.rs @@ -19,7 +19,7 @@ use interpret::{const_val_field, const_variant_index, self}; use rustc::mir::{fmt_const_val, Field, BorrowKind, Mutability}; -use rustc::mir::interpret::{Scalar, GlobalId, ConstValue, Value}; +use rustc::mir::interpret::{Scalar, GlobalId, ConstValue}; use rustc::ty::{self, TyCtxt, AdtDef, Ty, Region}; use rustc::ty::subst::{Substs, Kind}; use rustc::hir::{self, PatKind, RangeEnd}; @@ -1080,8 +1080,9 @@ pub fn compare_const_vals<'a, 'tcx>( l.partial_cmp(&r) }, ty::TyInt(_) => { - let a = interpret::sign_extend(tcx, a, ty.value).expect("layout error for TyInt"); - let b = interpret::sign_extend(tcx, b, ty.value).expect("layout error for TyInt"); + let layout = tcx.layout_of(ty).ok()?; + let a = interpret::sign_extend(a, layout); + let b = interpret::sign_extend(b, layout); Some((a as i128).cmp(&(b as i128))) }, _ => Some(a.cmp(&b)), @@ -1090,17 +1091,24 @@ pub fn compare_const_vals<'a, 'tcx>( if let ty::TyRef(_, rty, _) = ty.value.sty { if let ty::TyStr = rty.sty { - match (a.to_byval_value(), b.to_byval_value()) { + match (a.val, b.val) { ( - Some(Value::ScalarPair( + ConstValue::ScalarPair( Scalar::Ptr(ptr_a), len_a, - )), - Some(Value::ScalarPair( + ), + ConstValue::ScalarPair( Scalar::Ptr(ptr_b), len_b, - )) + ), ) if ptr_a.offset.bytes() == 0 && ptr_b.offset.bytes() == 0 => { + let len_a = len_a.unwrap_or_err().ok(); + let len_b = len_b.unwrap_or_err().ok(); + if len_a.is_none() || len_b.is_none() { + tcx.sess.struct_err("str slice len is undef").delay_as_bug(); + } + let len_a = len_a?; + let len_b = len_b?; if let Ok(len_a) = len_a.to_bits(tcx.data_layout.pointer_size) { if let Ok(len_b) = len_b.to_bits(tcx.data_layout.pointer_size) { if len_a == len_b { @@ -1142,7 +1150,7 @@ fn lit_to_const<'a, 'tcx>(lit: &'tcx ast::LitKind, let s = s.as_str(); let id = tcx.allocate_bytes(s.as_bytes()); let value = Scalar::Ptr(id.into()).to_value_with_len(s.len() as u64, tcx); - ConstValue::from_byval_value(value) + ConstValue::from_byval_value(value).unwrap() }, LitKind::ByteStr(ref data) => { let id = tcx.allocate_bytes(data); @@ -1150,7 +1158,7 @@ fn lit_to_const<'a, 'tcx>(lit: &'tcx ast::LitKind, }, LitKind::Byte(n) => ConstValue::Scalar(Scalar::Bits { bits: n as u128, - defined: 8, + size: 1, }), LitKind::Int(n, _) => { enum Int { @@ -1188,10 +1196,10 @@ enum Int { Int::Signed(IntTy::I128)| Int::Unsigned(UintTy::U128) => n, _ => bug!(), }; - let defined = tcx.layout_of(ty::ParamEnv::empty().and(ty)).unwrap().size.bits() as u8; + let size = tcx.layout_of(ty::ParamEnv::empty().and(ty)).unwrap().size.bytes() as u8; ConstValue::Scalar(Scalar::Bits { bits: n, - defined, + size, }) }, LitKind::Float(n, fty) => { @@ -1204,14 +1212,8 @@ enum Int { }; parse_float(n, fty, neg).map_err(|_| LitToConstError::UnparseableFloat)? } - LitKind::Bool(b) => ConstValue::Scalar(Scalar::Bits { - bits: b as u128, - defined: 8, - }), - LitKind::Char(c) => ConstValue::Scalar(Scalar::Bits { - bits: c as u128, - defined: 32, - }), + LitKind::Bool(b) => ConstValue::Scalar(Scalar::from_bool(b)), + LitKind::Char(c) => ConstValue::Scalar(Scalar::from_char(c)), }; Ok(ty::Const::from_const_value(tcx, lit, ty)) } @@ -1224,7 +1226,7 @@ pub fn parse_float<'tcx>( let num = num.as_str(); use rustc_apfloat::ieee::{Single, Double}; use rustc_apfloat::Float; - let (bits, defined) = match fty { + let (bits, size) = match fty { ast::FloatTy::F32 => { num.parse::().map_err(|_| ())?; let mut f = num.parse::().unwrap_or_else(|e| { @@ -1233,7 +1235,7 @@ pub fn parse_float<'tcx>( if neg { f = -f; } - (f.to_bits(), 32) + (f.to_bits(), 4) } ast::FloatTy::F64 => { num.parse::().map_err(|_| ())?; @@ -1243,9 +1245,9 @@ pub fn parse_float<'tcx>( if neg { f = -f; } - (f.to_bits(), 64) + (f.to_bits(), 8) } }; - Ok(ConstValue::Scalar(Scalar::Bits { bits, defined })) + Ok(ConstValue::Scalar(Scalar::Bits { bits, size })) } diff --git a/src/librustc_mir/interpret/cast.rs b/src/librustc_mir/interpret/cast.rs index 7bcf4ef6588..4e705254331 100644 --- a/src/librustc_mir/interpret/cast.rs +++ b/src/librustc_mir/interpret/cast.rs @@ -1,5 +1,5 @@ use rustc::ty::{self, Ty}; -use rustc::ty::layout::{self, LayoutOf}; +use rustc::ty::layout::{self, LayoutOf, TyLayout}; use syntax::ast::{FloatTy, IntTy, UintTy}; use rustc_apfloat::ieee::{Single, Double}; @@ -18,11 +18,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { dest_ty: Ty<'tcx>, dest: Place, ) -> EvalResult<'tcx> { + let src_layout = self.layout_of(src.ty)?; + let dst_layout = self.layout_of(dest_ty)?; use rustc::mir::CastKind::*; match kind { Unsize => { - let src_layout = self.layout_of(src.ty)?; - let dst_layout = self.layout_of(dest_ty)?; self.unsize_into(src.value, src_layout, dest, dst_layout)?; } @@ -57,16 +57,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { let discr_val = def .discriminant_for_variant(*self.tcx, index) .val; - let defined = self - .layout_of(dest_ty) - .unwrap() - .size - .bits() as u8; return self.write_scalar( dest, Scalar::Bits { bits: discr_val, - defined, + size: dst_layout.size.bytes() as u8, }, dest_ty); } @@ -76,9 +71,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { } let src_val = self.value_to_scalar(src)?; - let dest_val = self.cast_scalar(src_val, src.ty, dest_ty)?; + let dest_val = self.cast_scalar(src_val, src_layout, dst_layout)?; let valty = ValTy { - value: Value::Scalar(dest_val), + value: Value::Scalar(dest_val.into()), ty: dest_ty, }; self.write_value(valty, dest)?; @@ -100,7 +95,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { ).ok_or_else(|| EvalErrorKind::TooGeneric.into()); let fn_ptr = self.memory.create_fn_alloc(instance?); let valty = ValTy { - value: Value::Scalar(fn_ptr.into()), + value: Value::Scalar(Scalar::Ptr(fn_ptr.into()).into()), ty: dest_ty, }; self.write_value(valty, dest)?; @@ -136,7 +131,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { ); let fn_ptr = self.memory.create_fn_alloc(instance); let valty = ValTy { - value: Value::Scalar(fn_ptr.into()), + value: Value::Scalar(Scalar::Ptr(fn_ptr.into()).into()), ty: dest_ty, }; self.write_value(valty, dest)?; @@ -151,20 +146,19 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { pub(super) fn cast_scalar( &self, val: Scalar, - src_ty: Ty<'tcx>, - dest_ty: Ty<'tcx>, + src_layout: TyLayout<'tcx>, + dest_layout: TyLayout<'tcx>, ) -> EvalResult<'tcx, Scalar> { use rustc::ty::TypeVariants::*; - trace!("Casting {:?}: {:?} to {:?}", val, src_ty, dest_ty); + trace!("Casting {:?}: {:?} to {:?}", val, src_layout.ty, dest_layout.ty); match val { - Scalar::Bits { defined: 0, .. } => Ok(val), - Scalar::Ptr(ptr) => self.cast_from_ptr(ptr, dest_ty), - Scalar::Bits { bits, .. } => { - // TODO(oli-obk): check defined bits here - match src_ty.sty { - TyFloat(fty) => self.cast_from_float(bits, fty, dest_ty), - _ => self.cast_from_int(bits, src_ty, dest_ty), + Scalar::Ptr(ptr) => self.cast_from_ptr(ptr, dest_layout.ty), + Scalar::Bits { bits, size } => { + assert_eq!(size as u64, src_layout.size.bytes()); + match src_layout.ty.sty { + TyFloat(fty) => self.cast_from_float(bits, fty, dest_layout.ty), + _ => self.cast_from_int(bits, src_layout, dest_layout), } } } @@ -173,56 +167,58 @@ pub(super) fn cast_scalar( fn cast_from_int( &self, v: u128, - src_ty: Ty<'tcx>, - dest_ty: Ty<'tcx>, + src_layout: TyLayout<'tcx>, + dest_layout: TyLayout<'tcx>, ) -> EvalResult<'tcx, Scalar> { - let signed = self.layout_of(src_ty)?.abi.is_signed(); + let signed = src_layout.abi.is_signed(); let v = if signed { - self.sign_extend(v, src_ty)? + self.sign_extend(v, src_layout) } else { v }; - trace!("cast_from_int: {}, {}, {}", v, src_ty, dest_ty); + trace!("cast_from_int: {}, {}, {}", v, src_layout.ty, dest_layout.ty); use rustc::ty::TypeVariants::*; - match dest_ty.sty { + match dest_layout.ty.sty { TyInt(_) | TyUint(_) => { - let v = self.truncate(v, dest_ty)?; + let v = self.truncate(v, dest_layout); Ok(Scalar::Bits { bits: v, - defined: self.layout_of(dest_ty).unwrap().size.bits() as u8, + size: dest_layout.size.bytes() as u8, }) } TyFloat(FloatTy::F32) if signed => Ok(Scalar::Bits { bits: Single::from_i128(v as i128).value.to_bits(), - defined: 32, + size: 4, }), TyFloat(FloatTy::F64) if signed => Ok(Scalar::Bits { bits: Double::from_i128(v as i128).value.to_bits(), - defined: 64, + size: 8, }), TyFloat(FloatTy::F32) => Ok(Scalar::Bits { bits: Single::from_u128(v).value.to_bits(), - defined: 32, + size: 4, }), TyFloat(FloatTy::F64) => Ok(Scalar::Bits { bits: Double::from_u128(v).value.to_bits(), - defined: 64, + size: 8, }), - TyChar if v as u8 as u128 == v => Ok(Scalar::Bits { bits: v, defined: 32 }), - TyChar => err!(InvalidChar(v)), + TyChar => { + assert_eq!(v as u8 as u128, v); + Ok(Scalar::Bits { bits: v, size: 4 }) + }, // No alignment check needed for raw pointers. But we have to truncate to target ptr size. TyRawPtr(_) => { Ok(Scalar::Bits { bits: self.memory.truncate_to_ptr(v).0 as u128, - defined: self.memory.pointer_size().bits() as u8, + size: self.memory.pointer_size().bytes() as u8, }) }, // Casts to bool are not permitted by rustc, no need to handle them here. - _ => err!(Unimplemented(format!("int to {:?} cast", dest_ty))), + _ => err!(Unimplemented(format!("int to {:?} cast", dest_layout.ty))), } } @@ -236,11 +232,11 @@ fn cast_from_float(&self, bits: u128, fty: FloatTy, dest_ty: Ty<'tcx>) -> EvalRe match fty { FloatTy::F32 => Ok(Scalar::Bits { bits: Single::from_bits(bits).to_u128(width).value, - defined: width as u8, + size: (width / 8) as u8, }), FloatTy::F64 => Ok(Scalar::Bits { bits: Double::from_bits(bits).to_u128(width).value, - defined: width as u8, + size: (width / 8) as u8, }), } }, @@ -250,11 +246,11 @@ fn cast_from_float(&self, bits: u128, fty: FloatTy, dest_ty: Ty<'tcx>) -> EvalRe match fty { FloatTy::F32 => Ok(Scalar::Bits { bits: Single::from_bits(bits).to_i128(width).value as u128, - defined: width as u8, + size: (width / 8) as u8, }), FloatTy::F64 => Ok(Scalar::Bits { bits: Double::from_bits(bits).to_i128(width).value as u128, - defined: width as u8, + size: (width / 8) as u8, }), } }, @@ -262,24 +258,24 @@ fn cast_from_float(&self, bits: u128, fty: FloatTy, dest_ty: Ty<'tcx>) -> EvalRe TyFloat(FloatTy::F32) if fty == FloatTy::F64 => { Ok(Scalar::Bits { bits: Single::to_bits(Double::from_bits(bits).convert(&mut false).value), - defined: 32, + size: 4, }) }, // f32 -> f64 TyFloat(FloatTy::F64) if fty == FloatTy::F32 => { Ok(Scalar::Bits { bits: Double::to_bits(Single::from_bits(bits).convert(&mut false).value), - defined: 64, + size: 8, }) }, // identity cast TyFloat(FloatTy:: F64) => Ok(Scalar::Bits { bits, - defined: 64, + size: 8, }), TyFloat(FloatTy:: F32) => Ok(Scalar::Bits { bits, - defined: 32, + size: 4, }), _ => err!(Unimplemented(format!("float to {:?} cast", dest_ty))), } diff --git a/src/librustc_mir/interpret/const_eval.rs b/src/librustc_mir/interpret/const_eval.rs index 873fef75bb9..9d66a0b396b 100644 --- a/src/librustc_mir/interpret/const_eval.rs +++ b/src/librustc_mir/interpret/const_eval.rs @@ -2,11 +2,12 @@ use std::error::Error; use rustc::hir; -use rustc::mir::interpret::{ConstEvalErr}; +use rustc::mir::interpret::{ConstEvalErr, ScalarMaybeUndef}; use rustc::mir; use rustc::ty::{self, TyCtxt, Ty, Instance}; use rustc::ty::layout::{self, LayoutOf, Primitive, TyLayout}; use rustc::ty::subst::Subst; +use rustc_data_structures::indexed_vec::IndexVec; use syntax::ast::Mutability; use syntax::codemap::Span; @@ -28,13 +29,16 @@ pub fn mk_borrowck_eval_cx<'a, 'mir, 'tcx>( let param_env = tcx.param_env(instance.def_id()); let mut ecx = EvalContext::new(tcx.at(span), param_env, CompileTimeEvaluator, ()); // insert a stack frame so any queries have the correct substs - ecx.push_stack_frame( + ecx.stack.push(super::eval_context::Frame { + block: mir::START_BLOCK, + locals: IndexVec::new(), instance, span, mir, - Place::undef(), - StackPopCleanup::None, - )?; + return_place: Place::undef(), + return_to_block: StackPopCleanup::None, + stmt: 0, + }); Ok(ecx) } @@ -72,48 +76,30 @@ pub fn eval_promoted<'a, 'mir, 'tcx>( pub fn value_to_const_value<'tcx>( ecx: &EvalContext<'_, '_, 'tcx, CompileTimeEvaluator>, val: Value, - ty: Ty<'tcx>, -) -> &'tcx ty::Const<'tcx> { - let layout = ecx.layout_of(ty).unwrap(); + layout: TyLayout<'tcx>, +) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> { match (val, &layout.abi) { - (Value::Scalar(Scalar::Bits { defined: 0, ..}), _) if layout.is_zst() => {}, + (Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { size: 0, ..})), _) if layout.is_zst() => {}, (Value::ByRef(..), _) | (Value::Scalar(_), &layout::Abi::Scalar(_)) | (Value::ScalarPair(..), &layout::Abi::ScalarPair(..)) => {}, _ => bug!("bad value/layout combo: {:#?}, {:#?}", val, layout), } - let val = (|| { - match val { - Value::Scalar(val) => Ok(ConstValue::Scalar(val)), - Value::ScalarPair(a, b) => Ok(ConstValue::ScalarPair(a, b)), - Value::ByRef(ptr, align) => { - let ptr = ptr.to_ptr().unwrap(); - let alloc = ecx.memory.get(ptr.alloc_id)?; - assert!(alloc.align.abi() >= align.abi()); - assert!(alloc.bytes.len() as u64 - ptr.offset.bytes() >= layout.size.bytes()); - let mut alloc = alloc.clone(); - alloc.align = align; - let alloc = ecx.tcx.intern_const_alloc(alloc); - Ok(ConstValue::ByRef(alloc, ptr.offset)) - } - } - })(); - match val { - Ok(val) => ty::Const::from_const_value(ecx.tcx.tcx, val, ty), - Err(err) => { - let (frames, span) = ecx.generate_stacktrace(None); - let err = ConstEvalErr { - span, - error: err, - stacktrace: frames, - }; - err.report_as_error( - ecx.tcx, - "failed to convert Value to ConstValue, this is a bug", - ); - span_bug!(span, "miri error occured when converting Value to ConstValue") + let val = match val { + Value::Scalar(val) => ConstValue::Scalar(val.unwrap_or_err()?), + Value::ScalarPair(a, b) => ConstValue::ScalarPair(a.unwrap_or_err()?, b), + Value::ByRef(ptr, align) => { + let ptr = ptr.to_ptr().unwrap(); + let alloc = ecx.memory.get(ptr.alloc_id)?; + assert!(alloc.align.abi() >= align.abi()); + assert!(alloc.bytes.len() as u64 - ptr.offset.bytes() >= layout.size.bytes()); + let mut alloc = alloc.clone(); + alloc.align = align; + let alloc = ecx.tcx.intern_const_alloc(alloc); + ConstValue::ByRef(alloc, ptr.offset) } - } + }; + Ok(ty::Const::from_const_value(ecx.tcx.tcx, val, layout.ty)) } fn eval_body_and_ecx<'a, 'mir, 'tcx>( @@ -307,7 +293,7 @@ fn call_intrinsic<'a>( let elem_align = ecx.layout_of(elem_ty)?.align.abi(); let align_val = Scalar::Bits { bits: elem_align as u128, - defined: dest_layout.size.bits() as u8, + size: dest_layout.size.bytes() as u8, }; ecx.write_scalar(dest, align_val, dest_layout.ty)?; } @@ -317,7 +303,7 @@ fn call_intrinsic<'a>( let size = ecx.layout_of(ty)?.size.bytes() as u128; let size_val = Scalar::Bits { bits: size, - defined: dest_layout.size.bits() as u8, + size: dest_layout.size.bytes() as u8, }; ecx.write_scalar(dest, size_val, dest_layout.ty)?; } @@ -327,7 +313,7 @@ fn call_intrinsic<'a>( let type_id = ecx.tcx.type_id_hash(ty) as u128; let id_val = Scalar::Bits { bits: type_id, - defined: dest_layout.size.bits() as u8, + size: dest_layout.size.bytes() as u8, }; ecx.write_scalar(dest, id_val, dest_layout.ty)?; } @@ -437,7 +423,7 @@ pub fn const_val_field<'a, 'tcx>( let place = ecx.allocate_place_for_value(value, layout, variant)?; let (place, layout) = ecx.place_field(place, field, layout)?; let (ptr, align) = place.to_ptr_align(); - let mut new_value = Value::ByRef(ptr, align); + let mut new_value = Value::ByRef(ptr.unwrap_or_err()?, align); new_value = ecx.try_read_by_ref(new_value, layout.ty)?; use rustc_data_structures::indexed_vec::Idx; match (value, new_value) { @@ -451,7 +437,7 @@ pub fn const_val_field<'a, 'tcx>( ), _ => {}, } - Ok(value_to_const_value(&ecx, new_value, layout.ty)) + value_to_const_value(&ecx, new_value, layout) })(); result.map_err(|err| { let (trace, span) = ecx.generate_stacktrace(None); @@ -481,7 +467,7 @@ pub fn const_variant_index<'a, 'tcx>( }, Value::ByRef(ptr, align) => (ptr, align), }; - let place = Place::from_scalar_ptr(ptr, align); + let place = Place::from_scalar_ptr(ptr.into(), align); ecx.read_discriminant_as_variant_index(place, layout) } @@ -552,7 +538,7 @@ pub fn const_eval_provider<'a, 'tcx>( if tcx.is_static(def_id).is_none() && cid.promoted.is_none() { val = ecx.try_read_by_ref(val, layout.ty)?; } - Ok(value_to_const_value(&ecx, val, layout.ty)) + value_to_const_value(&ecx, val, layout) }).map_err(|err| { let (trace, span) = ecx.generate_stacktrace(None); let err = ConstEvalErr { @@ -562,6 +548,9 @@ pub fn const_eval_provider<'a, 'tcx>( }; if tcx.is_static(def_id).is_some() { err.report_as_error(ecx.tcx, "could not evaluate static initializer"); + if tcx.sess.err_count() == 0 { + span_bug!(span, "static eval failure didn't emit an error: {:#?}", err); + } } err.into() }) @@ -572,11 +561,11 @@ fn numeric_intrinsic<'tcx>( bits: u128, kind: Primitive, ) -> EvalResult<'tcx, Scalar> { - let defined = match kind { - Primitive::Int(integer, _) => integer.size().bits() as u8, + let size = match kind { + Primitive::Int(integer, _) => integer.size(), _ => bug!("invalid `{}` argument: {:?}", name, bits), }; - let extra = 128 - defined as u128; + let extra = 128 - size.bits() as u128; let bits_out = match name { "ctpop" => bits.count_ones() as u128, "ctlz" => bits.leading_zeros() as u128 - extra, @@ -584,5 +573,5 @@ fn numeric_intrinsic<'tcx>( "bswap" => (bits << extra).swap_bytes(), _ => bug!("not a numeric intrinsic: {}", name), }; - Ok(Scalar::Bits { bits: bits_out, defined }) + Ok(Scalar::Bits { bits: bits_out, size: size.bytes() as u8 }) } diff --git a/src/librustc_mir/interpret/eval_context.rs b/src/librustc_mir/interpret/eval_context.rs index c6c1a1d1ebb..52305be5fac 100644 --- a/src/librustc_mir/interpret/eval_context.rs +++ b/src/librustc_mir/interpret/eval_context.rs @@ -15,6 +15,7 @@ use rustc::mir::interpret::{ GlobalId, Value, Scalar, FrameInfo, AllocType, EvalResult, EvalErrorKind, Pointer, ConstValue, + ScalarMaybeUndef, }; use syntax::codemap::{self, Span}; @@ -105,9 +106,7 @@ pub struct Frame<'mir, 'tcx: 'mir> { /// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option`s. /// `None` represents a local that is currently dead, while a live local /// can either directly contain `Scalar` or refer to some part of an `Allocation`. - /// - /// Before being initialized, arguments are `Value::Scalar(Scalar::undef())` and other locals are `None`. - pub locals: IndexVec>, + pub locals: IndexVec, //////////////////////////////////////////////////////////////////////////////// // Current position within the function @@ -120,6 +119,21 @@ pub struct Frame<'mir, 'tcx: 'mir> { pub stmt: usize, } +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub enum LocalValue { + Dead, + Live(Value), +} + +impl LocalValue { + pub fn access(self) -> EvalResult<'static, Value> { + match self { + LocalValue::Dead => err!(DeadLocal), + LocalValue::Live(val) => Ok(val), + } + } +} + impl<'mir, 'tcx: 'mir> Eq for Frame<'mir, 'tcx> {} impl<'mir, 'tcx: 'mir> PartialEq for Frame<'mir, 'tcx> { @@ -395,8 +409,8 @@ pub fn const_to_value( let id = self.memory.allocate_value(alloc.clone(), MemoryKind::Stack)?; Ok(Value::ByRef(Pointer::new(id, offset).into(), alloc.align)) }, - ConstValue::ScalarPair(a, b) => Ok(Value::ScalarPair(a, b)), - ConstValue::Scalar(val) => Ok(Value::Scalar(val)), + ConstValue::ScalarPair(a, b) => Ok(Value::ScalarPair(a.into(), b.into())), + ConstValue::Scalar(val) => Ok(Value::Scalar(val.into())), } } @@ -452,7 +466,7 @@ pub fn monomorphize(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> /// Note that the value does not matter if the type is sized. For unsized types, /// the value has to be a fat pointer, and we only care about the "extra" data in it. pub fn size_and_align_of_dst( - &mut self, + &self, ty: Ty<'tcx>, value: Value, ) -> EvalResult<'tcx, (Size, Align)> { @@ -480,7 +494,7 @@ pub fn size_and_align_of_dst( // Recurse to get the size of the dynamically sized field (must be // the last field). - let field_ty = layout.field(&self, layout.fields.count() - 1)?.ty; + let field_ty = layout.field(self, layout.fields.count() - 1)?.ty; let (unsized_size, unsized_align) = self.size_and_align_of_dst(field_ty, value)?; @@ -518,7 +532,7 @@ pub fn size_and_align_of_dst( } ty::TySlice(_) | ty::TyStr => { - let (elem_size, align) = layout.field(&self, 0)?.size_and_align(); + let (elem_size, align) = layout.field(self, 0)?.size_and_align(); let (_, len) = self.into_slice(value)?; Ok((elem_size * len, align)) } @@ -538,8 +552,26 @@ pub fn push_stack_frame( ) -> EvalResult<'tcx> { ::log_settings::settings().indentation += 1; - let locals = if mir.local_decls.len() > 1 { - let mut locals = IndexVec::from_elem(Some(Value::Scalar(Scalar::undef())), &mir.local_decls); + // first push a stack frame so we have access to the local substs + self.stack.push(Frame { + mir, + block: mir::START_BLOCK, + return_to_block, + return_place, + // empty local array, we fill it in below, after we are inside the stack frame and + // all methods actually know about the frame + locals: IndexVec::new(), + span, + instance, + stmt: 0, + }); + + // don't allocate at all for trivial constants + if mir.local_decls.len() > 1 { + let mut locals = IndexVec::from_elem(LocalValue::Dead, &mir.local_decls); + for (local, decl) in locals.iter_mut().zip(mir.local_decls.iter()) { + *local = LocalValue::Live(self.init_value(decl.ty)?); + } match self.tcx.describe_def(instance.def_id()) { // statics and constants don't have `Storage*` statements, no need to look for them Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {}, @@ -550,29 +582,15 @@ pub fn push_stack_frame( use rustc::mir::StatementKind::{StorageDead, StorageLive}; match stmt.kind { StorageLive(local) | - StorageDead(local) => locals[local] = None, + StorageDead(local) => locals[local] = LocalValue::Dead, _ => {} } } } }, } - locals - } else { - // don't allocate at all for trivial constants - IndexVec::new() - }; - - self.stack.push(Frame { - mir, - block: mir::START_BLOCK, - return_to_block, - return_place, - locals, - span, - instance, - stmt: 0, - }); + self.frame_mut().locals = locals; + } self.memory.cur_frame = self.cur_frame(); @@ -598,7 +616,7 @@ pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> { if let Place::Ptr { ptr, .. } = frame.return_place { // FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions self.memory.mark_static_initialized( - ptr.to_ptr()?.alloc_id, + ptr.unwrap_or_err()?.to_ptr()?.alloc_id, mutable, )? } else { @@ -616,8 +634,9 @@ pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> { Ok(()) } - pub fn deallocate_local(&mut self, local: Option) -> EvalResult<'tcx> { - if let Some(Value::ByRef(ptr, _align)) = local { + pub fn deallocate_local(&mut self, local: LocalValue) -> EvalResult<'tcx> { + // FIXME: should we tell the user that there was a local which was never written to? + if let LocalValue::Live(Value::ByRef(ptr, _align)) = local { trace!("deallocating local"); let ptr = ptr.to_ptr()?; self.memory.dump_alloc(ptr.alloc_id); @@ -637,6 +656,7 @@ pub(super) fn eval_rvalue_into_place( ) -> EvalResult<'tcx> { let dest = self.eval_place(place)?; let dest_ty = self.place_ty(place); + let dest_layout = self.layout_of(dest_ty)?; use rustc::mir::Rvalue::*; match *rvalue { @@ -675,7 +695,7 @@ pub(super) fn eval_rvalue_into_place( UnaryOp(un_op, ref operand) => { let val = self.eval_operand_to_scalar(operand)?; - let val = self.unary_op(un_op, val, dest_ty)?; + let val = self.unary_op(un_op, val, dest_layout)?; self.write_scalar( dest, val, @@ -724,6 +744,7 @@ pub(super) fn eval_rvalue_into_place( let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align(); if length > 0 { + let dest = dest.unwrap_or_err()?; //write the first value self.write_value_to_ptr(value, dest, dest_align, elem_ty)?; @@ -739,12 +760,12 @@ pub(super) fn eval_rvalue_into_place( let src = self.eval_place(place)?; let ty = self.place_ty(place); let (_, len) = src.elem_ty_and_len(ty, self.tcx.tcx); - let defined = self.memory.pointer_size().bits() as u8; + let size = self.memory.pointer_size().bytes() as u8; self.write_scalar( dest, Scalar::Bits { bits: len as u128, - defined, + size, }, dest_ty, )?; @@ -757,7 +778,7 @@ pub(super) fn eval_rvalue_into_place( let (ptr, _align, extra) = self.force_allocation(src)?.to_ptr_align_extra(); let val = match extra { - PlaceExtra::None => ptr.to_value(), + PlaceExtra::None => Value::Scalar(ptr), PlaceExtra::Length(len) => ptr.to_value_with_len(len, self.tcx.tcx), PlaceExtra::Vtable(vtable) => ptr.to_value_with_vtable(vtable), PlaceExtra::DowncastVariant(..) => { @@ -781,12 +802,12 @@ pub(super) fn eval_rvalue_into_place( let layout = self.layout_of(ty)?; assert!(!layout.is_unsized(), "SizeOf nullary MIR operator called for unsized type"); - let defined = self.memory.pointer_size().bits() as u8; + let size = self.memory.pointer_size().bytes() as u8; self.write_scalar( dest, Scalar::Bits { bits: layout.size.bytes() as u128, - defined, + size, }, dest_ty, )?; @@ -803,10 +824,10 @@ pub(super) fn eval_rvalue_into_place( let layout = self.layout_of(ty)?; let place = self.eval_place(place)?; let discr_val = self.read_discriminant_value(place, layout)?; - let defined = self.layout_of(dest_ty).unwrap().size.bits() as u8; + let size = self.layout_of(dest_ty).unwrap().size.bytes() as u8; self.write_scalar(dest, Scalar::Bits { bits: discr_val, - defined, + size, }, dest_ty)?; } } @@ -957,10 +978,8 @@ pub fn read_discriminant_value( assert!(variants_start == variants_end); dataful_variant as u128 }, - Scalar::Bits { bits: raw_discr, defined } => { - if defined < discr.size.bits() as u8 { - return err!(ReadUndefBytes); - } + Scalar::Bits { bits: raw_discr, size } => { + assert_eq!(size as u64, discr.size.bytes()); let discr = raw_discr.wrapping_sub(niche_start) .wrapping_add(variants_start); if variants_start <= discr && discr <= variants_end { @@ -1002,14 +1021,14 @@ pub fn write_discriminant_value( // raw discriminants for enums are isize or bigger during // their computation, but the in-memory tag is the smallest possible // representation - let size = tag.value.size(self.tcx.tcx).bits(); - let shift = 128 - size; + let size = tag.value.size(self.tcx.tcx); + let shift = 128 - size.bits(); let discr_val = (discr_val << shift) >> shift; let (discr_dest, tag) = self.place_field(dest, mir::Field::new(0), layout)?; self.write_scalar(discr_dest, Scalar::Bits { bits: discr_val, - defined: size as u8, + size: size.bytes() as u8, }, tag.ty)?; } layout::Variants::NicheFilling { @@ -1025,7 +1044,7 @@ pub fn write_discriminant_value( .wrapping_add(niche_start); self.write_scalar(niche_dest, Scalar::Bits { bits: niche_value, - defined: niche.size.bits() as u8, + size: niche.size.bytes() as u8, }, niche.ty)?; } } @@ -1063,7 +1082,7 @@ pub fn allocate_place_for_value( }, }; Ok(Place::Ptr { - ptr, + ptr: ptr.into(), align, extra: variant.map_or(PlaceExtra::None, PlaceExtra::DowncastVariant), }) @@ -1072,22 +1091,22 @@ pub fn allocate_place_for_value( pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> { let new_place = match place { Place::Local { frame, local } => { - match self.stack[frame].locals[local] { - None => return err!(DeadLocal), - Some(Value::ByRef(ptr, align)) => { + match self.stack[frame].locals[local].access()? { + Value::ByRef(ptr, align) => { Place::Ptr { - ptr, + ptr: ptr.into(), align, extra: PlaceExtra::None, } } - Some(val) => { + val => { let ty = self.stack[frame].mir.local_decls[local].ty; let ty = self.monomorphize(ty, self.stack[frame].instance.substs); let layout = self.layout_of(ty)?; let ptr = self.alloc_ptr(layout)?; self.stack[frame].locals[local] = - Some(Value::ByRef(ptr.into(), layout.align)); // it stays live + LocalValue::Live(Value::ByRef(ptr.into(), layout.align)); // it stays live + let place = Place::from_ptr(ptr, layout.align); self.write_value(ValTy { value: val, ty }, place)?; place @@ -1120,7 +1139,7 @@ pub fn value_to_scalar( match self.follow_by_ref_value(value, ty)? { Value::ByRef { .. } => bug!("follow_by_ref_value can't result in `ByRef`"), - Value::Scalar(scalar) => Ok(scalar), + Value::Scalar(scalar) => scalar.unwrap_or_err(), Value::ScalarPair(..) => bug!("value_to_scalar can't work with fat pointers"), } @@ -1137,11 +1156,11 @@ pub fn write_ptr(&mut self, dest: Place, val: Scalar, dest_ty: Ty<'tcx>) -> Eval pub fn write_scalar( &mut self, dest: Place, - val: Scalar, + val: impl Into, dest_ty: Ty<'tcx>, ) -> EvalResult<'tcx> { let valty = ValTy { - value: Value::Scalar(val), + value: Value::Scalar(val.into()), ty: dest_ty, }; self.write_value(valty, dest) @@ -1160,15 +1179,15 @@ pub fn write_value( match dest { Place::Ptr { ptr, align, extra } => { assert_eq!(extra, PlaceExtra::None); - self.write_value_to_ptr(src_val, ptr, align, dest_ty) + self.write_value_to_ptr(src_val, ptr.unwrap_or_err()?, align, dest_ty) } Place::Local { frame, local } => { - let dest = self.stack[frame].get_local(local)?; + let old_val = self.stack[frame].locals[local].access()?; self.write_value_possibly_by_val( src_val, |this, val| this.stack[frame].set_local(local, val), - dest, + old_val, dest_ty, ) } @@ -1183,6 +1202,7 @@ fn write_value_possibly_by_val EvalResult<'tcx>>( old_dest_val: Value, dest_ty: Ty<'tcx>, ) -> EvalResult<'tcx> { + // FIXME: this should be a layout check, not underlying value if let Value::ByRef(dest_ptr, align) = old_dest_val { // If the value is already `ByRef` (that is, backed by an `Allocation`), // then we must write the new value into this allocation, because there may be @@ -1239,12 +1259,9 @@ pub fn write_value_to_ptr( layout::Primitive::Int(_, signed) => signed, _ => false, }, - _ => match scalar { - Scalar::Bits { defined: 0, .. } => false, - _ => bug!("write_value_to_ptr: invalid ByVal layout: {:#?}", layout), - } + _ => false, }; - self.memory.write_scalar(dest, dest_align, scalar, layout.size, signed) + self.memory.write_scalar(dest, dest_align, scalar, layout.size, layout.align, signed) } Value::ScalarPair(a_val, b_val) => { trace!("write_value_to_ptr valpair: {:#?}", layout); @@ -1253,12 +1270,13 @@ pub fn write_value_to_ptr( _ => bug!("write_value_to_ptr: invalid ScalarPair layout: {:#?}", layout) }; let (a_size, b_size) = (a.size(&self), b.size(&self)); + let (a_align, b_align) = (a.align(&self), b.align(&self)); let a_ptr = dest; - let b_offset = a_size.abi_align(b.align(&self)); + let b_offset = a_size.abi_align(b_align); let b_ptr = dest.ptr_offset(b_offset, &self)?.into(); // TODO: What about signedess? - self.memory.write_scalar(a_ptr, dest_align, a_val, a_size, false)?; - self.memory.write_scalar(b_ptr, dest_align, b_val, b_size, false) + self.memory.write_scalar(a_ptr, dest_align, a_val, a_size, a_align, false)?; + self.memory.write_scalar(b_ptr, dest_align, b_val, b_size, b_align, false) } } } @@ -1271,38 +1289,9 @@ pub fn read_value(&self, ptr: Scalar, align: Align, ty: Ty<'tcx>) -> EvalResult< } } - pub(crate) fn read_ptr( - &self, - ptr: Pointer, - ptr_align: Align, - pointee_ty: Ty<'tcx>, - ) -> EvalResult<'tcx, Value> { - let ptr_size = self.memory.pointer_size(); - let p: Scalar = self.memory.read_ptr_sized(ptr, ptr_align)?.into(); - if self.type_is_sized(pointee_ty) { - Ok(p.to_value()) - } else { - trace!("reading fat pointer extra of type {}", pointee_ty); - let extra = ptr.offset(ptr_size, self)?; - match self.tcx.struct_tail(pointee_ty).sty { - ty::TyDynamic(..) => Ok(p.to_value_with_vtable( - self.memory.read_ptr_sized(extra, ptr_align)?.to_ptr()?, - )), - ty::TySlice(..) | ty::TyStr => { - let len = self - .memory - .read_ptr_sized(extra, ptr_align)? - .to_bits(ptr_size)?; - Ok(p.to_value_with_len(len as u64, self.tcx.tcx)) - }, - _ => bug!("unsized scalar ptr read from {:?}", pointee_ty), - } - } - } - fn validate_scalar( &self, - value: Scalar, + value: ScalarMaybeUndef, size: Size, scalar: &layout::Scalar, path: &str, @@ -1311,8 +1300,16 @@ fn validate_scalar( trace!("validate scalar: {:#?}, {:#?}, {:#?}, {}", value, size, scalar, ty); let (lo, hi) = scalar.valid_range.clone().into_inner(); - let (bits, defined) = match value { - Scalar::Bits { bits, defined } => (bits, defined), + let value = match value { + ScalarMaybeUndef::Scalar(scalar) => scalar, + ScalarMaybeUndef::Undef => return validation_failure!("undefined bytes", path), + }; + + let bits = match value { + Scalar::Bits { bits, size: value_size } => { + assert_eq!(value_size as u64, size.bytes()); + bits + }, Scalar::Ptr(_) => { let ptr_size = self.memory.pointer_size(); let ptr_max = u128::max_value() >> (128 - ptr_size.bits()); @@ -1346,7 +1343,7 @@ fn validate_scalar( // has no special checks for chars match ty.sty { ty::TyChar => { - assert_eq!(size.bytes(), 4); + debug_assert_eq!(size.bytes(), 4); if ::std::char::from_u32(bits as u32).is_none() { return err!(InvalidChar(bits)); } @@ -1355,32 +1352,26 @@ fn validate_scalar( } use std::ops::RangeInclusive; - let in_range = |bound: RangeInclusive| { - defined as u64 >= size.bits() && bound.contains(&bits) - }; + let in_range = |bound: RangeInclusive| bound.contains(&bits); if lo > hi { if in_range(0..=hi) || in_range(lo..=u128::max_value()) { Ok(()) - } else if defined as u64 >= size.bits() { + } else { validation_failure!( bits, path, format!("something in the range {:?} or {:?}", ..=hi, lo..) ) - } else { - validation_failure!("undefined bytes", path) } } else { if in_range(scalar.valid_range.clone()) { Ok(()) - } else if defined as u64 >= size.bits() { + } else { validation_failure!( bits, path, format!("something in the range {:?}", scalar.valid_range) ) - } else { - validation_failure!("undefined bytes", path) } } } @@ -1410,10 +1401,10 @@ pub fn validate_ptr_target( mir::Field::new(0), layout, )?; - let tag_value = self.value_to_scalar(ValTy { - value: tag_value, - ty: tag_layout.ty, - })?; + let tag_value = match self.follow_by_ref_value(tag_value, tag_layout.ty)? { + Value::Scalar(val) => val, + _ => bug!("tag must be scalar"), + }; let path = format!("{}.TAG", path); self.validate_scalar(tag_value, size, tag, &path, tag_layout.ty)?; let variant_index = self.read_discriminant_as_variant_index( @@ -1440,7 +1431,7 @@ pub fn validate_ptr_target( self.validate_scalar(value, size, scalar, &path, layout.ty)?; if scalar.value == Primitive::Pointer { // ignore integer pointers, we can't reason about the final hardware - if let Scalar::Ptr(ptr) = value { + if let Scalar::Ptr(ptr) = value.unwrap_or_err()? { let alloc_kind = self.tcx.alloc_map.lock().get(ptr.alloc_id); if let Some(AllocType::Static(did)) = alloc_kind { // statics from other crates are already checked @@ -1534,7 +1525,7 @@ pub fn try_read_value(&self, ptr: Scalar, ptr_align: Align, ty: Ty<'tcx>) -> Eva self.memory.check_align(ptr, ptr_align)?; if layout.size.bytes() == 0 { - return Ok(Some(Value::Scalar(Scalar::undef()))); + return Ok(Some(Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { bits: 0, size: 0 })))); } let ptr = ptr.to_ptr()?; @@ -1670,7 +1661,7 @@ fn unsize_into_ptr( } let (src_f_value, src_field) = match src { Value::ByRef(ptr, align) => { - let src_place = Place::from_scalar_ptr(ptr, align); + let src_place = Place::from_scalar_ptr(ptr.into(), align); let (src_f_place, src_field) = self.place_field(src_place, mir::Field::new(i), src_layout)?; (self.read_place(src_f_place)?, src_field) @@ -1717,7 +1708,7 @@ pub fn dump_local(&self, place: Place) { } write!(msg, ":").unwrap(); - match self.stack[frame].get_local(local) { + match self.stack[frame].locals[local].access() { Err(err) => { if let EvalErrorKind::DeadLocal = err.kind { write!(msg, " is dead").unwrap(); @@ -1736,16 +1727,16 @@ pub fn dump_local(&self, place: Place) { } Ok(Value::Scalar(val)) => { write!(msg, " {:?}", val).unwrap(); - if let Scalar::Ptr(ptr) = val { + if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val { allocs.push(ptr.alloc_id); } } Ok(Value::ScalarPair(val1, val2)) => { write!(msg, " ({:?}, {:?})", val1, val2).unwrap(); - if let Scalar::Ptr(ptr) = val1 { + if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val1 { allocs.push(ptr.alloc_id); } - if let Scalar::Ptr(ptr) = val2 { + if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val2 { allocs.push(ptr.alloc_id); } } @@ -1756,7 +1747,7 @@ pub fn dump_local(&self, place: Place) { } Place::Ptr { ptr, align, .. } => { match ptr { - Scalar::Ptr(ptr) => { + ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) => { trace!("by align({}) ref:", align.abi()); self.memory.dump_alloc(ptr.alloc_id); } @@ -1766,21 +1757,6 @@ pub fn dump_local(&self, place: Place) { } } - /// Convenience function to ensure correct usage of locals - pub fn modify_local(&mut self, frame: usize, local: mir::Local, f: F) -> EvalResult<'tcx> - where - F: FnOnce(&mut Self, Value) -> EvalResult<'tcx, Value>, - { - let val = self.stack[frame].get_local(local)?; - let new_val = f(self, val)?; - self.stack[frame].set_local(local, new_val)?; - // FIXME(solson): Run this when setting to Undef? (See previous version of this code.) - // if let Value::ByRef(ptr) = self.stack[frame].get_local(local) { - // self.memory.deallocate(ptr)?; - // } - Ok(()) - } - pub fn generate_stacktrace(&self, explicit_span: Option) -> (Vec, Span) { let mut last_span = None; let mut frames = Vec::new(); @@ -1819,12 +1795,12 @@ pub fn generate_stacktrace(&self, explicit_span: Option) -> (Vec) -> EvalResult<'tcx, u128> { - super::sign_extend(self.tcx.tcx, value, ty) + pub fn sign_extend(&self, value: u128, ty: TyLayout<'_>) -> u128 { + super::sign_extend(value, ty) } - pub fn truncate(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> { - super::truncate(self.tcx.tcx, value, ty) + pub fn truncate(&self, value: u128, ty: TyLayout<'_>) -> u128 { + super::truncate(value, ty) } fn write_field_name(&self, s: &mut String, ty: Ty<'tcx>, i: usize, variant: usize) -> ::std::fmt::Result { @@ -1893,34 +1869,45 @@ fn write_field_name(&self, s: &mut String, ty: Ty<'tcx>, i: usize, variant: usiz } } } -} -impl<'mir, 'tcx> Frame<'mir, 'tcx> { - pub fn get_local(&self, local: mir::Local) -> EvalResult<'tcx, Value> { - self.locals[local].ok_or_else(|| EvalErrorKind::DeadLocal.into()) + pub fn storage_live(&mut self, local: mir::Local) -> EvalResult<'tcx, LocalValue> { + trace!("{:?} is now live", local); + + let ty = self.frame().mir.local_decls[local].ty; + let init = self.init_value(ty)?; + // StorageLive *always* kills the value that's currently stored + Ok(mem::replace(&mut self.frame_mut().locals[local], LocalValue::Live(init))) } + fn init_value(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> { + let ty = self.monomorphize(ty, self.substs()); + let layout = self.layout_of(ty)?; + Ok(match layout.abi { + layout::Abi::Scalar(..) => Value::Scalar(ScalarMaybeUndef::Undef), + layout::Abi::ScalarPair(..) => Value::ScalarPair( + ScalarMaybeUndef::Undef, + ScalarMaybeUndef::Undef, + ), + _ => Value::ByRef(self.alloc_ptr(layout)?.into(), layout.align), + }) + } +} + +impl<'mir, 'tcx> Frame<'mir, 'tcx> { fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> { match self.locals[local] { - None => err!(DeadLocal), - Some(ref mut local) => { + LocalValue::Dead => err!(DeadLocal), + LocalValue::Live(ref mut local) => { *local = value; Ok(()) } } } - pub fn storage_live(&mut self, local: mir::Local) -> Option { - trace!("{:?} is now live", local); - - // StorageLive *always* kills the value that's currently stored - mem::replace(&mut self.locals[local], Some(Value::Scalar(Scalar::undef()))) - } - /// Returns the old value of the local - pub fn storage_dead(&mut self, local: mir::Local) -> Option { + pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue { trace!("{:?} is now dead", local); - self.locals[local].take() + mem::replace(&mut self.locals[local], LocalValue::Dead) } } diff --git a/src/librustc_mir/interpret/memory.rs b/src/librustc_mir/interpret/memory.rs index 6e5cfe3bb3e..b62add8a559 100644 --- a/src/librustc_mir/interpret/memory.rs +++ b/src/librustc_mir/interpret/memory.rs @@ -7,7 +7,7 @@ use rustc::ty::ParamEnv; use rustc::ty::query::TyCtxtAt; use rustc::ty::layout::{self, Align, TargetDataLayout, Size}; -use rustc::mir::interpret::{Pointer, AllocId, Allocation, AccessKind, Value, +use rustc::mir::interpret::{Pointer, AllocId, Allocation, AccessKind, Value, ScalarMaybeUndef, EvalResult, Scalar, EvalErrorKind, GlobalId, AllocType}; pub use rustc::mir::interpret::{write_target_uint, write_target_int, read_target_uint}; use rustc_data_structures::fx::{FxHashSet, FxHashMap, FxHasher}; @@ -272,10 +272,8 @@ pub fn check_align(&self, ptr: Scalar, required_align: Align) -> EvalResult<'tcx let alloc = self.get(ptr.alloc_id)?; (ptr.offset.bytes(), alloc.align) } - Scalar::Bits { bits, defined } => { - if (defined as u64) < self.pointer_size().bits() { - return err!(ReadUndefBytes); - } + Scalar::Bits { bits, size } => { + assert_eq!(size as u64, self.pointer_size().bytes()); // FIXME: what on earth does this line do? docs or fix needed! let v = ((bits as u128) % (1 << self.pointer_size().bytes())) as u64; if v == 0 { @@ -756,7 +754,7 @@ pub fn write_repeat(&mut self, ptr: Scalar, val: u8, count: Size) -> EvalResult< Ok(()) } - pub fn read_scalar(&self, ptr: Pointer, ptr_align: Align, size: Size) -> EvalResult<'tcx, Scalar> { + pub fn read_scalar(&self, ptr: Pointer, ptr_align: Align, size: Size) -> EvalResult<'tcx, ScalarMaybeUndef> { self.check_relocation_edges(ptr, size)?; // Make sure we don't read part of a pointer as a pointer let endianness = self.endianness(); let bytes = self.get_bytes_unchecked(ptr, size, ptr_align.min(self.int_align(size)))?; @@ -764,7 +762,7 @@ pub fn read_scalar(&self, ptr: Pointer, ptr_align: Align, size: Size) -> EvalRes // We must not return Ok() for unaligned pointers! if self.check_defined(ptr, size).is_err() { // this inflates undefined bytes to the entire scalar, even if only a few bytes are undefined - return Ok(Scalar::undef().into()); + return Ok(ScalarMaybeUndef::Undef); } // Now we do the actual reading let bits = read_target_uint(endianness, bytes).unwrap(); @@ -776,44 +774,60 @@ pub fn read_scalar(&self, ptr: Pointer, ptr_align: Align, size: Size) -> EvalRes } else { let alloc = self.get(ptr.alloc_id)?; match alloc.relocations.get(&ptr.offset) { - Some(&alloc_id) => return Ok(Pointer::new(alloc_id, Size::from_bytes(bits as u64)).into()), + Some(&alloc_id) => return Ok(ScalarMaybeUndef::Scalar(Pointer::new(alloc_id, Size::from_bytes(bits as u64)).into())), None => {}, } } // We don't. Just return the bits. - Ok(Scalar::Bits { + Ok(ScalarMaybeUndef::Scalar(Scalar::Bits { bits, - defined: size.bits() as u8, - }) + size: size.bytes() as u8, + })) } - pub fn read_ptr_sized(&self, ptr: Pointer, ptr_align: Align) -> EvalResult<'tcx, Scalar> { + pub fn read_ptr_sized(&self, ptr: Pointer, ptr_align: Align) -> EvalResult<'tcx, ScalarMaybeUndef> { self.read_scalar(ptr, ptr_align, self.pointer_size()) } - pub fn write_scalar(&mut self, ptr: Scalar, ptr_align: Align, val: Scalar, size: Size, signed: bool) -> EvalResult<'tcx> { + pub fn write_scalar( + &mut self, + ptr: Scalar, + ptr_align: Align, + val: ScalarMaybeUndef, + type_size: Size, + type_align: Align, + signed: bool, + ) -> EvalResult<'tcx> { let endianness = self.endianness(); + self.check_align(ptr, ptr_align)?; + + let val = match val { + ScalarMaybeUndef::Scalar(scalar) => scalar, + ScalarMaybeUndef::Undef => return self.mark_definedness(ptr, type_size, false), + }; let bytes = match val { Scalar::Ptr(val) => { - assert_eq!(size, self.pointer_size()); + assert_eq!(type_size, self.pointer_size()); val.offset.bytes() as u128 } - Scalar::Bits { bits, defined } if defined as u64 >= size.bits() && size.bits() != 0 => bits, - - Scalar::Bits { .. } => { - self.check_align(ptr.into(), ptr_align)?; - self.mark_definedness(ptr, size, false)?; + Scalar::Bits { size: 0, .. } => { + // nothing to do for ZSTs + assert_eq!(type_size.bytes(), 0); return Ok(()); } + + Scalar::Bits { bits, size } => { + assert_eq!(size as u64, type_size.bytes()); + bits + }, }; let ptr = ptr.to_ptr()?; { - let align = self.int_align(size); - let dst = self.get_bytes_mut(ptr, size, ptr_align.min(align))?; + let dst = self.get_bytes_mut(ptr, type_size, ptr_align.min(type_align))?; if signed { write_target_int(endianness, dst, bytes as i128).unwrap(); } else { @@ -835,9 +849,9 @@ pub fn write_scalar(&mut self, ptr: Scalar, ptr_align: Align, val: Scalar, size: Ok(()) } - pub fn write_ptr_sized_unsigned(&mut self, ptr: Pointer, ptr_align: Align, val: Scalar) -> EvalResult<'tcx> { + pub fn write_ptr_sized_unsigned(&mut self, ptr: Pointer, ptr_align: Align, val: ScalarMaybeUndef) -> EvalResult<'tcx> { let ptr_size = self.pointer_size(); - self.write_scalar(ptr.into(), ptr_align, val, ptr_size, false) + self.write_scalar(ptr.into(), ptr_align, val, ptr_size, ptr_align, false) } fn int_align(&self, size: Size) -> Align { @@ -984,7 +998,7 @@ pub trait HasMemory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> { fn into_ptr( &self, value: Value, - ) -> EvalResult<'tcx, Scalar> { + ) -> EvalResult<'tcx, ScalarMaybeUndef> { Ok(match value { Value::ByRef(ptr, align) => { self.memory().read_ptr_sized(ptr.to_ptr()?, align)? @@ -997,7 +1011,7 @@ fn into_ptr( fn into_ptr_vtable_pair( &self, value: Value, - ) -> EvalResult<'tcx, (Scalar, Pointer)> { + ) -> EvalResult<'tcx, (ScalarMaybeUndef, Pointer)> { match value { Value::ByRef(ref_ptr, align) => { let mem = self.memory(); @@ -1005,11 +1019,11 @@ fn into_ptr_vtable_pair( let vtable = mem.read_ptr_sized( ref_ptr.ptr_offset(mem.pointer_size(), &mem.tcx.data_layout)?.to_ptr()?, align - )?.to_ptr()?; + )?.unwrap_or_err()?.to_ptr()?; Ok((ptr, vtable)) } - Value::ScalarPair(ptr, vtable) => Ok((ptr.into(), vtable.to_ptr()?)), + Value::ScalarPair(ptr, vtable) => Ok((ptr, vtable.unwrap_or_err()?.to_ptr()?)), _ => bug!("expected ptr and vtable, got {:?}", value), } } @@ -1017,7 +1031,7 @@ fn into_ptr_vtable_pair( fn into_slice( &self, value: Value, - ) -> EvalResult<'tcx, (Scalar, u64)> { + ) -> EvalResult<'tcx, (ScalarMaybeUndef, u64)> { match value { Value::ByRef(ref_ptr, align) => { let mem = self.memory(); @@ -1025,12 +1039,12 @@ fn into_slice( let len = mem.read_ptr_sized( ref_ptr.ptr_offset(mem.pointer_size(), &mem.tcx.data_layout)?.to_ptr()?, align - )?.to_bits(mem.pointer_size())? as u64; + )?.unwrap_or_err()?.to_bits(mem.pointer_size())? as u64; Ok((ptr, len)) } Value::ScalarPair(ptr, val) => { - let len = val.to_bits(self.memory().pointer_size())?; - Ok((ptr.into(), len as u64)) + let len = val.unwrap_or_err()?.to_bits(self.memory().pointer_size())?; + Ok((ptr, len as u64)) } Value::Scalar(_) => bug!("expected ptr and length, got {:?}", value), } diff --git a/src/librustc_mir/interpret/mod.rs b/src/librustc_mir/interpret/mod.rs index 0c921f66198..bc77f6e29d2 100644 --- a/src/librustc_mir/interpret/mod.rs +++ b/src/librustc_mir/interpret/mod.rs @@ -11,8 +11,10 @@ mod terminator; mod traits; -pub use self::eval_context::{EvalContext, Frame, StackPopCleanup, - TyAndPacked, ValTy}; +pub use self::eval_context::{ + EvalContext, Frame, StackPopCleanup, + TyAndPacked, ValTy, +}; pub use self::place::{Place, PlaceExtra}; @@ -34,26 +36,21 @@ pub use self::memory::{write_target_uint, write_target_int, read_target_uint}; -use rustc::mir::interpret::{EvalResult, EvalErrorKind}; -use rustc::ty::{Ty, TyCtxt, ParamEnv}; +use rustc::ty::layout::TyLayout; -pub fn sign_extend<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> { - let param_env = ParamEnv::empty(); - let layout = tcx.layout_of(param_env.and(ty)).map_err(|layout| EvalErrorKind::Layout(layout))?; +pub fn sign_extend(value: u128, layout: TyLayout<'_>) -> u128 { let size = layout.size.bits(); assert!(layout.abi.is_signed()); // sign extend let shift = 128 - size; // shift the unsigned value to the left // and back to the right as signed (essentially fills with FF on the left) - Ok((((value << shift) as i128) >> shift) as u128) + (((value << shift) as i128) >> shift) as u128 } -pub fn truncate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> { - let param_env = ParamEnv::empty(); - let layout = tcx.layout_of(param_env.and(ty)).map_err(|layout| EvalErrorKind::Layout(layout))?; +pub fn truncate(value: u128, layout: TyLayout<'_>) -> u128 { let size = layout.size.bits(); let shift = 128 - size; // truncate (shift left to drop out leftover values, shift right to fill with zeroes) - Ok((value << shift) >> shift) + (value << shift) >> shift } diff --git a/src/librustc_mir/interpret/operator.rs b/src/librustc_mir/interpret/operator.rs index 8320add7157..732c85bd014 100644 --- a/src/librustc_mir/interpret/operator.rs +++ b/src/librustc_mir/interpret/operator.rs @@ -1,7 +1,7 @@ use rustc::mir; use rustc::ty::{self, Ty, layout}; use syntax::ast::FloatTy; -use rustc::ty::layout::LayoutOf; +use rustc::ty::layout::{LayoutOf, TyLayout}; use rustc_apfloat::ieee::{Double, Single}; use rustc_apfloat::Float; @@ -11,7 +11,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { fn binop_with_overflow( - &mut self, + &self, op: mir::BinOp, left: ValTy<'tcx>, right: ValTy<'tcx>, @@ -32,7 +32,7 @@ pub fn intrinsic_with_overflow( dest_ty: Ty<'tcx>, ) -> EvalResult<'tcx> { let (val, overflowed) = self.binop_with_overflow(op, left, right)?; - let val = Value::ScalarPair(val, Scalar::from_bool(overflowed)); + let val = Value::ScalarPair(val.into(), Scalar::from_bool(overflowed).into()); let valty = ValTy { value: val, ty: dest_ty, @@ -97,13 +97,13 @@ pub fn binary_op( let signed = left_layout.abi.is_signed(); let mut oflo = (r as u32 as u128) != r; let mut r = r as u32; - let size = left_layout.size.bits() as u32; - oflo |= r >= size; + let size = left_layout.size; + oflo |= r >= size.bits() as u32; if oflo { - r %= size; + r %= size.bits() as u32; } let result = if signed { - let l = self.sign_extend(l, left_ty)? as i128; + let l = self.sign_extend(l, left_layout) as i128; let result = match bin_op { Shl => l << r, Shr => l >> r, @@ -117,10 +117,10 @@ pub fn binary_op( _ => bug!("it has already been checked that this is a shift op"), } }; - let truncated = self.truncate(result, left_ty)?; + let truncated = self.truncate(result, left_layout); return Ok((Scalar::Bits { bits: truncated, - defined: size as u8, + size: size.bytes() as u8, }, oflo)); } @@ -145,8 +145,8 @@ pub fn binary_op( _ => None, }; if let Some(op) = op { - let l = self.sign_extend(l, left_ty)? as i128; - let r = self.sign_extend(r, right_ty)? as i128; + let l = self.sign_extend(l, left_layout) as i128; + let r = self.sign_extend(r, right_layout) as i128; return Ok((Scalar::from_bool(op(&l, &r)), false)); } let op: Option (i128, bool)> = match bin_op { @@ -160,14 +160,14 @@ pub fn binary_op( _ => None, }; if let Some(op) = op { - let l128 = self.sign_extend(l, left_ty)? as i128; - let r = self.sign_extend(r, right_ty)? as i128; - let size = left_layout.size.bits(); + let l128 = self.sign_extend(l, left_layout) as i128; + let r = self.sign_extend(r, right_layout) as i128; + let size = left_layout.size; match bin_op { Rem | Div => { // int_min / -1 - if r == -1 && l == (1 << (size - 1)) { - return Ok((Scalar::Bits { bits: l, defined: size as u8 }, true)); + if r == -1 && l == (1 << (size.bits() - 1)) { + return Ok((Scalar::Bits { bits: l, size: size.bytes() as u8 }, true)); } }, _ => {}, @@ -175,27 +175,27 @@ pub fn binary_op( trace!("{}, {}, {}", l, l128, r); let (result, mut oflo) = op(l128, r); trace!("{}, {}", result, oflo); - if !oflo && size != 128 { - let max = 1 << (size - 1); + if !oflo && size.bits() != 128 { + let max = 1 << (size.bits() - 1); oflo = result >= max || result < -max; } let result = result as u128; - let truncated = self.truncate(result, left_ty)?; + let truncated = self.truncate(result, left_layout); return Ok((Scalar::Bits { bits: truncated, - defined: size as u8, + size: size.bytes() as u8, }, oflo)); } } if let ty::TyFloat(fty) = left_ty.sty { macro_rules! float_math { - ($ty:path, $bitsize:expr) => {{ + ($ty:path, $size:expr) => {{ let l = <$ty>::from_bits(l); let r = <$ty>::from_bits(r); let bitify = |res: ::rustc_apfloat::StatusAnd<$ty>| Scalar::Bits { bits: res.value.to_bits(), - defined: $bitsize, + size: $size, }; let val = match bin_op { Eq => Scalar::from_bool(l == r), @@ -215,12 +215,12 @@ macro_rules! float_math { }}; } match fty { - FloatTy::F32 => float_math!(Single, 32), - FloatTy::F64 => float_math!(Double, 64), + FloatTy::F32 => float_math!(Single, 4), + FloatTy::F64 => float_math!(Double, 8), } } - let bit_width = self.layout_of(left_ty).unwrap().size.bits() as u8; + let size = self.layout_of(left_ty).unwrap().size.bytes() as u8; // only ints left let val = match bin_op { @@ -232,9 +232,9 @@ macro_rules! float_math { Gt => Scalar::from_bool(l > r), Ge => Scalar::from_bool(l >= r), - BitOr => Scalar::Bits { bits: l | r, defined: bit_width }, - BitAnd => Scalar::Bits { bits: l & r, defined: bit_width }, - BitXor => Scalar::Bits { bits: l ^ r, defined: bit_width }, + BitOr => Scalar::Bits { bits: l | r, size }, + BitAnd => Scalar::Bits { bits: l & r, size }, + BitXor => Scalar::Bits { bits: l ^ r, size }, Add | Sub | Mul | Rem | Div => { let op: fn(u128, u128) -> (u128, bool) = match bin_op { @@ -248,10 +248,10 @@ macro_rules! float_math { _ => bug!(), }; let (result, oflo) = op(l, r); - let truncated = self.truncate(result, left_ty)?; + let truncated = self.truncate(result, left_layout); return Ok((Scalar::Bits { bits: truncated, - defined: bit_width, + size, }, oflo || truncated != result)); } @@ -275,17 +275,16 @@ pub fn unary_op( &self, un_op: mir::UnOp, val: Scalar, - ty: Ty<'tcx>, + layout: TyLayout<'tcx>, ) -> EvalResult<'tcx, Scalar> { use rustc::mir::UnOp::*; use rustc_apfloat::ieee::{Single, Double}; use rustc_apfloat::Float; - let size = self.layout_of(ty)?.size; + let size = layout.size; let bytes = val.to_bits(size)?; - let size = size.bits(); - let result_bytes = match (un_op, &ty.sty) { + let result_bytes = match (un_op, &layout.ty.sty) { (Not, ty::TyBool) => !val.to_bool()? as u128, @@ -294,13 +293,13 @@ pub fn unary_op( (Neg, ty::TyFloat(FloatTy::F32)) => Single::to_bits(-Single::from_bits(bytes)), (Neg, ty::TyFloat(FloatTy::F64)) => Double::to_bits(-Double::from_bits(bytes)), - (Neg, _) if bytes == (1 << (size - 1)) => return err!(OverflowNeg), + (Neg, _) if bytes == (1 << (size.bits() - 1)) => return err!(OverflowNeg), (Neg, _) => (-(bytes as i128)) as u128, }; Ok(Scalar::Bits { - bits: self.truncate(result_bytes, ty)?, - defined: size as u8, + bits: self.truncate(result_bytes, layout), + size: size.bytes() as u8, }) } } diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs index 59bf2ae6c0f..91c25192306 100644 --- a/src/librustc_mir/interpret/place.rs +++ b/src/librustc_mir/interpret/place.rs @@ -3,7 +3,7 @@ use rustc::ty::layout::{self, Align, LayoutOf, TyLayout}; use rustc_data_structures::indexed_vec::Idx; -use rustc::mir::interpret::{GlobalId, Value, Scalar, EvalResult, Pointer}; +use rustc::mir::interpret::{GlobalId, Value, Scalar, EvalResult, Pointer, ScalarMaybeUndef}; use super::{EvalContext, Machine, ValTy}; use interpret::memory::HasMemory; @@ -14,7 +14,7 @@ pub enum Place { /// A place may have an invalid (integral or undef) pointer, /// since it might be turned back into a reference /// before ever being dereferenced. - ptr: Scalar, + ptr: ScalarMaybeUndef, align: Align, extra: PlaceExtra, }, @@ -35,10 +35,10 @@ pub enum PlaceExtra { impl<'tcx> Place { /// Produces a Place that will error if attempted to be read from pub fn undef() -> Self { - Self::from_scalar_ptr(Scalar::undef().into(), Align::from_bytes(1, 1).unwrap()) + Self::from_scalar_ptr(ScalarMaybeUndef::Undef, Align::from_bytes(1, 1).unwrap()) } - pub fn from_scalar_ptr(ptr: Scalar, align: Align) -> Self { + pub fn from_scalar_ptr(ptr: ScalarMaybeUndef, align: Align) -> Self { Place::Ptr { ptr, align, @@ -47,10 +47,10 @@ pub fn from_scalar_ptr(ptr: Scalar, align: Align) -> Self { } pub fn from_ptr(ptr: Pointer, align: Align) -> Self { - Self::from_scalar_ptr(ptr.into(), align) + Self::from_scalar_ptr(ScalarMaybeUndef::Scalar(ptr.into()), align) } - pub fn to_ptr_align_extra(self) -> (Scalar, Align, PlaceExtra) { + pub fn to_ptr_align_extra(self) -> (ScalarMaybeUndef, Align, PlaceExtra) { match self { Place::Ptr { ptr, align, extra } => (ptr, align, extra), _ => bug!("to_ptr_and_extra: expected Place::Ptr, got {:?}", self), @@ -58,7 +58,7 @@ pub fn to_ptr_align_extra(self) -> (Scalar, Align, PlaceExtra) { } } - pub fn to_ptr_align(self) -> (Scalar, Align) { + pub fn to_ptr_align(self) -> (ScalarMaybeUndef, Align) { let (ptr, align, _extra) = self.to_ptr_align_extra(); (ptr, align) } @@ -66,7 +66,7 @@ pub fn to_ptr_align(self) -> (Scalar, Align) { pub fn to_ptr(self) -> EvalResult<'tcx, Pointer> { // At this point, we forget about the alignment information -- the place has been turned into a reference, // and no matter where it came from, it now must be aligned. - self.to_ptr_align().0.to_ptr() + self.to_ptr_align().0.unwrap_or_err()?.to_ptr() } pub(super) fn elem_ty_and_len( @@ -106,7 +106,7 @@ pub fn try_read_place( // Might allow this in the future, right now there's no way to do this from Rust code anyway Local(mir::RETURN_PLACE) => err!(ReadFromReturnPointer), // Directly reading a local will always succeed - Local(local) => self.frame().get_local(local).map(Some), + Local(local) => self.frame().locals[local].access().map(Some), // No fast path for statics. Reading from statics is rare and would require another // Machine function to handle differently in miri. Promoted(_) | @@ -129,7 +129,7 @@ pub fn read_field( let field = base_layout.field(self, field_index)?; if field.size.bytes() == 0 { return Ok(( - Value::Scalar(Scalar::undef()), + Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { bits: 0, size: 0 })), field, )); } @@ -197,9 +197,9 @@ pub fn read_place(&self, place: Place) -> EvalResult<'tcx, Value> { match place { Place::Ptr { ptr, align, extra } => { assert_eq!(extra, PlaceExtra::None); - Ok(Value::ByRef(ptr, align)) + Ok(Value::ByRef(ptr.unwrap_or_err()?, align)) } - Place::Local { frame, local } => self.stack[frame].get_local(local), + Place::Local { frame, local } => self.stack[frame].locals[local].access(), } } @@ -220,7 +220,7 @@ pub fn eval_place(&mut self, mir_place: &mir::Place<'tcx>) -> EvalResult<'tcx, P })?; if let Value::ByRef(ptr, align) = val { Place::Ptr { - ptr, + ptr: ptr.into(), align, extra: PlaceExtra::None, } @@ -238,7 +238,7 @@ pub fn eval_place(&mut self, mir_place: &mir::Place<'tcx>) -> EvalResult<'tcx, P }; let alloc = Machine::init_static(self, cid)?; Place::Ptr { - ptr: Scalar::Ptr(alloc.into()), + ptr: ScalarMaybeUndef::Scalar(Scalar::Ptr(alloc.into())), align: layout.align, extra: PlaceExtra::None, } @@ -276,14 +276,13 @@ pub fn place_field( let (base_ptr, base_align, base_extra) = match base { Place::Ptr { ptr, align, extra } => (ptr, align, extra), Place::Local { frame, local } => { - match (&self.stack[frame].get_local(local)?, &base_layout.abi) { + match (self.stack[frame].locals[local].access()?, &base_layout.abi) { // in case the field covers the entire type, just return the value - (&Value::Scalar(_), &layout::Abi::Scalar(_)) | - (&Value::ScalarPair(..), &layout::Abi::ScalarPair(..)) - if offset.bytes() == 0 && field.size == base_layout.size => - { - return Ok((base, field)); - } + (Value::Scalar(_), &layout::Abi::Scalar(_)) | + (Value::ScalarPair(..), &layout::Abi::ScalarPair(..)) + if offset.bytes() == 0 && field.size == base_layout.size => { + return Ok((base, field)) + }, _ => self.force_allocation(base)?.to_ptr_align_extra(), } } @@ -413,7 +412,7 @@ pub fn eval_place_projection( } Index(local) => { - let value = self.frame().get_local(local)?; + let value = self.frame().locals[local].access()?; let ty = self.tcx.types.usize; let n = self .value_to_scalar(ValTy { value, ty })? diff --git a/src/librustc_mir/interpret/step.rs b/src/librustc_mir/interpret/step.rs index db90714d0e6..57b56db14bb 100644 --- a/src/librustc_mir/interpret/step.rs +++ b/src/librustc_mir/interpret/step.rs @@ -90,7 +90,7 @@ fn statement(&mut self, stmt: &mir::Statement<'tcx>) -> EvalResult<'tcx> { // Mark locals as alive StorageLive(local) => { - let old_val = self.frame_mut().storage_live(local); + let old_val = self.storage_live(local)?; self.deallocate_local(old_val)?; } diff --git a/src/librustc_mir/interpret/terminator/drop.rs b/src/librustc_mir/interpret/terminator/drop.rs index d750c1f47a6..fe8071897c3 100644 --- a/src/librustc_mir/interpret/terminator/drop.rs +++ b/src/librustc_mir/interpret/terminator/drop.rs @@ -2,7 +2,7 @@ use rustc::ty::{self, Ty}; use syntax::codemap::Span; -use rustc::mir::interpret::{EvalResult, Scalar, Value}; +use rustc::mir::interpret::{EvalResult, Value}; use interpret::{Machine, ValTy, EvalContext, Place, PlaceExtra}; impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { @@ -33,7 +33,7 @@ pub(crate) fn drop_place( ptr, align: _, extra: PlaceExtra::None, - } => ptr.to_value(), + } => Value::Scalar(ptr), _ => bug!("force_allocation broken"), }; self.drop(val, instance, ty, span, target) @@ -51,17 +51,10 @@ fn drop( let instance = match ty.sty { ty::TyDynamic(..) => { - let vtable = match arg { - Value::ScalarPair(_, Scalar::Ptr(vtable)) => vtable, - _ => bug!("expected fat ptr, got {:?}", arg), - }; - match self.read_drop_type_from_vtable(vtable)? { - Some(func) => func, - // no drop fn -> bail out - None => { - self.goto_block(target); - return Ok(()) - }, + if let Value::ScalarPair(_, vtable) = arg { + self.read_drop_type_from_vtable(vtable.unwrap_or_err()?.to_ptr()?)? + } else { + bug!("expected fat ptr, got {:?}", arg); } } _ => instance, diff --git a/src/librustc_mir/interpret/terminator/mod.rs b/src/librustc_mir/interpret/terminator/mod.rs index 56dd3f603b6..682e384da39 100644 --- a/src/librustc_mir/interpret/terminator/mod.rs +++ b/src/librustc_mir/interpret/terminator/mod.rs @@ -4,7 +4,7 @@ use syntax::codemap::Span; use rustc_target::spec::abi::Abi; -use rustc::mir::interpret::{EvalResult, Scalar}; +use rustc::mir::interpret::{EvalResult, Scalar, Value}; use super::{EvalContext, Place, Machine, ValTy}; use rustc_data_structures::indexed_vec::Idx; @@ -47,7 +47,7 @@ pub(super) fn eval_terminator( for (index, &const_int) in values.iter().enumerate() { // Compare using binary_op - let const_int = Scalar::Bits { bits: const_int, defined: 128 }; + let const_int = Scalar::Bits { bits: const_int, size: discr_layout.size.bytes() as u8 }; let res = self.binary_op(mir::BinOp::Eq, discr_prim, discr_val.ty, const_int, discr_val.ty @@ -392,12 +392,12 @@ fn eval_fn_call( let fn_ptr = self.memory.read_ptr_sized( vtable.offset(ptr_size * (idx as u64 + 3), &self)?, ptr_align - )?.to_ptr()?; + )?.unwrap_or_err()?.to_ptr()?; let instance = self.memory.get_fn(fn_ptr)?; let mut args = args.to_vec(); let ty = self.layout_of(args[0].ty)?.field(&self, 0)?.ty; args[0].ty = ty; - args[0].value = ptr.to_value(); + args[0].value = Value::Scalar(ptr); // recurse with concrete function self.eval_fn_call(instance, destination, &args, span, sig) } diff --git a/src/librustc_mir/interpret/traits.rs b/src/librustc_mir/interpret/traits.rs index b6c7feda19f..84583680988 100644 --- a/src/librustc_mir/interpret/traits.rs +++ b/src/librustc_mir/interpret/traits.rs @@ -1,6 +1,6 @@ use rustc::ty::{self, Ty}; use rustc::ty::layout::{Size, Align, LayoutOf}; -use rustc::mir::interpret::{Scalar, Value, Pointer, EvalResult}; +use rustc::mir::interpret::{Scalar, Pointer, EvalResult}; use syntax::ast::Mutability; @@ -36,25 +36,25 @@ pub fn get_vtable( let drop = ::monomorphize::resolve_drop_in_place(*self.tcx, ty); let drop = self.memory.create_fn_alloc(drop); - self.memory.write_ptr_sized_unsigned(vtable, ptr_align, drop.into())?; + self.memory.write_ptr_sized_unsigned(vtable, ptr_align, Scalar::Ptr(drop).into())?; let size_ptr = vtable.offset(ptr_size, &self)?; self.memory.write_ptr_sized_unsigned(size_ptr, ptr_align, Scalar::Bits { bits: size as u128, - defined: ptr_size.bits() as u8, - })?; + size: ptr_size.bytes() as u8, + }.into())?; let align_ptr = vtable.offset(ptr_size * 2, &self)?; self.memory.write_ptr_sized_unsigned(align_ptr, ptr_align, Scalar::Bits { bits: align as u128, - defined: ptr_size.bits() as u8, - })?; + size: ptr_size.bytes() as u8, + }.into())?; for (i, method) in methods.iter().enumerate() { if let Some((def_id, substs)) = *method { let instance = self.resolve(def_id, substs)?; let fn_ptr = self.memory.create_fn_alloc(instance); let method_ptr = vtable.offset(ptr_size * (3 + i as u64), &self)?; - self.memory.write_ptr_sized_unsigned(method_ptr, ptr_align, fn_ptr.into())?; + self.memory.write_ptr_sized_unsigned(method_ptr, ptr_align, Scalar::Ptr(fn_ptr).into())?; } } @@ -69,16 +69,11 @@ pub fn get_vtable( pub fn read_drop_type_from_vtable( &self, vtable: Pointer, - ) -> EvalResult<'tcx, Option>> { + ) -> EvalResult<'tcx, ty::Instance<'tcx>> { // we don't care about the pointee type, we just want a pointer let pointer_align = self.tcx.data_layout.pointer_align; - let pointer_size = self.tcx.data_layout.pointer_size.bits() as u8; - match self.read_ptr(vtable, pointer_align, self.tcx.mk_nil_ptr())? { - // some values don't need to call a drop impl, so the value is null - Value::Scalar(Scalar::Bits { bits: 0, defined} ) if defined == pointer_size => Ok(None), - Value::Scalar(Scalar::Ptr(drop_fn)) => self.memory.get_fn(drop_fn).map(Some), - _ => err!(ReadBytesAsPointer), - } + let drop_fn = self.memory.read_ptr_sized(vtable, pointer_align)?.unwrap_or_err()?.to_ptr()?; + self.memory.get_fn(drop_fn) } pub fn read_size_and_align_from_vtable( @@ -87,11 +82,11 @@ pub fn read_size_and_align_from_vtable( ) -> EvalResult<'tcx, (Size, Align)> { let pointer_size = self.memory.pointer_size(); let pointer_align = self.tcx.data_layout.pointer_align; - let size = self.memory.read_ptr_sized(vtable.offset(pointer_size, self)?, pointer_align)?.to_bits(pointer_size)? as u64; + let size = self.memory.read_ptr_sized(vtable.offset(pointer_size, self)?, pointer_align)?.unwrap_or_err()?.to_bits(pointer_size)? as u64; let align = self.memory.read_ptr_sized( vtable.offset(pointer_size * 2, self)?, pointer_align - )?.to_bits(pointer_size)? as u64; + )?.unwrap_or_err()?.to_bits(pointer_size)? as u64; Ok((Size::from_bytes(size), Align::from_bytes(align, align).unwrap())) } } diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs index 6283ee9cfe6..97f1f2a76ca 100644 --- a/src/librustc_mir/monomorphize/collector.rs +++ b/src/librustc_mir/monomorphize/collector.rs @@ -193,7 +193,7 @@ use rustc::hir::map as hir_map; use rustc::hir::def_id::DefId; -use rustc::mir::interpret::{AllocId, ConstValue}; +use rustc::mir::interpret::{AllocId, ConstValue, ScalarMaybeUndef}; use rustc::middle::lang_items::{ExchangeMallocFnLangItem, StartFnLangItem}; use rustc::ty::subst::Substs; use rustc::ty::{self, TypeFoldable, Ty, TyCtxt, GenericParamDefKind}; @@ -1264,11 +1264,11 @@ fn collect_const<'a, 'tcx>( }; match val { ConstValue::Unevaluated(..) => bug!("const eval yielded unevaluated const"), - ConstValue::ScalarPair(Scalar::Ptr(a), Scalar::Ptr(b)) => { + ConstValue::ScalarPair(Scalar::Ptr(a), ScalarMaybeUndef::Scalar(Scalar::Ptr(b))) => { collect_miri(tcx, a.alloc_id, output); collect_miri(tcx, b.alloc_id, output); } - ConstValue::ScalarPair(_, Scalar::Ptr(ptr)) | + ConstValue::ScalarPair(_, ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr))) | ConstValue::ScalarPair(Scalar::Ptr(ptr), _) | ConstValue::Scalar(Scalar::Ptr(ptr)) => collect_miri(tcx, ptr.alloc_id, output), diff --git a/src/librustc_mir/transform/const_prop.rs b/src/librustc_mir/transform/const_prop.rs index 9902fe98cc0..05e51c5430d 100644 --- a/src/librustc_mir/transform/const_prop.rs +++ b/src/librustc_mir/transform/const_prop.rs @@ -17,7 +17,7 @@ use rustc::mir::{NullOp, StatementKind, Statement, BasicBlock, LocalKind}; use rustc::mir::{TerminatorKind, ClearCrossCrate, SourceInfo, BinOp, ProjectionElem}; use rustc::mir::visit::{Visitor, PlaceContext}; -use rustc::mir::interpret::{ConstEvalErr, EvalErrorKind}; +use rustc::mir::interpret::{ConstEvalErr, EvalErrorKind, ScalarMaybeUndef}; use rustc::ty::{TyCtxt, self, Instance}; use rustc::mir::interpret::{Value, Scalar, GlobalId, EvalResult}; use interpret::EvalContext; @@ -368,8 +368,8 @@ fn const_prop( type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some(( Value::Scalar(Scalar::Bits { bits: n as u128, - defined: self.tcx.data_layout.pointer_size.bits() as u8, - }), + size: self.tcx.data_layout.pointer_size.bytes() as u8, + }.into()), self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).ok()?, span, ))) @@ -390,8 +390,8 @@ fn const_prop( let prim = self.use_ecx(source_info, |this| { this.ecx.value_to_scalar(ValTy { value: val.0, ty: val.1.ty }) })?; - let val = self.use_ecx(source_info, |this| this.ecx.unary_op(op, prim, val.1.ty))?; - Some((Value::Scalar(val), place_layout, span)) + let val = self.use_ecx(source_info, |this| this.ecx.unary_op(op, prim, val.1))?; + Some((Value::Scalar(val.into()), place_layout, span)) } Rvalue::CheckedBinaryOp(op, ref left, ref right) | Rvalue::BinaryOp(op, ref left, ref right) => { @@ -449,8 +449,8 @@ fn const_prop( })?; let val = if let Rvalue::CheckedBinaryOp(..) = *rvalue { Value::ScalarPair( - val, - Scalar::from_bool(overflow), + val.into(), + Scalar::from_bool(overflow).into(), ) } else { if overflow { @@ -458,7 +458,7 @@ fn const_prop( let _: Option<()> = self.use_ecx(source_info, |_| Err(err)); return None; } - Value::Scalar(val) + Value::Scalar(val.into()) }; Some((val, place_layout, span)) }, @@ -576,7 +576,7 @@ fn visit_terminator_kind( if let TerminatorKind::Assert { expected, msg, cond, .. } = kind { if let Some(value) = self.eval_operand(cond, source_info) { trace!("assertion on {:?} should be {:?}", value, expected); - if Value::Scalar(Scalar::from_bool(*expected)) != value.0 { + if Value::Scalar(Scalar::from_bool(*expected).into()) != value.0 { // poison all places this operand references so that further code // doesn't use the invalid value match cond { @@ -613,14 +613,18 @@ fn visit_terminator_kind( .eval_operand(len, source_info) .expect("len must be const"); let len = match len.0 { - Value::Scalar(Scalar::Bits { bits, ..}) => bits, + Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { + bits, .. + })) => bits, _ => bug!("const len not primitive: {:?}", len), }; let index = self .eval_operand(index, source_info) .expect("index must be const"); let index = match index.0 { - Value::Scalar(Scalar::Bits { bits, .. }) => bits, + Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { + bits, .. + })) => bits, _ => bug!("const index not primitive: {:?}", index), }; format!( diff --git a/src/test/compile-fail/const-err4.rs b/src/test/compile-fail/const-err4.rs index 09ebf1681c5..10376d57809 100644 --- a/src/test/compile-fail/const-err4.rs +++ b/src/test/compile-fail/const-err4.rs @@ -16,7 +16,7 @@ union Foo { enum Bar { Boo = [unsafe { Foo { b: () }.a }; 4][3], - //~^ ERROR constant evaluation of enum discriminant resulted in non-integer + //~^ ERROR could not evaluate enum discriminant } fn main() { diff --git a/src/test/ui/const-eval/simple_with_undef.rs b/src/test/ui/const-eval/simple_with_undef.rs new file mode 100644 index 00000000000..d18059fafd5 --- /dev/null +++ b/src/test/ui/const-eval/simple_with_undef.rs @@ -0,0 +1,16 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-pass + +const PARSE_BOOL: Option<&'static str> = None; +static FOO: (Option<&str>, u32) = (PARSE_BOOL, 42); + +fn main() {} diff --git a/src/test/ui/const-eval/union-const-eval-field.rs b/src/test/ui/const-eval/union-const-eval-field.rs index 41981e12567..5723f4a4159 100644 --- a/src/test/ui/const-eval/union-const-eval-field.rs +++ b/src/test/ui/const-eval/union-const-eval-field.rs @@ -34,7 +34,7 @@ const fn read_field2() -> Field2 { } const fn read_field3() -> Field3 { - const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR exhibits undefined behavior + const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR cannot be used FIELD3 } diff --git a/src/test/ui/const-eval/union-const-eval-field.stderr b/src/test/ui/const-eval/union-const-eval-field.stderr index 94896d6c225..811450c8cba 100644 --- a/src/test/ui/const-eval/union-const-eval-field.stderr +++ b/src/test/ui/const-eval/union-const-eval-field.stderr @@ -1,11 +1,10 @@ -error[E0080]: this constant likely exhibits undefined behavior +error: this constant cannot be used --> $DIR/union-const-eval-field.rs:37:5 | -LL | const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR exhibits undefined behavior - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered undefined bytes +LL | const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR cannot be used + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ attempted to read undefined bytes | - = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior + = note: #[deny(const_err)] on by default error: aborting due to previous error -For more information about this error, try `rustc --explain E0080`. diff --git a/src/test/ui/const-eval/union-ice.rs b/src/test/ui/const-eval/union-ice.rs index 426710389eb..0cdb78c9780 100644 --- a/src/test/ui/const-eval/union-ice.rs +++ b/src/test/ui/const-eval/union-ice.rs @@ -20,9 +20,9 @@ union DummyUnion { const UNION: DummyUnion = DummyUnion { field1: 1065353216 }; -const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR this constant likely exhibits undefined +const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR this constant cannot be used -const FIELD_PATH: Struct = Struct { //~ ERROR this constant likely exhibits undefined behavior +const FIELD_PATH: Struct = Struct { //~ ERROR this constant cannot be used a: 42, b: unsafe { UNION.field3 }, }; diff --git a/src/test/ui/const-eval/union-ice.stderr b/src/test/ui/const-eval/union-ice.stderr index 58e9033a071..e8a7b2f5005 100644 --- a/src/test/ui/const-eval/union-ice.stderr +++ b/src/test/ui/const-eval/union-ice.stderr @@ -1,21 +1,19 @@ -error[E0080]: this constant likely exhibits undefined behavior +error: this constant cannot be used --> $DIR/union-ice.rs:23:1 | -LL | const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR this constant likely exhibits undefined - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered undefined bytes +LL | const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR this constant cannot be used + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ attempted to read undefined bytes | - = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior + = note: #[deny(const_err)] on by default -error[E0080]: this constant likely exhibits undefined behavior +error: this constant cannot be used --> $DIR/union-ice.rs:25:1 | -LL | / const FIELD_PATH: Struct = Struct { //~ ERROR this constant likely exhibits undefined behavior +LL | / const FIELD_PATH: Struct = Struct { //~ ERROR this constant cannot be used LL | | a: 42, LL | | b: unsafe { UNION.field3 }, LL | | }; - | |__^ type validation failed: encountered undefined bytes at .b - | - = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior + | |__^ attempted to read undefined bytes error[E0080]: this constant likely exhibits undefined behavior --> $DIR/union-ice.rs:35:1