]> git.lizzy.rs Git - rust.git/blobdiff - src/librustc_mir/interpret/operand.rs
Auto merge of #63233 - RalfJung:get_unchecked, r=Centril
[rust.git] / src / librustc_mir / interpret / operand.rs
index 1816171d7b1276ecbd6f87ac23a2f9f4bbf5cc44..7b75627ee140060014e5ccb3e2aaabc51b682fcf 100644 (file)
@@ -11,8 +11,7 @@
 use rustc::mir::interpret::{
     GlobalId, AllocId,
     ConstValue, Pointer, Scalar,
-    InterpResult, InterpError,
-    sign_extend, truncate,
+    InterpResult, sign_extend, truncate,
 };
 use super::{
     InterpCx, Machine,
@@ -33,12 +32,21 @@ pub enum Immediate<Tag=(), Id=AllocId> {
     ScalarPair(ScalarMaybeUndef<Tag, Id>, ScalarMaybeUndef<Tag, Id>),
 }
 
-impl<'tcx, Tag> Immediate<Tag> {
-    #[inline]
-    pub fn from_scalar(val: Scalar<Tag>) -> Self {
-        Immediate::Scalar(ScalarMaybeUndef::Scalar(val))
+impl<Tag> From<ScalarMaybeUndef<Tag>> for Immediate<Tag> {
+    #[inline(always)]
+    fn from(val: ScalarMaybeUndef<Tag>) -> Self {
+        Immediate::Scalar(val)
+    }
+}
+
+impl<Tag> From<Scalar<Tag>> for Immediate<Tag> {
+    #[inline(always)]
+    fn from(val: Scalar<Tag>) -> Self {
+        Immediate::Scalar(val.into())
     }
+}
 
+impl<'tcx, Tag> Immediate<Tag> {
     pub fn new_slice(
         val: Scalar<Tag>,
         len: u64,
@@ -183,7 +191,7 @@ impl<'tcx, Tag: Copy> ImmTy<'tcx, Tag>
 {
     #[inline]
     pub fn from_scalar(val: Scalar<Tag>, layout: TyLayout<'tcx>) -> Self {
-        ImmTy { imm: Immediate::from_scalar(val), layout }
+        ImmTy { imm: val.into(), layout }
     }
 
     #[inline]
@@ -241,7 +249,7 @@ fn try_read_immediate_from_mplace(
         let ptr = match self.check_mplace_access(mplace, None)? {
             Some(ptr) => ptr,
             None => return Ok(Some(ImmTy { // zero-sized type
-                imm: Immediate::Scalar(Scalar::zst().into()),
+                imm: Scalar::zst().into(),
                 layout: mplace.layout,
             })),
         };
@@ -252,7 +260,7 @@ fn try_read_immediate_from_mplace(
                     .get(ptr.alloc_id)?
                     .read_scalar(self, ptr, mplace.layout.size)?;
                 Ok(Some(ImmTy {
-                    imm: Immediate::Scalar(scalar),
+                    imm: scalar.into(),
                     layout: mplace.layout,
                 }))
             }
@@ -331,8 +339,9 @@ pub fn read_str(
     ) -> InterpResult<'tcx, &str> {
         let len = mplace.len(self)?;
         let bytes = self.memory.read_bytes(mplace.ptr, Size::from_bytes(len as u64))?;
-        let str = ::std::str::from_utf8(bytes)
-            .map_err(|err| InterpError::ValidationFailure(err.to_string()))?;
+        let str = ::std::str::from_utf8(bytes).map_err(|err| {
+            err_unsup!(ValidationFailure(err.to_string()))
+        })?;
         Ok(str)
     }
 
@@ -354,7 +363,7 @@ pub fn operand_field(
         let field = field.try_into().unwrap();
         let field_layout = op.layout.field(self, field)?;
         if field_layout.is_zst() {
-            let immediate = Immediate::Scalar(Scalar::zst().into());
+            let immediate = Scalar::zst().into();
             return Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout });
         }
         let offset = op.layout.fields.offset(field);
@@ -364,7 +373,7 @@ pub fn operand_field(
             // extract fields from types with `ScalarPair` ABI
             Immediate::ScalarPair(a, b) => {
                 let val = if offset.bytes() == 0 { a } else { b };
-                Immediate::Scalar(val)
+                Immediate::from(val)
             },
             Immediate::Scalar(val) =>
                 bug!("field access on non aggregate {:#?}, {:#?}", val, op.layout),
@@ -401,7 +410,7 @@ pub fn operand_projection(
             Deref => self.deref_operand(base)?.into(),
             Subslice { .. } | ConstantIndex { .. } | Index(_) => if base.layout.is_zst() {
                 OpTy {
-                    op: Operand::Immediate(Immediate::Scalar(Scalar::zst().into())),
+                    op: Operand::Immediate(Scalar::zst().into()),
                     // the actual index doesn't matter, so we just pick a convenient one like 0
                     layout: base.layout.field(self, 0)?,
                 }
@@ -425,7 +434,7 @@ pub fn access_local(
         let layout = self.layout_of_local(frame, local, layout)?;
         let op = if layout.is_zst() {
             // Do not read from ZST, they might not be initialized
-            Operand::Immediate(Immediate::Scalar(Scalar::zst().into()))
+            Operand::Immediate(Scalar::zst().into())
         } else {
             frame.locals[local].access()?
         };
@@ -459,7 +468,8 @@ pub(super) fn eval_place_to_op(
 
         mir_place.iterate(|place_base, place_projection| {
             let mut op = match place_base {
-                PlaceBase::Local(mir::RETURN_PLACE) => return err!(ReadFromReturnPointer),
+                PlaceBase::Local(mir::RETURN_PLACE) =>
+                    throw_unsup!(ReadFromReturnPointer),
                 PlaceBase::Local(local) => {
                     // Do not use the layout passed in as argument if the base we are looking at
                     // here is not the entire place.
@@ -530,7 +540,9 @@ pub(super) fn eval_operands(
         };
         // Early-return cases.
         match val.val {
-            ConstValue::Param(_) => return err!(TooGeneric), // FIXME(oli-obk): try to monomorphize
+            ConstValue::Param(_) =>
+                // FIXME(oli-obk): try to monomorphize
+                throw_inval!(TooGeneric),
             ConstValue::Unevaluated(def_id, substs) => {
                 let instance = self.resolve(def_id, substs)?;
                 return Ok(OpTy::from(self.const_eval_raw(GlobalId {
@@ -553,7 +565,7 @@ pub(super) fn eval_operands(
                 Operand::Indirect(MemPlace::from_ptr(ptr, align))
             },
             ConstValue::Scalar(x) =>
-                Operand::Immediate(Immediate::Scalar(tag_scalar(x).into())),
+                Operand::Immediate(tag_scalar(x).into()),
             ConstValue::Slice { data, start, end } => {
                 // We rely on mutability being set correctly in `data` to prevent writes
                 // where none should happen.
@@ -604,7 +616,8 @@ pub fn read_discriminant(
             layout::DiscriminantKind::Tag => {
                 let bits_discr = match raw_discr.to_bits(discr_val.layout.size) {
                     Ok(raw_discr) => raw_discr,
-                    Err(_) => return err!(InvalidDiscriminant(raw_discr.erase_tag())),
+                    Err(_) =>
+                        throw_unsup!(InvalidDiscriminant(raw_discr.erase_tag())),
                 };
                 let real_discr = if discr_val.layout.ty.is_signed() {
                     // going from layout tag type to typeck discriminant type
@@ -630,7 +643,9 @@ pub fn read_discriminant(
                         .discriminants(*def_id, self.tcx.tcx)
                         .find(|(_, var)| var.val == real_discr),
                     _ => bug!("tagged layout for non-adt non-generator"),
-                }.ok_or_else(|| InterpError::InvalidDiscriminant(raw_discr.erase_tag()))?;
+                }.ok_or_else(
+                    || err_unsup!(InvalidDiscriminant(raw_discr.erase_tag()))
+                )?;
                 (real_discr, index.0)
             },
             layout::DiscriminantKind::Niche {
@@ -640,15 +655,16 @@ pub fn read_discriminant(
             } => {
                 let variants_start = niche_variants.start().as_u32() as u128;
                 let variants_end = niche_variants.end().as_u32() as u128;
-                let raw_discr = raw_discr.not_undef()
-                    .map_err(|_| InterpError::InvalidDiscriminant(ScalarMaybeUndef::Undef))?;
+                let raw_discr = raw_discr.not_undef().map_err(|_| {
+                    err_unsup!(InvalidDiscriminant(ScalarMaybeUndef::Undef))
+                })?;
                 match raw_discr.to_bits_or_ptr(discr_val.layout.size, self) {
                     Err(ptr) => {
                         // The niche must be just 0 (which an inbounds pointer value never is)
                         let ptr_valid = niche_start == 0 && variants_start == variants_end &&
                             !self.memory.ptr_may_be_null(ptr);
                         if !ptr_valid {
-                            return err!(InvalidDiscriminant(raw_discr.erase_tag().into()));
+                            throw_unsup!(InvalidDiscriminant(raw_discr.erase_tag().into()))
                         }
                         (dataful_variant.as_u32() as u128, dataful_variant)
                     },