]> git.lizzy.rs Git - rust.git/blobdiff - src/operator.rs
Auto merge of #2430 - RalfJung:no-global-wrapper, r=RalfJung
[rust.git] / src / operator.rs
index 45bf1e453744cb26f723495b989b6934537889b9..758e747d2786760b8b8f75d6ea140f5674b846cc 100644 (file)
-use rustc::ty::{Ty, layout::LayoutOf};
-use rustc::mir;
+use log::trace;
+
+use rustc_middle::{mir, ty::Ty};
+use rustc_target::abi::Size;
 
 use crate::*;
 
 pub trait EvalContextExt<'tcx> {
-    fn pointer_inbounds(
-        &self,
-        ptr: Pointer<Tag>
-    ) -> InterpResult<'tcx>;
-
     fn binary_ptr_op(
         &self,
         bin_op: mir::BinOp,
-        left: ImmTy<'tcx, Tag>,
-        right: ImmTy<'tcx, Tag>,
-    ) -> InterpResult<'tcx, (Scalar<Tag>, bool, Ty<'tcx>)>;
-
-    fn ptr_eq(
-        &self,
-        left: Scalar<Tag>,
-        right: Scalar<Tag>,
-    ) -> InterpResult<'tcx, bool>;
-
-    fn pointer_offset_inbounds(
-        &self,
-        ptr: Scalar<Tag>,
-        pointee_ty: Ty<'tcx>,
-        offset: i64,
-    ) -> InterpResult<'tcx, Scalar<Tag>>;
+        left: &ImmTy<'tcx, Provenance>,
+        right: &ImmTy<'tcx, Provenance>,
+    ) -> InterpResult<'tcx, (Scalar<Provenance>, bool, Ty<'tcx>)>;
 }
 
 impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
-    /// Test if the pointer is in-bounds of a live allocation.
-    #[inline]
-    fn pointer_inbounds(&self, ptr: Pointer<Tag>) -> InterpResult<'tcx> {
-        let (size, _align) = self.memory().get_size_and_align(ptr.alloc_id, AllocCheck::Live)?;
-        ptr.check_inbounds_alloc(size, CheckInAllocMsg::InboundsTest)
-    }
-
     fn binary_ptr_op(
         &self,
         bin_op: mir::BinOp,
-        left: ImmTy<'tcx, Tag>,
-        right: ImmTy<'tcx, Tag>,
-    ) -> InterpResult<'tcx, (Scalar<Tag>, bool, Ty<'tcx>)> {
-        use rustc::mir::BinOp::*;
+        left: &ImmTy<'tcx, Provenance>,
+        right: &ImmTy<'tcx, Provenance>,
+    ) -> InterpResult<'tcx, (Scalar<Provenance>, bool, Ty<'tcx>)> {
+        use rustc_middle::mir::BinOp::*;
 
         trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);
 
         Ok(match bin_op {
-            Eq | Ne => {
-                // This supports fat pointers.
-                let eq = match (*left, *right) {
-                    (Immediate::Scalar(left), Immediate::Scalar(right)) =>
-                        self.ptr_eq(left.not_undef()?, right.not_undef()?)?,
-                    (Immediate::ScalarPair(left1, left2), Immediate::ScalarPair(right1, right2)) =>
-                        self.ptr_eq(left1.not_undef()?, right1.not_undef()?)? &&
-                        self.ptr_eq(left2.not_undef()?, right2.not_undef()?)?,
-                    _ => bug!("Type system should not allow comparing Scalar with ScalarPair"),
+            Eq | Ne | Lt | Le | Gt | Ge => {
+                assert_eq!(left.layout.abi, right.layout.abi); // types an differ, e.g. fn ptrs with different `for`
+                let size = self.pointer_size();
+                // Just compare the bits. ScalarPairs are compared lexicographically.
+                // We thus always compare pairs and simply fill scalars up with 0.
+                let left = match **left {
+                    Immediate::Scalar(l) => (l.check_init()?.to_bits(size)?, 0),
+                    Immediate::ScalarPair(l1, l2) =>
+                        (l1.check_init()?.to_bits(size)?, l2.check_init()?.to_bits(size)?),
+                    Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)),
+                };
+                let right = match **right {
+                    Immediate::Scalar(r) => (r.check_init()?.to_bits(size)?, 0),
+                    Immediate::ScalarPair(r1, r2) =>
+                        (r1.check_init()?.to_bits(size)?, r2.check_init()?.to_bits(size)?),
+                    Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)),
                 };
-                (Scalar::from_bool(if bin_op == Eq { eq } else { !eq }), false, self.tcx.types.bool)
-            }
-
-            Lt | Le | Gt | Ge => {
-                // Just compare the integers.
-                // TODO: Do we really want to *always* do that, even when comparing two live in-bounds pointers?
-                let left = self.force_bits(left.to_scalar()?, left.layout.size)?;
-                let right = self.force_bits(right.to_scalar()?, right.layout.size)?;
                 let res = match bin_op {
+                    Eq => left == right,
+                    Ne => left != right,
                     Lt => left < right,
                     Le => left <= right,
                     Gt => left > right,
                     Ge => left >= right,
-                    _ => bug!("We already established it has to be one of these operators."),
+                    _ => bug!(),
                 };
                 (Scalar::from_bool(res), false, self.tcx.types.bool)
             }
 
             Offset => {
-                let pointee_ty = left.layout.ty
-                    .builtin_deref(true)
-                    .expect("Offset called on non-ptr type")
-                    .ty;
-                let ptr = self.pointer_offset_inbounds(
-                    left.to_scalar()?,
-                    pointee_ty,
-                    right.to_scalar()?.to_isize(self)?,
-                )?;
-                (ptr, false, left.layout.ty)
-            }
-
-            _ => bug!("Invalid operator on pointers: {:?}", bin_op)
-        })
-    }
+                assert!(left.layout.ty.is_unsafe_ptr());
+                let ptr = self.scalar_to_ptr(left.to_scalar()?)?;
+                let offset = right.to_scalar()?.to_machine_isize(self)?;
 
-    fn ptr_eq(
-        &self,
-        left: Scalar<Tag>,
-        right: Scalar<Tag>,
-    ) -> InterpResult<'tcx, bool> {
-        let size = self.pointer_size();
-        // Just compare the integers.
-        // TODO: Do we really want to *always* do that, even when comparing two live in-bounds pointers?
-        let left = self.force_bits(left, size)?;
-        let right = self.force_bits(right, size)?;
-        Ok(left == right)
-    }
+                let pointee_ty =
+                    left.layout.ty.builtin_deref(true).expect("Offset called on non-ptr type").ty;
+                let ptr = self.ptr_offset_inbounds(ptr, pointee_ty, offset)?;
+                (Scalar::from_maybe_pointer(ptr, self), false, left.layout.ty)
+            }
 
-    /// Raises an error if the offset moves the pointer outside of its allocation.
-    /// We consider ZSTs their own huge allocation that doesn't overlap with anything (and nothing
-    /// moves in there because the size is 0). We also consider the NULL pointer its own separate
-    /// allocation, and all the remaining integers pointers their own allocation.
-    fn pointer_offset_inbounds(
-        &self,
-        ptr: Scalar<Tag>,
-        pointee_ty: Ty<'tcx>,
-        offset: i64,
-    ) -> InterpResult<'tcx, Scalar<Tag>> {
-        // FIXME: assuming here that type size is less than `i64::max_value()`.
-        let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64;
-        let offset = offset
-            .checked_mul(pointee_size)
-            .ok_or_else(|| err_panic!(Overflow(mir::BinOp::Mul)))?;
-        // Now let's see what kind of pointer this is.
-        let ptr = if offset == 0 {
-            match ptr {
-                Scalar::Ptr(ptr) => ptr,
-                Scalar::Raw { .. } => {
-                    // Offset 0 on an integer. We accept that, pretending there is
-                    // a little zero-sized allocation here.
-                    return Ok(ptr);
-                }
+            // Some more operations are possible with atomics.
+            // The return value always has the provenance of the *left* operand.
+            Add | Sub | BitOr | BitAnd | BitXor => {
+                assert!(left.layout.ty.is_unsafe_ptr());
+                assert!(right.layout.ty.is_unsafe_ptr());
+                let ptr = self.scalar_to_ptr(left.to_scalar()?)?;
+                // We do the actual operation with usize-typed scalars.
+                let left = ImmTy::from_uint(ptr.addr().bytes(), self.machine.layouts.usize);
+                let right = ImmTy::from_uint(
+                    right.to_scalar()?.to_machine_usize(self)?,
+                    self.machine.layouts.usize,
+                );
+                let (result, overflowing, _ty) =
+                    self.overflowing_binary_op(bin_op, &left, &right)?;
+                // Construct a new pointer with the provenance of `ptr` (the LHS).
+                let result_ptr =
+                    Pointer::new(ptr.provenance, Size::from_bytes(result.to_machine_usize(self)?));
+                (Scalar::from_maybe_pointer(result_ptr, self), overflowing, left.layout.ty)
             }
-        } else {
-            // Offset > 0. We *require* a pointer.
-            self.force_ptr(ptr)?
-        };
-        // Both old and new pointer must be in-bounds of a *live* allocation.
-        // (Of the same allocation, but that part is trivial with our representation.)
-        self.pointer_inbounds(ptr)?;
-        let ptr = ptr.signed_offset(offset, self)?;
-        self.pointer_inbounds(ptr)?;
-        Ok(Scalar::Ptr(ptr))
+
+            _ => span_bug!(self.cur_span(), "Invalid operator on pointers: {:?}", bin_op),
+        })
     }
 }