]> git.lizzy.rs Git - rust.git/commitdiff
add after_stack_push hook; add public ImmTy::from_immediate method, and make ImmTy...
authorRalf Jung <post@ralfj.de>
Mon, 13 Apr 2020 15:07:54 +0000 (17:07 +0200)
committerRalf Jung <post@ralfj.de>
Mon, 13 Apr 2020 15:08:27 +0000 (17:08 +0200)
src/librustc_mir/const_eval/eval_queries.rs
src/librustc_mir/interpret/eval_context.rs
src/librustc_mir/interpret/machine.rs
src/librustc_mir/interpret/operand.rs
src/librustc_mir/interpret/place.rs
src/librustc_mir/interpret/terminator.rs

index 97cdb32e2cdf783ed6836a8a728b408b67f77c73..33f8397887336ebff02aa03d9de98f14a30715a4 100644 (file)
@@ -1,7 +1,7 @@
 use super::{error_to_const_error, CompileTimeEvalContext, CompileTimeInterpreter, MemoryExtra};
 use crate::interpret::eval_nullary_intrinsic;
 use crate::interpret::{
-    intern_const_alloc_recursive, Allocation, ConstValue, GlobalId, ImmTy, Immediate, InternKind,
+    intern_const_alloc_recursive, Allocation, ConstValue, GlobalId, Immediate, InternKind,
     InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RawConst, RefTracking, Scalar,
     ScalarMaybeUndef, StackPopCleanup,
 };
@@ -147,24 +147,26 @@ pub(super) fn op_to_const<'tcx>(
     match immediate {
         Ok(mplace) => to_const_value(mplace),
         // see comment on `let try_as_immediate` above
-        Err(ImmTy { imm: Immediate::Scalar(x), .. }) => match x {
-            ScalarMaybeUndef::Scalar(s) => ConstValue::Scalar(s),
-            ScalarMaybeUndef::Undef => to_const_value(op.assert_mem_place(ecx)),
-        },
-        Err(ImmTy { imm: Immediate::ScalarPair(a, b), .. }) => {
-            let (data, start) = match a.not_undef().unwrap() {
-                Scalar::Ptr(ptr) => {
-                    (ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id), ptr.offset.bytes())
-                }
-                Scalar::Raw { .. } => (
-                    ecx.tcx.intern_const_alloc(Allocation::from_byte_aligned_bytes(b"" as &[u8])),
-                    0,
-                ),
-            };
-            let len = b.to_machine_usize(&ecx.tcx.tcx).unwrap();
-            let start = start.try_into().unwrap();
-            let len: usize = len.try_into().unwrap();
-            ConstValue::Slice { data, start, end: start + len }
+        Err(imm) => match *imm {
+            Immediate::Scalar(x) => match x {
+                ScalarMaybeUndef::Scalar(s) => ConstValue::Scalar(s),
+                ScalarMaybeUndef::Undef => to_const_value(op.assert_mem_place(ecx)),
+            },
+            Immediate::ScalarPair(a, b) => {
+                let (data, start) = match a.not_undef().unwrap() {
+                    Scalar::Ptr(ptr) => {
+                        (ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id), ptr.offset.bytes())
+                    }
+                    Scalar::Raw { .. } => (
+                        ecx.tcx.intern_const_alloc(Allocation::from_byte_aligned_bytes(b"" as &[u8])),
+                        0,
+                    ),
+                };
+                let len = b.to_machine_usize(&ecx.tcx.tcx).unwrap();
+                let start = start.try_into().unwrap();
+                let len: usize = len.try_into().unwrap();
+                ConstValue::Slice { data, start, end: start + len }
+            }
         }
     }
 }
index a0c6240a8a0ae444b4548014ff0e575ee2177ba4..f111eecb9450e33625f58147a24f01839b362a98 100644 (file)
@@ -646,6 +646,7 @@ pub fn push_stack_frame(
             self.frame_mut().locals = locals;
         }
 
+        M::after_stack_push(self)?;
         info!("ENTERING({}) {}", self.cur_frame(), self.frame().instance);
 
         if self.stack.len() > *self.tcx.sess.recursion_limit.get() {
@@ -751,7 +752,7 @@ pub(super) fn pop_stack_frame(&mut self, unwinding: bool) -> InterpResult<'tcx>
             // We want to skip the `info!` below, hence early return.
             return Ok(());
         }
-        // Normal return.
+        // Normal return, figure out where to jump.
         if unwinding {
             // Follow the unwind edge.
             let unwind = next_block.expect("Encountered StackPopCleanup::None when unwinding!");
index ffda0334a508f323b6697c5fa881d19240a08328..3c7cb73610ea698536e81110c40ef41f84094439 100644 (file)
@@ -285,6 +285,11 @@ fn init_frame_extra(
         frame: Frame<'mir, 'tcx, Self::PointerTag>,
     ) -> InterpResult<'tcx, Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>>;
 
+    /// Called immediately after a stack frame got pushed and its locals got initialized.
+    fn after_stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
+        Ok(())
+    }
+
     /// Called immediately after a stack frame got popped, but before jumping back to the caller.
     fn after_stack_pop(
         _ecx: &mut InterpCx<'mir, 'tcx, Self>,
index 3741f31927e9428a25f593a4de2e804375bb6179..893f4c1db7e0aa20b24c9d82228d2e582d40e4cb 100644 (file)
@@ -87,7 +87,7 @@ pub fn to_scalar_pair(self) -> InterpResult<'tcx, (Scalar<Tag>, Scalar<Tag>)> {
 // as input for binary and cast operations.
 #[derive(Copy, Clone, Debug)]
 pub struct ImmTy<'tcx, Tag = ()> {
-    pub(crate) imm: Immediate<Tag>,
+    imm: Immediate<Tag>,
     pub layout: TyAndLayout<'tcx>,
 }
 
@@ -183,6 +183,11 @@ pub fn from_scalar(val: Scalar<Tag>, layout: TyAndLayout<'tcx>) -> Self {
         ImmTy { imm: val.into(), layout }
     }
 
+    #[inline]
+    pub fn from_immediate(imm: Immediate<Tag>, layout: TyAndLayout<'tcx>) -> Self {
+        ImmTy { imm, layout }
+    }
+
     #[inline]
     pub fn try_from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Option<Self> {
         Some(Self::from_scalar(Scalar::try_from_uint(i, layout.size)?, layout))
@@ -424,7 +429,9 @@ pub fn access_local(
         Ok(OpTy { op, layout })
     }
 
-    /// Every place can be read from, so we can turn them into an operand
+    /// Every place can be read from, so we can turn them into an operand.
+    /// This will definitely return `Indirect` if the place is a `Ptr`, i.e., this
+    /// will never actually read from memory.
     #[inline(always)]
     pub fn place_to_op(
         &self,
index 828df9a0930f533531f78b10ac5fff246894e2c9..9ac4b3551fc4307c0ea01da5f11d5929ca4ff1d8 100644 (file)
@@ -247,7 +247,7 @@ pub fn try_as_mplace(
             Operand::Immediate(_) if self.layout.is_zst() => {
                 Ok(MPlaceTy::dangling(self.layout, cx))
             }
-            Operand::Immediate(imm) => Err(ImmTy { imm, layout: self.layout }),
+            Operand::Immediate(imm) => Err(ImmTy::from_immediate(imm, self.layout)),
         }
     }
 
index 2d8551b2bbf1e3a911508f92d2bd23ac3a01911a..49fee1bddcb6db1b06cdc8343938ff2896ea8373 100644 (file)
@@ -407,7 +407,7 @@ fn eval_fn_call(
                 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?;
                 // Adjust receiver argument.
                 args[0] =
-                    OpTy::from(ImmTy { layout: this_receiver_ptr, imm: receiver_place.ptr.into() });
+                    OpTy::from(ImmTy::from_immediate(receiver_place.ptr.into(), this_receiver_ptr));
                 trace!("Patched self operand to {:#?}", args[0]);
                 // recurse with concrete function
                 self.eval_fn_call(drop_fn, caller_abi, &args, ret, unwind)
@@ -436,10 +436,10 @@ fn drop_in_place(
             _ => (instance, place),
         };
 
-        let arg = ImmTy {
-            imm: place.to_ref(),
-            layout: self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
-        };
+        let arg = ImmTy::from_immediate(
+            place.to_ref(),
+            self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
+        );
 
         let ty = self.tcx.mk_unit(); // return type is ()
         let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);