From 19485cc10173cb72e24813ce95b65c8f3cf92326 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 15 Apr 2019 10:05:13 +0200 Subject: [PATCH] Miri: refactor new allocation tagging --- src/librustc/mir/interpret/allocation.rs | 73 +++++++----------------- src/librustc/mir/interpret/pointer.rs | 8 ++- src/librustc/mir/interpret/value.rs | 34 +++++------ src/librustc_mir/const_eval.rs | 22 +++---- src/librustc_mir/interpret/machine.rs | 40 ++++++------- src/librustc_mir/interpret/memory.rs | 10 ++-- src/librustc_mir/interpret/operand.rs | 38 +++--------- src/librustc_mir/interpret/place.rs | 36 ++++-------- src/librustc_mir/interpret/traits.rs | 2 +- 9 files changed, 99 insertions(+), 164 deletions(-) diff --git a/src/librustc/mir/interpret/allocation.rs b/src/librustc/mir/interpret/allocation.rs index 80fef910cc7..ca5feaee12e 100644 --- a/src/librustc/mir/interpret/allocation.rs +++ b/src/librustc/mir/interpret/allocation.rs @@ -45,12 +45,10 @@ pub struct Allocation { } -pub trait AllocationExtra: ::std::fmt::Debug + Clone { - /// Hook to initialize the extra data when an allocation gets created. - fn memory_allocated( - _size: Size, - _memory_extra: &MemoryExtra - ) -> Self; +pub trait AllocationExtra: ::std::fmt::Debug + Clone { + // There is no constructor in here because the constructor's type depends + // on `MemoryKind`, and making things sufficiently generic leads to painful + // inference failure. /// Hook for performing extra checks on a memory read access. /// @@ -88,15 +86,8 @@ fn memory_deallocated( } } -impl AllocationExtra<(), ()> for () { - #[inline(always)] - fn memory_allocated( - _size: Size, - _memory_extra: &() - ) -> Self { - () - } -} +// For Tag=() and no extra state, we have is a trivial implementation. +impl AllocationExtra<()> for () { } impl Allocation { /// Creates a read-only allocation initialized by the given bytes @@ -159,7 +150,7 @@ pub fn check_bounds( } /// Byte accessors -impl<'tcx, Tag: Copy, Extra> Allocation { +impl<'tcx, Tag: Copy, Extra: AllocationExtra> Allocation { /// The last argument controls whether we error out when there are undefined /// or pointer bytes. You should never call this, call `get_bytes` or /// `get_bytes_with_undef_and_ptr` instead, @@ -167,15 +158,13 @@ impl<'tcx, Tag: Copy, Extra> Allocation { /// This function also guarantees that the resulting pointer will remain stable /// even when new allocations are pushed to the `HashMap`. `copy_repeatedly` relies /// on that. - fn get_bytes_internal( + fn get_bytes_internal( &self, cx: &impl HasDataLayout, ptr: Pointer, size: Size, check_defined_and_ptr: bool, ) -> EvalResult<'tcx, &[u8]> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra { self.check_bounds(cx, ptr, size)?; @@ -196,14 +185,12 @@ fn get_bytes_internal( } #[inline] - pub fn get_bytes( + pub fn get_bytes( &self, cx: &impl HasDataLayout, ptr: Pointer, size: Size, ) -> EvalResult<'tcx, &[u8]> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra { self.get_bytes_internal(cx, ptr, size, true) } @@ -211,28 +198,24 @@ pub fn get_bytes( /// It is the caller's responsibility to handle undefined and pointer bytes. /// However, this still checks that there are no relocations on the *edges*. #[inline] - pub fn get_bytes_with_undef_and_ptr( + pub fn get_bytes_with_undef_and_ptr( &self, cx: &impl HasDataLayout, ptr: Pointer, size: Size, ) -> EvalResult<'tcx, &[u8]> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra { self.get_bytes_internal(cx, ptr, size, false) } /// Just calling this already marks everything as defined and removes relocations, /// so be sure to actually put data there! - pub fn get_bytes_mut( + pub fn get_bytes_mut( &mut self, cx: &impl HasDataLayout, ptr: Pointer, size: Size, ) -> EvalResult<'tcx, &mut [u8]> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra { assert_ne!(size.bytes(), 0, "0-sized accesses should never even get a `Pointer`"); self.check_bounds(cx, ptr, size)?; @@ -250,16 +233,14 @@ pub fn get_bytes_mut( } /// Reading and writing -impl<'tcx, Tag: Copy, Extra> Allocation { +impl<'tcx, Tag: Copy, Extra: AllocationExtra> Allocation { /// Reads bytes until a `0` is encountered. Will error if the end of the allocation is reached /// before a `0` is found. - pub fn read_c_str( + pub fn read_c_str( &self, cx: &impl HasDataLayout, ptr: Pointer, ) -> EvalResult<'tcx, &[u8]> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra { assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes()); let offset = ptr.offset.bytes() as usize; @@ -278,15 +259,13 @@ pub fn read_c_str( /// Validates that `ptr.offset` and `ptr.offset + size` do not point to the middle of a /// relocation. If `allow_ptr_and_undef` is `false`, also enforces that the memory in the /// given range contains neither relocations nor undef bytes. - pub fn check_bytes( + pub fn check_bytes( &self, cx: &impl HasDataLayout, ptr: Pointer, size: Size, allow_ptr_and_undef: bool, ) -> EvalResult<'tcx> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra { // Check bounds and relocations on the edges self.get_bytes_with_undef_and_ptr(cx, ptr, size)?; @@ -301,14 +280,12 @@ pub fn check_bytes( /// Writes `src` to the memory starting at `ptr.offset`. /// /// Will do bounds checks on the allocation. - pub fn write_bytes( + pub fn write_bytes( &mut self, cx: &impl HasDataLayout, ptr: Pointer, src: &[u8], ) -> EvalResult<'tcx> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra { let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(src.len() as u64))?; bytes.clone_from_slice(src); @@ -316,15 +293,13 @@ pub fn write_bytes( } /// Sets `count` bytes starting at `ptr.offset` with `val`. Basically `memset`. - pub fn write_repeat( + pub fn write_repeat( &mut self, cx: &impl HasDataLayout, ptr: Pointer, val: u8, count: Size ) -> EvalResult<'tcx> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra { let bytes = self.get_bytes_mut(cx, ptr, count)?; for b in bytes { @@ -341,14 +316,12 @@ pub fn write_repeat( /// being valid for ZSTs /// /// Note: This function does not do *any* alignment checks, you need to do these before calling - pub fn read_scalar( + pub fn read_scalar( &self, cx: &impl HasDataLayout, ptr: Pointer, size: Size ) -> EvalResult<'tcx, ScalarMaybeUndef> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra { // get_bytes_unchecked tests relocation edges let bytes = self.get_bytes_with_undef_and_ptr(cx, ptr, size)?; @@ -379,13 +352,11 @@ pub fn read_scalar( } /// Note: This function does not do *any* alignment checks, you need to do these before calling - pub fn read_ptr_sized( + pub fn read_ptr_sized( &self, cx: &impl HasDataLayout, ptr: Pointer, ) -> EvalResult<'tcx, ScalarMaybeUndef> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra { self.read_scalar(cx, ptr, cx.data_layout().pointer_size) } @@ -398,15 +369,13 @@ pub fn read_ptr_sized( /// being valid for ZSTs /// /// Note: This function does not do *any* alignment checks, you need to do these before calling - pub fn write_scalar( + pub fn write_scalar( &mut self, cx: &impl HasDataLayout, ptr: Pointer, val: ScalarMaybeUndef, type_size: Size, ) -> EvalResult<'tcx> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra { let val = match val { ScalarMaybeUndef::Scalar(scalar) => scalar, @@ -446,14 +415,12 @@ pub fn write_scalar( } /// Note: This function does not do *any* alignment checks, you need to do these before calling - pub fn write_ptr_sized( + pub fn write_ptr_sized( &mut self, cx: &impl HasDataLayout, ptr: Pointer, val: ScalarMaybeUndef ) -> EvalResult<'tcx> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra { let ptr_size = cx.data_layout().pointer_size; self.write_scalar(cx, ptr.into(), val, ptr_size) diff --git a/src/librustc/mir/interpret/pointer.rs b/src/librustc/mir/interpret/pointer.rs index 9216cb494ce..75e0f704a58 100644 --- a/src/librustc/mir/interpret/pointer.rs +++ b/src/librustc/mir/interpret/pointer.rs @@ -94,11 +94,17 @@ pub fn new(alloc_id: AllocId, offset: Size) -> Self { Pointer { alloc_id, offset, tag: () } } + #[inline(always)] + pub fn with_tag(self, tag: Tag) -> Pointer + { + Pointer::new_with_tag(self.alloc_id, self.offset, tag) + } + #[inline(always)] pub fn with_default_tag(self) -> Pointer where Tag: Default { - Pointer::new_with_tag(self.alloc_id, self.offset, Default::default()) + self.with_tag(Tag::default()) } } diff --git a/src/librustc/mir/interpret/value.rs b/src/librustc/mir/interpret/value.rs index 7b47c02de1b..18c82ecd38e 100644 --- a/src/librustc/mir/interpret/value.rs +++ b/src/librustc/mir/interpret/value.rs @@ -119,14 +119,19 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { impl<'tcx> Scalar<()> { #[inline] - pub fn with_default_tag(self) -> Scalar - where Tag: Default - { + pub fn with_tag(self, new_tag: Tag) -> Scalar { match self { - Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_default_tag()), + Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_tag(new_tag)), Scalar::Bits { bits, size } => Scalar::Bits { bits, size }, } } + + #[inline(always)] + pub fn with_default_tag(self) -> Scalar + where Tag: Default + { + self.with_tag(Tag::default()) + } } impl<'tcx, Tag> Scalar { @@ -138,14 +143,6 @@ pub fn erase_tag(self) -> Scalar { } } - #[inline] - pub fn with_tag(self, new_tag: Tag) -> Self { - match self { - Scalar::Ptr(ptr) => Scalar::Ptr(Pointer { tag: new_tag, ..ptr }), - Scalar::Bits { bits, size } => Scalar::Bits { bits, size }, - } - } - #[inline] pub fn ptr_null(cx: &impl HasDataLayout) -> Self { Scalar::Bits { @@ -434,14 +431,19 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { impl<'tcx> ScalarMaybeUndef<()> { #[inline] - pub fn with_default_tag(self) -> ScalarMaybeUndef - where Tag: Default - { + pub fn with_tag(self, new_tag: Tag) -> ScalarMaybeUndef { match self { - ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.with_default_tag()), + ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.with_tag(new_tag)), ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef, } } + + #[inline(always)] + pub fn with_default_tag(self) -> ScalarMaybeUndef + where Tag: Default + { + self.with_tag(Tag::default()) + } } impl<'tcx, Tag> ScalarMaybeUndef { diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index 4c8ab361e04..09c50d4f81f 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -11,7 +11,7 @@ use rustc::mir::interpret::{ConstEvalErr, ErrorHandled}; use rustc::mir; use rustc::ty::{self, TyCtxt, query::TyCtxtAt}; -use rustc::ty::layout::{self, LayoutOf, VariantIdx}; +use rustc::ty::layout::{self, LayoutOf, VariantIdx, Size}; use rustc::ty::subst::Subst; use rustc::traits::Reveal; use rustc::util::common::ErrorReported; @@ -21,7 +21,7 @@ use syntax::source_map::{Span, DUMMY_SP}; use crate::interpret::{self, - PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Immediate, Scalar, Pointer, + PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Immediate, Scalar, RawConst, ConstValue, EvalResult, EvalError, InterpError, GlobalId, InterpretCx, StackPopCleanup, Allocation, AllocId, MemoryKind, @@ -406,6 +406,15 @@ fn adjust_static_allocation<'b>( Cow::Borrowed(alloc) } + #[inline(always)] + fn new_allocation( + _size: Size, + _extra: &Self::MemoryExtra, + _kind: MemoryKind, + ) -> (Self::AllocExtra, Self::PointerTag) { + ((), ()) + } + fn box_alloc( _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, _dest: PlaceTy<'tcx>, @@ -439,15 +448,6 @@ fn before_terminator(ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>) -> EvalResult< ) } - #[inline(always)] - fn tag_new_allocation( - _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, - ptr: Pointer, - _kind: MemoryKind, - ) -> Pointer { - ptr - } - #[inline(always)] fn stack_push( _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, diff --git a/src/librustc_mir/interpret/machine.rs b/src/librustc_mir/interpret/machine.rs index 09d403ab243..288ffbf3cd6 100644 --- a/src/librustc_mir/interpret/machine.rs +++ b/src/librustc_mir/interpret/machine.rs @@ -7,11 +7,11 @@ use rustc::hir::{self, def_id::DefId}; use rustc::mir; -use rustc::ty::{self, query::TyCtxtAt}; +use rustc::ty::{self, query::TyCtxtAt, layout::Size}; use super::{ Allocation, AllocId, EvalResult, Scalar, AllocationExtra, - InterpretCx, PlaceTy, MPlaceTy, OpTy, ImmTy, Pointer, MemoryKind, + InterpretCx, PlaceTy, MPlaceTy, OpTy, ImmTy, MemoryKind, }; /// Whether this kind of memory is allowed to leak @@ -76,7 +76,7 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized { type MemoryExtra: Default; /// Extra data stored in every allocation. - type AllocExtra: AllocationExtra + 'static; + type AllocExtra: AllocationExtra + 'static; /// Memory's allocation map type MemoryMap: @@ -139,18 +139,6 @@ fn find_foreign_static( memory_extra: &Self::MemoryExtra, ) -> EvalResult<'tcx, Cow<'tcx, Allocation>>; - /// Called to turn an allocation obtained from the `tcx` into one that has - /// the right type for this machine. - /// - /// This should avoid copying if no work has to be done! If this returns an owned - /// allocation (because a copy had to be done to add tags or metadata), machine memory will - /// cache the result. (This relies on `AllocMap::get_or` being able to add the - /// owned allocation to the map even when the map is shared.) - fn adjust_static_allocation<'b>( - alloc: &'b Allocation, - memory_extra: &Self::MemoryExtra, - ) -> Cow<'b, Allocation>; - /// Called for all binary operations on integer(-like) types when one operand is a pointer /// value, and for the `Offset` operation that is inherently about pointers. /// @@ -168,12 +156,24 @@ fn box_alloc( dest: PlaceTy<'tcx, Self::PointerTag>, ) -> EvalResult<'tcx>; - /// Adds the tag for a newly allocated pointer. - fn tag_new_allocation( - ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, - ptr: Pointer, + /// Called to turn an allocation obtained from the `tcx` into one that has + /// the right type for this machine. + /// + /// This should avoid copying if no work has to be done! If this returns an owned + /// allocation (because a copy had to be done to add tags or metadata), machine memory will + /// cache the result. (This relies on `AllocMap::get_or` being able to add the + /// owned allocation to the map even when the map is shared.) + fn adjust_static_allocation<'b>( + alloc: &'b Allocation, + memory_extra: &Self::MemoryExtra, + ) -> Cow<'b, Allocation>; + + /// Computes the extra state and the tag for a new allocation. + fn new_allocation( + size: Size, + extra: &Self::MemoryExtra, kind: MemoryKind, - ) -> Pointer; + ) -> (Self::AllocExtra, Self::PointerTag); /// Executed when evaluating the `*` operator: Following a reference. /// This has the chance to adjust the tag. It should not change anything else! diff --git a/src/librustc_mir/interpret/memory.rs b/src/librustc_mir/interpret/memory.rs index e5d8341dfcf..117bd15399c 100644 --- a/src/librustc_mir/interpret/memory.rs +++ b/src/librustc_mir/interpret/memory.rs @@ -132,9 +132,9 @@ pub fn allocate( size: Size, align: Align, kind: MemoryKind, - ) -> Pointer { - let extra = AllocationExtra::memory_allocated(size, &self.extra); - Pointer::from(self.allocate_with(Allocation::undef(size, align, extra), kind)) + ) -> Pointer { + let (extra, tag) = M::new_allocation(size, &self.extra, kind); + Pointer::from(self.allocate_with(Allocation::undef(size, align, extra), kind)).with_tag(tag) } pub fn reallocate( @@ -145,7 +145,7 @@ pub fn reallocate( new_size: Size, new_align: Align, kind: MemoryKind, - ) -> EvalResult<'tcx, Pointer> { + ) -> EvalResult<'tcx, Pointer> { if ptr.offset.bytes() != 0 { return err!(ReallocateNonBasePtr); } @@ -156,7 +156,7 @@ pub fn reallocate( self.copy( ptr.into(), old_align, - new_ptr.with_default_tag().into(), + new_ptr.into(), new_align, old_size.min(new_size), /*nonoverlapping*/ true, diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index 1ce6d09d7a4..55c1bfb17de 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -31,19 +31,6 @@ pub enum Immediate { ScalarPair(ScalarMaybeUndef, ScalarMaybeUndef), } -impl Immediate { - #[inline] - pub fn with_default_tag(self) -> Immediate - where Tag: Default - { - match self { - Immediate::Scalar(x) => Immediate::Scalar(x.with_default_tag()), - Immediate::ScalarPair(x, y) => - Immediate::ScalarPair(x.with_default_tag(), y.with_default_tag()), - } - } -} - impl<'tcx, Tag> Immediate { #[inline] pub fn from_scalar(val: Scalar) -> Self { @@ -142,18 +129,6 @@ pub enum Operand { Indirect(MemPlace), } -impl Operand { - #[inline] - pub fn with_default_tag(self) -> Operand - where Tag: Default - { - match self { - Operand::Immediate(x) => Operand::Immediate(x.with_default_tag()), - Operand::Indirect(x) => Operand::Indirect(x.with_default_tag()), - } - } -} - impl Operand { #[inline] pub fn erase_tag(self) -> Operand @@ -554,16 +529,17 @@ pub(super) fn eval_operands( // We rely on mutability being set correctly in that allocation to prevent writes // where none should happen -- and for `static mut`, we copy on demand anyway. Operand::Indirect( - MemPlace::from_ptr(ptr, alloc.align) - ).with_default_tag() + MemPlace::from_ptr(ptr.with_default_tag(), alloc.align) + ) }, ConstValue::Slice(a, b) => Operand::Immediate(Immediate::ScalarPair( - a.into(), - Scalar::from_uint(b, self.tcx.data_layout.pointer_size).into(), - )).with_default_tag(), + a.with_default_tag().into(), + Scalar::from_uint(b, self.tcx.data_layout.pointer_size) + .with_default_tag().into(), + )), ConstValue::Scalar(x) => - Operand::Immediate(Immediate::Scalar(x.into())).with_default_tag(), + Operand::Immediate(Immediate::Scalar(x.with_default_tag().into())), ConstValue::Unevaluated(def_id, substs) => { let instance = self.resolve(def_id, substs)?; return Ok(OpTy::from(self.const_eval_raw(GlobalId { diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs index 32ad5274689..8239337796e 100644 --- a/src/librustc_mir/interpret/place.rs +++ b/src/librustc_mir/interpret/place.rs @@ -83,23 +83,19 @@ fn from(mplace: MPlaceTy<'tcx, Tag>) -> Self { } } -impl MemPlace { +impl MemPlace { + /// Replace ptr tag, maintain vtable tag (if any) #[inline] - pub fn with_default_tag(self) -> MemPlace - where Tag: Default - { + pub fn replace_tag(self, new_tag: Tag) -> Self { MemPlace { - ptr: self.ptr.with_default_tag(), + ptr: self.ptr.erase_tag().with_tag(new_tag), align: self.align, - meta: self.meta.map(Scalar::with_default_tag), + meta: self.meta, } } -} -impl MemPlace { #[inline] - pub fn erase_tag(self) -> MemPlace - { + pub fn erase_tag(self) -> MemPlace { MemPlace { ptr: self.ptr.erase_tag(), align: self.align, @@ -107,16 +103,6 @@ pub fn erase_tag(self) -> MemPlace } } - #[inline] - pub fn with_tag(self, new_tag: Tag) -> Self - { - MemPlace { - ptr: self.ptr.with_tag(new_tag), - align: self.align, - meta: self.meta, - } - } - #[inline(always)] pub fn from_scalar_ptr(ptr: Scalar, align: Align) -> Self { MemPlace { @@ -189,11 +175,11 @@ pub fn dangling(layout: TyLayout<'tcx>, cx: &impl HasDataLayout) -> Self { } } + /// Replace ptr tag, maintain vtable tag (if any) #[inline] - pub fn with_tag(self, new_tag: Tag) -> Self - { + pub fn replace_tag(self, new_tag: Tag) -> Self { MPlaceTy { - mplace: self.mplace.with_tag(new_tag), + mplace: self.mplace.replace_tag(new_tag), layout: self.layout, } } @@ -312,7 +298,7 @@ impl<'a, 'mir, 'tcx, Tag, M> InterpretCx<'a, 'mir, 'tcx, M> M: Machine<'a, 'mir, 'tcx, PointerTag=Tag>, // FIXME: Working around https://github.com/rust-lang/rust/issues/24159 M::MemoryMap: AllocMap, Allocation)>, - M::AllocExtra: AllocationExtra, + M::AllocExtra: AllocationExtra, { /// Take a value, which represents a (thin or fat) reference, and make it a place. /// Alignment is just based on the type. This is the inverse of `MemPlace::to_ref()`. @@ -943,7 +929,6 @@ pub fn force_allocation_maybe_sized( let (size, align) = self.size_and_align_of(meta, local_layout)? .expect("Cannot allocate for non-dyn-sized type"); let ptr = self.memory.allocate(size, align, MemoryKind::Stack); - let ptr = M::tag_new_allocation(self, ptr, MemoryKind::Stack); let mplace = MemPlace { ptr: ptr.into(), align, meta }; if let Some(value) = old_val { // Preserve old value. @@ -981,7 +966,6 @@ pub fn allocate( kind: MemoryKind, ) -> MPlaceTy<'tcx, M::PointerTag> { let ptr = self.memory.allocate(layout.size, layout.align.abi, kind); - let ptr = M::tag_new_allocation(self, ptr, kind); MPlaceTy::from_aligned_ptr(ptr, layout) } diff --git a/src/librustc_mir/interpret/traits.rs b/src/librustc_mir/interpret/traits.rs index a451f2afb46..0bed62ccf50 100644 --- a/src/librustc_mir/interpret/traits.rs +++ b/src/librustc_mir/interpret/traits.rs @@ -52,7 +52,7 @@ pub fn get_vtable( ptr_size * (3 + methods.len() as u64), ptr_align, MemoryKind::Vtable, - ).with_default_tag(); + ); let tcx = &*self.tcx; let drop = crate::monomorphize::resolve_drop_in_place(*tcx, ty); -- 2.44.0