pub extra: Extra,
}
+/// Byte accessors
+impl<'tcx, Tag, Extra> Allocation<Tag, Extra> {
+ /// The last argument controls whether we error out when there are undefined
+ /// or pointer bytes. You should never call this, call `get_bytes` or
+ /// `get_bytes_with_undef_and_ptr` instead,
+ ///
+ /// This function also guarantees that the resulting pointer will remain stable
+ /// even when new allocations are pushed to the `HashMap`. `copy_repeatedly` relies
+ /// on that.
+ fn get_bytes_internal(
+ &self,
+ ptr: Pointer<M::PointerTag>,
+ size: Size,
+ align: Align,
+ check_defined_and_ptr: bool,
+ ) -> EvalResult<'tcx, &[u8]> {
+ assert_ne!(size.bytes(), 0, "0-sized accesses should never even get a `Pointer`");
+ self.check_align(ptr.into(), align)?;
+ self.check_bounds(ptr, size, InboundsCheck::Live)?;
+
+ if check_defined_and_ptr {
+ self.check_defined(ptr, size)?;
+ self.check_relocations(ptr, size)?;
+ } else {
+ // We still don't want relocations on the *edges*
+ self.check_relocation_edges(ptr, size)?;
+ }
+
+ let alloc = self.get(ptr.alloc_id)?;
+ AllocationExtra::memory_read(alloc, ptr, size)?;
+
+ assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
+ assert_eq!(size.bytes() as usize as u64, size.bytes());
+ let offset = ptr.offset.bytes() as usize;
+ Ok(&alloc.bytes[offset..offset + size.bytes() as usize])
+ }
+
+ #[inline]
+ fn get_bytes(
+ &self,
+ ptr: Pointer<M::PointerTag>,
+ size: Size,
+ align: Align
+ ) -> EvalResult<'tcx, &[u8]> {
+ self.get_bytes_internal(ptr, size, align, true)
+ }
+
+ /// It is the caller's responsibility to handle undefined and pointer bytes.
+ /// However, this still checks that there are no relocations on the *edges*.
+ #[inline]
+ fn get_bytes_with_undef_and_ptr(
+ &self,
+ ptr: Pointer<M::PointerTag>,
+ size: Size,
+ align: Align
+ ) -> EvalResult<'tcx, &[u8]> {
+ self.get_bytes_internal(ptr, size, align, false)
+ }
+
+ /// Just calling this already marks everything as defined and removes relocations,
+ /// so be sure to actually put data there!
+ fn get_bytes_mut(
+ &mut self,
+ ptr: Pointer<M::PointerTag>,
+ size: Size,
+ align: Align,
+ ) -> EvalResult<'tcx, &mut [u8]> {
+ assert_ne!(size.bytes(), 0, "0-sized accesses should never even get a `Pointer`");
+ self.check_align(ptr.into(), align)?;
+ self.check_bounds(ptr, size, InboundsCheck::Live)?;
+
+ self.mark_definedness(ptr, size, true)?;
+ self.clear_relocations(ptr, size)?;
+
+ let alloc = self.get_mut(ptr.alloc_id)?;
+ AllocationExtra::memory_written(alloc, ptr, size)?;
+
+ assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
+ assert_eq!(size.bytes() as usize as u64, size.bytes());
+ let offset = ptr.offset.bytes() as usize;
+ Ok(&mut alloc.bytes[offset..offset + size.bytes() as usize])
+ }
+}
+
pub trait AllocationExtra<Tag>: ::std::fmt::Debug + Default + Clone {
/// Hook for performing extra checks on a memory read access.
///
}
}
-/// Byte accessors
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
- /// The last argument controls whether we error out when there are undefined
- /// or pointer bytes. You should never call this, call `get_bytes` or
- /// `get_bytes_with_undef_and_ptr` instead,
- ///
- /// This function also guarantees that the resulting pointer will remain stable
- /// even when new allocations are pushed to the `HashMap`. `copy_repeatedly` relies
- /// on that.
- fn get_bytes_internal(
- &self,
- ptr: Pointer<M::PointerTag>,
- size: Size,
- align: Align,
- check_defined_and_ptr: bool,
- ) -> EvalResult<'tcx, &[u8]> {
- assert_ne!(size.bytes(), 0, "0-sized accesses should never even get a `Pointer`");
- self.check_align(ptr.into(), align)?;
- self.check_bounds(ptr, size, InboundsCheck::Live)?;
-
- if check_defined_and_ptr {
- self.check_defined(ptr, size)?;
- self.check_relocations(ptr, size)?;
- } else {
- // We still don't want relocations on the *edges*
- self.check_relocation_edges(ptr, size)?;
- }
-
- let alloc = self.get(ptr.alloc_id)?;
- AllocationExtra::memory_read(alloc, ptr, size)?;
-
- assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
- assert_eq!(size.bytes() as usize as u64, size.bytes());
- let offset = ptr.offset.bytes() as usize;
- Ok(&alloc.bytes[offset..offset + size.bytes() as usize])
- }
-
- #[inline]
- fn get_bytes(
- &self,
- ptr: Pointer<M::PointerTag>,
- size: Size,
- align: Align
- ) -> EvalResult<'tcx, &[u8]> {
- self.get_bytes_internal(ptr, size, align, true)
- }
-
- /// It is the caller's responsibility to handle undefined and pointer bytes.
- /// However, this still checks that there are no relocations on the *edges*.
- #[inline]
- fn get_bytes_with_undef_and_ptr(
- &self,
- ptr: Pointer<M::PointerTag>,
- size: Size,
- align: Align
- ) -> EvalResult<'tcx, &[u8]> {
- self.get_bytes_internal(ptr, size, align, false)
- }
-
- /// Just calling this already marks everything as defined and removes relocations,
- /// so be sure to actually put data there!
- fn get_bytes_mut(
- &mut self,
- ptr: Pointer<M::PointerTag>,
- size: Size,
- align: Align,
- ) -> EvalResult<'tcx, &mut [u8]> {
- assert_ne!(size.bytes(), 0, "0-sized accesses should never even get a `Pointer`");
- self.check_align(ptr.into(), align)?;
- self.check_bounds(ptr, size, InboundsCheck::Live)?;
-
- self.mark_definedness(ptr, size, true)?;
- self.clear_relocations(ptr, size)?;
-
- let alloc = self.get_mut(ptr.alloc_id)?;
- AllocationExtra::memory_written(alloc, ptr, size)?;
-
- assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
- assert_eq!(size.bytes() as usize as u64, size.bytes());
- let offset = ptr.offset.bytes() as usize;
- Ok(&mut alloc.bytes[offset..offset + size.bytes() as usize])
- }
-}
-
/// Interning (for CTFE)
impl<'a, 'mir, 'tcx, M> Memory<'a, 'mir, 'tcx, M>
where