match *dest_layout {
Univariant { ref variant, .. } => {
let offsets = variant.offsets.iter().map(|s| s.bytes());
+ if variant.packed {
+ let ptr = self.force_allocation(dest)?.to_ptr_and_extra().0;
+ self.memory.mark_packed(ptr, variant.stride().bytes());
+ }
self.assign_fields(dest, offsets, operands)?;
}
if let mir::AggregateKind::Adt(adt_def, variant, _, _) = *kind {
let discr_val = adt_def.variants[variant].disr_val.to_u128_unchecked();
let discr_size = discr.size().bytes();
+ if variants[variant].packed {
+ let ptr = self.force_allocation(dest)?.to_ptr_and_extra().0;
+ self.memory.mark_packed(ptr, variants[variant].stride().bytes());
+ }
self.assign_discr_and_fields(
dest,
StructWrappedNullablePointer { nndiscr, ref nonnull, ref discrfield, .. } => {
if let mir::AggregateKind::Adt(_, variant, _, _) = *kind {
+ if nonnull.packed {
+ let ptr = self.force_allocation(dest)?.to_ptr_and_extra().0;
+ self.memory.mark_packed(ptr, nonnull.stride().bytes());
+ }
if nndiscr == variant as u64 {
let offsets = nonnull.offsets.iter().map(|s| s.bytes());
self.assign_fields(dest, offsets, operands)?;
i128_type,
pub_restricted,
rustc_private,
+ collections_bound,
)]
// From rustc.
let field = field.index();
use rustc::ty::layout::Layout::*;
- let offset = match *base_layout {
- Univariant { ref variant, .. } => variant.offsets[field],
+ let (offset, packed) = match *base_layout {
+ Univariant { ref variant, .. } => {
+ (variant.offsets[field], variant.packed)
+ },
General { ref variants, .. } => {
if let LvalueExtra::DowncastVariant(variant_idx) = base_extra {
// +1 for the discriminant, which is field 0
- variants[variant_idx].offsets[field + 1]
+ (variants[variant_idx].offsets[field + 1], variants[variant_idx].packed)
} else {
bug!("field access on enum had no variant index");
}
}
StructWrappedNullablePointer { ref nonnull, .. } => {
- nonnull.offsets[field]
+ (nonnull.offsets[field], nonnull.packed)
}
UntaggedUnion { .. } => return Ok(base),
let field = field as u64;
assert!(field < count);
let elem_size = element.size(&self.tcx.data_layout).bytes();
- Size::from_bytes(field * elem_size)
+ (Size::from_bytes(field * elem_size), false)
}
_ => bug!("field access on non-product type: {:?}", base_layout),
};
+ if packed {
+ let size = self.type_size(field_ty)?.expect("packed struct must be sized");
+ self.memory.mark_packed(base_ptr, size);
+ }
+
let ptr = base_ptr.offset(offset.bytes());
let extra = if self.type_is_sized(field_ty) {
LvalueExtra::None
use byteorder::{ReadBytesExt, WriteBytesExt, LittleEndian, BigEndian};
-use std::collections::{btree_map, BTreeMap, HashMap, HashSet, VecDeque};
+use std::collections::{btree_map, BTreeMap, HashMap, HashSet, VecDeque, BTreeSet};
use std::{fmt, iter, ptr, mem, io};
use rustc::hir::def_id::DefId;
function_alloc_cache: HashMap<FunctionDefinition<'tcx>, AllocId>,
next_id: AllocId,
pub layout: &'a TargetDataLayout,
+ /// List of memory regions containing packed structures
+ packed: BTreeSet<Entry>,
}
const ZST_ALLOC_ID: AllocId = AllocId(0);
layout,
memory_size: max_memory,
memory_usage: 0,
+ packed: BTreeSet::new(),
}
}
self.layout.endian
}
- pub fn check_align(&self, ptr: Pointer, align: u64) -> EvalResult<'tcx, ()> {
+ pub fn check_align(&self, ptr: Pointer, align: u64, len: u64) -> EvalResult<'tcx, ()> {
let alloc = self.get(ptr.alloc_id)?;
+ // check whether the memory was marked as aligned
+ let start = Entry(ptr.alloc_id, 0, ptr.offset + len);
+ let end = Entry(ptr.alloc_id, ptr.offset + len, 0);
+ for &Entry(_, start, end) in self.packed.range(start..end) {
+ if start <= ptr.offset && (ptr.offset + len) <= end {
+ return Ok(());
+ }
+ }
if alloc.align < align {
return Err(EvalError::AlignmentCheckFailed {
has: alloc.align,
})
}
}
+
+ pub(crate) fn mark_packed(&mut self, ptr: Pointer, len: u64) {
+ self.packed.insert(Entry(ptr.alloc_id, ptr.offset, ptr.offset + len));
+ }
+
+ pub(crate) fn clear_packed(&mut self) {
+ self.packed.clear();
+ }
}
+#[derive(Eq, PartialEq, Ord, PartialOrd)]
+struct Entry(AllocId, u64, u64);
+
/// Allocation accessors
impl<'a, 'tcx> Memory<'a, 'tcx> {
pub fn get(&self, id: AllocId) -> EvalResult<'tcx, &Allocation> {
if size == 0 {
return Ok(&[]);
}
- self.check_align(ptr, align)?;
+ self.check_align(ptr, align, size)?;
if self.relocations(ptr, size)?.count() != 0 {
return Err(EvalError::ReadPointerAsBytes);
}
if size == 0 {
return Ok(&mut []);
}
- self.check_align(ptr, align)?;
+ self.check_align(ptr, align, size)?;
self.clear_relocations(ptr, size)?;
self.mark_definedness(ptr, size, true)?;
self.get_bytes_unchecked_mut(ptr, size)
/// Returns true as long as there are more things to do.
pub fn step(&mut self) -> EvalResult<'tcx, bool> {
+ self.memory.clear_packed();
self.inc_step_counter_and_check_limit(1)?;
if self.stack.is_empty() {
return Ok(false);
--- /dev/null
+#[repr(packed)]
+struct S {
+ a: i32,
+ b: i64,
+}
+
+fn main() {
+ let x = S {
+ a: 42,
+ b: 99,
+ };
+ assert_eq!(x.a, 42);
+ assert_eq!(x.b, 99);
+}