}
}
+ #[inline]
pub fn get(&self, i: Size) -> bool {
let (block, bit) = bit_index(i);
(self.blocks[block] & 1 << bit) != 0
}
+ #[inline]
pub fn set(&mut self, i: Size, new_state: bool) {
let (block, bit) = bit_index(i);
if new_state {
}
}
+#[inline]
fn bit_index(bits: Size) -> (usize, usize) {
let bits = bits.bytes();
let a = bits / BLOCK_SIZE;
let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align();
- // FIXME: speed up repeat filling
- for i in 0..length {
- let elem_dest = dest.ptr_offset(elem_size * i as u64, &self)?;
- self.write_value_to_ptr(value, elem_dest, dest_align, elem_ty)?;
+ if length > 0 {
+ //write the first value
+ self.write_value_to_ptr(value, dest, dest_align, elem_ty)?;
+
+ if length > 1 {
+ let rest = dest.ptr_offset(elem_size * 1 as u64, &self)?;
+ self.memory.copy_repeatedly(dest, dest_align, rest, dest_align, elem_size, length - 1, false)?;
+ }
}
}
dest_align: Align,
size: Size,
nonoverlapping: bool,
+ ) -> EvalResult<'tcx> {
+ self.copy_repeatedly(src, src_align, dest, dest_align, size, 1, nonoverlapping)
+ }
+
+ pub fn copy_repeatedly(
+ &mut self,
+ src: Scalar,
+ src_align: Align,
+ dest: Scalar,
+ dest_align: Align,
+ size: Size,
+ length: u64,
+ nonoverlapping: bool,
) -> EvalResult<'tcx> {
// Empty accesses don't need to be valid pointers, but they should still be aligned
self.check_align(src, src_align)?;
// first copy the relocations to a temporary buffer, because
// `get_bytes_mut` will clear the relocations, which is correct,
// since we don't want to keep any relocations at the target.
- let relocations: Vec<_> = self.relocations(src, size)?
- .iter()
- .map(|&(offset, alloc_id)| {
- // Update relocation offsets for the new positions in the destination allocation.
- (offset + dest.offset - src.offset, alloc_id)
- })
- .collect();
+ let relocations = {
+ let relocations = self.relocations(src, size)?;
+ let mut new_relocations = Vec::with_capacity(relocations.len() * (length as usize));
+ for i in 0..length {
+ new_relocations.extend(
+ relocations
+ .iter()
+ .map(|&(offset, alloc_id)| {
+ (offset + dest.offset - src.offset + (i * size * relocations.len() as u64), alloc_id)
+ })
+ );
+ }
+
+ new_relocations
+ };
let src_bytes = self.get_bytes_unchecked(src, size, src_align)?.as_ptr();
- let dest_bytes = self.get_bytes_mut(dest, size, dest_align)?.as_mut_ptr();
+ let dest_bytes = self.get_bytes_mut(dest, size * length, dest_align)?.as_mut_ptr();
// SAFE: The above indexing would have panicked if there weren't at least `size` bytes
// behind `src` and `dest`. Also, we use the overlapping-safe `ptr::copy` if `src` and
));
}
}
- ptr::copy(src_bytes, dest_bytes, size.bytes() as usize);
+
+ for i in 0..length {
+ ptr::copy(src_bytes, dest_bytes.offset((size.bytes() * i) as isize), size.bytes() as usize);
+ }
} else {
- ptr::copy_nonoverlapping(src_bytes, dest_bytes, size.bytes() as usize);
+ for i in 0..length {
+ ptr::copy_nonoverlapping(src_bytes, dest_bytes.offset((size.bytes() * i) as isize), size.bytes() as usize);
+ }
}
}
- self.copy_undef_mask(src, dest, size)?;
+ self.copy_undef_mask(src, dest, size, length)?;
// copy back the relocations
self.get_mut(dest.alloc_id)?.relocations.insert_presorted(relocations);
src: Pointer,
dest: Pointer,
size: Size,
+ repeat: u64,
) -> EvalResult<'tcx> {
// The bits have to be saved locally before writing to dest in case src and dest overlap.
assert_eq!(size.bytes() as usize as u64, size.bytes());
- let mut v = Vec::with_capacity(size.bytes() as usize);
+
+ let undef_mask = self.get(src.alloc_id)?.undef_mask.clone();
+ let dest_allocation = self.get_mut(dest.alloc_id)?;
+
for i in 0..size.bytes() {
- let defined = self.get(src.alloc_id)?.undef_mask.get(src.offset + Size::from_bytes(i));
- v.push(defined);
- }
- for (i, defined) in v.into_iter().enumerate() {
- self.get_mut(dest.alloc_id)?.undef_mask.set(
- dest.offset +
- Size::from_bytes(i as u64),
- defined,
- );
+ let defined = undef_mask.get(src.offset + Size::from_bytes(i));
+
+ for j in 0..repeat {
+ dest_allocation.undef_mask.set(
+ dest.offset + Size::from_bytes(i + (size.bytes() * j)),
+ defined
+ );
+ }
}
+
Ok(())
}
impl Size {
pub const ZERO: Size = Self::from_bytes(0);
+ #[inline]
pub fn from_bits(bits: u64) -> Size {
// Avoid potential overflow from `bits + 7`.
Size::from_bytes(bits / 8 + ((bits % 8) + 7) / 8)
}
+ #[inline]
pub const fn from_bytes(bytes: u64) -> Size {
Size {
raw: bytes
}
}
+ #[inline]
pub fn bytes(self) -> u64 {
self.raw
}
+ #[inline]
pub fn bits(self) -> u64 {
self.bytes().checked_mul(8).unwrap_or_else(|| {
panic!("Size::bits: {} bytes in bits doesn't fit in u64", self.bytes())
})
}
+ #[inline]
pub fn abi_align(self, align: Align) -> Size {
let mask = align.abi() - 1;
Size::from_bytes((self.bytes() + mask) & !mask)
}
+ #[inline]
pub fn is_abi_aligned(self, align: Align) -> bool {
let mask = align.abi() - 1;
self.bytes() & mask == 0
}
+ #[inline]
pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: C) -> Option<Size> {
let dl = cx.data_layout();
}
}
+ #[inline]
pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: C) -> Option<Size> {
let dl = cx.data_layout();
impl Add for Size {
type Output = Size;
+ #[inline]
fn add(self, other: Size) -> Size {
Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
impl Sub for Size {
type Output = Size;
+ #[inline]
fn sub(self, other: Size) -> Size {
Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
impl Mul<Size> for u64 {
type Output = Size;
+ #[inline]
fn mul(self, size: Size) -> Size {
size * self
}
impl Mul<u64> for Size {
type Output = Size;
+ #[inline]
fn mul(self, count: u64) -> Size {
match self.bytes().checked_mul(count) {
Some(bytes) => Size::from_bytes(bytes),
}
impl AddAssign for Size {
+ #[inline]
fn add_assign(&mut self, other: Size) {
*self = *self + other;
}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[derive(Copy, Clone)]
+union Foo {
+ a: isize,
+ b: (),
+}
+
+enum Bar {
+ Boo = [unsafe { Foo { b: () }.a }; 4][3],
+ //~^ ERROR constant evaluation of enum discriminant resulted in non-integer
+}
+
+fn main() {
+ assert_ne!(Bar::Boo as isize, 0);
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+const FOO: isize = 42;
+
+enum Bar {
+ Boo = *[&FOO; 4][3],
+}
+
+fn main() {
+ assert_eq!(Bar::Boo as isize, 42);
+}