/// on MSVC it's `*mut [usize; 2]`. For more information see the compiler's
/// source as well as std's catch implementation.
pub fn try(f: fn(*mut u8), data: *mut u8, local_ptr: *mut u8) -> i32;
+
+ /// Computes the byte offset that needs to be applied to `ptr` in order to
+ /// make it aligned to `align`.
+ /// If it is not possible to align `ptr`, the implementation returns
+ /// `usize::max_value()`.
+ ///
+ /// There are no guarantees whatsover that offsetting the pointer will not
+ /// overflow or go beyond the allocation that `ptr` points into.
+ /// It is up to the caller to ensure that the returned offset is correct
+ /// in all terms other than alignment.
+ ///
+ /// # Examples
+ ///
+ /// Accessing adjacent `u8` as `u16`
+ ///
+ /// ```
+ /// # #![feature(core_intrinsics)]
+ /// # fn foo(n: usize) {
+ /// # use std::intrinsics::align_offset;
+ /// # use std::mem::align_of;
+ /// # unsafe {
+ /// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
+ /// let ptr = &x[n] as *const u8;
+ /// let offset = align_offset(ptr as *const (), align_of::<u16>());
+ /// if offset < x.len() - n - 1 {
+ /// let u16_ptr = ptr.offset(offset as isize) as *const u16;
+ /// *u16_ptr = 500;
+ /// } else {
+ /// // while the pointer can be aligned via `offset`, it would point
+ /// // outside the allocation
+ /// }
+ /// # } }
+ /// ```
+ #[cfg(not(stage0))]
+ pub fn align_offset(ptr: *const (), align: usize) -> usize;
+}
+
+#[cfg(stage0)]
+/// Computes the byte offset that needs to be applied to `ptr` in order to
+/// make it aligned to `align`.
+/// If it is not possible to align `ptr`, the implementation returns
+/// `usize::max_value()`.
+///
+/// There are no guarantees whatsover that offsetting the pointer will not
+/// overflow or go beyond the allocation that `ptr` points into.
+/// It is up to the caller to ensure that the returned offset is correct
+/// in all terms other than alignment.
+///
+/// # Examples
+///
+/// Accessing adjacent `u8` as `u16`
+///
+/// ```
+/// # #![feature(core_intrinsics)]
+/// # fn foo(n: usize) {
+/// # use std::intrinsics::align_offset;
+/// # use std::mem::align_of;
+/// # unsafe {
+/// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
+/// let ptr = &x[n] as *const u8;
+/// let offset = align_offset(ptr as *const (), align_of::<u16>());
+/// if offset < x.len() - n - 1 {
+/// let u16_ptr = ptr.offset(offset as isize) as *const u16;
+/// *u16_ptr = 500;
+/// } else {
+/// // while the pointer can be aligned via `offset`, it would point
+/// // outside the allocation
+/// }
+/// # } }
+/// ```
+pub unsafe fn align_offset(ptr: *const (), align: usize) -> usize {
+ let offset = ptr as usize % align;
+ if offset == 0 {
+ 0
+ } else {
+ align - offset
+ }
}
use iter::{Map, Cloned, FusedIterator};
use slice::{self, SliceIndex};
use mem;
+use intrinsics::align_offset;
pub mod pattern;
// When the pointer is aligned, read 2 words of data per iteration
// until we find a word containing a non-ascii byte.
let ptr = v.as_ptr();
- let align = (ptr as usize + index) & (usize_bytes - 1);
+ let align = unsafe {
+ // the offset is safe, because `index` is guaranteed inbounds
+ align_offset(ptr.offset(index as isize) as *const (), usize_bytes)
+ };
if align == 0 {
while index < blocks_end {
unsafe {
_ => C_null(llret_ty)
}
}
+
+ "align_offset" => {
+ // `ptr as usize`
+ let ptr_val = bcx.ptrtoint(llargs[0], bcx.ccx.int_type());
+ // `ptr_val % align`
+ let offset = bcx.urem(ptr_val, llargs[1]);
+ let zero = C_null(bcx.ccx.int_type());
+ // `offset == 0`
+ let is_zero = bcx.icmp(llvm::IntPredicate::IntEQ, offset, zero);
+ // `if offset == 0 { 0 } else { offset - align }`
+ bcx.select(is_zero, zero, bcx.sub(offset, llargs[1]))
+ }
name if name.starts_with("simd_") => {
generic_simd_intrinsic(bcx, name,
callee_ty,