mod sort;
#[repr(C)]
-struct Repr<T> {
- pub data: *const T,
- pub len: usize,
+union Repr<'a, T: 'a> {
+ rust: &'a [T],
+ rust_mut: &'a mut [T],
+ raw: FatPtr<T>,
}
-//
-// Extension traits
-//
-
-public_in_stage0! {
-{
-/// Extension methods for slices.
-#[unstable(feature = "core_slice_ext",
- reason = "stable interface provided by `impl [T]` in later crates",
- issue = "32110")]
-#[allow(missing_docs)] // documented elsewhere
-}
-trait SliceExt {
- type Item;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn split_at(&self, mid: usize) -> (&[Self::Item], &[Self::Item]);
-
- #[stable(feature = "core", since = "1.6.0")]
- fn iter(&self) -> Iter<Self::Item>;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn split<P>(&self, pred: P) -> Split<Self::Item, P>
- where P: FnMut(&Self::Item) -> bool;
-
- #[stable(feature = "slice_rsplit", since = "1.27.0")]
- fn rsplit<P>(&self, pred: P) -> RSplit<Self::Item, P>
- where P: FnMut(&Self::Item) -> bool;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn splitn<P>(&self, n: usize, pred: P) -> SplitN<Self::Item, P>
- where P: FnMut(&Self::Item) -> bool;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn rsplitn<P>(&self, n: usize, pred: P) -> RSplitN<Self::Item, P>
- where P: FnMut(&Self::Item) -> bool;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn windows(&self, size: usize) -> Windows<Self::Item>;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn chunks(&self, size: usize) -> Chunks<Self::Item>;
-
- #[unstable(feature = "exact_chunks", issue = "47115")]
- fn exact_chunks(&self, size: usize) -> ExactChunks<Self::Item>;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn get<I>(&self, index: I) -> Option<&I::Output>
- where I: SliceIndex<Self>;
- #[stable(feature = "core", since = "1.6.0")]
- fn first(&self) -> Option<&Self::Item>;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn split_first(&self) -> Option<(&Self::Item, &[Self::Item])>;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn split_last(&self) -> Option<(&Self::Item, &[Self::Item])>;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn last(&self) -> Option<&Self::Item>;
-
- #[stable(feature = "core", since = "1.6.0")]
- unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
- where I: SliceIndex<Self>;
- #[stable(feature = "core", since = "1.6.0")]
- fn as_ptr(&self) -> *const Self::Item;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn binary_search(&self, x: &Self::Item) -> Result<usize, usize>
- where Self::Item: Ord;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn binary_search_by<'a, F>(&'a self, f: F) -> Result<usize, usize>
- where F: FnMut(&'a Self::Item) -> Ordering;
-
- #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
- fn binary_search_by_key<'a, B, F>(&'a self, b: &B, f: F) -> Result<usize, usize>
- where F: FnMut(&'a Self::Item) -> B,
- B: Ord;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn len(&self) -> usize;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn is_empty(&self) -> bool { self.len() == 0 }
-
- #[stable(feature = "core", since = "1.6.0")]
- fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
- where I: SliceIndex<Self>;
- #[stable(feature = "core", since = "1.6.0")]
- fn iter_mut(&mut self) -> IterMut<Self::Item>;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn first_mut(&mut self) -> Option<&mut Self::Item>;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn split_first_mut(&mut self) -> Option<(&mut Self::Item, &mut [Self::Item])>;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn split_last_mut(&mut self) -> Option<(&mut Self::Item, &mut [Self::Item])>;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn last_mut(&mut self) -> Option<&mut Self::Item>;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn split_mut<P>(&mut self, pred: P) -> SplitMut<Self::Item, P>
- where P: FnMut(&Self::Item) -> bool;
-
- #[stable(feature = "slice_rsplit", since = "1.27.0")]
- fn rsplit_mut<P>(&mut self, pred: P) -> RSplitMut<Self::Item, P>
- where P: FnMut(&Self::Item) -> bool;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn splitn_mut<P>(&mut self, n: usize, pred: P) -> SplitNMut<Self::Item, P>
- where P: FnMut(&Self::Item) -> bool;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn rsplitn_mut<P>(&mut self, n: usize, pred: P) -> RSplitNMut<Self::Item, P>
- where P: FnMut(&Self::Item) -> bool;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<Self::Item>;
-
- #[unstable(feature = "exact_chunks", issue = "47115")]
- fn exact_chunks_mut(&mut self, size: usize) -> ExactChunksMut<Self::Item>;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn swap(&mut self, a: usize, b: usize);
-
- #[stable(feature = "core", since = "1.6.0")]
- fn split_at_mut(&mut self, mid: usize) -> (&mut [Self::Item], &mut [Self::Item]);
-
- #[stable(feature = "core", since = "1.6.0")]
- fn reverse(&mut self);
-
- #[stable(feature = "core", since = "1.6.0")]
- unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
- where I: SliceIndex<Self>;
- #[stable(feature = "core", since = "1.6.0")]
- fn as_mut_ptr(&mut self) -> *mut Self::Item;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn contains(&self, x: &Self::Item) -> bool where Self::Item: PartialEq;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn starts_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
-
- #[stable(feature = "core", since = "1.6.0")]
- fn ends_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
-
- #[stable(feature = "slice_rotate", since = "1.26.0")]
- fn rotate_left(&mut self, mid: usize);
-
- #[stable(feature = "slice_rotate", since = "1.26.0")]
- fn rotate_right(&mut self, k: usize);
-
- #[stable(feature = "clone_from_slice", since = "1.7.0")]
- fn clone_from_slice(&mut self, src: &[Self::Item]) where Self::Item: Clone;
-
- #[stable(feature = "copy_from_slice", since = "1.9.0")]
- fn copy_from_slice(&mut self, src: &[Self::Item]) where Self::Item: Copy;
-
- #[stable(feature = "swap_with_slice", since = "1.27.0")]
- fn swap_with_slice(&mut self, src: &mut [Self::Item]);
-
- #[stable(feature = "sort_unstable", since = "1.20.0")]
- fn sort_unstable(&mut self)
- where Self::Item: Ord;
-
- #[stable(feature = "sort_unstable", since = "1.20.0")]
- fn sort_unstable_by<F>(&mut self, compare: F)
- where F: FnMut(&Self::Item, &Self::Item) -> Ordering;
-
- #[stable(feature = "sort_unstable", since = "1.20.0")]
- fn sort_unstable_by_key<B, F>(&mut self, f: F)
- where F: FnMut(&Self::Item) -> B,
- B: Ord;
-}}
-
-// Use macros to be generic over const/mut
-macro_rules! slice_offset {
- ($ptr:expr, $by:expr) => {{
- let ptr = $ptr;
- if size_from_ptr(ptr) == 0 {
- (ptr as *mut i8).wrapping_offset($by) as _
- } else {
- ptr.offset($by)
- }
- }};
-}
-
-// make a &T from a *const T
-macro_rules! make_ref {
- ($ptr:expr) => {{
- let ptr = $ptr;
- if size_from_ptr(ptr) == 0 {
- // Use a non-null pointer value
- &*(1 as *mut _)
- } else {
- &*ptr
- }
- }};
-}
-
-// make a &mut T from a *mut T
-macro_rules! make_ref_mut {
- ($ptr:expr) => {{
- let ptr = $ptr;
- if size_from_ptr(ptr) == 0 {
- // Use a non-null pointer value
- &mut *(1 as *mut _)
- } else {
- &mut *ptr
- }
- }};
-}
-
-#[unstable(feature = "core_slice_ext",
- reason = "stable interface provided by `impl [T]` in later crates",
- issue = "32110")]
-impl<T> SliceExt for [T] {
- type Item = T;
-
- #[inline]
- fn split_at(&self, mid: usize) -> (&[T], &[T]) {
- (&self[..mid], &self[mid..])
- }
-
- #[inline]
- fn iter(&self) -> Iter<T> {
- unsafe {
- let p = if mem::size_of::<T>() == 0 {
- 1 as *const _
- } else {
- let p = self.as_ptr();
- assume(!p.is_null());
- p
- };
-
- Iter {
- ptr: p,
- end: slice_offset!(p, self.len() as isize),
- _marker: marker::PhantomData
- }
- }
- }
-
- #[inline]
- fn split<P>(&self, pred: P) -> Split<T, P>
- where P: FnMut(&T) -> bool
- {
- Split {
- v: self,
- pred,
- finished: false
- }
- }
-
- #[inline]
- fn rsplit<P>(&self, pred: P) -> RSplit<T, P>
- where P: FnMut(&T) -> bool
- {
- RSplit { inner: self.split(pred) }
- }
-
- #[inline]
- fn splitn<P>(&self, n: usize, pred: P) -> SplitN<T, P>
- where P: FnMut(&T) -> bool
- {
- SplitN {
- inner: GenericSplitN {
- iter: self.split(pred),
- count: n
- }
- }
- }
-
- #[inline]
- fn rsplitn<P>(&self, n: usize, pred: P) -> RSplitN<T, P>
- where P: FnMut(&T) -> bool
- {
- RSplitN {
- inner: GenericSplitN {
- iter: self.rsplit(pred),
- count: n
- }
- }
- }
-
- #[inline]
- fn windows(&self, size: usize) -> Windows<T> {
- assert!(size != 0);
- Windows { v: self, size: size }
- }
-
- #[inline]
- fn chunks(&self, chunk_size: usize) -> Chunks<T> {
- assert!(chunk_size != 0);
- Chunks { v: self, chunk_size: chunk_size }
- }
-
- #[inline]
- fn exact_chunks(&self, chunk_size: usize) -> ExactChunks<T> {
- assert!(chunk_size != 0);
- let rem = self.len() % chunk_size;
- let len = self.len() - rem;
- ExactChunks { v: &self[..len], chunk_size: chunk_size}
- }
-
- #[inline]
- fn get<I>(&self, index: I) -> Option<&I::Output>
- where I: SliceIndex<[T]>
- {
- index.get(self)
- }
-
- #[inline]
- fn first(&self) -> Option<&T> {
- if self.is_empty() { None } else { Some(&self[0]) }
- }
-
- #[inline]
- fn split_first(&self) -> Option<(&T, &[T])> {
- if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
- }
-
- #[inline]
- fn split_last(&self) -> Option<(&T, &[T])> {
- let len = self.len();
- if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
- }
-
- #[inline]
- fn last(&self) -> Option<&T> {
- if self.is_empty() { None } else { Some(&self[self.len() - 1]) }
- }
-
- #[inline]
- unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
- where I: SliceIndex<[T]>
- {
- index.get_unchecked(self)
- }
-
- #[inline]
- fn as_ptr(&self) -> *const T {
- self as *const [T] as *const T
- }
-
- fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
- where F: FnMut(&'a T) -> Ordering
- {
- let s = self;
- let mut size = s.len();
- if size == 0 {
- return Err(0);
- }
- let mut base = 0usize;
- while size > 1 {
- let half = size / 2;
- let mid = base + half;
- // mid is always in [0, size), that means mid is >= 0 and < size.
- // mid >= 0: by definition
- // mid < size: mid = size / 2 + size / 4 + size / 8 ...
- let cmp = f(unsafe { s.get_unchecked(mid) });
- base = if cmp == Greater { base } else { mid };
- size -= half;
- }
- // base is always in [0, size) because base <= mid.
- let cmp = f(unsafe { s.get_unchecked(base) });
- if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
- }
-
- #[inline]
- fn len(&self) -> usize {
- unsafe {
- mem::transmute::<&[T], Repr<T>>(self).len
- }
- }
-
- #[inline]
- fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
- where I: SliceIndex<[T]>
- {
- index.get_mut(self)
- }
-
- #[inline]
- fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
- let len = self.len();
- let ptr = self.as_mut_ptr();
-
- unsafe {
- assert!(mid <= len);
-
- (from_raw_parts_mut(ptr, mid),
- from_raw_parts_mut(ptr.offset(mid as isize), len - mid))
- }
- }
-
- #[inline]
- fn iter_mut(&mut self) -> IterMut<T> {
- unsafe {
- let p = if mem::size_of::<T>() == 0 {
- 1 as *mut _
- } else {
- let p = self.as_mut_ptr();
- assume(!p.is_null());
- p
- };
-
- IterMut {
- ptr: p,
- end: slice_offset!(p, self.len() as isize),
- _marker: marker::PhantomData
- }
- }
- }
-
- #[inline]
- fn last_mut(&mut self) -> Option<&mut T> {
- let len = self.len();
- if len == 0 { return None; }
- Some(&mut self[len - 1])
- }
-
- #[inline]
- fn first_mut(&mut self) -> Option<&mut T> {
- if self.is_empty() { None } else { Some(&mut self[0]) }
- }
-
- #[inline]
- fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
- if self.is_empty() { None } else {
- let split = self.split_at_mut(1);
- Some((&mut split.0[0], split.1))
- }
- }
-
- #[inline]
- fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
- let len = self.len();
- if len == 0 { None } else {
- let split = self.split_at_mut(len - 1);
- Some((&mut split.1[0], split.0))
- }
- }
-
- #[inline]
- fn split_mut<P>(&mut self, pred: P) -> SplitMut<T, P>
- where P: FnMut(&T) -> bool
- {
- SplitMut { v: self, pred: pred, finished: false }
- }
-
- #[inline]
- fn rsplit_mut<P>(&mut self, pred: P) -> RSplitMut<T, P>
- where P: FnMut(&T) -> bool
- {
- RSplitMut { inner: self.split_mut(pred) }
- }
-
- #[inline]
- fn splitn_mut<P>(&mut self, n: usize, pred: P) -> SplitNMut<T, P>
- where P: FnMut(&T) -> bool
- {
- SplitNMut {
- inner: GenericSplitN {
- iter: self.split_mut(pred),
- count: n
- }
- }
- }
-
- #[inline]
- fn rsplitn_mut<P>(&mut self, n: usize, pred: P) -> RSplitNMut<T, P> where
- P: FnMut(&T) -> bool,
- {
- RSplitNMut {
- inner: GenericSplitN {
- iter: self.rsplit_mut(pred),
- count: n
- }
- }
- }
-
- #[inline]
- fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
- assert!(chunk_size != 0);
- ChunksMut { v: self, chunk_size: chunk_size }
- }
-
- #[inline]
- fn exact_chunks_mut(&mut self, chunk_size: usize) -> ExactChunksMut<T> {
- assert!(chunk_size != 0);
- let rem = self.len() % chunk_size;
- let len = self.len() - rem;
- ExactChunksMut { v: &mut self[..len], chunk_size: chunk_size}
- }
-
- #[inline]
- fn swap(&mut self, a: usize, b: usize) {
- unsafe {
- // Can't take two mutable loans from one vector, so instead just cast
- // them to their raw pointers to do the swap
- let pa: *mut T = &mut self[a];
- let pb: *mut T = &mut self[b];
- ptr::swap(pa, pb);
- }
- }
-
- fn reverse(&mut self) {
- let mut i: usize = 0;
- let ln = self.len();
-
- // For very small types, all the individual reads in the normal
- // path perform poorly. We can do better, given efficient unaligned
- // load/store, by loading a larger chunk and reversing a register.
-
- // Ideally LLVM would do this for us, as it knows better than we do
- // whether unaligned reads are efficient (since that changes between
- // different ARM versions, for example) and what the best chunk size
- // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
- // the loop, so we need to do this ourselves. (Hypothesis: reverse
- // is troublesome because the sides can be aligned differently --
- // will be, when the length is odd -- so there's no way of emitting
- // pre- and postludes to use fully-aligned SIMD in the middle.)
-
- let fast_unaligned =
- cfg!(any(target_arch = "x86", target_arch = "x86_64"));
-
- if fast_unaligned && mem::size_of::<T>() == 1 {
- // Use the llvm.bswap intrinsic to reverse u8s in a usize
- let chunk = mem::size_of::<usize>();
- while i + chunk - 1 < ln / 2 {
- unsafe {
- let pa: *mut T = self.get_unchecked_mut(i);
- let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
- let va = ptr::read_unaligned(pa as *mut usize);
- let vb = ptr::read_unaligned(pb as *mut usize);
- ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
- ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
- }
- i += chunk;
- }
- }
-
- if fast_unaligned && mem::size_of::<T>() == 2 {
- // Use rotate-by-16 to reverse u16s in a u32
- let chunk = mem::size_of::<u32>() / 2;
- while i + chunk - 1 < ln / 2 {
- unsafe {
- let pa: *mut T = self.get_unchecked_mut(i);
- let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
- let va = ptr::read_unaligned(pa as *mut u32);
- let vb = ptr::read_unaligned(pb as *mut u32);
- ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
- ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
- }
- i += chunk;
- }
- }
-
- while i < ln / 2 {
- // Unsafe swap to avoid the bounds check in safe swap.
- unsafe {
- let pa: *mut T = self.get_unchecked_mut(i);
- let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
- ptr::swap(pa, pb);
- }
- i += 1;
- }
- }
-
- #[inline]
- unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
- where I: SliceIndex<[T]>
- {
- index.get_unchecked_mut(self)
- }
-
- #[inline]
- fn as_mut_ptr(&mut self) -> *mut T {
- self as *mut [T] as *mut T
- }
-
- #[inline]
- fn contains(&self, x: &T) -> bool where T: PartialEq {
- x.slice_contains(self)
- }
-
- #[inline]
- fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq {
- let n = needle.len();
- self.len() >= n && needle == &self[..n]
- }
-
- #[inline]
- fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
- let (m, n) = (self.len(), needle.len());
- m >= n && needle == &self[m-n..]
- }
-
- fn binary_search(&self, x: &T) -> Result<usize, usize>
- where T: Ord
- {
- self.binary_search_by(|p| p.cmp(x))
- }
-
- fn rotate_left(&mut self, mid: usize) {
- assert!(mid <= self.len());
- let k = self.len() - mid;
-
- unsafe {
- let p = self.as_mut_ptr();
- rotate::ptr_rotate(mid, p.offset(mid as isize), k);
- }
- }
-
- fn rotate_right(&mut self, k: usize) {
- assert!(k <= self.len());
- let mid = self.len() - k;
+#[repr(C)]
+struct FatPtr<T> {
+ data: *const T,
+ len: usize,
+}
- unsafe {
- let p = self.as_mut_ptr();
- rotate::ptr_rotate(mid, p.offset(mid as isize), k);
- }
- }
+//
+// Extension traits
+//
- #[inline]
- fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
- assert!(self.len() == src.len(),
- "destination and source slices have different lengths");
- // NOTE: We need to explicitly slice them to the same length
- // for bounds checking to be elided, and the optimizer will
- // generate memcpy for simple cases (for example T = u8).
- let len = self.len();
- let src = &src[..len];
- for i in 0..len {
- self[i].clone_from(&src[i]);
+// Use macros to be generic over const/mut
+macro_rules! slice_offset {
+ ($ptr:expr, $by:expr) => {{
+ let ptr = $ptr;
+ if size_from_ptr(ptr) == 0 {
+ (ptr as *mut i8).wrapping_offset($by) as _
+ } else {
+ ptr.offset($by)
}
- }
+ }};
+}
- #[inline]
- fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
- assert!(self.len() == src.len(),
- "destination and source slices have different lengths");
- unsafe {
- ptr::copy_nonoverlapping(
- src.as_ptr(), self.as_mut_ptr(), self.len());
+// make a &T from a *const T
+macro_rules! make_ref {
+ ($ptr:expr) => {{
+ let ptr = $ptr;
+ if size_from_ptr(ptr) == 0 {
+ // Use a non-null pointer value
+ &*(1 as *mut _)
+ } else {
+ &*ptr
}
- }
+ }};
+}
- #[inline]
- fn swap_with_slice(&mut self, src: &mut [T]) {
- assert!(self.len() == src.len(),
- "destination and source slices have different lengths");
- unsafe {
- ptr::swap_nonoverlapping(
- self.as_mut_ptr(), src.as_mut_ptr(), self.len());
+// make a &mut T from a *mut T
+macro_rules! make_ref_mut {
+ ($ptr:expr) => {{
+ let ptr = $ptr;
+ if size_from_ptr(ptr) == 0 {
+ // Use a non-null pointer value
+ &mut *(1 as *mut _)
+ } else {
+ &mut *ptr
}
- }
-
- #[inline]
- fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
- where F: FnMut(&'a Self::Item) -> B,
- B: Ord
- {
- self.binary_search_by(|k| f(k).cmp(b))
- }
-
- #[inline]
- fn sort_unstable(&mut self)
- where Self::Item: Ord
- {
- sort::quicksort(self, |a, b| a.lt(b));
- }
-
- #[inline]
- fn sort_unstable_by<F>(&mut self, mut compare: F)
- where F: FnMut(&Self::Item, &Self::Item) -> Ordering
- {
- sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
- }
-
- #[inline]
- fn sort_unstable_by_key<B, F>(&mut self, mut f: F)
- where F: FnMut(&Self::Item) -> B,
- B: Ord
- {
- sort::quicksort(self, |a, b| f(a).lt(&f(b)));
- }
+ }};
}
-// FIXME: remove (inline) this macro and the SliceExt trait
-// when updating to a bootstrap compiler that has the new lang items.
-#[cfg_attr(stage0, macro_export)]
-#[unstable(feature = "core_slice_ext", issue = "32110")]
-macro_rules! slice_core_methods { () => {
+#[lang = "slice"]
+#[cfg(not(test))]
+impl<T> [T] {
/// Returns the number of elements in the slice.
///
/// # Examples
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
- pub fn len(&self) -> usize {
- SliceExt::len(self)
+ #[rustc_const_unstable(feature = "const_slice_len")]
+ pub const fn len(&self) -> usize {
+ unsafe {
+ Repr { rust: self }.raw.len
+ }
}
/// Returns `true` if the slice has a length of 0.
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
- pub fn is_empty(&self) -> bool {
- SliceExt::is_empty(self)
+ #[rustc_const_unstable(feature = "const_slice_len")]
+ pub const fn is_empty(&self) -> bool {
+ self.len() == 0
}
/// Returns the first element of the slice, or `None` if it is empty.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn first(&self) -> Option<&T> {
- SliceExt::first(self)
+ if self.is_empty() { None } else { Some(&self[0]) }
}
/// Returns a mutable pointer to the first element of the slice, or `None` if it is empty.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn first_mut(&mut self) -> Option<&mut T> {
- SliceExt::first_mut(self)
+ if self.is_empty() { None } else { Some(&mut self[0]) }
}
/// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
#[stable(feature = "slice_splits", since = "1.5.0")]
#[inline]
pub fn split_first(&self) -> Option<(&T, &[T])> {
- SliceExt::split_first(self)
+ if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
}
/// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
#[stable(feature = "slice_splits", since = "1.5.0")]
#[inline]
pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
- SliceExt::split_first_mut(self)
+ if self.is_empty() { None } else {
+ let split = self.split_at_mut(1);
+ Some((&mut split.0[0], split.1))
+ }
}
/// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
#[stable(feature = "slice_splits", since = "1.5.0")]
#[inline]
pub fn split_last(&self) -> Option<(&T, &[T])> {
- SliceExt::split_last(self)
+ let len = self.len();
+ if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
}
/// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
#[stable(feature = "slice_splits", since = "1.5.0")]
#[inline]
pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
- SliceExt::split_last_mut(self)
+ let len = self.len();
+ if len == 0 { None } else {
+ let split = self.split_at_mut(len - 1);
+ Some((&mut split.1[0], split.0))
+ }
+
}
/// Returns the last element of the slice, or `None` if it is empty.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn last(&self) -> Option<&T> {
- SliceExt::last(self)
+ if self.is_empty() { None } else { Some(&self[self.len() - 1]) }
}
/// Returns a mutable pointer to the last item in the slice.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn last_mut(&mut self) -> Option<&mut T> {
- SliceExt::last_mut(self)
+ let len = self.len();
+ if len == 0 { return None; }
+ Some(&mut self[len - 1])
}
/// Returns a reference to an element or subslice depending on the type of
pub fn get<I>(&self, index: I) -> Option<&I::Output>
where I: SliceIndex<Self>
{
- SliceExt::get(self, index)
+ index.get(self)
}
/// Returns a mutable reference to an element or subslice depending on the
pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
where I: SliceIndex<Self>
{
- SliceExt::get_mut(self, index)
+ index.get_mut(self)
}
/// Returns a reference to an element or subslice, without doing bounds
pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
where I: SliceIndex<Self>
{
- SliceExt::get_unchecked(self, index)
+ index.get_unchecked(self)
}
/// Returns a mutable reference to an element or subslice, without doing
pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
where I: SliceIndex<Self>
{
- SliceExt::get_unchecked_mut(self, index)
+ index.get_unchecked_mut(self)
}
/// Returns a raw pointer to the slice's buffer.
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
- pub fn as_ptr(&self) -> *const T {
- SliceExt::as_ptr(self)
+ #[rustc_const_unstable(feature = "const_slice_as_ptr")]
+ pub const fn as_ptr(&self) -> *const T {
+ self as *const [T] as *const T
}
/// Returns an unsafe mutable pointer to the slice's buffer.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut T {
- SliceExt::as_mut_ptr(self)
+ self as *mut [T] as *mut T
}
/// Swaps two elements in the slice.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn swap(&mut self, a: usize, b: usize) {
- SliceExt::swap(self, a, b)
+ unsafe {
+ // Can't take two mutable loans from one vector, so instead just cast
+ // them to their raw pointers to do the swap
+ let pa: *mut T = &mut self[a];
+ let pb: *mut T = &mut self[b];
+ ptr::swap(pa, pb);
+ }
}
/// Reverses the order of elements in the slice, in place.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn reverse(&mut self) {
- SliceExt::reverse(self)
+ let mut i: usize = 0;
+ let ln = self.len();
+
+ // For very small types, all the individual reads in the normal
+ // path perform poorly. We can do better, given efficient unaligned
+ // load/store, by loading a larger chunk and reversing a register.
+
+ // Ideally LLVM would do this for us, as it knows better than we do
+ // whether unaligned reads are efficient (since that changes between
+ // different ARM versions, for example) and what the best chunk size
+ // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
+ // the loop, so we need to do this ourselves. (Hypothesis: reverse
+ // is troublesome because the sides can be aligned differently --
+ // will be, when the length is odd -- so there's no way of emitting
+ // pre- and postludes to use fully-aligned SIMD in the middle.)
+
+ let fast_unaligned =
+ cfg!(any(target_arch = "x86", target_arch = "x86_64"));
+
+ if fast_unaligned && mem::size_of::<T>() == 1 {
+ // Use the llvm.bswap intrinsic to reverse u8s in a usize
+ let chunk = mem::size_of::<usize>();
+ while i + chunk - 1 < ln / 2 {
+ unsafe {
+ let pa: *mut T = self.get_unchecked_mut(i);
+ let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
+ let va = ptr::read_unaligned(pa as *mut usize);
+ let vb = ptr::read_unaligned(pb as *mut usize);
+ ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
+ ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
+ }
+ i += chunk;
+ }
+ }
+
+ if fast_unaligned && mem::size_of::<T>() == 2 {
+ // Use rotate-by-16 to reverse u16s in a u32
+ let chunk = mem::size_of::<u32>() / 2;
+ while i + chunk - 1 < ln / 2 {
+ unsafe {
+ let pa: *mut T = self.get_unchecked_mut(i);
+ let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
+ let va = ptr::read_unaligned(pa as *mut u32);
+ let vb = ptr::read_unaligned(pb as *mut u32);
+ ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
+ ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
+ }
+ i += chunk;
+ }
+ }
+
+ while i < ln / 2 {
+ // Unsafe swap to avoid the bounds check in safe swap.
+ unsafe {
+ let pa: *mut T = self.get_unchecked_mut(i);
+ let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
+ ptr::swap(pa, pb);
+ }
+ i += 1;
+ }
}
/// Returns an iterator over the slice.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn iter(&self) -> Iter<T> {
- SliceExt::iter(self)
+ unsafe {
+ let p = if mem::size_of::<T>() == 0 {
+ 1 as *const _
+ } else {
+ let p = self.as_ptr();
+ assume(!p.is_null());
+ p
+ };
+
+ Iter {
+ ptr: p,
+ end: slice_offset!(p, self.len() as isize),
+ _marker: marker::PhantomData
+ }
+ }
}
/// Returns an iterator that allows modifying each value.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn iter_mut(&mut self) -> IterMut<T> {
- SliceExt::iter_mut(self)
+ unsafe {
+ let p = if mem::size_of::<T>() == 0 {
+ 1 as *mut _
+ } else {
+ let p = self.as_mut_ptr();
+ assume(!p.is_null());
+ p
+ };
+
+ IterMut {
+ ptr: p,
+ end: slice_offset!(p, self.len() as isize),
+ _marker: marker::PhantomData
+ }
+ }
}
/// Returns an iterator over all contiguous windows of length
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn windows(&self, size: usize) -> Windows<T> {
- SliceExt::windows(self, size)
+ assert!(size != 0);
+ Windows { v: self, size: size }
}
/// Returns an iterator over `chunk_size` elements of the slice at a
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn chunks(&self, chunk_size: usize) -> Chunks<T> {
- SliceExt::chunks(self, chunk_size)
+ assert!(chunk_size != 0);
+ Chunks { v: self, chunk_size: chunk_size }
}
/// Returns an iterator over `chunk_size` elements of the slice at a
#[unstable(feature = "exact_chunks", issue = "47115")]
#[inline]
pub fn exact_chunks(&self, chunk_size: usize) -> ExactChunks<T> {
- SliceExt::exact_chunks(self, chunk_size)
+ assert!(chunk_size != 0);
+ let rem = self.len() % chunk_size;
+ let len = self.len() - rem;
+ ExactChunks { v: &self[..len], chunk_size: chunk_size}
}
/// Returns an iterator over `chunk_size` elements of the slice at a time.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
- SliceExt::chunks_mut(self, chunk_size)
+ assert!(chunk_size != 0);
+ ChunksMut { v: self, chunk_size: chunk_size }
}
/// Returns an iterator over `chunk_size` elements of the slice at a time.
#[unstable(feature = "exact_chunks", issue = "47115")]
#[inline]
pub fn exact_chunks_mut(&mut self, chunk_size: usize) -> ExactChunksMut<T> {
- SliceExt::exact_chunks_mut(self, chunk_size)
+ assert!(chunk_size != 0);
+ let rem = self.len() % chunk_size;
+ let len = self.len() - rem;
+ ExactChunksMut { v: &mut self[..len], chunk_size: chunk_size}
}
/// Divides one slice into two at an index.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
- SliceExt::split_at(self, mid)
+ (&self[..mid], &self[mid..])
}
/// Divides one mutable slice into two at an index.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
- SliceExt::split_at_mut(self, mid)
+ let len = self.len();
+ let ptr = self.as_mut_ptr();
+
+ unsafe {
+ assert!(mid <= len);
+
+ (from_raw_parts_mut(ptr, mid),
+ from_raw_parts_mut(ptr.offset(mid as isize), len - mid))
+ }
}
/// Returns an iterator over subslices separated by elements that match
pub fn split<F>(&self, pred: F) -> Split<T, F>
where F: FnMut(&T) -> bool
{
- SliceExt::split(self, pred)
+ Split {
+ v: self,
+ pred,
+ finished: false
+ }
}
/// Returns an iterator over mutable subslices separated by elements that
pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<T, F>
where F: FnMut(&T) -> bool
{
- SliceExt::split_mut(self, pred)
+ SplitMut { v: self, pred: pred, finished: false }
}
/// Returns an iterator over subslices separated by elements that match
pub fn rsplit<F>(&self, pred: F) -> RSplit<T, F>
where F: FnMut(&T) -> bool
{
- SliceExt::rsplit(self, pred)
+ RSplit { inner: self.split(pred) }
}
/// Returns an iterator over mutable subslices separated by elements that
pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<T, F>
where F: FnMut(&T) -> bool
{
- SliceExt::rsplit_mut(self, pred)
+ RSplitMut { inner: self.split_mut(pred) }
}
/// Returns an iterator over subslices separated by elements that match
pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<T, F>
where F: FnMut(&T) -> bool
{
- SliceExt::splitn(self, n, pred)
+ SplitN {
+ inner: GenericSplitN {
+ iter: self.split(pred),
+ count: n
+ }
+ }
}
/// Returns an iterator over subslices separated by elements that match
pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<T, F>
where F: FnMut(&T) -> bool
{
- SliceExt::splitn_mut(self, n, pred)
+ SplitNMut {
+ inner: GenericSplitN {
+ iter: self.split_mut(pred),
+ count: n
+ }
+ }
}
/// Returns an iterator over subslices separated by elements that match
pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<T, F>
where F: FnMut(&T) -> bool
{
- SliceExt::rsplitn(self, n, pred)
+ RSplitN {
+ inner: GenericSplitN {
+ iter: self.rsplit(pred),
+ count: n
+ }
+ }
}
/// Returns an iterator over subslices separated by elements that match
pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<T, F>
where F: FnMut(&T) -> bool
{
- SliceExt::rsplitn_mut(self, n, pred)
+ RSplitNMut {
+ inner: GenericSplitN {
+ iter: self.rsplit_mut(pred),
+ count: n
+ }
+ }
}
/// Returns `true` if the slice contains an element with the given value.
pub fn contains(&self, x: &T) -> bool
where T: PartialEq
{
- SliceExt::contains(self, x)
+ x.slice_contains(self)
}
/// Returns `true` if `needle` is a prefix of the slice.
pub fn starts_with(&self, needle: &[T]) -> bool
where T: PartialEq
{
- SliceExt::starts_with(self, needle)
+ let n = needle.len();
+ self.len() >= n && needle == &self[..n]
}
/// Returns `true` if `needle` is a suffix of the slice.
pub fn ends_with(&self, needle: &[T]) -> bool
where T: PartialEq
{
- SliceExt::ends_with(self, needle)
+ let (m, n) = (self.len(), needle.len());
+ m >= n && needle == &self[m-n..]
}
/// Binary searches this sorted slice for a given element.
pub fn binary_search(&self, x: &T) -> Result<usize, usize>
where T: Ord
{
- SliceExt::binary_search(self, x)
+ self.binary_search_by(|p| p.cmp(x))
}
/// Binary searches this sorted slice with a comparator function.
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
- pub fn binary_search_by<'a, F>(&'a self, f: F) -> Result<usize, usize>
+ pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
where F: FnMut(&'a T) -> Ordering
{
- SliceExt::binary_search_by(self, f)
+ let s = self;
+ let mut size = s.len();
+ if size == 0 {
+ return Err(0);
+ }
+ let mut base = 0usize;
+ while size > 1 {
+ let half = size / 2;
+ let mid = base + half;
+ // mid is always in [0, size), that means mid is >= 0 and < size.
+ // mid >= 0: by definition
+ // mid < size: mid = size / 2 + size / 4 + size / 8 ...
+ let cmp = f(unsafe { s.get_unchecked(mid) });
+ base = if cmp == Greater { base } else { mid };
+ size -= half;
+ }
+ // base is always in [0, size) because base <= mid.
+ let cmp = f(unsafe { s.get_unchecked(base) });
+ if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
+
}
/// Binary searches this sorted slice with a key extraction function.
/// ```
#[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
#[inline]
- pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, f: F) -> Result<usize, usize>
+ pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
where F: FnMut(&'a T) -> B,
B: Ord
{
- SliceExt::binary_search_by_key(self, b, f)
+ self.binary_search_by(|k| f(k).cmp(b))
}
/// Sorts the slice, but may not preserve the order of equal elements.
pub fn sort_unstable(&mut self)
where T: Ord
{
- SliceExt::sort_unstable(self);
+ sort::quicksort(self, |a, b| a.lt(b));
}
/// Sorts the slice with a comparator function, but may not preserve the order of equal
/// [pdqsort]: https://github.com/orlp/pdqsort
#[stable(feature = "sort_unstable", since = "1.20.0")]
#[inline]
- pub fn sort_unstable_by<F>(&mut self, compare: F)
+ pub fn sort_unstable_by<F>(&mut self, mut compare: F)
where F: FnMut(&T, &T) -> Ordering
{
- SliceExt::sort_unstable_by(self, compare);
+ sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
}
/// Sorts the slice with a key extraction function, but may not preserve the order of equal
/// [pdqsort]: https://github.com/orlp/pdqsort
#[stable(feature = "sort_unstable", since = "1.20.0")]
#[inline]
- pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
+ pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
where F: FnMut(&T) -> K, K: Ord
{
- SliceExt::sort_unstable_by_key(self, f);
+ sort::quicksort(self, |a, b| f(a).lt(&f(b)));
}
/// Rotates the slice in-place such that the first `mid` elements of the
/// ```
#[stable(feature = "slice_rotate", since = "1.26.0")]
pub fn rotate_left(&mut self, mid: usize) {
- SliceExt::rotate_left(self, mid);
+ assert!(mid <= self.len());
+ let k = self.len() - mid;
+
+ unsafe {
+ let p = self.as_mut_ptr();
+ rotate::ptr_rotate(mid, p.offset(mid as isize), k);
+ }
}
/// Rotates the slice in-place such that the first `self.len() - k`
/// ```
#[stable(feature = "slice_rotate", since = "1.26.0")]
pub fn rotate_right(&mut self, k: usize) {
- SliceExt::rotate_right(self, k);
+ assert!(k <= self.len());
+ let mid = self.len() - k;
+
+ unsafe {
+ let p = self.as_mut_ptr();
+ rotate::ptr_rotate(mid, p.offset(mid as isize), k);
+ }
}
/// Copies the elements from `src` into `self`.
/// [`split_at_mut`]: #method.split_at_mut
#[stable(feature = "clone_from_slice", since = "1.7.0")]
pub fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
- SliceExt::clone_from_slice(self, src)
+ assert!(self.len() == src.len(),
+ "destination and source slices have different lengths");
+ // NOTE: We need to explicitly slice them to the same length
+ // for bounds checking to be elided, and the optimizer will
+ // generate memcpy for simple cases (for example T = u8).
+ let len = self.len();
+ let src = &src[..len];
+ for i in 0..len {
+ self[i].clone_from(&src[i]);
+ }
+
}
/// Copies all elements from `src` into `self`, using a memcpy.
/// [`split_at_mut`]: #method.split_at_mut
#[stable(feature = "copy_from_slice", since = "1.9.0")]
pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
- SliceExt::copy_from_slice(self, src)
+ assert!(self.len() == src.len(),
+ "destination and source slices have different lengths");
+ unsafe {
+ ptr::copy_nonoverlapping(
+ src.as_ptr(), self.as_mut_ptr(), self.len());
+ }
}
/// Swaps all elements in `self` with those in `other`.
/// [`split_at_mut`]: #method.split_at_mut
#[stable(feature = "swap_with_slice", since = "1.27.0")]
pub fn swap_with_slice(&mut self, other: &mut [T]) {
- SliceExt::swap_with_slice(self, other)
+ assert!(self.len() == other.len(),
+ "destination and source slices have different lengths");
+ unsafe {
+ ptr::swap_nonoverlapping(
+ self.as_mut_ptr(), other.as_mut_ptr(), self.len());
+ }
}
-}}
-#[lang = "slice"]
-#[cfg(not(test))]
-#[cfg(not(stage0))]
-impl<T> [T] {
- slice_core_methods!();
+ /// Function to calculate lenghts of the middle and trailing slice for `align_to{,_mut}`.
+ #[cfg(not(stage0))]
+ fn align_to_offsets<U>(&self) -> (usize, usize) {
+ // What we gonna do about `rest` is figure out what multiple of `U`s we can put in a
+ // lowest number of `T`s. And how many `T`s we need for each such "multiple".
+ //
+ // Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider
+ // for example a case where size_of::<T> = 16, size_of::<U> = 24. We can put 2 Us in
+ // place of every 3 Ts in the `rest` slice. A bit more complicated.
+ //
+ // Formula to calculate this is:
+ //
+ // Us = lcm(size_of::<T>, size_of::<U>) / size_of::<U>
+ // Ts = lcm(size_of::<T>, size_of::<U>) / size_of::<T>
+ //
+ // Expanded and simplified:
+ //
+ // Us = size_of::<T> / gcd(size_of::<T>, size_of::<U>)
+ // Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
+ //
+ // Luckily since all this is constant-evaluated... performance here matters not!
+ #[inline]
+ fn gcd(a: usize, b: usize) -> usize {
+ // iterative stein’s algorithm
+ // We should still make this `const fn` (and revert to recursive algorithm if we do)
+ // because relying on llvm to consteval all this is… well, it makes me
+ let (ctz_a, mut ctz_b) = unsafe {
+ if a == 0 { return b; }
+ if b == 0 { return a; }
+ (::intrinsics::cttz_nonzero(a), ::intrinsics::cttz_nonzero(b))
+ };
+ let k = ctz_a.min(ctz_b);
+ let mut a = a >> ctz_a;
+ let mut b = b;
+ loop {
+ // remove all factors of 2 from b
+ b >>= ctz_b;
+ if a > b {
+ ::mem::swap(&mut a, &mut b);
+ }
+ b = b - a;
+ unsafe {
+ if b == 0 {
+ break;
+ }
+ ctz_b = ::intrinsics::cttz_nonzero(b);
+ }
+ }
+ return a << k;
+ }
+ let gcd: usize = gcd(::mem::size_of::<T>(), ::mem::size_of::<U>());
+ let ts: usize = ::mem::size_of::<U>() / gcd;
+ let us: usize = ::mem::size_of::<T>() / gcd;
+
+ // Armed with this knowledge, we can find how many `U`s we can fit!
+ let us_len = self.len() / ts * us;
+ // And how many `T`s will be in the trailing slice!
+ let ts_len = self.len() % ts;
+ return (us_len, ts_len);
+ }
+
+ /// Transmute the slice to a slice of another type, ensuring aligment of the types is
+ /// maintained.
+ ///
+ /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
+ /// slice of a new type, and the suffix slice. The middle slice will have the greatest length
+ /// possible for a given type and input slice.
+ ///
+ /// This method has no purpose when either input element `T` or output element `U` are
+ /// zero-sized and will return the original slice without splitting anything.
+ ///
+ /// # Unsafety
+ ///
+ /// This method is essentially a `transmute` with respect to the elements in the returned
+ /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// # #![feature(slice_align_to)]
+ /// unsafe {
+ /// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
+ /// let (prefix, shorts, suffix) = bytes.align_to::<u16>();
+ /// // less_efficient_algorithm_for_bytes(prefix);
+ /// // more_efficient_algorithm_for_aligned_shorts(shorts);
+ /// // less_efficient_algorithm_for_bytes(suffix);
+ /// }
+ /// ```
+ #[unstable(feature = "slice_align_to", issue = "44488")]
+ #[cfg(not(stage0))]
+ pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
+ // Note that most of this function will be constant-evaluated,
+ if ::mem::size_of::<U>() == 0 || ::mem::size_of::<T>() == 0 {
+ // handle ZSTs specially, which is – don't handle them at all.
+ return (self, &[], &[]);
+ }
+
+ // First, find at what point do we split between the first and 2nd slice. Easy with
+ // ptr.align_offset.
+ let ptr = self.as_ptr();
+ let offset = ::ptr::align_offset(ptr, ::mem::align_of::<U>());
+ if offset > self.len() {
+ return (self, &[], &[]);
+ } else {
+ let (left, rest) = self.split_at(offset);
+ let (us_len, ts_len) = rest.align_to_offsets::<U>();
+ return (left,
+ from_raw_parts(rest.as_ptr() as *const U, us_len),
+ from_raw_parts(rest.as_ptr().offset((rest.len() - ts_len) as isize), ts_len))
+ }
+ }
+
+ /// Transmute the slice to a slice of another type, ensuring aligment of the types is
+ /// maintained.
+ ///
+ /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
+ /// slice of a new type, and the suffix slice. The middle slice will have the greatest length
+ /// possible for a given type and input slice.
+ ///
+ /// This method has no purpose when either input element `T` or output element `U` are
+ /// zero-sized and will return the original slice without splitting anything.
+ ///
+ /// # Unsafety
+ ///
+ /// This method is essentially a `transmute` with respect to the elements in the returned
+ /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// # #![feature(slice_align_to)]
+ /// unsafe {
+ /// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
+ /// let (prefix, shorts, suffix) = bytes.align_to_mut::<u16>();
+ /// // less_efficient_algorithm_for_bytes(prefix);
+ /// // more_efficient_algorithm_for_aligned_shorts(shorts);
+ /// // less_efficient_algorithm_for_bytes(suffix);
+ /// }
+ /// ```
+ #[unstable(feature = "slice_align_to", issue = "44488")]
+ #[cfg(not(stage0))]
+ pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
+ // Note that most of this function will be constant-evaluated,
+ if ::mem::size_of::<U>() == 0 || ::mem::size_of::<T>() == 0 {
+ // handle ZSTs specially, which is – don't handle them at all.
+ return (self, &mut [], &mut []);
+ }
+
+ // First, find at what point do we split between the first and 2nd slice. Easy with
+ // ptr.align_offset.
+ let ptr = self.as_ptr();
+ let offset = ::ptr::align_offset(ptr, ::mem::align_of::<U>());
+ if offset > self.len() {
+ return (self, &mut [], &mut []);
+ } else {
+ let (left, rest) = self.split_at_mut(offset);
+ let (us_len, ts_len) = rest.align_to_offsets::<U>();
+ let mut_ptr = rest.as_mut_ptr();
+ return (left,
+ from_raw_parts_mut(mut_ptr as *mut U, us_len),
+ from_raw_parts_mut(mut_ptr.offset((rest.len() - ts_len) as isize), ts_len))
+ }
+ }
}
-// FIXME: remove (inline) this macro
-// when updating to a bootstrap compiler that has the new lang items.
-#[cfg_attr(stage0, macro_export)]
-#[unstable(feature = "core_slice_ext", issue = "32110")]
-macro_rules! slice_u8_core_methods { () => {
+#[lang = "slice_u8"]
+#[cfg(not(test))]
+impl [u8] {
/// Checks if all bytes in this slice are within the ASCII range.
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
byte.make_ascii_lowercase();
}
}
-}}
-#[lang = "slice_u8"]
-#[cfg(not(test))]
-#[cfg(not(stage0))]
-impl [u8] {
- slice_u8_core_methods!();
}
#[stable(feature = "rust1", since = "1.0.0")]
panic!("slice index starts at {} but ends at {}", index, end);
}
+#[inline(never)]
+#[cold]
+fn slice_index_overflow_fail() -> ! {
+ panic!("attempted to index slice up to maximum usize");
+}
+
+mod private_slice_index {
+ use super::ops;
+ #[stable(feature = "slice_get_slice", since = "1.28.0")]
+ pub trait Sealed {}
+
+ #[stable(feature = "slice_get_slice", since = "1.28.0")]
+ impl Sealed for usize {}
+ #[stable(feature = "slice_get_slice", since = "1.28.0")]
+ impl Sealed for ops::Range<usize> {}
+ #[stable(feature = "slice_get_slice", since = "1.28.0")]
+ impl Sealed for ops::RangeTo<usize> {}
+ #[stable(feature = "slice_get_slice", since = "1.28.0")]
+ impl Sealed for ops::RangeFrom<usize> {}
+ #[stable(feature = "slice_get_slice", since = "1.28.0")]
+ impl Sealed for ops::RangeFull {}
+ #[stable(feature = "slice_get_slice", since = "1.28.0")]
+ impl Sealed for ops::RangeInclusive<usize> {}
+ #[stable(feature = "slice_get_slice", since = "1.28.0")]
+ impl Sealed for ops::RangeToInclusive<usize> {}
+}
+
/// A helper trait used for indexing operations.
-#[unstable(feature = "slice_get_slice", issue = "35729")]
+#[stable(feature = "slice_get_slice", since = "1.28.0")]
#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
-pub trait SliceIndex<T: ?Sized> {
+pub trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
/// The output type returned by methods.
+ #[stable(feature = "slice_get_slice", since = "1.28.0")]
type Output: ?Sized;
/// Returns a shared reference to the output at this location, if in
/// bounds.
+ #[unstable(feature = "slice_index_methods", issue = "0")]
fn get(self, slice: &T) -> Option<&Self::Output>;
/// Returns a mutable reference to the output at this location, if in
/// bounds.
+ #[unstable(feature = "slice_index_methods", issue = "0")]
fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
/// Returns a shared reference to the output at this location, without
/// performing any bounds checking.
+ #[unstable(feature = "slice_index_methods", issue = "0")]
unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
/// Returns a mutable reference to the output at this location, without
/// performing any bounds checking.
+ #[unstable(feature = "slice_index_methods", issue = "0")]
unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
/// Returns a shared reference to the output at this location, panicking
/// if out of bounds.
+ #[unstable(feature = "slice_index_methods", issue = "0")]
fn index(self, slice: &T) -> &Self::Output;
/// Returns a mutable reference to the output at this location, panicking
/// if out of bounds.
+ #[unstable(feature = "slice_index_methods", issue = "0")]
fn index_mut(self, slice: &mut T) -> &mut Self::Output;
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
- assert!(self.end != usize::max_value(),
- "attempted to index slice up to maximum usize");
+ if self.end == usize::max_value() { slice_index_overflow_fail(); }
(self.start..self.end + 1).index(slice)
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
- assert!(self.end != usize::max_value(),
- "attempted to index slice up to maximum usize");
+ if self.end == usize::max_value() { slice_index_overflow_fail(); }
(self.start..self.end + 1).index_mut(slice)
}
}
accum
}
}
+
+ #[stable(feature = "fused", since = "1.26.0")]
+ impl<'a, T> FusedIterator for $name<'a, T> {}
+
+ #[unstable(feature = "trusted_len", issue = "37572")]
+ unsafe impl<'a, T> TrustedLen for $name<'a, T> {}
}
}
}
}
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T> FusedIterator for Iter<'a, T> {}
-
-#[unstable(feature = "trusted_len", issue = "37572")]
-unsafe impl<'a, T> TrustedLen for Iter<'a, T> {}
-
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Clone for Iter<'a, T> {
fn clone(&self) -> Iter<'a, T> { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
/// View the underlying data as a subslice of the original data.
///
/// To avoid creating `&mut` references that alias, this is forced
- /// to consume the iterator. Consider using the `Slice` and
- /// `SliceMut` implementations for obtaining slices with more
- /// restricted lifetimes that do not consume the iterator.
+ /// to consume the iterator.
///
/// # Examples
///
}
}
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T> FusedIterator for IterMut<'a, T> {}
-
-#[unstable(feature = "trusted_len", issue = "37572")]
-unsafe impl<'a, T> TrustedLen for IterMut<'a, T> {}
-
-
// Return the number of elements of `T` from `start` to `end`.
// Return the arithmetic difference if `T` is zero size.
#[inline(always)]
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Windows<'a, T> {}
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<'a, T> TrustedLen for Windows<'a, T> {}
+
#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for Windows<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Chunks<'a, T> {}
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<'a, T> TrustedLen for Chunks<'a, T> {}
+
#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for Chunks<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for ChunksMut<'a, T> {}
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<'a, T> TrustedLen for ChunksMut<'a, T> {}
+
#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for ChunksMut<'a, T> {}
}
}
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<'a, T> TrustedLen for ExactChunks<'a, T> {}
+
#[unstable(feature = "exact_chunks", issue = "47115")]
impl<'a, T> FusedIterator for ExactChunks<'a, T> {}
}
}
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<'a, T> TrustedLen for ExactChunksMut<'a, T> {}
+
#[unstable(feature = "exact_chunks", issue = "47115")]
impl<'a, T> FusedIterator for ExactChunksMut<'a, T> {}
/// valid for `len` elements, nor whether the lifetime inferred is a suitable
/// lifetime for the returned slice.
///
-/// `p` must be non-null, even for zero-length slices, because non-zero bits
-/// are required to distinguish between a zero-length slice within `Some()`
-/// from `None`. `p` can be a bogus non-dereferencable pointer, such as `0x1`,
-/// for zero-length slices, though.
+/// `p` must be non-null and aligned, even for zero-length slices, as is
+/// required for all references. However, for zero-length slices, `p` can be
+/// a bogus non-dereferencable pointer such as [`NonNull::dangling()`].
///
/// # Caveat
///
/// let slice = slice::from_raw_parts(ptr, amt);
/// }
/// ```
+///
+/// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
-pub unsafe fn from_raw_parts<'a, T>(p: *const T, len: usize) -> &'a [T] {
- mem::transmute(Repr { data: p, len: len })
+pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
+ Repr { raw: FatPtr { data, len } }.rust
}
/// Performs the same functionality as `from_raw_parts`, except that a mutable
///
/// This function is unsafe for the same reasons as `from_raw_parts`, as well
/// as not being able to provide a non-aliasing guarantee of the returned
-/// mutable slice. `p` must be non-null even for zero-length slices as with
+/// mutable slice. `p` must be non-null and aligned even for zero-length slices as with
/// `from_raw_parts`.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
-pub unsafe fn from_raw_parts_mut<'a, T>(p: *mut T, len: usize) -> &'a mut [T] {
- mem::transmute(Repr { data: p, len: len })
+pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
+ Repr { raw: FatPtr { data, len} }.rust_mut
}
/// Converts a reference to T into a slice of length 1 (without copying).
-#[unstable(feature = "from_ref", issue = "45703")]
+#[stable(feature = "from_ref", since = "1.28.0")]
pub fn from_ref<T>(s: &T) -> &[T] {
unsafe {
from_raw_parts(s, 1)
}
/// Converts a reference to T into a slice of length 1 (without copying).
-#[unstable(feature = "from_ref", issue = "45703")]
-pub fn from_ref_mut<T>(s: &mut T) -> &mut [T] {
+#[stable(feature = "from_ref", since = "1.28.0")]
+pub fn from_mut<T>(s: &mut T) -> &mut [T] {
unsafe {
from_raw_parts_mut(s, 1)
}