1 // Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Slice management and manipulation
13 //! For more details see [`std::slice`].
15 //! [`std::slice`]: ../../std/slice/index.html
17 #![stable(feature = "rust1", since = "1.0.0")]
19 // How this module is organized.
21 // The library infrastructure for slices is fairly messy. There's
22 // a lot of stuff defined here. Let's keep it clean.
24 // Since slices don't support inherent methods; all operations
25 // on them are defined on traits, which are then re-exported from
26 // the prelude for convenience. So there are a lot of traits here.
28 // The layout of this file is thus:
30 // * Slice-specific 'extension' traits and their implementations. This
31 // is where most of the slice API resides.
32 // * Implementations of a few common traits with important slice ops.
33 // * Definitions of a bunch of iterators.
35 // * The `raw` and `bytes` submodules.
36 // * Boilerplate trait implementations.
38 use cmp::Ordering::{self, Less, Equal, Greater};
41 use intrinsics::assume;
43 use ops::{FnMut, Try, self};
45 use option::Option::{None, Some};
47 use result::Result::{Ok, Err};
50 use marker::{Copy, Send, Sync, Sized, self};
51 use iter_private::TrustedRandomAccess;
53 #[unstable(feature = "slice_internals", issue = "0",
54 reason = "exposed from core to be reused in std; use the memchr crate")]
55 /// Pure rust memchr implementation, taken from rust-memchr
71 /// Extension methods for slices.
72 #[unstable(feature = "core_slice_ext",
73 reason = "stable interface provided by `impl [T]` in later crates",
75 #[allow(missing_docs)] // documented elsewhere
79 #[stable(feature = "core", since = "1.6.0")]
80 fn split_at(&self, mid: usize) -> (&[Self::Item], &[Self::Item]);
82 #[stable(feature = "core", since = "1.6.0")]
83 fn iter(&self) -> Iter<Self::Item>;
85 #[stable(feature = "core", since = "1.6.0")]
86 fn split<P>(&self, pred: P) -> Split<Self::Item, P>
87 where P: FnMut(&Self::Item) -> bool;
89 #[unstable(feature = "slice_rsplit", issue = "41020")]
90 fn rsplit<P>(&self, pred: P) -> RSplit<Self::Item, P>
91 where P: FnMut(&Self::Item) -> bool;
93 #[stable(feature = "core", since = "1.6.0")]
94 fn splitn<P>(&self, n: usize, pred: P) -> SplitN<Self::Item, P>
95 where P: FnMut(&Self::Item) -> bool;
97 #[stable(feature = "core", since = "1.6.0")]
98 fn rsplitn<P>(&self, n: usize, pred: P) -> RSplitN<Self::Item, P>
99 where P: FnMut(&Self::Item) -> bool;
101 #[stable(feature = "core", since = "1.6.0")]
102 fn windows(&self, size: usize) -> Windows<Self::Item>;
104 #[stable(feature = "core", since = "1.6.0")]
105 fn chunks(&self, size: usize) -> Chunks<Self::Item>;
107 #[unstable(feature = "exact_chunks", issue = "47115")]
108 fn exact_chunks(&self, size: usize) -> ExactChunks<Self::Item>;
110 #[stable(feature = "core", since = "1.6.0")]
111 fn get<I>(&self, index: I) -> Option<&I::Output>
112 where I: SliceIndex<Self>;
113 #[stable(feature = "core", since = "1.6.0")]
114 fn first(&self) -> Option<&Self::Item>;
116 #[stable(feature = "core", since = "1.6.0")]
117 fn split_first(&self) -> Option<(&Self::Item, &[Self::Item])>;
119 #[stable(feature = "core", since = "1.6.0")]
120 fn split_last(&self) -> Option<(&Self::Item, &[Self::Item])>;
122 #[stable(feature = "core", since = "1.6.0")]
123 fn last(&self) -> Option<&Self::Item>;
125 #[stable(feature = "core", since = "1.6.0")]
126 unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
127 where I: SliceIndex<Self>;
128 #[stable(feature = "core", since = "1.6.0")]
129 fn as_ptr(&self) -> *const Self::Item;
131 #[stable(feature = "core", since = "1.6.0")]
132 fn binary_search(&self, x: &Self::Item) -> Result<usize, usize>
133 where Self::Item: Ord;
135 #[stable(feature = "core", since = "1.6.0")]
136 fn binary_search_by<'a, F>(&'a self, f: F) -> Result<usize, usize>
137 where F: FnMut(&'a Self::Item) -> Ordering;
139 #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
140 fn binary_search_by_key<'a, B, F>(&'a self, b: &B, f: F) -> Result<usize, usize>
141 where F: FnMut(&'a Self::Item) -> B,
144 #[stable(feature = "core", since = "1.6.0")]
145 fn len(&self) -> usize;
147 #[stable(feature = "core", since = "1.6.0")]
148 fn is_empty(&self) -> bool { self.len() == 0 }
150 #[stable(feature = "core", since = "1.6.0")]
151 fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
152 where I: SliceIndex<Self>;
153 #[stable(feature = "core", since = "1.6.0")]
154 fn iter_mut(&mut self) -> IterMut<Self::Item>;
156 #[stable(feature = "core", since = "1.6.0")]
157 fn first_mut(&mut self) -> Option<&mut Self::Item>;
159 #[stable(feature = "core", since = "1.6.0")]
160 fn split_first_mut(&mut self) -> Option<(&mut Self::Item, &mut [Self::Item])>;
162 #[stable(feature = "core", since = "1.6.0")]
163 fn split_last_mut(&mut self) -> Option<(&mut Self::Item, &mut [Self::Item])>;
165 #[stable(feature = "core", since = "1.6.0")]
166 fn last_mut(&mut self) -> Option<&mut Self::Item>;
168 #[stable(feature = "core", since = "1.6.0")]
169 fn split_mut<P>(&mut self, pred: P) -> SplitMut<Self::Item, P>
170 where P: FnMut(&Self::Item) -> bool;
172 #[unstable(feature = "slice_rsplit", issue = "41020")]
173 fn rsplit_mut<P>(&mut self, pred: P) -> RSplitMut<Self::Item, P>
174 where P: FnMut(&Self::Item) -> bool;
176 #[stable(feature = "core", since = "1.6.0")]
177 fn splitn_mut<P>(&mut self, n: usize, pred: P) -> SplitNMut<Self::Item, P>
178 where P: FnMut(&Self::Item) -> bool;
180 #[stable(feature = "core", since = "1.6.0")]
181 fn rsplitn_mut<P>(&mut self, n: usize, pred: P) -> RSplitNMut<Self::Item, P>
182 where P: FnMut(&Self::Item) -> bool;
184 #[stable(feature = "core", since = "1.6.0")]
185 fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<Self::Item>;
187 #[unstable(feature = "exact_chunks", issue = "47115")]
188 fn exact_chunks_mut(&mut self, size: usize) -> ExactChunksMut<Self::Item>;
190 #[stable(feature = "core", since = "1.6.0")]
191 fn swap(&mut self, a: usize, b: usize);
193 #[stable(feature = "core", since = "1.6.0")]
194 fn split_at_mut(&mut self, mid: usize) -> (&mut [Self::Item], &mut [Self::Item]);
196 #[stable(feature = "core", since = "1.6.0")]
197 fn reverse(&mut self);
199 #[stable(feature = "core", since = "1.6.0")]
200 unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
201 where I: SliceIndex<Self>;
202 #[stable(feature = "core", since = "1.6.0")]
203 fn as_mut_ptr(&mut self) -> *mut Self::Item;
205 #[stable(feature = "core", since = "1.6.0")]
206 fn contains(&self, x: &Self::Item) -> bool where Self::Item: PartialEq;
208 #[stable(feature = "core", since = "1.6.0")]
209 fn starts_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
211 #[stable(feature = "core", since = "1.6.0")]
212 fn ends_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
214 #[stable(feature = "slice_rotate", since = "1.26.0")]
215 fn rotate_left(&mut self, mid: usize);
217 #[stable(feature = "slice_rotate", since = "1.26.0")]
218 fn rotate_right(&mut self, k: usize);
220 #[stable(feature = "clone_from_slice", since = "1.7.0")]
221 fn clone_from_slice(&mut self, src: &[Self::Item]) where Self::Item: Clone;
223 #[stable(feature = "copy_from_slice", since = "1.9.0")]
224 fn copy_from_slice(&mut self, src: &[Self::Item]) where Self::Item: Copy;
226 #[unstable(feature = "swap_with_slice", issue = "44030")]
227 fn swap_with_slice(&mut self, src: &mut [Self::Item]);
229 #[stable(feature = "sort_unstable", since = "1.20.0")]
230 fn sort_unstable(&mut self)
231 where Self::Item: Ord;
233 #[stable(feature = "sort_unstable", since = "1.20.0")]
234 fn sort_unstable_by<F>(&mut self, compare: F)
235 where F: FnMut(&Self::Item, &Self::Item) -> Ordering;
237 #[stable(feature = "sort_unstable", since = "1.20.0")]
238 fn sort_unstable_by_key<B, F>(&mut self, f: F)
239 where F: FnMut(&Self::Item) -> B,
243 // Use macros to be generic over const/mut
244 macro_rules! slice_offset {
245 ($ptr:expr, $by:expr) => {{
247 if size_from_ptr(ptr) == 0 {
248 (ptr as *mut i8).wrapping_offset($by) as _
255 // make a &T from a *const T
256 macro_rules! make_ref {
259 if size_from_ptr(ptr) == 0 {
260 // Use a non-null pointer value
268 // make a &mut T from a *mut T
269 macro_rules! make_ref_mut {
272 if size_from_ptr(ptr) == 0 {
273 // Use a non-null pointer value
281 #[unstable(feature = "core_slice_ext",
282 reason = "stable interface provided by `impl [T]` in later crates",
284 impl<T> SliceExt for [T] {
288 fn split_at(&self, mid: usize) -> (&[T], &[T]) {
289 (&self[..mid], &self[mid..])
293 fn iter(&self) -> Iter<T> {
295 let p = if mem::size_of::<T>() == 0 {
298 let p = self.as_ptr();
299 assume(!p.is_null());
305 end: slice_offset!(p, self.len() as isize),
306 _marker: marker::PhantomData
312 fn split<P>(&self, pred: P) -> Split<T, P>
313 where P: FnMut(&T) -> bool
323 fn rsplit<P>(&self, pred: P) -> RSplit<T, P>
324 where P: FnMut(&T) -> bool
326 RSplit { inner: self.split(pred) }
330 fn splitn<P>(&self, n: usize, pred: P) -> SplitN<T, P>
331 where P: FnMut(&T) -> bool
334 inner: GenericSplitN {
335 iter: self.split(pred),
342 fn rsplitn<P>(&self, n: usize, pred: P) -> RSplitN<T, P>
343 where P: FnMut(&T) -> bool
346 inner: GenericSplitN {
347 iter: self.rsplit(pred),
354 fn windows(&self, size: usize) -> Windows<T> {
356 Windows { v: self, size: size }
360 fn chunks(&self, chunk_size: usize) -> Chunks<T> {
361 assert!(chunk_size != 0);
362 Chunks { v: self, chunk_size: chunk_size }
366 fn exact_chunks(&self, chunk_size: usize) -> ExactChunks<T> {
367 assert!(chunk_size != 0);
368 let rem = self.len() % chunk_size;
369 let len = self.len() - rem;
370 ExactChunks { v: &self[..len], chunk_size: chunk_size}
374 fn get<I>(&self, index: I) -> Option<&I::Output>
375 where I: SliceIndex<[T]>
381 fn first(&self) -> Option<&T> {
382 if self.is_empty() { None } else { Some(&self[0]) }
386 fn split_first(&self) -> Option<(&T, &[T])> {
387 if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
391 fn split_last(&self) -> Option<(&T, &[T])> {
392 let len = self.len();
393 if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
397 fn last(&self) -> Option<&T> {
398 if self.is_empty() { None } else { Some(&self[self.len() - 1]) }
402 unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
403 where I: SliceIndex<[T]>
405 index.get_unchecked(self)
409 fn as_ptr(&self) -> *const T {
410 self as *const [T] as *const T
413 fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
414 where F: FnMut(&'a T) -> Ordering
417 let mut size = s.len();
421 let mut base = 0usize;
424 let mid = base + half;
425 // mid is always in [0, size), that means mid is >= 0 and < size.
426 // mid >= 0: by definition
427 // mid < size: mid = size / 2 + size / 4 + size / 8 ...
428 let cmp = f(unsafe { s.get_unchecked(mid) });
429 base = if cmp == Greater { base } else { mid };
432 // base is always in [0, size) because base <= mid.
433 let cmp = f(unsafe { s.get_unchecked(base) });
434 if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
438 fn len(&self) -> usize {
440 mem::transmute::<&[T], Repr<T>>(self).len
445 fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
446 where I: SliceIndex<[T]>
452 fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
453 let len = self.len();
454 let ptr = self.as_mut_ptr();
459 (from_raw_parts_mut(ptr, mid),
460 from_raw_parts_mut(ptr.offset(mid as isize), len - mid))
465 fn iter_mut(&mut self) -> IterMut<T> {
467 let p = if mem::size_of::<T>() == 0 {
470 let p = self.as_mut_ptr();
471 assume(!p.is_null());
477 end: slice_offset!(p, self.len() as isize),
478 _marker: marker::PhantomData
484 fn last_mut(&mut self) -> Option<&mut T> {
485 let len = self.len();
486 if len == 0 { return None; }
487 Some(&mut self[len - 1])
491 fn first_mut(&mut self) -> Option<&mut T> {
492 if self.is_empty() { None } else { Some(&mut self[0]) }
496 fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
497 if self.is_empty() { None } else {
498 let split = self.split_at_mut(1);
499 Some((&mut split.0[0], split.1))
504 fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
505 let len = self.len();
506 if len == 0 { None } else {
507 let split = self.split_at_mut(len - 1);
508 Some((&mut split.1[0], split.0))
513 fn split_mut<P>(&mut self, pred: P) -> SplitMut<T, P>
514 where P: FnMut(&T) -> bool
516 SplitMut { v: self, pred: pred, finished: false }
520 fn rsplit_mut<P>(&mut self, pred: P) -> RSplitMut<T, P>
521 where P: FnMut(&T) -> bool
523 RSplitMut { inner: self.split_mut(pred) }
527 fn splitn_mut<P>(&mut self, n: usize, pred: P) -> SplitNMut<T, P>
528 where P: FnMut(&T) -> bool
531 inner: GenericSplitN {
532 iter: self.split_mut(pred),
539 fn rsplitn_mut<P>(&mut self, n: usize, pred: P) -> RSplitNMut<T, P> where
540 P: FnMut(&T) -> bool,
543 inner: GenericSplitN {
544 iter: self.rsplit_mut(pred),
551 fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
552 assert!(chunk_size != 0);
553 ChunksMut { v: self, chunk_size: chunk_size }
557 fn exact_chunks_mut(&mut self, chunk_size: usize) -> ExactChunksMut<T> {
558 assert!(chunk_size != 0);
559 let rem = self.len() % chunk_size;
560 let len = self.len() - rem;
561 ExactChunksMut { v: &mut self[..len], chunk_size: chunk_size}
565 fn swap(&mut self, a: usize, b: usize) {
567 // Can't take two mutable loans from one vector, so instead just cast
568 // them to their raw pointers to do the swap
569 let pa: *mut T = &mut self[a];
570 let pb: *mut T = &mut self[b];
575 fn reverse(&mut self) {
576 let mut i: usize = 0;
579 // For very small types, all the individual reads in the normal
580 // path perform poorly. We can do better, given efficient unaligned
581 // load/store, by loading a larger chunk and reversing a register.
583 // Ideally LLVM would do this for us, as it knows better than we do
584 // whether unaligned reads are efficient (since that changes between
585 // different ARM versions, for example) and what the best chunk size
586 // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
587 // the loop, so we need to do this ourselves. (Hypothesis: reverse
588 // is troublesome because the sides can be aligned differently --
589 // will be, when the length is odd -- so there's no way of emitting
590 // pre- and postludes to use fully-aligned SIMD in the middle.)
593 cfg!(any(target_arch = "x86", target_arch = "x86_64"));
595 if fast_unaligned && mem::size_of::<T>() == 1 {
596 // Use the llvm.bswap intrinsic to reverse u8s in a usize
597 let chunk = mem::size_of::<usize>();
598 while i + chunk - 1 < ln / 2 {
600 let pa: *mut T = self.get_unchecked_mut(i);
601 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
602 let va = ptr::read_unaligned(pa as *mut usize);
603 let vb = ptr::read_unaligned(pb as *mut usize);
604 ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
605 ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
611 if fast_unaligned && mem::size_of::<T>() == 2 {
612 // Use rotate-by-16 to reverse u16s in a u32
613 let chunk = mem::size_of::<u32>() / 2;
614 while i + chunk - 1 < ln / 2 {
616 let pa: *mut T = self.get_unchecked_mut(i);
617 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
618 let va = ptr::read_unaligned(pa as *mut u32);
619 let vb = ptr::read_unaligned(pb as *mut u32);
620 ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
621 ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
628 // Unsafe swap to avoid the bounds check in safe swap.
630 let pa: *mut T = self.get_unchecked_mut(i);
631 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
639 unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
640 where I: SliceIndex<[T]>
642 index.get_unchecked_mut(self)
646 fn as_mut_ptr(&mut self) -> *mut T {
647 self as *mut [T] as *mut T
651 fn contains(&self, x: &T) -> bool where T: PartialEq {
652 x.slice_contains(self)
656 fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq {
657 let n = needle.len();
658 self.len() >= n && needle == &self[..n]
662 fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
663 let (m, n) = (self.len(), needle.len());
664 m >= n && needle == &self[m-n..]
667 fn binary_search(&self, x: &T) -> Result<usize, usize>
670 self.binary_search_by(|p| p.cmp(x))
673 fn rotate_left(&mut self, mid: usize) {
674 assert!(mid <= self.len());
675 let k = self.len() - mid;
678 let p = self.as_mut_ptr();
679 rotate::ptr_rotate(mid, p.offset(mid as isize), k);
683 fn rotate_right(&mut self, k: usize) {
684 assert!(k <= self.len());
685 let mid = self.len() - k;
688 let p = self.as_mut_ptr();
689 rotate::ptr_rotate(mid, p.offset(mid as isize), k);
694 fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
695 assert!(self.len() == src.len(),
696 "destination and source slices have different lengths");
697 // NOTE: We need to explicitly slice them to the same length
698 // for bounds checking to be elided, and the optimizer will
699 // generate memcpy for simple cases (for example T = u8).
700 let len = self.len();
701 let src = &src[..len];
703 self[i].clone_from(&src[i]);
708 fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
709 assert!(self.len() == src.len(),
710 "destination and source slices have different lengths");
712 ptr::copy_nonoverlapping(
713 src.as_ptr(), self.as_mut_ptr(), self.len());
718 fn swap_with_slice(&mut self, src: &mut [T]) {
719 assert!(self.len() == src.len(),
720 "destination and source slices have different lengths");
722 ptr::swap_nonoverlapping(
723 self.as_mut_ptr(), src.as_mut_ptr(), self.len());
728 fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
729 where F: FnMut(&'a Self::Item) -> B,
732 self.binary_search_by(|k| f(k).cmp(b))
736 fn sort_unstable(&mut self)
737 where Self::Item: Ord
739 sort::quicksort(self, |a, b| a.lt(b));
743 fn sort_unstable_by<F>(&mut self, mut compare: F)
744 where F: FnMut(&Self::Item, &Self::Item) -> Ordering
746 sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
750 fn sort_unstable_by_key<B, F>(&mut self, mut f: F)
751 where F: FnMut(&Self::Item) -> B,
754 sort::quicksort(self, |a, b| f(a).lt(&f(b)));
758 #[stable(feature = "rust1", since = "1.0.0")]
759 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
760 impl<T, I> ops::Index<I> for [T]
761 where I: SliceIndex<[T]>
763 type Output = I::Output;
766 fn index(&self, index: I) -> &I::Output {
771 #[stable(feature = "rust1", since = "1.0.0")]
772 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
773 impl<T, I> ops::IndexMut<I> for [T]
774 where I: SliceIndex<[T]>
777 fn index_mut(&mut self, index: I) -> &mut I::Output {
778 index.index_mut(self)
784 fn slice_index_len_fail(index: usize, len: usize) -> ! {
785 panic!("index {} out of range for slice of length {}", index, len);
790 fn slice_index_order_fail(index: usize, end: usize) -> ! {
791 panic!("slice index starts at {} but ends at {}", index, end);
794 /// A helper trait used for indexing operations.
795 #[unstable(feature = "slice_get_slice", issue = "35729")]
796 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
797 pub trait SliceIndex<T: ?Sized> {
798 /// The output type returned by methods.
801 /// Returns a shared reference to the output at this location, if in
803 fn get(self, slice: &T) -> Option<&Self::Output>;
805 /// Returns a mutable reference to the output at this location, if in
807 fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
809 /// Returns a shared reference to the output at this location, without
810 /// performing any bounds checking.
811 unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
813 /// Returns a mutable reference to the output at this location, without
814 /// performing any bounds checking.
815 unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
817 /// Returns a shared reference to the output at this location, panicking
818 /// if out of bounds.
819 fn index(self, slice: &T) -> &Self::Output;
821 /// Returns a mutable reference to the output at this location, panicking
822 /// if out of bounds.
823 fn index_mut(self, slice: &mut T) -> &mut Self::Output;
826 #[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
827 impl<T> SliceIndex<[T]> for usize {
831 fn get(self, slice: &[T]) -> Option<&T> {
832 if self < slice.len() {
834 Some(self.get_unchecked(slice))
842 fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
843 if self < slice.len() {
845 Some(self.get_unchecked_mut(slice))
853 unsafe fn get_unchecked(self, slice: &[T]) -> &T {
854 &*slice.as_ptr().offset(self as isize)
858 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
859 &mut *slice.as_mut_ptr().offset(self as isize)
863 fn index(self, slice: &[T]) -> &T {
864 // NB: use intrinsic indexing
869 fn index_mut(self, slice: &mut [T]) -> &mut T {
870 // NB: use intrinsic indexing
875 #[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
876 impl<T> SliceIndex<[T]> for ops::Range<usize> {
880 fn get(self, slice: &[T]) -> Option<&[T]> {
881 if self.start > self.end || self.end > slice.len() {
885 Some(self.get_unchecked(slice))
891 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
892 if self.start > self.end || self.end > slice.len() {
896 Some(self.get_unchecked_mut(slice))
902 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
903 from_raw_parts(slice.as_ptr().offset(self.start as isize), self.end - self.start)
907 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
908 from_raw_parts_mut(slice.as_mut_ptr().offset(self.start as isize), self.end - self.start)
912 fn index(self, slice: &[T]) -> &[T] {
913 if self.start > self.end {
914 slice_index_order_fail(self.start, self.end);
915 } else if self.end > slice.len() {
916 slice_index_len_fail(self.end, slice.len());
919 self.get_unchecked(slice)
924 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
925 if self.start > self.end {
926 slice_index_order_fail(self.start, self.end);
927 } else if self.end > slice.len() {
928 slice_index_len_fail(self.end, slice.len());
931 self.get_unchecked_mut(slice)
936 #[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
937 impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
941 fn get(self, slice: &[T]) -> Option<&[T]> {
942 (0..self.end).get(slice)
946 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
947 (0..self.end).get_mut(slice)
951 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
952 (0..self.end).get_unchecked(slice)
956 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
957 (0..self.end).get_unchecked_mut(slice)
961 fn index(self, slice: &[T]) -> &[T] {
962 (0..self.end).index(slice)
966 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
967 (0..self.end).index_mut(slice)
971 #[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
972 impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
976 fn get(self, slice: &[T]) -> Option<&[T]> {
977 (self.start..slice.len()).get(slice)
981 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
982 (self.start..slice.len()).get_mut(slice)
986 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
987 (self.start..slice.len()).get_unchecked(slice)
991 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
992 (self.start..slice.len()).get_unchecked_mut(slice)
996 fn index(self, slice: &[T]) -> &[T] {
997 (self.start..slice.len()).index(slice)
1001 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
1002 (self.start..slice.len()).index_mut(slice)
1006 #[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
1007 impl<T> SliceIndex<[T]> for ops::RangeFull {
1011 fn get(self, slice: &[T]) -> Option<&[T]> {
1016 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
1021 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
1026 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
1031 fn index(self, slice: &[T]) -> &[T] {
1036 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
1042 #[stable(feature = "inclusive_range", since = "1.26.0")]
1043 impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
1047 fn get(self, slice: &[T]) -> Option<&[T]> {
1048 if self.end == usize::max_value() { None }
1049 else { (self.start..self.end + 1).get(slice) }
1053 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
1054 if self.end == usize::max_value() { None }
1055 else { (self.start..self.end + 1).get_mut(slice) }
1059 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
1060 (self.start..self.end + 1).get_unchecked(slice)
1064 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
1065 (self.start..self.end + 1).get_unchecked_mut(slice)
1069 fn index(self, slice: &[T]) -> &[T] {
1070 assert!(self.end != usize::max_value(),
1071 "attempted to index slice up to maximum usize");
1072 (self.start..self.end + 1).index(slice)
1076 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
1077 assert!(self.end != usize::max_value(),
1078 "attempted to index slice up to maximum usize");
1079 (self.start..self.end + 1).index_mut(slice)
1083 #[stable(feature = "inclusive_range", since = "1.26.0")]
1084 impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
1088 fn get(self, slice: &[T]) -> Option<&[T]> {
1089 (0..=self.end).get(slice)
1093 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
1094 (0..=self.end).get_mut(slice)
1098 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
1099 (0..=self.end).get_unchecked(slice)
1103 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
1104 (0..=self.end).get_unchecked_mut(slice)
1108 fn index(self, slice: &[T]) -> &[T] {
1109 (0..=self.end).index(slice)
1113 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
1114 (0..=self.end).index_mut(slice)
1118 ////////////////////////////////////////////////////////////////////////////////
1120 ////////////////////////////////////////////////////////////////////////////////
1122 #[stable(feature = "rust1", since = "1.0.0")]
1123 impl<'a, T> Default for &'a [T] {
1124 /// Creates an empty slice.
1125 fn default() -> &'a [T] { &[] }
1128 #[stable(feature = "mut_slice_default", since = "1.5.0")]
1129 impl<'a, T> Default for &'a mut [T] {
1130 /// Creates a mutable empty slice.
1131 fn default() -> &'a mut [T] { &mut [] }
1138 #[stable(feature = "rust1", since = "1.0.0")]
1139 impl<'a, T> IntoIterator for &'a [T] {
1141 type IntoIter = Iter<'a, T>;
1143 fn into_iter(self) -> Iter<'a, T> {
1148 #[stable(feature = "rust1", since = "1.0.0")]
1149 impl<'a, T> IntoIterator for &'a mut [T] {
1150 type Item = &'a mut T;
1151 type IntoIter = IterMut<'a, T>;
1153 fn into_iter(self) -> IterMut<'a, T> {
1159 fn size_from_ptr<T>(_: *const T) -> usize {
1163 // The shared definition of the `Iter` and `IterMut` iterators
1164 macro_rules! iterator {
1165 (struct $name:ident -> $ptr:ty, $elem:ty, $mkref:ident) => {
1166 #[stable(feature = "rust1", since = "1.0.0")]
1167 impl<'a, T> Iterator for $name<'a, T> {
1171 fn next(&mut self) -> Option<$elem> {
1172 // could be implemented with slices, but this avoids bounds checks
1174 if mem::size_of::<T>() != 0 {
1175 assume(!self.ptr.is_null());
1176 assume(!self.end.is_null());
1178 if self.ptr == self.end {
1181 Some($mkref!(self.ptr.post_inc()))
1187 fn size_hint(&self) -> (usize, Option<usize>) {
1188 let exact = ptrdistance(self.ptr, self.end);
1189 (exact, Some(exact))
1193 fn count(self) -> usize {
1198 fn nth(&mut self, n: usize) -> Option<$elem> {
1199 // Call helper method. Can't put the definition here because mut versus const.
1204 fn last(mut self) -> Option<$elem> {
1209 fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where
1210 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
1212 // manual unrolling is needed when there are conditional exits from the loop
1213 let mut accum = init;
1215 while ptrdistance(self.ptr, self.end) >= 4 {
1216 accum = f(accum, $mkref!(self.ptr.post_inc()))?;
1217 accum = f(accum, $mkref!(self.ptr.post_inc()))?;
1218 accum = f(accum, $mkref!(self.ptr.post_inc()))?;
1219 accum = f(accum, $mkref!(self.ptr.post_inc()))?;
1221 while self.ptr != self.end {
1222 accum = f(accum, $mkref!(self.ptr.post_inc()))?;
1229 fn fold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
1230 where Fold: FnMut(Acc, Self::Item) -> Acc,
1232 // Let LLVM unroll this, rather than using the default
1233 // impl that would force the manual unrolling above
1234 let mut accum = init;
1235 while let Some(x) = self.next() {
1236 accum = f(accum, x);
1242 #[rustc_inherit_overflow_checks]
1243 fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
1245 P: FnMut(Self::Item) -> bool,
1247 // The addition might panic on overflow
1248 // Use the len of the slice to hint optimizer to remove result index bounds check.
1249 let n = make_slice!(self.ptr, self.end).len();
1250 self.try_fold(0, move |i, x| {
1251 if predicate(x) { Err(i) }
1255 unsafe { assume(i < n) };
1261 fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
1262 P: FnMut(Self::Item) -> bool,
1263 Self: Sized + ExactSizeIterator + DoubleEndedIterator
1265 // No need for an overflow check here, because `ExactSizeIterator`
1266 // implies that the number of elements fits into a `usize`.
1267 // Use the len of the slice to hint optimizer to remove result index bounds check.
1268 let n = make_slice!(self.ptr, self.end).len();
1269 self.try_rfold(n, move |i, x| {
1271 if predicate(x) { Err(i) }
1275 unsafe { assume(i < n) };
1281 #[stable(feature = "rust1", since = "1.0.0")]
1282 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
1284 fn next_back(&mut self) -> Option<$elem> {
1285 // could be implemented with slices, but this avoids bounds checks
1287 if mem::size_of::<T>() != 0 {
1288 assume(!self.ptr.is_null());
1289 assume(!self.end.is_null());
1291 if self.end == self.ptr {
1294 Some($mkref!(self.end.pre_dec()))
1300 fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
1301 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
1303 // manual unrolling is needed when there are conditional exits from the loop
1304 let mut accum = init;
1306 while ptrdistance(self.ptr, self.end) >= 4 {
1307 accum = f(accum, $mkref!(self.end.pre_dec()))?;
1308 accum = f(accum, $mkref!(self.end.pre_dec()))?;
1309 accum = f(accum, $mkref!(self.end.pre_dec()))?;
1310 accum = f(accum, $mkref!(self.end.pre_dec()))?;
1312 while self.ptr != self.end {
1313 accum = f(accum, $mkref!(self.end.pre_dec()))?;
1320 fn rfold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
1321 where Fold: FnMut(Acc, Self::Item) -> Acc,
1323 // Let LLVM unroll this, rather than using the default
1324 // impl that would force the manual unrolling above
1325 let mut accum = init;
1326 while let Some(x) = self.next_back() {
1327 accum = f(accum, x);
1335 macro_rules! make_slice {
1336 ($start: expr, $end: expr) => {{
1338 let diff = ($end as usize).wrapping_sub(start as usize);
1339 if size_from_ptr(start) == 0 {
1340 // use a non-null pointer value
1341 unsafe { from_raw_parts(1 as *const _, diff) }
1343 let len = diff / size_from_ptr(start);
1344 unsafe { from_raw_parts(start, len) }
1349 macro_rules! make_mut_slice {
1350 ($start: expr, $end: expr) => {{
1352 let diff = ($end as usize).wrapping_sub(start as usize);
1353 if size_from_ptr(start) == 0 {
1354 // use a non-null pointer value
1355 unsafe { from_raw_parts_mut(1 as *mut _, diff) }
1357 let len = diff / size_from_ptr(start);
1358 unsafe { from_raw_parts_mut(start, len) }
1363 /// Immutable slice iterator
1365 /// This struct is created by the [`iter`] method on [slices].
1372 /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
1373 /// let slice = &[1, 2, 3];
1375 /// // Then, we iterate over it:
1376 /// for element in slice.iter() {
1377 /// println!("{}", element);
1381 /// [`iter`]: ../../std/primitive.slice.html#method.iter
1382 /// [slices]: ../../std/primitive.slice.html
1383 #[stable(feature = "rust1", since = "1.0.0")]
1384 pub struct Iter<'a, T: 'a> {
1387 _marker: marker::PhantomData<&'a T>,
1390 #[stable(feature = "core_impl_debug", since = "1.9.0")]
1391 impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
1392 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1393 f.debug_tuple("Iter")
1394 .field(&self.as_slice())
1399 #[stable(feature = "rust1", since = "1.0.0")]
1400 unsafe impl<'a, T: Sync> Sync for Iter<'a, T> {}
1401 #[stable(feature = "rust1", since = "1.0.0")]
1402 unsafe impl<'a, T: Sync> Send for Iter<'a, T> {}
1404 impl<'a, T> Iter<'a, T> {
1405 /// View the underlying data as a subslice of the original data.
1407 /// This has the same lifetime as the original slice, and so the
1408 /// iterator can continue to be used while this exists.
1415 /// // First, we declare a type which has the `iter` method to get the `Iter`
1416 /// // struct (&[usize here]):
1417 /// let slice = &[1, 2, 3];
1419 /// // Then, we get the iterator:
1420 /// let mut iter = slice.iter();
1421 /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
1422 /// println!("{:?}", iter.as_slice());
1424 /// // Next, we move to the second element of the slice:
1426 /// // Now `as_slice` returns "[2, 3]":
1427 /// println!("{:?}", iter.as_slice());
1429 #[stable(feature = "iter_to_slice", since = "1.4.0")]
1430 pub fn as_slice(&self) -> &'a [T] {
1431 make_slice!(self.ptr, self.end)
1434 // Helper function for Iter::nth
1435 fn iter_nth(&mut self, n: usize) -> Option<&'a T> {
1436 match self.as_slice().get(n) {
1437 Some(elem_ref) => unsafe {
1438 self.ptr = slice_offset!(self.ptr, (n as isize).wrapping_add(1));
1442 self.ptr = self.end;
1449 iterator!{struct Iter -> *const T, &'a T, make_ref}
1451 #[stable(feature = "rust1", since = "1.0.0")]
1452 impl<'a, T> ExactSizeIterator for Iter<'a, T> {
1453 fn is_empty(&self) -> bool {
1454 self.ptr == self.end
1458 #[stable(feature = "fused", since = "1.26.0")]
1459 impl<'a, T> FusedIterator for Iter<'a, T> {}
1461 #[unstable(feature = "trusted_len", issue = "37572")]
1462 unsafe impl<'a, T> TrustedLen for Iter<'a, T> {}
1464 #[stable(feature = "rust1", since = "1.0.0")]
1465 impl<'a, T> Clone for Iter<'a, T> {
1466 fn clone(&self) -> Iter<'a, T> { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
1469 #[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
1470 impl<'a, T> AsRef<[T]> for Iter<'a, T> {
1471 fn as_ref(&self) -> &[T] {
1476 /// Mutable slice iterator.
1478 /// This struct is created by the [`iter_mut`] method on [slices].
1485 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
1486 /// // struct (&[usize here]):
1487 /// let mut slice = &mut [1, 2, 3];
1489 /// // Then, we iterate over it and increment each element value:
1490 /// for element in slice.iter_mut() {
1494 /// // We now have "[2, 3, 4]":
1495 /// println!("{:?}", slice);
1498 /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
1499 /// [slices]: ../../std/primitive.slice.html
1500 #[stable(feature = "rust1", since = "1.0.0")]
1501 pub struct IterMut<'a, T: 'a> {
1504 _marker: marker::PhantomData<&'a mut T>,
1507 #[stable(feature = "core_impl_debug", since = "1.9.0")]
1508 impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> {
1509 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1510 f.debug_tuple("IterMut")
1511 .field(&make_slice!(self.ptr, self.end))
1516 #[stable(feature = "rust1", since = "1.0.0")]
1517 unsafe impl<'a, T: Sync> Sync for IterMut<'a, T> {}
1518 #[stable(feature = "rust1", since = "1.0.0")]
1519 unsafe impl<'a, T: Send> Send for IterMut<'a, T> {}
1521 impl<'a, T> IterMut<'a, T> {
1522 /// View the underlying data as a subslice of the original data.
1524 /// To avoid creating `&mut` references that alias, this is forced
1525 /// to consume the iterator. Consider using the `Slice` and
1526 /// `SliceMut` implementations for obtaining slices with more
1527 /// restricted lifetimes that do not consume the iterator.
1534 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
1535 /// // struct (&[usize here]):
1536 /// let mut slice = &mut [1, 2, 3];
1539 /// // Then, we get the iterator:
1540 /// let mut iter = slice.iter_mut();
1541 /// // We move to next element:
1543 /// // So if we print what `into_slice` method returns here, we have "[2, 3]":
1544 /// println!("{:?}", iter.into_slice());
1547 /// // Now let's modify a value of the slice:
1549 /// // First we get back the iterator:
1550 /// let mut iter = slice.iter_mut();
1551 /// // We change the value of the first element of the slice returned by the `next` method:
1552 /// *iter.next().unwrap() += 1;
1554 /// // Now slice is "[2, 2, 3]":
1555 /// println!("{:?}", slice);
1557 #[stable(feature = "iter_to_slice", since = "1.4.0")]
1558 pub fn into_slice(self) -> &'a mut [T] {
1559 make_mut_slice!(self.ptr, self.end)
1562 // Helper function for IterMut::nth
1563 fn iter_nth(&mut self, n: usize) -> Option<&'a mut T> {
1564 match make_mut_slice!(self.ptr, self.end).get_mut(n) {
1565 Some(elem_ref) => unsafe {
1566 self.ptr = slice_offset!(self.ptr, (n as isize).wrapping_add(1));
1570 self.ptr = self.end;
1577 iterator!{struct IterMut -> *mut T, &'a mut T, make_ref_mut}
1579 #[stable(feature = "rust1", since = "1.0.0")]
1580 impl<'a, T> ExactSizeIterator for IterMut<'a, T> {
1581 fn is_empty(&self) -> bool {
1582 self.ptr == self.end
1586 #[stable(feature = "fused", since = "1.26.0")]
1587 impl<'a, T> FusedIterator for IterMut<'a, T> {}
1589 #[unstable(feature = "trusted_len", issue = "37572")]
1590 unsafe impl<'a, T> TrustedLen for IterMut<'a, T> {}
1593 // Return the number of elements of `T` from `start` to `end`.
1594 // Return the arithmetic difference if `T` is zero size.
1596 fn ptrdistance<T>(start: *const T, end: *const T) -> usize {
1597 match start.offset_to(end) {
1598 Some(x) => x as usize,
1599 None => (end as usize).wrapping_sub(start as usize),
1603 // Extension methods for raw pointers, used by the iterators
1604 trait PointerExt : Copy {
1605 unsafe fn slice_offset(self, i: isize) -> Self;
1607 /// Increments `self` by 1, but returns the old value.
1609 unsafe fn post_inc(&mut self) -> Self {
1610 let current = *self;
1611 *self = self.slice_offset(1);
1615 /// Decrements `self` by 1, and returns the new value.
1617 unsafe fn pre_dec(&mut self) -> Self {
1618 *self = self.slice_offset(-1);
1623 impl<T> PointerExt for *const T {
1625 unsafe fn slice_offset(self, i: isize) -> Self {
1626 slice_offset!(self, i)
1630 impl<T> PointerExt for *mut T {
1632 unsafe fn slice_offset(self, i: isize) -> Self {
1633 slice_offset!(self, i)
1637 /// An internal abstraction over the splitting iterators, so that
1638 /// splitn, splitn_mut etc can be implemented once.
1640 trait SplitIter: DoubleEndedIterator {
1641 /// Marks the underlying iterator as complete, extracting the remaining
1642 /// portion of the slice.
1643 fn finish(&mut self) -> Option<Self::Item>;
1646 /// An iterator over subslices separated by elements that match a predicate
1649 /// This struct is created by the [`split`] method on [slices].
1651 /// [`split`]: ../../std/primitive.slice.html#method.split
1652 /// [slices]: ../../std/primitive.slice.html
1653 #[stable(feature = "rust1", since = "1.0.0")]
1654 pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
1660 #[stable(feature = "core_impl_debug", since = "1.9.0")]
1661 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for Split<'a, T, P> where P: FnMut(&T) -> bool {
1662 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1663 f.debug_struct("Split")
1664 .field("v", &self.v)
1665 .field("finished", &self.finished)
1670 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
1671 #[stable(feature = "rust1", since = "1.0.0")]
1672 impl<'a, T, P> Clone for Split<'a, T, P> where P: Clone + FnMut(&T) -> bool {
1673 fn clone(&self) -> Split<'a, T, P> {
1676 pred: self.pred.clone(),
1677 finished: self.finished,
1682 #[stable(feature = "rust1", since = "1.0.0")]
1683 impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
1684 type Item = &'a [T];
1687 fn next(&mut self) -> Option<&'a [T]> {
1688 if self.finished { return None; }
1690 match self.v.iter().position(|x| (self.pred)(x)) {
1691 None => self.finish(),
1693 let ret = Some(&self.v[..idx]);
1694 self.v = &self.v[idx + 1..];
1701 fn size_hint(&self) -> (usize, Option<usize>) {
1705 (1, Some(self.v.len() + 1))
1710 #[stable(feature = "rust1", since = "1.0.0")]
1711 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
1713 fn next_back(&mut self) -> Option<&'a [T]> {
1714 if self.finished { return None; }
1716 match self.v.iter().rposition(|x| (self.pred)(x)) {
1717 None => self.finish(),
1719 let ret = Some(&self.v[idx + 1..]);
1720 self.v = &self.v[..idx];
1727 impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool {
1729 fn finish(&mut self) -> Option<&'a [T]> {
1730 if self.finished { None } else { self.finished = true; Some(self.v) }
1734 #[stable(feature = "fused", since = "1.26.0")]
1735 impl<'a, T, P> FusedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {}
1737 /// An iterator over the subslices of the vector which are separated
1738 /// by elements that match `pred`.
1740 /// This struct is created by the [`split_mut`] method on [slices].
1742 /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
1743 /// [slices]: ../../std/primitive.slice.html
1744 #[stable(feature = "rust1", since = "1.0.0")]
1745 pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
1751 #[stable(feature = "core_impl_debug", since = "1.9.0")]
1752 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1753 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1754 f.debug_struct("SplitMut")
1755 .field("v", &self.v)
1756 .field("finished", &self.finished)
1761 impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1763 fn finish(&mut self) -> Option<&'a mut [T]> {
1767 self.finished = true;
1768 Some(mem::replace(&mut self.v, &mut []))
1773 #[stable(feature = "rust1", since = "1.0.0")]
1774 impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1775 type Item = &'a mut [T];
1778 fn next(&mut self) -> Option<&'a mut [T]> {
1779 if self.finished { return None; }
1781 let idx_opt = { // work around borrowck limitations
1782 let pred = &mut self.pred;
1783 self.v.iter().position(|x| (*pred)(x))
1786 None => self.finish(),
1788 let tmp = mem::replace(&mut self.v, &mut []);
1789 let (head, tail) = tmp.split_at_mut(idx);
1790 self.v = &mut tail[1..];
1797 fn size_hint(&self) -> (usize, Option<usize>) {
1801 // if the predicate doesn't match anything, we yield one slice
1802 // if it matches every element, we yield len+1 empty slices.
1803 (1, Some(self.v.len() + 1))
1808 #[stable(feature = "rust1", since = "1.0.0")]
1809 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where
1810 P: FnMut(&T) -> bool,
1813 fn next_back(&mut self) -> Option<&'a mut [T]> {
1814 if self.finished { return None; }
1816 let idx_opt = { // work around borrowck limitations
1817 let pred = &mut self.pred;
1818 self.v.iter().rposition(|x| (*pred)(x))
1821 None => self.finish(),
1823 let tmp = mem::replace(&mut self.v, &mut []);
1824 let (head, tail) = tmp.split_at_mut(idx);
1826 Some(&mut tail[1..])
1832 #[stable(feature = "fused", since = "1.26.0")]
1833 impl<'a, T, P> FusedIterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {}
1835 /// An iterator over subslices separated by elements that match a predicate
1836 /// function, starting from the end of the slice.
1838 /// This struct is created by the [`rsplit`] method on [slices].
1840 /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
1841 /// [slices]: ../../std/primitive.slice.html
1842 #[unstable(feature = "slice_rsplit", issue = "41020")]
1843 #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
1844 pub struct RSplit<'a, T:'a, P> where P: FnMut(&T) -> bool {
1845 inner: Split<'a, T, P>
1848 #[unstable(feature = "slice_rsplit", issue = "41020")]
1849 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
1850 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1851 f.debug_struct("RSplit")
1852 .field("v", &self.inner.v)
1853 .field("finished", &self.inner.finished)
1858 #[unstable(feature = "slice_rsplit", issue = "41020")]
1859 impl<'a, T, P> Iterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
1860 type Item = &'a [T];
1863 fn next(&mut self) -> Option<&'a [T]> {
1864 self.inner.next_back()
1868 fn size_hint(&self) -> (usize, Option<usize>) {
1869 self.inner.size_hint()
1873 #[unstable(feature = "slice_rsplit", issue = "41020")]
1874 impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
1876 fn next_back(&mut self) -> Option<&'a [T]> {
1881 #[unstable(feature = "slice_rsplit", issue = "41020")]
1882 impl<'a, T, P> SplitIter for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
1884 fn finish(&mut self) -> Option<&'a [T]> {
1889 #[unstable(feature = "slice_rsplit", issue = "41020")]
1890 impl<'a, T, P> FusedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {}
1892 /// An iterator over the subslices of the vector which are separated
1893 /// by elements that match `pred`, starting from the end of the slice.
1895 /// This struct is created by the [`rsplit_mut`] method on [slices].
1897 /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
1898 /// [slices]: ../../std/primitive.slice.html
1899 #[unstable(feature = "slice_rsplit", issue = "41020")]
1900 pub struct RSplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
1901 inner: SplitMut<'a, T, P>
1904 #[unstable(feature = "slice_rsplit", issue = "41020")]
1905 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1906 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1907 f.debug_struct("RSplitMut")
1908 .field("v", &self.inner.v)
1909 .field("finished", &self.inner.finished)
1914 #[unstable(feature = "slice_rsplit", issue = "41020")]
1915 impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1917 fn finish(&mut self) -> Option<&'a mut [T]> {
1922 #[unstable(feature = "slice_rsplit", issue = "41020")]
1923 impl<'a, T, P> Iterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1924 type Item = &'a mut [T];
1927 fn next(&mut self) -> Option<&'a mut [T]> {
1928 self.inner.next_back()
1932 fn size_hint(&self) -> (usize, Option<usize>) {
1933 self.inner.size_hint()
1937 #[unstable(feature = "slice_rsplit", issue = "41020")]
1938 impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P> where
1939 P: FnMut(&T) -> bool,
1942 fn next_back(&mut self) -> Option<&'a mut [T]> {
1947 #[unstable(feature = "slice_rsplit", issue = "41020")]
1948 impl<'a, T, P> FusedIterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {}
1950 /// An private iterator over subslices separated by elements that
1951 /// match a predicate function, splitting at most a fixed number of
1954 struct GenericSplitN<I> {
1959 impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> {
1963 fn next(&mut self) -> Option<T> {
1966 1 => { self.count -= 1; self.iter.finish() }
1967 _ => { self.count -= 1; self.iter.next() }
1972 fn size_hint(&self) -> (usize, Option<usize>) {
1973 let (lower, upper_opt) = self.iter.size_hint();
1974 (lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
1978 /// An iterator over subslices separated by elements that match a predicate
1979 /// function, limited to a given number of splits.
1981 /// This struct is created by the [`splitn`] method on [slices].
1983 /// [`splitn`]: ../../std/primitive.slice.html#method.splitn
1984 /// [slices]: ../../std/primitive.slice.html
1985 #[stable(feature = "rust1", since = "1.0.0")]
1986 pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
1987 inner: GenericSplitN<Split<'a, T, P>>
1990 #[stable(feature = "core_impl_debug", since = "1.9.0")]
1991 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for SplitN<'a, T, P> where P: FnMut(&T) -> bool {
1992 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1993 f.debug_struct("SplitN")
1994 .field("inner", &self.inner)
1999 /// An iterator over subslices separated by elements that match a
2000 /// predicate function, limited to a given number of splits, starting
2001 /// from the end of the slice.
2003 /// This struct is created by the [`rsplitn`] method on [slices].
2005 /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
2006 /// [slices]: ../../std/primitive.slice.html
2007 #[stable(feature = "rust1", since = "1.0.0")]
2008 pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
2009 inner: GenericSplitN<RSplit<'a, T, P>>
2012 #[stable(feature = "core_impl_debug", since = "1.9.0")]
2013 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplitN<'a, T, P> where P: FnMut(&T) -> bool {
2014 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2015 f.debug_struct("RSplitN")
2016 .field("inner", &self.inner)
2021 /// An iterator over subslices separated by elements that match a predicate
2022 /// function, limited to a given number of splits.
2024 /// This struct is created by the [`splitn_mut`] method on [slices].
2026 /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
2027 /// [slices]: ../../std/primitive.slice.html
2028 #[stable(feature = "rust1", since = "1.0.0")]
2029 pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
2030 inner: GenericSplitN<SplitMut<'a, T, P>>
2033 #[stable(feature = "core_impl_debug", since = "1.9.0")]
2034 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for SplitNMut<'a, T, P> where P: FnMut(&T) -> bool {
2035 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2036 f.debug_struct("SplitNMut")
2037 .field("inner", &self.inner)
2042 /// An iterator over subslices separated by elements that match a
2043 /// predicate function, limited to a given number of splits, starting
2044 /// from the end of the slice.
2046 /// This struct is created by the [`rsplitn_mut`] method on [slices].
2048 /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
2049 /// [slices]: ../../std/primitive.slice.html
2050 #[stable(feature = "rust1", since = "1.0.0")]
2051 pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
2052 inner: GenericSplitN<RSplitMut<'a, T, P>>
2055 #[stable(feature = "core_impl_debug", since = "1.9.0")]
2056 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplitNMut<'a, T, P> where P: FnMut(&T) -> bool {
2057 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2058 f.debug_struct("RSplitNMut")
2059 .field("inner", &self.inner)
2064 macro_rules! forward_iterator {
2065 ($name:ident: $elem:ident, $iter_of:ty) => {
2066 #[stable(feature = "rust1", since = "1.0.0")]
2067 impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where
2068 P: FnMut(&T) -> bool
2070 type Item = $iter_of;
2073 fn next(&mut self) -> Option<$iter_of> {
2078 fn size_hint(&self) -> (usize, Option<usize>) {
2079 self.inner.size_hint()
2083 #[stable(feature = "fused", since = "1.26.0")]
2084 impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P>
2085 where P: FnMut(&T) -> bool {}
2089 forward_iterator! { SplitN: T, &'a [T] }
2090 forward_iterator! { RSplitN: T, &'a [T] }
2091 forward_iterator! { SplitNMut: T, &'a mut [T] }
2092 forward_iterator! { RSplitNMut: T, &'a mut [T] }
2094 /// An iterator over overlapping subslices of length `size`.
2096 /// This struct is created by the [`windows`] method on [slices].
2098 /// [`windows`]: ../../std/primitive.slice.html#method.windows
2099 /// [slices]: ../../std/primitive.slice.html
2101 #[stable(feature = "rust1", since = "1.0.0")]
2102 pub struct Windows<'a, T:'a> {
2107 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
2108 #[stable(feature = "rust1", since = "1.0.0")]
2109 impl<'a, T> Clone for Windows<'a, T> {
2110 fn clone(&self) -> Windows<'a, T> {
2118 #[stable(feature = "rust1", since = "1.0.0")]
2119 impl<'a, T> Iterator for Windows<'a, T> {
2120 type Item = &'a [T];
2123 fn next(&mut self) -> Option<&'a [T]> {
2124 if self.size > self.v.len() {
2127 let ret = Some(&self.v[..self.size]);
2128 self.v = &self.v[1..];
2134 fn size_hint(&self) -> (usize, Option<usize>) {
2135 if self.size > self.v.len() {
2138 let size = self.v.len() - self.size + 1;
2144 fn count(self) -> usize {
2149 fn nth(&mut self, n: usize) -> Option<Self::Item> {
2150 let (end, overflow) = self.size.overflowing_add(n);
2151 if end > self.v.len() || overflow {
2155 let nth = &self.v[n..end];
2156 self.v = &self.v[n+1..];
2162 fn last(self) -> Option<Self::Item> {
2163 if self.size > self.v.len() {
2166 let start = self.v.len() - self.size;
2167 Some(&self.v[start..])
2172 #[stable(feature = "rust1", since = "1.0.0")]
2173 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
2175 fn next_back(&mut self) -> Option<&'a [T]> {
2176 if self.size > self.v.len() {
2179 let ret = Some(&self.v[self.v.len()-self.size..]);
2180 self.v = &self.v[..self.v.len()-1];
2186 #[stable(feature = "rust1", since = "1.0.0")]
2187 impl<'a, T> ExactSizeIterator for Windows<'a, T> {}
2189 #[stable(feature = "fused", since = "1.26.0")]
2190 impl<'a, T> FusedIterator for Windows<'a, T> {}
2193 unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
2194 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
2195 from_raw_parts(self.v.as_ptr().offset(i as isize), self.size)
2197 fn may_have_side_effect() -> bool { false }
2200 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
2203 /// When the slice len is not evenly divided by the chunk size, the last slice
2204 /// of the iteration will be the remainder.
2206 /// This struct is created by the [`chunks`] method on [slices].
2208 /// [`chunks`]: ../../std/primitive.slice.html#method.chunks
2209 /// [slices]: ../../std/primitive.slice.html
2211 #[stable(feature = "rust1", since = "1.0.0")]
2212 pub struct Chunks<'a, T:'a> {
2217 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
2218 #[stable(feature = "rust1", since = "1.0.0")]
2219 impl<'a, T> Clone for Chunks<'a, T> {
2220 fn clone(&self) -> Chunks<'a, T> {
2223 chunk_size: self.chunk_size,
2228 #[stable(feature = "rust1", since = "1.0.0")]
2229 impl<'a, T> Iterator for Chunks<'a, T> {
2230 type Item = &'a [T];
2233 fn next(&mut self) -> Option<&'a [T]> {
2234 if self.v.is_empty() {
2237 let chunksz = cmp::min(self.v.len(), self.chunk_size);
2238 let (fst, snd) = self.v.split_at(chunksz);
2245 fn size_hint(&self) -> (usize, Option<usize>) {
2246 if self.v.is_empty() {
2249 let n = self.v.len() / self.chunk_size;
2250 let rem = self.v.len() % self.chunk_size;
2251 let n = if rem > 0 { n+1 } else { n };
2257 fn count(self) -> usize {
2262 fn nth(&mut self, n: usize) -> Option<Self::Item> {
2263 let (start, overflow) = n.overflowing_mul(self.chunk_size);
2264 if start >= self.v.len() || overflow {
2268 let end = match start.checked_add(self.chunk_size) {
2269 Some(sum) => cmp::min(self.v.len(), sum),
2270 None => self.v.len(),
2272 let nth = &self.v[start..end];
2273 self.v = &self.v[end..];
2279 fn last(self) -> Option<Self::Item> {
2280 if self.v.is_empty() {
2283 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
2284 Some(&self.v[start..])
2289 #[stable(feature = "rust1", since = "1.0.0")]
2290 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
2292 fn next_back(&mut self) -> Option<&'a [T]> {
2293 if self.v.is_empty() {
2296 let remainder = self.v.len() % self.chunk_size;
2297 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
2298 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
2305 #[stable(feature = "rust1", since = "1.0.0")]
2306 impl<'a, T> ExactSizeIterator for Chunks<'a, T> {}
2308 #[stable(feature = "fused", since = "1.26.0")]
2309 impl<'a, T> FusedIterator for Chunks<'a, T> {}
2312 unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
2313 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
2314 let start = i * self.chunk_size;
2315 let end = match start.checked_add(self.chunk_size) {
2316 None => self.v.len(),
2317 Some(end) => cmp::min(end, self.v.len()),
2319 from_raw_parts(self.v.as_ptr().offset(start as isize), end - start)
2321 fn may_have_side_effect() -> bool { false }
2324 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
2325 /// elements at a time). When the slice len is not evenly divided by the chunk
2326 /// size, the last slice of the iteration will be the remainder.
2328 /// This struct is created by the [`chunks_mut`] method on [slices].
2330 /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
2331 /// [slices]: ../../std/primitive.slice.html
2333 #[stable(feature = "rust1", since = "1.0.0")]
2334 pub struct ChunksMut<'a, T:'a> {
2339 #[stable(feature = "rust1", since = "1.0.0")]
2340 impl<'a, T> Iterator for ChunksMut<'a, T> {
2341 type Item = &'a mut [T];
2344 fn next(&mut self) -> Option<&'a mut [T]> {
2345 if self.v.is_empty() {
2348 let sz = cmp::min(self.v.len(), self.chunk_size);
2349 let tmp = mem::replace(&mut self.v, &mut []);
2350 let (head, tail) = tmp.split_at_mut(sz);
2357 fn size_hint(&self) -> (usize, Option<usize>) {
2358 if self.v.is_empty() {
2361 let n = self.v.len() / self.chunk_size;
2362 let rem = self.v.len() % self.chunk_size;
2363 let n = if rem > 0 { n + 1 } else { n };
2369 fn count(self) -> usize {
2374 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
2375 let (start, overflow) = n.overflowing_mul(self.chunk_size);
2376 if start >= self.v.len() || overflow {
2380 let end = match start.checked_add(self.chunk_size) {
2381 Some(sum) => cmp::min(self.v.len(), sum),
2382 None => self.v.len(),
2384 let tmp = mem::replace(&mut self.v, &mut []);
2385 let (head, tail) = tmp.split_at_mut(end);
2386 let (_, nth) = head.split_at_mut(start);
2393 fn last(self) -> Option<Self::Item> {
2394 if self.v.is_empty() {
2397 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
2398 Some(&mut self.v[start..])
2403 #[stable(feature = "rust1", since = "1.0.0")]
2404 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
2406 fn next_back(&mut self) -> Option<&'a mut [T]> {
2407 if self.v.is_empty() {
2410 let remainder = self.v.len() % self.chunk_size;
2411 let sz = if remainder != 0 { remainder } else { self.chunk_size };
2412 let tmp = mem::replace(&mut self.v, &mut []);
2413 let tmp_len = tmp.len();
2414 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
2421 #[stable(feature = "rust1", since = "1.0.0")]
2422 impl<'a, T> ExactSizeIterator for ChunksMut<'a, T> {}
2424 #[stable(feature = "fused", since = "1.26.0")]
2425 impl<'a, T> FusedIterator for ChunksMut<'a, T> {}
2428 unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
2429 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
2430 let start = i * self.chunk_size;
2431 let end = match start.checked_add(self.chunk_size) {
2432 None => self.v.len(),
2433 Some(end) => cmp::min(end, self.v.len()),
2435 from_raw_parts_mut(self.v.as_mut_ptr().offset(start as isize), end - start)
2437 fn may_have_side_effect() -> bool { false }
2440 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
2443 /// When the slice len is not evenly divided by the chunk size, the last
2444 /// up to `chunk_size-1` elements will be omitted.
2446 /// This struct is created by the [`exact_chunks`] method on [slices].
2448 /// [`exact_chunks`]: ../../std/primitive.slice.html#method.exact_chunks
2449 /// [slices]: ../../std/primitive.slice.html
2451 #[unstable(feature = "exact_chunks", issue = "47115")]
2452 pub struct ExactChunks<'a, T:'a> {
2457 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
2458 #[unstable(feature = "exact_chunks", issue = "47115")]
2459 impl<'a, T> Clone for ExactChunks<'a, T> {
2460 fn clone(&self) -> ExactChunks<'a, T> {
2463 chunk_size: self.chunk_size,
2468 #[unstable(feature = "exact_chunks", issue = "47115")]
2469 impl<'a, T> Iterator for ExactChunks<'a, T> {
2470 type Item = &'a [T];
2473 fn next(&mut self) -> Option<&'a [T]> {
2474 if self.v.len() < self.chunk_size {
2477 let (fst, snd) = self.v.split_at(self.chunk_size);
2484 fn size_hint(&self) -> (usize, Option<usize>) {
2485 let n = self.v.len() / self.chunk_size;
2490 fn count(self) -> usize {
2495 fn nth(&mut self, n: usize) -> Option<Self::Item> {
2496 let (start, overflow) = n.overflowing_mul(self.chunk_size);
2497 if start >= self.v.len() || overflow {
2501 let (_, snd) = self.v.split_at(start);
2508 fn last(mut self) -> Option<Self::Item> {
2513 #[unstable(feature = "exact_chunks", issue = "47115")]
2514 impl<'a, T> DoubleEndedIterator for ExactChunks<'a, T> {
2516 fn next_back(&mut self) -> Option<&'a [T]> {
2517 if self.v.len() < self.chunk_size {
2520 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
2527 #[unstable(feature = "exact_chunks", issue = "47115")]
2528 impl<'a, T> ExactSizeIterator for ExactChunks<'a, T> {
2529 fn is_empty(&self) -> bool {
2534 #[unstable(feature = "exact_chunks", issue = "47115")]
2535 impl<'a, T> FusedIterator for ExactChunks<'a, T> {}
2538 unsafe impl<'a, T> TrustedRandomAccess for ExactChunks<'a, T> {
2539 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
2540 let start = i * self.chunk_size;
2541 from_raw_parts(self.v.as_ptr().offset(start as isize), self.chunk_size)
2543 fn may_have_side_effect() -> bool { false }
2546 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
2547 /// elements at a time). When the slice len is not evenly divided by the chunk
2548 /// size, the last up to `chunk_size-1` elements will be omitted.
2550 /// This struct is created by the [`exact_chunks_mut`] method on [slices].
2552 /// [`exact_chunks_mut`]: ../../std/primitive.slice.html#method.exact_chunks_mut
2553 /// [slices]: ../../std/primitive.slice.html
2555 #[unstable(feature = "exact_chunks", issue = "47115")]
2556 pub struct ExactChunksMut<'a, T:'a> {
2561 #[unstable(feature = "exact_chunks", issue = "47115")]
2562 impl<'a, T> Iterator for ExactChunksMut<'a, T> {
2563 type Item = &'a mut [T];
2566 fn next(&mut self) -> Option<&'a mut [T]> {
2567 if self.v.len() < self.chunk_size {
2570 let tmp = mem::replace(&mut self.v, &mut []);
2571 let (head, tail) = tmp.split_at_mut(self.chunk_size);
2578 fn size_hint(&self) -> (usize, Option<usize>) {
2579 let n = self.v.len() / self.chunk_size;
2584 fn count(self) -> usize {
2589 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
2590 let (start, overflow) = n.overflowing_mul(self.chunk_size);
2591 if start >= self.v.len() || overflow {
2595 let tmp = mem::replace(&mut self.v, &mut []);
2596 let (_, snd) = tmp.split_at_mut(start);
2603 fn last(mut self) -> Option<Self::Item> {
2608 #[unstable(feature = "exact_chunks", issue = "47115")]
2609 impl<'a, T> DoubleEndedIterator for ExactChunksMut<'a, T> {
2611 fn next_back(&mut self) -> Option<&'a mut [T]> {
2612 if self.v.len() < self.chunk_size {
2615 let tmp = mem::replace(&mut self.v, &mut []);
2616 let tmp_len = tmp.len();
2617 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
2624 #[unstable(feature = "exact_chunks", issue = "47115")]
2625 impl<'a, T> ExactSizeIterator for ExactChunksMut<'a, T> {
2626 fn is_empty(&self) -> bool {
2631 #[unstable(feature = "exact_chunks", issue = "47115")]
2632 impl<'a, T> FusedIterator for ExactChunksMut<'a, T> {}
2635 unsafe impl<'a, T> TrustedRandomAccess for ExactChunksMut<'a, T> {
2636 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
2637 let start = i * self.chunk_size;
2638 from_raw_parts_mut(self.v.as_mut_ptr().offset(start as isize), self.chunk_size)
2640 fn may_have_side_effect() -> bool { false }
2647 /// Forms a slice from a pointer and a length.
2649 /// The `len` argument is the number of **elements**, not the number of bytes.
2653 /// This function is unsafe as there is no guarantee that the given pointer is
2654 /// valid for `len` elements, nor whether the lifetime inferred is a suitable
2655 /// lifetime for the returned slice.
2657 /// `p` must be non-null, even for zero-length slices, because non-zero bits
2658 /// are required to distinguish between a zero-length slice within `Some()`
2659 /// from `None`. `p` can be a bogus non-dereferencable pointer, such as `0x1`,
2660 /// for zero-length slices, though.
2664 /// The lifetime for the returned slice is inferred from its usage. To
2665 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
2666 /// source lifetime is safe in the context, such as by providing a helper
2667 /// function taking the lifetime of a host value for the slice, or by explicit
2675 /// // manifest a slice out of thin air!
2676 /// let ptr = 0x1234 as *const usize;
2679 /// let slice = slice::from_raw_parts(ptr, amt);
2683 #[stable(feature = "rust1", since = "1.0.0")]
2684 pub unsafe fn from_raw_parts<'a, T>(p: *const T, len: usize) -> &'a [T] {
2685 mem::transmute(Repr { data: p, len: len })
2688 /// Performs the same functionality as `from_raw_parts`, except that a mutable
2689 /// slice is returned.
2691 /// This function is unsafe for the same reasons as `from_raw_parts`, as well
2692 /// as not being able to provide a non-aliasing guarantee of the returned
2693 /// mutable slice. `p` must be non-null even for zero-length slices as with
2694 /// `from_raw_parts`.
2696 #[stable(feature = "rust1", since = "1.0.0")]
2697 pub unsafe fn from_raw_parts_mut<'a, T>(p: *mut T, len: usize) -> &'a mut [T] {
2698 mem::transmute(Repr { data: p, len: len })
2701 /// Converts a reference to T into a slice of length 1 (without copying).
2702 #[unstable(feature = "from_ref", issue = "45703")]
2703 pub fn from_ref<T>(s: &T) -> &[T] {
2705 from_raw_parts(s, 1)
2709 /// Converts a reference to T into a slice of length 1 (without copying).
2710 #[unstable(feature = "from_ref", issue = "45703")]
2711 pub fn from_ref_mut<T>(s: &mut T) -> &mut [T] {
2713 from_raw_parts_mut(s, 1)
2717 // This function is public only because there is no other way to unit test heapsort.
2718 #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "0")]
2720 pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
2721 where F: FnMut(&T, &T) -> bool
2723 sort::heapsort(v, &mut is_less);
2727 // Comparison traits
2731 /// Calls implementation provided memcmp.
2733 /// Interprets the data as u8.
2735 /// Returns 0 for equal, < 0 for less than and > 0 for greater
2737 // FIXME(#32610): Return type should be c_int
2738 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
2741 #[stable(feature = "rust1", since = "1.0.0")]
2742 impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
2743 fn eq(&self, other: &[B]) -> bool {
2744 SlicePartialEq::equal(self, other)
2747 fn ne(&self, other: &[B]) -> bool {
2748 SlicePartialEq::not_equal(self, other)
2752 #[stable(feature = "rust1", since = "1.0.0")]
2753 impl<T: Eq> Eq for [T] {}
2755 /// Implements comparison of vectors lexicographically.
2756 #[stable(feature = "rust1", since = "1.0.0")]
2757 impl<T: Ord> Ord for [T] {
2758 fn cmp(&self, other: &[T]) -> Ordering {
2759 SliceOrd::compare(self, other)
2763 /// Implements comparison of vectors lexicographically.
2764 #[stable(feature = "rust1", since = "1.0.0")]
2765 impl<T: PartialOrd> PartialOrd for [T] {
2766 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
2767 SlicePartialOrd::partial_compare(self, other)
2772 // intermediate trait for specialization of slice's PartialEq
2773 trait SlicePartialEq<B> {
2774 fn equal(&self, other: &[B]) -> bool;
2776 fn not_equal(&self, other: &[B]) -> bool { !self.equal(other) }
2779 // Generic slice equality
2780 impl<A, B> SlicePartialEq<B> for [A]
2781 where A: PartialEq<B>
2783 default fn equal(&self, other: &[B]) -> bool {
2784 if self.len() != other.len() {
2788 for i in 0..self.len() {
2789 if !self[i].eq(&other[i]) {
2798 // Use memcmp for bytewise equality when the types allow
2799 impl<A> SlicePartialEq<A> for [A]
2800 where A: PartialEq<A> + BytewiseEquality
2802 fn equal(&self, other: &[A]) -> bool {
2803 if self.len() != other.len() {
2806 if self.as_ptr() == other.as_ptr() {
2810 let size = mem::size_of_val(self);
2811 memcmp(self.as_ptr() as *const u8,
2812 other.as_ptr() as *const u8, size) == 0
2818 // intermediate trait for specialization of slice's PartialOrd
2819 trait SlicePartialOrd<B> {
2820 fn partial_compare(&self, other: &[B]) -> Option<Ordering>;
2823 impl<A> SlicePartialOrd<A> for [A]
2826 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
2827 let l = cmp::min(self.len(), other.len());
2829 // Slice to the loop iteration range to enable bound check
2830 // elimination in the compiler
2831 let lhs = &self[..l];
2832 let rhs = &other[..l];
2835 match lhs[i].partial_cmp(&rhs[i]) {
2836 Some(Ordering::Equal) => (),
2837 non_eq => return non_eq,
2841 self.len().partial_cmp(&other.len())
2845 impl<A> SlicePartialOrd<A> for [A]
2848 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
2849 Some(SliceOrd::compare(self, other))
2854 // intermediate trait for specialization of slice's Ord
2856 fn compare(&self, other: &[B]) -> Ordering;
2859 impl<A> SliceOrd<A> for [A]
2862 default fn compare(&self, other: &[A]) -> Ordering {
2863 let l = cmp::min(self.len(), other.len());
2865 // Slice to the loop iteration range to enable bound check
2866 // elimination in the compiler
2867 let lhs = &self[..l];
2868 let rhs = &other[..l];
2871 match lhs[i].cmp(&rhs[i]) {
2872 Ordering::Equal => (),
2873 non_eq => return non_eq,
2877 self.len().cmp(&other.len())
2881 // memcmp compares a sequence of unsigned bytes lexicographically.
2882 // this matches the order we want for [u8], but no others (not even [i8]).
2883 impl SliceOrd<u8> for [u8] {
2885 fn compare(&self, other: &[u8]) -> Ordering {
2886 let order = unsafe {
2887 memcmp(self.as_ptr(), other.as_ptr(),
2888 cmp::min(self.len(), other.len()))
2891 self.len().cmp(&other.len())
2892 } else if order < 0 {
2901 /// Trait implemented for types that can be compared for equality using
2902 /// their bytewise representation
2903 trait BytewiseEquality { }
2905 macro_rules! impl_marker_for {
2906 ($traitname:ident, $($ty:ty)*) => {
2908 impl $traitname for $ty { }
2913 impl_marker_for!(BytewiseEquality,
2914 u8 i8 u16 i16 u32 i32 u64 i64 usize isize char bool);
2917 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
2918 unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
2919 &*self.ptr.offset(i as isize)
2921 fn may_have_side_effect() -> bool { false }
2925 unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
2926 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
2927 &mut *self.ptr.offset(i as isize)
2929 fn may_have_side_effect() -> bool { false }
2932 trait SliceContains: Sized {
2933 fn slice_contains(&self, x: &[Self]) -> bool;
2936 impl<T> SliceContains for T where T: PartialEq {
2937 default fn slice_contains(&self, x: &[Self]) -> bool {
2938 x.iter().any(|y| *y == *self)
2942 impl SliceContains for u8 {
2943 fn slice_contains(&self, x: &[Self]) -> bool {
2944 memchr::memchr(*self, x).is_some()
2948 impl SliceContains for i8 {
2949 fn slice_contains(&self, x: &[Self]) -> bool {
2950 let byte = *self as u8;
2951 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
2952 memchr::memchr(byte, bytes).is_some()