1 // Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Slice management and manipulation
13 //! For more details see [`std::slice`].
15 //! [`std::slice`]: ../../std/slice/index.html
17 #![stable(feature = "rust1", since = "1.0.0")]
19 // How this module is organized.
21 // The library infrastructure for slices is fairly messy. There's
22 // a lot of stuff defined here. Let's keep it clean.
24 // Since slices don't support inherent methods; all operations
25 // on them are defined on traits, which are then re-exported from
26 // the prelude for convenience. So there are a lot of traits here.
28 // The layout of this file is thus:
30 // * Slice-specific 'extension' traits and their implementations. This
31 // is where most of the slice API resides.
32 // * Implementations of a few common traits with important slice ops.
33 // * Definitions of a bunch of iterators.
35 // * The `raw` and `bytes` submodules.
36 // * Boilerplate trait implementations.
38 use cmp::Ordering::{self, Less, Equal, Greater};
41 use intrinsics::assume;
43 use ops::{FnMut, Try, self};
45 use option::Option::{None, Some};
47 use result::Result::{Ok, Err};
50 use marker::{Copy, Send, Sync, Sized, self};
51 use iter_private::TrustedRandomAccess;
53 #[unstable(feature = "slice_internals", issue = "0",
54 reason = "exposed from core to be reused in std; use the memchr crate")]
55 /// Pure rust memchr implementation, taken from rust-memchr
71 /// Extension methods for slices.
72 #[unstable(feature = "core_slice_ext",
73 reason = "stable interface provided by `impl [T]` in later crates",
75 #[allow(missing_docs)] // documented elsewhere
79 #[stable(feature = "core", since = "1.6.0")]
80 fn split_at(&self, mid: usize) -> (&[Self::Item], &[Self::Item]);
82 #[stable(feature = "core", since = "1.6.0")]
83 fn iter(&self) -> Iter<Self::Item>;
85 #[stable(feature = "core", since = "1.6.0")]
86 fn split<P>(&self, pred: P) -> Split<Self::Item, P>
87 where P: FnMut(&Self::Item) -> bool;
89 #[unstable(feature = "slice_rsplit", issue = "41020")]
90 fn rsplit<P>(&self, pred: P) -> RSplit<Self::Item, P>
91 where P: FnMut(&Self::Item) -> bool;
93 #[stable(feature = "core", since = "1.6.0")]
94 fn splitn<P>(&self, n: usize, pred: P) -> SplitN<Self::Item, P>
95 where P: FnMut(&Self::Item) -> bool;
97 #[stable(feature = "core", since = "1.6.0")]
98 fn rsplitn<P>(&self, n: usize, pred: P) -> RSplitN<Self::Item, P>
99 where P: FnMut(&Self::Item) -> bool;
101 #[stable(feature = "core", since = "1.6.0")]
102 fn windows(&self, size: usize) -> Windows<Self::Item>;
104 #[stable(feature = "core", since = "1.6.0")]
105 fn chunks(&self, size: usize) -> Chunks<Self::Item>;
107 #[unstable(feature = "exact_chunks", issue = "47115")]
108 fn exact_chunks(&self, size: usize) -> ExactChunks<Self::Item>;
110 #[stable(feature = "core", since = "1.6.0")]
111 fn get<I>(&self, index: I) -> Option<&I::Output>
112 where I: SliceIndex<Self>;
113 #[stable(feature = "core", since = "1.6.0")]
114 fn first(&self) -> Option<&Self::Item>;
116 #[stable(feature = "core", since = "1.6.0")]
117 fn split_first(&self) -> Option<(&Self::Item, &[Self::Item])>;
119 #[stable(feature = "core", since = "1.6.0")]
120 fn split_last(&self) -> Option<(&Self::Item, &[Self::Item])>;
122 #[stable(feature = "core", since = "1.6.0")]
123 fn last(&self) -> Option<&Self::Item>;
125 #[stable(feature = "core", since = "1.6.0")]
126 unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
127 where I: SliceIndex<Self>;
128 #[stable(feature = "core", since = "1.6.0")]
129 fn as_ptr(&self) -> *const Self::Item;
131 #[stable(feature = "core", since = "1.6.0")]
132 fn binary_search(&self, x: &Self::Item) -> Result<usize, usize>
133 where Self::Item: Ord;
135 #[stable(feature = "core", since = "1.6.0")]
136 fn binary_search_by<'a, F>(&'a self, f: F) -> Result<usize, usize>
137 where F: FnMut(&'a Self::Item) -> Ordering;
139 #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
140 fn binary_search_by_key<'a, B, F>(&'a self, b: &B, f: F) -> Result<usize, usize>
141 where F: FnMut(&'a Self::Item) -> B,
144 #[stable(feature = "core", since = "1.6.0")]
145 fn len(&self) -> usize;
147 #[stable(feature = "core", since = "1.6.0")]
148 fn is_empty(&self) -> bool { self.len() == 0 }
150 #[stable(feature = "core", since = "1.6.0")]
151 fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
152 where I: SliceIndex<Self>;
153 #[stable(feature = "core", since = "1.6.0")]
154 fn iter_mut(&mut self) -> IterMut<Self::Item>;
156 #[stable(feature = "core", since = "1.6.0")]
157 fn first_mut(&mut self) -> Option<&mut Self::Item>;
159 #[stable(feature = "core", since = "1.6.0")]
160 fn split_first_mut(&mut self) -> Option<(&mut Self::Item, &mut [Self::Item])>;
162 #[stable(feature = "core", since = "1.6.0")]
163 fn split_last_mut(&mut self) -> Option<(&mut Self::Item, &mut [Self::Item])>;
165 #[stable(feature = "core", since = "1.6.0")]
166 fn last_mut(&mut self) -> Option<&mut Self::Item>;
168 #[stable(feature = "core", since = "1.6.0")]
169 fn split_mut<P>(&mut self, pred: P) -> SplitMut<Self::Item, P>
170 where P: FnMut(&Self::Item) -> bool;
172 #[unstable(feature = "slice_rsplit", issue = "41020")]
173 fn rsplit_mut<P>(&mut self, pred: P) -> RSplitMut<Self::Item, P>
174 where P: FnMut(&Self::Item) -> bool;
176 #[stable(feature = "core", since = "1.6.0")]
177 fn splitn_mut<P>(&mut self, n: usize, pred: P) -> SplitNMut<Self::Item, P>
178 where P: FnMut(&Self::Item) -> bool;
180 #[stable(feature = "core", since = "1.6.0")]
181 fn rsplitn_mut<P>(&mut self, n: usize, pred: P) -> RSplitNMut<Self::Item, P>
182 where P: FnMut(&Self::Item) -> bool;
184 #[stable(feature = "core", since = "1.6.0")]
185 fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<Self::Item>;
187 #[unstable(feature = "exact_chunks", issue = "47115")]
188 fn exact_chunks_mut(&mut self, size: usize) -> ExactChunksMut<Self::Item>;
190 #[stable(feature = "core", since = "1.6.0")]
191 fn swap(&mut self, a: usize, b: usize);
193 #[stable(feature = "core", since = "1.6.0")]
194 fn split_at_mut(&mut self, mid: usize) -> (&mut [Self::Item], &mut [Self::Item]);
196 #[stable(feature = "core", since = "1.6.0")]
197 fn reverse(&mut self);
199 #[stable(feature = "core", since = "1.6.0")]
200 unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
201 where I: SliceIndex<Self>;
202 #[stable(feature = "core", since = "1.6.0")]
203 fn as_mut_ptr(&mut self) -> *mut Self::Item;
205 #[stable(feature = "core", since = "1.6.0")]
206 fn contains(&self, x: &Self::Item) -> bool where Self::Item: PartialEq;
208 #[stable(feature = "core", since = "1.6.0")]
209 fn starts_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
211 #[stable(feature = "core", since = "1.6.0")]
212 fn ends_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
214 #[stable(feature = "slice_rotate", since = "1.26.0")]
215 fn rotate_left(&mut self, mid: usize);
217 #[stable(feature = "slice_rotate", since = "1.26.0")]
218 fn rotate_right(&mut self, k: usize);
220 #[stable(feature = "clone_from_slice", since = "1.7.0")]
221 fn clone_from_slice(&mut self, src: &[Self::Item]) where Self::Item: Clone;
223 #[stable(feature = "copy_from_slice", since = "1.9.0")]
224 fn copy_from_slice(&mut self, src: &[Self::Item]) where Self::Item: Copy;
226 #[unstable(feature = "swap_with_slice", issue = "44030")]
227 fn swap_with_slice(&mut self, src: &mut [Self::Item]);
229 #[stable(feature = "sort_unstable", since = "1.20.0")]
230 fn sort_unstable(&mut self)
231 where Self::Item: Ord;
233 #[stable(feature = "sort_unstable", since = "1.20.0")]
234 fn sort_unstable_by<F>(&mut self, compare: F)
235 where F: FnMut(&Self::Item, &Self::Item) -> Ordering;
237 #[stable(feature = "sort_unstable", since = "1.20.0")]
238 fn sort_unstable_by_key<B, F>(&mut self, f: F)
239 where F: FnMut(&Self::Item) -> B,
243 // Use macros to be generic over const/mut
244 macro_rules! slice_offset {
245 ($ptr:expr, $by:expr) => {{
247 if size_from_ptr(ptr) == 0 {
248 (ptr as *mut i8).wrapping_offset($by) as _
255 // make a &T from a *const T
256 macro_rules! make_ref {
259 if size_from_ptr(ptr) == 0 {
260 // Use a non-null pointer value
268 // make a &mut T from a *mut T
269 macro_rules! make_ref_mut {
272 if size_from_ptr(ptr) == 0 {
273 // Use a non-null pointer value
281 #[unstable(feature = "core_slice_ext",
282 reason = "stable interface provided by `impl [T]` in later crates",
284 impl<T> SliceExt for [T] {
288 fn split_at(&self, mid: usize) -> (&[T], &[T]) {
289 (&self[..mid], &self[mid..])
293 fn iter(&self) -> Iter<T> {
295 let p = if mem::size_of::<T>() == 0 {
298 let p = self.as_ptr();
299 assume(!p.is_null());
305 end: slice_offset!(p, self.len() as isize),
306 _marker: marker::PhantomData
312 fn split<P>(&self, pred: P) -> Split<T, P>
313 where P: FnMut(&T) -> bool
323 fn rsplit<P>(&self, pred: P) -> RSplit<T, P>
324 where P: FnMut(&T) -> bool
326 RSplit { inner: self.split(pred) }
330 fn splitn<P>(&self, n: usize, pred: P) -> SplitN<T, P>
331 where P: FnMut(&T) -> bool
334 inner: GenericSplitN {
335 iter: self.split(pred),
342 fn rsplitn<P>(&self, n: usize, pred: P) -> RSplitN<T, P>
343 where P: FnMut(&T) -> bool
346 inner: GenericSplitN {
347 iter: self.rsplit(pred),
354 fn windows(&self, size: usize) -> Windows<T> {
356 Windows { v: self, size: size }
360 fn chunks(&self, chunk_size: usize) -> Chunks<T> {
361 assert!(chunk_size != 0);
362 Chunks { v: self, chunk_size: chunk_size }
366 fn exact_chunks(&self, chunk_size: usize) -> ExactChunks<T> {
367 assert!(chunk_size != 0);
368 let rem = self.len() % chunk_size;
369 let len = self.len() - rem;
370 ExactChunks { v: &self[..len], chunk_size: chunk_size}
374 fn get<I>(&self, index: I) -> Option<&I::Output>
375 where I: SliceIndex<[T]>
381 fn first(&self) -> Option<&T> {
382 if self.is_empty() { None } else { Some(&self[0]) }
386 fn split_first(&self) -> Option<(&T, &[T])> {
387 if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
391 fn split_last(&self) -> Option<(&T, &[T])> {
392 let len = self.len();
393 if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
397 fn last(&self) -> Option<&T> {
398 if self.is_empty() { None } else { Some(&self[self.len() - 1]) }
402 unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
403 where I: SliceIndex<[T]>
405 index.get_unchecked(self)
409 fn as_ptr(&self) -> *const T {
410 self as *const [T] as *const T
413 fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
414 where F: FnMut(&'a T) -> Ordering
417 let mut size = s.len();
421 let mut base = 0usize;
424 let mid = base + half;
425 // mid is always in [0, size), that means mid is >= 0 and < size.
426 // mid >= 0: by definition
427 // mid < size: mid = size / 2 + size / 4 + size / 8 ...
428 let cmp = f(unsafe { s.get_unchecked(mid) });
429 base = if cmp == Greater { base } else { mid };
432 // base is always in [0, size) because base <= mid.
433 let cmp = f(unsafe { s.get_unchecked(base) });
434 if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
438 fn len(&self) -> usize {
440 mem::transmute::<&[T], Repr<T>>(self).len
445 fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
446 where I: SliceIndex<[T]>
452 fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
453 let len = self.len();
454 let ptr = self.as_mut_ptr();
459 (from_raw_parts_mut(ptr, mid),
460 from_raw_parts_mut(ptr.offset(mid as isize), len - mid))
465 fn iter_mut(&mut self) -> IterMut<T> {
467 let p = if mem::size_of::<T>() == 0 {
470 let p = self.as_mut_ptr();
471 assume(!p.is_null());
477 end: slice_offset!(p, self.len() as isize),
478 _marker: marker::PhantomData
484 fn last_mut(&mut self) -> Option<&mut T> {
485 let len = self.len();
486 if len == 0 { return None; }
487 Some(&mut self[len - 1])
491 fn first_mut(&mut self) -> Option<&mut T> {
492 if self.is_empty() { None } else { Some(&mut self[0]) }
496 fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
497 if self.is_empty() { None } else {
498 let split = self.split_at_mut(1);
499 Some((&mut split.0[0], split.1))
504 fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
505 let len = self.len();
506 if len == 0 { None } else {
507 let split = self.split_at_mut(len - 1);
508 Some((&mut split.1[0], split.0))
513 fn split_mut<P>(&mut self, pred: P) -> SplitMut<T, P>
514 where P: FnMut(&T) -> bool
516 SplitMut { v: self, pred: pred, finished: false }
520 fn rsplit_mut<P>(&mut self, pred: P) -> RSplitMut<T, P>
521 where P: FnMut(&T) -> bool
523 RSplitMut { inner: self.split_mut(pred) }
527 fn splitn_mut<P>(&mut self, n: usize, pred: P) -> SplitNMut<T, P>
528 where P: FnMut(&T) -> bool
531 inner: GenericSplitN {
532 iter: self.split_mut(pred),
539 fn rsplitn_mut<P>(&mut self, n: usize, pred: P) -> RSplitNMut<T, P> where
540 P: FnMut(&T) -> bool,
543 inner: GenericSplitN {
544 iter: self.rsplit_mut(pred),
551 fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
552 assert!(chunk_size != 0);
553 ChunksMut { v: self, chunk_size: chunk_size }
557 fn exact_chunks_mut(&mut self, chunk_size: usize) -> ExactChunksMut<T> {
558 assert!(chunk_size != 0);
559 let rem = self.len() % chunk_size;
560 let len = self.len() - rem;
561 ExactChunksMut { v: &mut self[..len], chunk_size: chunk_size}
565 fn swap(&mut self, a: usize, b: usize) {
567 // Can't take two mutable loans from one vector, so instead just cast
568 // them to their raw pointers to do the swap
569 let pa: *mut T = &mut self[a];
570 let pb: *mut T = &mut self[b];
575 fn reverse(&mut self) {
576 let mut i: usize = 0;
579 // For very small types, all the individual reads in the normal
580 // path perform poorly. We can do better, given efficient unaligned
581 // load/store, by loading a larger chunk and reversing a register.
583 // Ideally LLVM would do this for us, as it knows better than we do
584 // whether unaligned reads are efficient (since that changes between
585 // different ARM versions, for example) and what the best chunk size
586 // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
587 // the loop, so we need to do this ourselves. (Hypothesis: reverse
588 // is troublesome because the sides can be aligned differently --
589 // will be, when the length is odd -- so there's no way of emitting
590 // pre- and postludes to use fully-aligned SIMD in the middle.)
593 cfg!(any(target_arch = "x86", target_arch = "x86_64"));
595 if fast_unaligned && mem::size_of::<T>() == 1 {
596 // Use the llvm.bswap intrinsic to reverse u8s in a usize
597 let chunk = mem::size_of::<usize>();
598 while i + chunk - 1 < ln / 2 {
600 let pa: *mut T = self.get_unchecked_mut(i);
601 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
602 let va = ptr::read_unaligned(pa as *mut usize);
603 let vb = ptr::read_unaligned(pb as *mut usize);
604 ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
605 ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
611 if fast_unaligned && mem::size_of::<T>() == 2 {
612 // Use rotate-by-16 to reverse u16s in a u32
613 let chunk = mem::size_of::<u32>() / 2;
614 while i + chunk - 1 < ln / 2 {
616 let pa: *mut T = self.get_unchecked_mut(i);
617 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
618 let va = ptr::read_unaligned(pa as *mut u32);
619 let vb = ptr::read_unaligned(pb as *mut u32);
620 ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
621 ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
628 // Unsafe swap to avoid the bounds check in safe swap.
630 let pa: *mut T = self.get_unchecked_mut(i);
631 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
639 unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
640 where I: SliceIndex<[T]>
642 index.get_unchecked_mut(self)
646 fn as_mut_ptr(&mut self) -> *mut T {
647 self as *mut [T] as *mut T
651 fn contains(&self, x: &T) -> bool where T: PartialEq {
652 x.slice_contains(self)
656 fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq {
657 let n = needle.len();
658 self.len() >= n && needle == &self[..n]
662 fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
663 let (m, n) = (self.len(), needle.len());
664 m >= n && needle == &self[m-n..]
667 fn binary_search(&self, x: &T) -> Result<usize, usize>
670 self.binary_search_by(|p| p.cmp(x))
673 fn rotate_left(&mut self, mid: usize) {
674 assert!(mid <= self.len());
675 let k = self.len() - mid;
678 let p = self.as_mut_ptr();
679 rotate::ptr_rotate(mid, p.offset(mid as isize), k);
683 fn rotate_right(&mut self, k: usize) {
684 assert!(k <= self.len());
685 let mid = self.len() - k;
688 let p = self.as_mut_ptr();
689 rotate::ptr_rotate(mid, p.offset(mid as isize), k);
694 fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
695 assert!(self.len() == src.len(),
696 "destination and source slices have different lengths");
697 // NOTE: We need to explicitly slice them to the same length
698 // for bounds checking to be elided, and the optimizer will
699 // generate memcpy for simple cases (for example T = u8).
700 let len = self.len();
701 let src = &src[..len];
703 self[i].clone_from(&src[i]);
708 fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
709 assert!(self.len() == src.len(),
710 "destination and source slices have different lengths");
712 ptr::copy_nonoverlapping(
713 src.as_ptr(), self.as_mut_ptr(), self.len());
718 fn swap_with_slice(&mut self, src: &mut [T]) {
719 assert!(self.len() == src.len(),
720 "destination and source slices have different lengths");
722 ptr::swap_nonoverlapping(
723 self.as_mut_ptr(), src.as_mut_ptr(), self.len());
728 fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
729 where F: FnMut(&'a Self::Item) -> B,
732 self.binary_search_by(|k| f(k).cmp(b))
736 fn sort_unstable(&mut self)
737 where Self::Item: Ord
739 sort::quicksort(self, |a, b| a.lt(b));
743 fn sort_unstable_by<F>(&mut self, mut compare: F)
744 where F: FnMut(&Self::Item, &Self::Item) -> Ordering
746 sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
750 fn sort_unstable_by_key<B, F>(&mut self, mut f: F)
751 where F: FnMut(&Self::Item) -> B,
754 sort::quicksort(self, |a, b| f(a).lt(&f(b)));
758 #[stable(feature = "rust1", since = "1.0.0")]
759 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
760 impl<T, I> ops::Index<I> for [T]
761 where I: SliceIndex<[T]>
763 type Output = I::Output;
766 fn index(&self, index: I) -> &I::Output {
771 #[stable(feature = "rust1", since = "1.0.0")]
772 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
773 impl<T, I> ops::IndexMut<I> for [T]
774 where I: SliceIndex<[T]>
777 fn index_mut(&mut self, index: I) -> &mut I::Output {
778 index.index_mut(self)
784 fn slice_index_len_fail(index: usize, len: usize) -> ! {
785 panic!("index {} out of range for slice of length {}", index, len);
790 fn slice_index_order_fail(index: usize, end: usize) -> ! {
791 panic!("slice index starts at {} but ends at {}", index, end);
794 /// A helper trait used for indexing operations.
795 #[unstable(feature = "slice_get_slice", issue = "35729")]
796 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
797 pub trait SliceIndex<T: ?Sized> {
798 /// The output type returned by methods.
801 /// Returns a shared reference to the output at this location, if in
803 fn get(self, slice: &T) -> Option<&Self::Output>;
805 /// Returns a mutable reference to the output at this location, if in
807 fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
809 /// Returns a shared reference to the output at this location, without
810 /// performing any bounds checking.
811 unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
813 /// Returns a mutable reference to the output at this location, without
814 /// performing any bounds checking.
815 unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
817 /// Returns a shared reference to the output at this location, panicking
818 /// if out of bounds.
819 fn index(self, slice: &T) -> &Self::Output;
821 /// Returns a mutable reference to the output at this location, panicking
822 /// if out of bounds.
823 fn index_mut(self, slice: &mut T) -> &mut Self::Output;
826 #[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
827 impl<T> SliceIndex<[T]> for usize {
831 fn get(self, slice: &[T]) -> Option<&T> {
832 if self < slice.len() {
834 Some(self.get_unchecked(slice))
842 fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
843 if self < slice.len() {
845 Some(self.get_unchecked_mut(slice))
853 unsafe fn get_unchecked(self, slice: &[T]) -> &T {
854 &*slice.as_ptr().offset(self as isize)
858 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
859 &mut *slice.as_mut_ptr().offset(self as isize)
863 fn index(self, slice: &[T]) -> &T {
864 // NB: use intrinsic indexing
869 fn index_mut(self, slice: &mut [T]) -> &mut T {
870 // NB: use intrinsic indexing
875 #[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
876 impl<T> SliceIndex<[T]> for ops::Range<usize> {
880 fn get(self, slice: &[T]) -> Option<&[T]> {
881 if self.start > self.end || self.end > slice.len() {
885 Some(self.get_unchecked(slice))
891 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
892 if self.start > self.end || self.end > slice.len() {
896 Some(self.get_unchecked_mut(slice))
902 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
903 from_raw_parts(slice.as_ptr().offset(self.start as isize), self.end - self.start)
907 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
908 from_raw_parts_mut(slice.as_mut_ptr().offset(self.start as isize), self.end - self.start)
912 fn index(self, slice: &[T]) -> &[T] {
913 if self.start > self.end {
914 slice_index_order_fail(self.start, self.end);
915 } else if self.end > slice.len() {
916 slice_index_len_fail(self.end, slice.len());
919 self.get_unchecked(slice)
924 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
925 if self.start > self.end {
926 slice_index_order_fail(self.start, self.end);
927 } else if self.end > slice.len() {
928 slice_index_len_fail(self.end, slice.len());
931 self.get_unchecked_mut(slice)
936 #[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
937 impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
941 fn get(self, slice: &[T]) -> Option<&[T]> {
942 (0..self.end).get(slice)
946 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
947 (0..self.end).get_mut(slice)
951 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
952 (0..self.end).get_unchecked(slice)
956 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
957 (0..self.end).get_unchecked_mut(slice)
961 fn index(self, slice: &[T]) -> &[T] {
962 (0..self.end).index(slice)
966 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
967 (0..self.end).index_mut(slice)
971 #[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
972 impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
976 fn get(self, slice: &[T]) -> Option<&[T]> {
977 (self.start..slice.len()).get(slice)
981 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
982 (self.start..slice.len()).get_mut(slice)
986 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
987 (self.start..slice.len()).get_unchecked(slice)
991 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
992 (self.start..slice.len()).get_unchecked_mut(slice)
996 fn index(self, slice: &[T]) -> &[T] {
997 (self.start..slice.len()).index(slice)
1001 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
1002 (self.start..slice.len()).index_mut(slice)
1006 #[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
1007 impl<T> SliceIndex<[T]> for ops::RangeFull {
1011 fn get(self, slice: &[T]) -> Option<&[T]> {
1016 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
1021 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
1026 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
1031 fn index(self, slice: &[T]) -> &[T] {
1036 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
1042 #[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
1043 impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
1047 fn get(self, slice: &[T]) -> Option<&[T]> {
1048 if self.end == usize::max_value() { None }
1049 else { (self.start..self.end + 1).get(slice) }
1053 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
1054 if self.end == usize::max_value() { None }
1055 else { (self.start..self.end + 1).get_mut(slice) }
1059 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
1060 (self.start..self.end + 1).get_unchecked(slice)
1064 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
1065 (self.start..self.end + 1).get_unchecked_mut(slice)
1069 fn index(self, slice: &[T]) -> &[T] {
1070 assert!(self.end != usize::max_value(),
1071 "attempted to index slice up to maximum usize");
1072 (self.start..self.end + 1).index(slice)
1076 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
1077 assert!(self.end != usize::max_value(),
1078 "attempted to index slice up to maximum usize");
1079 (self.start..self.end + 1).index_mut(slice)
1083 #[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
1084 impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
1088 fn get(self, slice: &[T]) -> Option<&[T]> {
1089 (0..=self.end).get(slice)
1093 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
1094 (0..=self.end).get_mut(slice)
1098 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
1099 (0..=self.end).get_unchecked(slice)
1103 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
1104 (0..=self.end).get_unchecked_mut(slice)
1108 fn index(self, slice: &[T]) -> &[T] {
1109 (0..=self.end).index(slice)
1113 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
1114 (0..=self.end).index_mut(slice)
1118 ////////////////////////////////////////////////////////////////////////////////
1120 ////////////////////////////////////////////////////////////////////////////////
1122 #[stable(feature = "rust1", since = "1.0.0")]
1123 impl<'a, T> Default for &'a [T] {
1124 /// Creates an empty slice.
1125 fn default() -> &'a [T] { &[] }
1128 #[stable(feature = "mut_slice_default", since = "1.5.0")]
1129 impl<'a, T> Default for &'a mut [T] {
1130 /// Creates a mutable empty slice.
1131 fn default() -> &'a mut [T] { &mut [] }
1138 #[stable(feature = "rust1", since = "1.0.0")]
1139 impl<'a, T> IntoIterator for &'a [T] {
1141 type IntoIter = Iter<'a, T>;
1143 fn into_iter(self) -> Iter<'a, T> {
1148 #[stable(feature = "rust1", since = "1.0.0")]
1149 impl<'a, T> IntoIterator for &'a mut [T] {
1150 type Item = &'a mut T;
1151 type IntoIter = IterMut<'a, T>;
1153 fn into_iter(self) -> IterMut<'a, T> {
1159 fn size_from_ptr<T>(_: *const T) -> usize {
1163 // The shared definition of the `Iter` and `IterMut` iterators
1164 macro_rules! iterator {
1165 (struct $name:ident -> $ptr:ty, $elem:ty, $mkref:ident) => {
1166 #[stable(feature = "rust1", since = "1.0.0")]
1167 impl<'a, T> Iterator for $name<'a, T> {
1171 fn next(&mut self) -> Option<$elem> {
1172 // could be implemented with slices, but this avoids bounds checks
1174 if mem::size_of::<T>() != 0 {
1175 assume(!self.ptr.is_null());
1176 assume(!self.end.is_null());
1178 if self.ptr == self.end {
1181 Some($mkref!(self.ptr.post_inc()))
1187 fn size_hint(&self) -> (usize, Option<usize>) {
1188 let exact = ptrdistance(self.ptr, self.end);
1189 (exact, Some(exact))
1193 fn count(self) -> usize {
1198 fn nth(&mut self, n: usize) -> Option<$elem> {
1199 // Call helper method. Can't put the definition here because mut versus const.
1204 fn last(mut self) -> Option<$elem> {
1209 fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where
1210 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
1212 // manual unrolling is needed when there are conditional exits from the loop
1213 let mut accum = init;
1215 while ptrdistance(self.ptr, self.end) >= 4 {
1216 accum = f(accum, $mkref!(self.ptr.post_inc()))?;
1217 accum = f(accum, $mkref!(self.ptr.post_inc()))?;
1218 accum = f(accum, $mkref!(self.ptr.post_inc()))?;
1219 accum = f(accum, $mkref!(self.ptr.post_inc()))?;
1221 while self.ptr != self.end {
1222 accum = f(accum, $mkref!(self.ptr.post_inc()))?;
1229 fn fold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
1230 where Fold: FnMut(Acc, Self::Item) -> Acc,
1232 // Let LLVM unroll this, rather than using the default
1233 // impl that would force the manual unrolling above
1234 let mut accum = init;
1235 while let Some(x) = self.next() {
1236 accum = f(accum, x);
1242 #[rustc_inherit_overflow_checks]
1243 fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
1245 P: FnMut(Self::Item) -> bool,
1247 // The addition might panic on overflow
1248 // Use the len of the slice to hint optimizer to remove result index bounds check.
1249 let _n = make_slice!(self.ptr, self.end).len();
1250 self.try_fold(0, move |i, x| {
1251 if predicate(x) { Err(i) }
1254 // // FIXME(#48116/#45964):
1255 // // This assume() causes misoptimization on LLVM 6.
1256 // // Commented out until it is fixed again.
1258 // unsafe { assume(i < n) };
1264 fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
1265 P: FnMut(Self::Item) -> bool,
1266 Self: Sized + ExactSizeIterator + DoubleEndedIterator
1268 // No need for an overflow check here, because `ExactSizeIterator`
1269 // implies that the number of elements fits into a `usize`.
1270 // Use the len of the slice to hint optimizer to remove result index bounds check.
1271 let n = make_slice!(self.ptr, self.end).len();
1272 self.try_rfold(n, move |i, x| {
1274 if predicate(x) { Err(i) }
1277 // // FIXME(#48116/#45964):
1278 // // This assume() causes misoptimization on LLVM 6.
1279 // // Commented out until it is fixed again.
1281 // unsafe { assume(i < n) };
1287 #[stable(feature = "rust1", since = "1.0.0")]
1288 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
1290 fn next_back(&mut self) -> Option<$elem> {
1291 // could be implemented with slices, but this avoids bounds checks
1293 if mem::size_of::<T>() != 0 {
1294 assume(!self.ptr.is_null());
1295 assume(!self.end.is_null());
1297 if self.end == self.ptr {
1300 Some($mkref!(self.end.pre_dec()))
1306 fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
1307 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
1309 // manual unrolling is needed when there are conditional exits from the loop
1310 let mut accum = init;
1312 while ptrdistance(self.ptr, self.end) >= 4 {
1313 accum = f(accum, $mkref!(self.end.pre_dec()))?;
1314 accum = f(accum, $mkref!(self.end.pre_dec()))?;
1315 accum = f(accum, $mkref!(self.end.pre_dec()))?;
1316 accum = f(accum, $mkref!(self.end.pre_dec()))?;
1318 while self.ptr != self.end {
1319 accum = f(accum, $mkref!(self.end.pre_dec()))?;
1326 fn rfold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
1327 where Fold: FnMut(Acc, Self::Item) -> Acc,
1329 // Let LLVM unroll this, rather than using the default
1330 // impl that would force the manual unrolling above
1331 let mut accum = init;
1332 while let Some(x) = self.next_back() {
1333 accum = f(accum, x);
1341 macro_rules! make_slice {
1342 ($start: expr, $end: expr) => {{
1344 let diff = ($end as usize).wrapping_sub(start as usize);
1345 if size_from_ptr(start) == 0 {
1346 // use a non-null pointer value
1347 unsafe { from_raw_parts(1 as *const _, diff) }
1349 let len = diff / size_from_ptr(start);
1350 unsafe { from_raw_parts(start, len) }
1355 macro_rules! make_mut_slice {
1356 ($start: expr, $end: expr) => {{
1358 let diff = ($end as usize).wrapping_sub(start as usize);
1359 if size_from_ptr(start) == 0 {
1360 // use a non-null pointer value
1361 unsafe { from_raw_parts_mut(1 as *mut _, diff) }
1363 let len = diff / size_from_ptr(start);
1364 unsafe { from_raw_parts_mut(start, len) }
1369 /// Immutable slice iterator
1371 /// This struct is created by the [`iter`] method on [slices].
1378 /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
1379 /// let slice = &[1, 2, 3];
1381 /// // Then, we iterate over it:
1382 /// for element in slice.iter() {
1383 /// println!("{}", element);
1387 /// [`iter`]: ../../std/primitive.slice.html#method.iter
1388 /// [slices]: ../../std/primitive.slice.html
1389 #[stable(feature = "rust1", since = "1.0.0")]
1390 pub struct Iter<'a, T: 'a> {
1393 _marker: marker::PhantomData<&'a T>,
1396 #[stable(feature = "core_impl_debug", since = "1.9.0")]
1397 impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
1398 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1399 f.debug_tuple("Iter")
1400 .field(&self.as_slice())
1405 #[stable(feature = "rust1", since = "1.0.0")]
1406 unsafe impl<'a, T: Sync> Sync for Iter<'a, T> {}
1407 #[stable(feature = "rust1", since = "1.0.0")]
1408 unsafe impl<'a, T: Sync> Send for Iter<'a, T> {}
1410 impl<'a, T> Iter<'a, T> {
1411 /// View the underlying data as a subslice of the original data.
1413 /// This has the same lifetime as the original slice, and so the
1414 /// iterator can continue to be used while this exists.
1421 /// // First, we declare a type which has the `iter` method to get the `Iter`
1422 /// // struct (&[usize here]):
1423 /// let slice = &[1, 2, 3];
1425 /// // Then, we get the iterator:
1426 /// let mut iter = slice.iter();
1427 /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
1428 /// println!("{:?}", iter.as_slice());
1430 /// // Next, we move to the second element of the slice:
1432 /// // Now `as_slice` returns "[2, 3]":
1433 /// println!("{:?}", iter.as_slice());
1435 #[stable(feature = "iter_to_slice", since = "1.4.0")]
1436 pub fn as_slice(&self) -> &'a [T] {
1437 make_slice!(self.ptr, self.end)
1440 // Helper function for Iter::nth
1441 fn iter_nth(&mut self, n: usize) -> Option<&'a T> {
1442 match self.as_slice().get(n) {
1443 Some(elem_ref) => unsafe {
1444 self.ptr = slice_offset!(self.ptr, (n as isize).wrapping_add(1));
1448 self.ptr = self.end;
1455 iterator!{struct Iter -> *const T, &'a T, make_ref}
1457 #[stable(feature = "rust1", since = "1.0.0")]
1458 impl<'a, T> ExactSizeIterator for Iter<'a, T> {
1459 fn is_empty(&self) -> bool {
1460 self.ptr == self.end
1464 #[stable(feature = "fused", since = "1.25.0")]
1465 impl<'a, T> FusedIterator for Iter<'a, T> {}
1467 #[unstable(feature = "trusted_len", issue = "37572")]
1468 unsafe impl<'a, T> TrustedLen for Iter<'a, T> {}
1470 #[stable(feature = "rust1", since = "1.0.0")]
1471 impl<'a, T> Clone for Iter<'a, T> {
1472 fn clone(&self) -> Iter<'a, T> { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
1475 #[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
1476 impl<'a, T> AsRef<[T]> for Iter<'a, T> {
1477 fn as_ref(&self) -> &[T] {
1482 /// Mutable slice iterator.
1484 /// This struct is created by the [`iter_mut`] method on [slices].
1491 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
1492 /// // struct (&[usize here]):
1493 /// let mut slice = &mut [1, 2, 3];
1495 /// // Then, we iterate over it and increment each element value:
1496 /// for element in slice.iter_mut() {
1500 /// // We now have "[2, 3, 4]":
1501 /// println!("{:?}", slice);
1504 /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
1505 /// [slices]: ../../std/primitive.slice.html
1506 #[stable(feature = "rust1", since = "1.0.0")]
1507 pub struct IterMut<'a, T: 'a> {
1510 _marker: marker::PhantomData<&'a mut T>,
1513 #[stable(feature = "core_impl_debug", since = "1.9.0")]
1514 impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> {
1515 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1516 f.debug_tuple("IterMut")
1517 .field(&make_slice!(self.ptr, self.end))
1522 #[stable(feature = "rust1", since = "1.0.0")]
1523 unsafe impl<'a, T: Sync> Sync for IterMut<'a, T> {}
1524 #[stable(feature = "rust1", since = "1.0.0")]
1525 unsafe impl<'a, T: Send> Send for IterMut<'a, T> {}
1527 impl<'a, T> IterMut<'a, T> {
1528 /// View the underlying data as a subslice of the original data.
1530 /// To avoid creating `&mut` references that alias, this is forced
1531 /// to consume the iterator. Consider using the `Slice` and
1532 /// `SliceMut` implementations for obtaining slices with more
1533 /// restricted lifetimes that do not consume the iterator.
1540 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
1541 /// // struct (&[usize here]):
1542 /// let mut slice = &mut [1, 2, 3];
1545 /// // Then, we get the iterator:
1546 /// let mut iter = slice.iter_mut();
1547 /// // We move to next element:
1549 /// // So if we print what `into_slice` method returns here, we have "[2, 3]":
1550 /// println!("{:?}", iter.into_slice());
1553 /// // Now let's modify a value of the slice:
1555 /// // First we get back the iterator:
1556 /// let mut iter = slice.iter_mut();
1557 /// // We change the value of the first element of the slice returned by the `next` method:
1558 /// *iter.next().unwrap() += 1;
1560 /// // Now slice is "[2, 2, 3]":
1561 /// println!("{:?}", slice);
1563 #[stable(feature = "iter_to_slice", since = "1.4.0")]
1564 pub fn into_slice(self) -> &'a mut [T] {
1565 make_mut_slice!(self.ptr, self.end)
1568 // Helper function for IterMut::nth
1569 fn iter_nth(&mut self, n: usize) -> Option<&'a mut T> {
1570 match make_mut_slice!(self.ptr, self.end).get_mut(n) {
1571 Some(elem_ref) => unsafe {
1572 self.ptr = slice_offset!(self.ptr, (n as isize).wrapping_add(1));
1576 self.ptr = self.end;
1583 iterator!{struct IterMut -> *mut T, &'a mut T, make_ref_mut}
1585 #[stable(feature = "rust1", since = "1.0.0")]
1586 impl<'a, T> ExactSizeIterator for IterMut<'a, T> {
1587 fn is_empty(&self) -> bool {
1588 self.ptr == self.end
1592 #[stable(feature = "fused", since = "1.25.0")]
1593 impl<'a, T> FusedIterator for IterMut<'a, T> {}
1595 #[unstable(feature = "trusted_len", issue = "37572")]
1596 unsafe impl<'a, T> TrustedLen for IterMut<'a, T> {}
1599 // Return the number of elements of `T` from `start` to `end`.
1600 // Return the arithmetic difference if `T` is zero size.
1602 fn ptrdistance<T>(start: *const T, end: *const T) -> usize {
1603 match start.offset_to(end) {
1604 Some(x) => x as usize,
1605 None => (end as usize).wrapping_sub(start as usize),
1609 // Extension methods for raw pointers, used by the iterators
1610 trait PointerExt : Copy {
1611 unsafe fn slice_offset(self, i: isize) -> Self;
1613 /// Increments `self` by 1, but returns the old value.
1615 unsafe fn post_inc(&mut self) -> Self {
1616 let current = *self;
1617 *self = self.slice_offset(1);
1621 /// Decrements `self` by 1, and returns the new value.
1623 unsafe fn pre_dec(&mut self) -> Self {
1624 *self = self.slice_offset(-1);
1629 impl<T> PointerExt for *const T {
1631 unsafe fn slice_offset(self, i: isize) -> Self {
1632 slice_offset!(self, i)
1636 impl<T> PointerExt for *mut T {
1638 unsafe fn slice_offset(self, i: isize) -> Self {
1639 slice_offset!(self, i)
1643 /// An internal abstraction over the splitting iterators, so that
1644 /// splitn, splitn_mut etc can be implemented once.
1646 trait SplitIter: DoubleEndedIterator {
1647 /// Marks the underlying iterator as complete, extracting the remaining
1648 /// portion of the slice.
1649 fn finish(&mut self) -> Option<Self::Item>;
1652 /// An iterator over subslices separated by elements that match a predicate
1655 /// This struct is created by the [`split`] method on [slices].
1657 /// [`split`]: ../../std/primitive.slice.html#method.split
1658 /// [slices]: ../../std/primitive.slice.html
1659 #[stable(feature = "rust1", since = "1.0.0")]
1660 pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
1666 #[stable(feature = "core_impl_debug", since = "1.9.0")]
1667 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for Split<'a, T, P> where P: FnMut(&T) -> bool {
1668 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1669 f.debug_struct("Split")
1670 .field("v", &self.v)
1671 .field("finished", &self.finished)
1676 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
1677 #[stable(feature = "rust1", since = "1.0.0")]
1678 impl<'a, T, P> Clone for Split<'a, T, P> where P: Clone + FnMut(&T) -> bool {
1679 fn clone(&self) -> Split<'a, T, P> {
1682 pred: self.pred.clone(),
1683 finished: self.finished,
1688 #[stable(feature = "rust1", since = "1.0.0")]
1689 impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
1690 type Item = &'a [T];
1693 fn next(&mut self) -> Option<&'a [T]> {
1694 if self.finished { return None; }
1696 match self.v.iter().position(|x| (self.pred)(x)) {
1697 None => self.finish(),
1699 let ret = Some(&self.v[..idx]);
1700 self.v = &self.v[idx + 1..];
1707 fn size_hint(&self) -> (usize, Option<usize>) {
1711 (1, Some(self.v.len() + 1))
1716 #[stable(feature = "rust1", since = "1.0.0")]
1717 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
1719 fn next_back(&mut self) -> Option<&'a [T]> {
1720 if self.finished { return None; }
1722 match self.v.iter().rposition(|x| (self.pred)(x)) {
1723 None => self.finish(),
1725 let ret = Some(&self.v[idx + 1..]);
1726 self.v = &self.v[..idx];
1733 impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool {
1735 fn finish(&mut self) -> Option<&'a [T]> {
1736 if self.finished { None } else { self.finished = true; Some(self.v) }
1740 #[stable(feature = "fused", since = "1.25.0")]
1741 impl<'a, T, P> FusedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {}
1743 /// An iterator over the subslices of the vector which are separated
1744 /// by elements that match `pred`.
1746 /// This struct is created by the [`split_mut`] method on [slices].
1748 /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
1749 /// [slices]: ../../std/primitive.slice.html
1750 #[stable(feature = "rust1", since = "1.0.0")]
1751 pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
1757 #[stable(feature = "core_impl_debug", since = "1.9.0")]
1758 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1759 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1760 f.debug_struct("SplitMut")
1761 .field("v", &self.v)
1762 .field("finished", &self.finished)
1767 impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1769 fn finish(&mut self) -> Option<&'a mut [T]> {
1773 self.finished = true;
1774 Some(mem::replace(&mut self.v, &mut []))
1779 #[stable(feature = "rust1", since = "1.0.0")]
1780 impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1781 type Item = &'a mut [T];
1784 fn next(&mut self) -> Option<&'a mut [T]> {
1785 if self.finished { return None; }
1787 let idx_opt = { // work around borrowck limitations
1788 let pred = &mut self.pred;
1789 self.v.iter().position(|x| (*pred)(x))
1792 None => self.finish(),
1794 let tmp = mem::replace(&mut self.v, &mut []);
1795 let (head, tail) = tmp.split_at_mut(idx);
1796 self.v = &mut tail[1..];
1803 fn size_hint(&self) -> (usize, Option<usize>) {
1807 // if the predicate doesn't match anything, we yield one slice
1808 // if it matches every element, we yield len+1 empty slices.
1809 (1, Some(self.v.len() + 1))
1814 #[stable(feature = "rust1", since = "1.0.0")]
1815 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where
1816 P: FnMut(&T) -> bool,
1819 fn next_back(&mut self) -> Option<&'a mut [T]> {
1820 if self.finished { return None; }
1822 let idx_opt = { // work around borrowck limitations
1823 let pred = &mut self.pred;
1824 self.v.iter().rposition(|x| (*pred)(x))
1827 None => self.finish(),
1829 let tmp = mem::replace(&mut self.v, &mut []);
1830 let (head, tail) = tmp.split_at_mut(idx);
1832 Some(&mut tail[1..])
1838 #[stable(feature = "fused", since = "1.25.0")]
1839 impl<'a, T, P> FusedIterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {}
1841 /// An iterator over subslices separated by elements that match a predicate
1842 /// function, starting from the end of the slice.
1844 /// This struct is created by the [`rsplit`] method on [slices].
1846 /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
1847 /// [slices]: ../../std/primitive.slice.html
1848 #[unstable(feature = "slice_rsplit", issue = "41020")]
1849 #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
1850 pub struct RSplit<'a, T:'a, P> where P: FnMut(&T) -> bool {
1851 inner: Split<'a, T, P>
1854 #[unstable(feature = "slice_rsplit", issue = "41020")]
1855 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
1856 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1857 f.debug_struct("RSplit")
1858 .field("v", &self.inner.v)
1859 .field("finished", &self.inner.finished)
1864 #[unstable(feature = "slice_rsplit", issue = "41020")]
1865 impl<'a, T, P> Iterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
1866 type Item = &'a [T];
1869 fn next(&mut self) -> Option<&'a [T]> {
1870 self.inner.next_back()
1874 fn size_hint(&self) -> (usize, Option<usize>) {
1875 self.inner.size_hint()
1879 #[unstable(feature = "slice_rsplit", issue = "41020")]
1880 impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
1882 fn next_back(&mut self) -> Option<&'a [T]> {
1887 #[unstable(feature = "slice_rsplit", issue = "41020")]
1888 impl<'a, T, P> SplitIter for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
1890 fn finish(&mut self) -> Option<&'a [T]> {
1895 //#[stable(feature = "fused", since = "1.25.0")]
1896 #[unstable(feature = "slice_rsplit", issue = "41020")]
1897 impl<'a, T, P> FusedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {}
1899 /// An iterator over the subslices of the vector which are separated
1900 /// by elements that match `pred`, starting from the end of the slice.
1902 /// This struct is created by the [`rsplit_mut`] method on [slices].
1904 /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
1905 /// [slices]: ../../std/primitive.slice.html
1906 #[unstable(feature = "slice_rsplit", issue = "41020")]
1907 pub struct RSplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
1908 inner: SplitMut<'a, T, P>
1911 #[unstable(feature = "slice_rsplit", issue = "41020")]
1912 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1913 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1914 f.debug_struct("RSplitMut")
1915 .field("v", &self.inner.v)
1916 .field("finished", &self.inner.finished)
1921 #[unstable(feature = "slice_rsplit", issue = "41020")]
1922 impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1924 fn finish(&mut self) -> Option<&'a mut [T]> {
1929 #[unstable(feature = "slice_rsplit", issue = "41020")]
1930 impl<'a, T, P> Iterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1931 type Item = &'a mut [T];
1934 fn next(&mut self) -> Option<&'a mut [T]> {
1935 self.inner.next_back()
1939 fn size_hint(&self) -> (usize, Option<usize>) {
1940 self.inner.size_hint()
1944 #[unstable(feature = "slice_rsplit", issue = "41020")]
1945 impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P> where
1946 P: FnMut(&T) -> bool,
1949 fn next_back(&mut self) -> Option<&'a mut [T]> {
1954 //#[stable(feature = "fused", since = "1.25.0")]
1955 #[unstable(feature = "slice_rsplit", issue = "41020")]
1956 impl<'a, T, P> FusedIterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {}
1958 /// An private iterator over subslices separated by elements that
1959 /// match a predicate function, splitting at most a fixed number of
1962 struct GenericSplitN<I> {
1967 impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> {
1971 fn next(&mut self) -> Option<T> {
1974 1 => { self.count -= 1; self.iter.finish() }
1975 _ => { self.count -= 1; self.iter.next() }
1980 fn size_hint(&self) -> (usize, Option<usize>) {
1981 let (lower, upper_opt) = self.iter.size_hint();
1982 (lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
1986 /// An iterator over subslices separated by elements that match a predicate
1987 /// function, limited to a given number of splits.
1989 /// This struct is created by the [`splitn`] method on [slices].
1991 /// [`splitn`]: ../../std/primitive.slice.html#method.splitn
1992 /// [slices]: ../../std/primitive.slice.html
1993 #[stable(feature = "rust1", since = "1.0.0")]
1994 pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
1995 inner: GenericSplitN<Split<'a, T, P>>
1998 #[stable(feature = "core_impl_debug", since = "1.9.0")]
1999 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for SplitN<'a, T, P> where P: FnMut(&T) -> bool {
2000 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2001 f.debug_struct("SplitN")
2002 .field("inner", &self.inner)
2007 /// An iterator over subslices separated by elements that match a
2008 /// predicate function, limited to a given number of splits, starting
2009 /// from the end of the slice.
2011 /// This struct is created by the [`rsplitn`] method on [slices].
2013 /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
2014 /// [slices]: ../../std/primitive.slice.html
2015 #[stable(feature = "rust1", since = "1.0.0")]
2016 pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
2017 inner: GenericSplitN<RSplit<'a, T, P>>
2020 #[stable(feature = "core_impl_debug", since = "1.9.0")]
2021 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplitN<'a, T, P> where P: FnMut(&T) -> bool {
2022 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2023 f.debug_struct("RSplitN")
2024 .field("inner", &self.inner)
2029 /// An iterator over subslices separated by elements that match a predicate
2030 /// function, limited to a given number of splits.
2032 /// This struct is created by the [`splitn_mut`] method on [slices].
2034 /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
2035 /// [slices]: ../../std/primitive.slice.html
2036 #[stable(feature = "rust1", since = "1.0.0")]
2037 pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
2038 inner: GenericSplitN<SplitMut<'a, T, P>>
2041 #[stable(feature = "core_impl_debug", since = "1.9.0")]
2042 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for SplitNMut<'a, T, P> where P: FnMut(&T) -> bool {
2043 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2044 f.debug_struct("SplitNMut")
2045 .field("inner", &self.inner)
2050 /// An iterator over subslices separated by elements that match a
2051 /// predicate function, limited to a given number of splits, starting
2052 /// from the end of the slice.
2054 /// This struct is created by the [`rsplitn_mut`] method on [slices].
2056 /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
2057 /// [slices]: ../../std/primitive.slice.html
2058 #[stable(feature = "rust1", since = "1.0.0")]
2059 pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
2060 inner: GenericSplitN<RSplitMut<'a, T, P>>
2063 #[stable(feature = "core_impl_debug", since = "1.9.0")]
2064 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplitNMut<'a, T, P> where P: FnMut(&T) -> bool {
2065 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2066 f.debug_struct("RSplitNMut")
2067 .field("inner", &self.inner)
2072 macro_rules! forward_iterator {
2073 ($name:ident: $elem:ident, $iter_of:ty) => {
2074 #[stable(feature = "rust1", since = "1.0.0")]
2075 impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where
2076 P: FnMut(&T) -> bool
2078 type Item = $iter_of;
2081 fn next(&mut self) -> Option<$iter_of> {
2086 fn size_hint(&self) -> (usize, Option<usize>) {
2087 self.inner.size_hint()
2091 #[stable(feature = "fused", since = "1.25.0")]
2092 impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P>
2093 where P: FnMut(&T) -> bool {}
2097 forward_iterator! { SplitN: T, &'a [T] }
2098 forward_iterator! { RSplitN: T, &'a [T] }
2099 forward_iterator! { SplitNMut: T, &'a mut [T] }
2100 forward_iterator! { RSplitNMut: T, &'a mut [T] }
2102 /// An iterator over overlapping subslices of length `size`.
2104 /// This struct is created by the [`windows`] method on [slices].
2106 /// [`windows`]: ../../std/primitive.slice.html#method.windows
2107 /// [slices]: ../../std/primitive.slice.html
2109 #[stable(feature = "rust1", since = "1.0.0")]
2110 pub struct Windows<'a, T:'a> {
2115 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
2116 #[stable(feature = "rust1", since = "1.0.0")]
2117 impl<'a, T> Clone for Windows<'a, T> {
2118 fn clone(&self) -> Windows<'a, T> {
2126 #[stable(feature = "rust1", since = "1.0.0")]
2127 impl<'a, T> Iterator for Windows<'a, T> {
2128 type Item = &'a [T];
2131 fn next(&mut self) -> Option<&'a [T]> {
2132 if self.size > self.v.len() {
2135 let ret = Some(&self.v[..self.size]);
2136 self.v = &self.v[1..];
2142 fn size_hint(&self) -> (usize, Option<usize>) {
2143 if self.size > self.v.len() {
2146 let size = self.v.len() - self.size + 1;
2152 fn count(self) -> usize {
2157 fn nth(&mut self, n: usize) -> Option<Self::Item> {
2158 let (end, overflow) = self.size.overflowing_add(n);
2159 if end > self.v.len() || overflow {
2163 let nth = &self.v[n..end];
2164 self.v = &self.v[n+1..];
2170 fn last(self) -> Option<Self::Item> {
2171 if self.size > self.v.len() {
2174 let start = self.v.len() - self.size;
2175 Some(&self.v[start..])
2180 #[stable(feature = "rust1", since = "1.0.0")]
2181 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
2183 fn next_back(&mut self) -> Option<&'a [T]> {
2184 if self.size > self.v.len() {
2187 let ret = Some(&self.v[self.v.len()-self.size..]);
2188 self.v = &self.v[..self.v.len()-1];
2194 #[stable(feature = "rust1", since = "1.0.0")]
2195 impl<'a, T> ExactSizeIterator for Windows<'a, T> {}
2197 #[stable(feature = "fused", since = "1.25.0")]
2198 impl<'a, T> FusedIterator for Windows<'a, T> {}
2201 unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
2202 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
2203 from_raw_parts(self.v.as_ptr().offset(i as isize), self.size)
2205 fn may_have_side_effect() -> bool { false }
2208 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
2211 /// When the slice len is not evenly divided by the chunk size, the last slice
2212 /// of the iteration will be the remainder.
2214 /// This struct is created by the [`chunks`] method on [slices].
2216 /// [`chunks`]: ../../std/primitive.slice.html#method.chunks
2217 /// [slices]: ../../std/primitive.slice.html
2219 #[stable(feature = "rust1", since = "1.0.0")]
2220 pub struct Chunks<'a, T:'a> {
2225 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
2226 #[stable(feature = "rust1", since = "1.0.0")]
2227 impl<'a, T> Clone for Chunks<'a, T> {
2228 fn clone(&self) -> Chunks<'a, T> {
2231 chunk_size: self.chunk_size,
2236 #[stable(feature = "rust1", since = "1.0.0")]
2237 impl<'a, T> Iterator for Chunks<'a, T> {
2238 type Item = &'a [T];
2241 fn next(&mut self) -> Option<&'a [T]> {
2242 if self.v.is_empty() {
2245 let chunksz = cmp::min(self.v.len(), self.chunk_size);
2246 let (fst, snd) = self.v.split_at(chunksz);
2253 fn size_hint(&self) -> (usize, Option<usize>) {
2254 if self.v.is_empty() {
2257 let n = self.v.len() / self.chunk_size;
2258 let rem = self.v.len() % self.chunk_size;
2259 let n = if rem > 0 { n+1 } else { n };
2265 fn count(self) -> usize {
2270 fn nth(&mut self, n: usize) -> Option<Self::Item> {
2271 let (start, overflow) = n.overflowing_mul(self.chunk_size);
2272 if start >= self.v.len() || overflow {
2276 let end = match start.checked_add(self.chunk_size) {
2277 Some(sum) => cmp::min(self.v.len(), sum),
2278 None => self.v.len(),
2280 let nth = &self.v[start..end];
2281 self.v = &self.v[end..];
2287 fn last(self) -> Option<Self::Item> {
2288 if self.v.is_empty() {
2291 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
2292 Some(&self.v[start..])
2297 #[stable(feature = "rust1", since = "1.0.0")]
2298 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
2300 fn next_back(&mut self) -> Option<&'a [T]> {
2301 if self.v.is_empty() {
2304 let remainder = self.v.len() % self.chunk_size;
2305 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
2306 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
2313 #[stable(feature = "rust1", since = "1.0.0")]
2314 impl<'a, T> ExactSizeIterator for Chunks<'a, T> {}
2316 #[stable(feature = "fused", since = "1.25.0")]
2317 impl<'a, T> FusedIterator for Chunks<'a, T> {}
2320 unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
2321 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
2322 let start = i * self.chunk_size;
2323 let end = match start.checked_add(self.chunk_size) {
2324 None => self.v.len(),
2325 Some(end) => cmp::min(end, self.v.len()),
2327 from_raw_parts(self.v.as_ptr().offset(start as isize), end - start)
2329 fn may_have_side_effect() -> bool { false }
2332 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
2333 /// elements at a time). When the slice len is not evenly divided by the chunk
2334 /// size, the last slice of the iteration will be the remainder.
2336 /// This struct is created by the [`chunks_mut`] method on [slices].
2338 /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
2339 /// [slices]: ../../std/primitive.slice.html
2341 #[stable(feature = "rust1", since = "1.0.0")]
2342 pub struct ChunksMut<'a, T:'a> {
2347 #[stable(feature = "rust1", since = "1.0.0")]
2348 impl<'a, T> Iterator for ChunksMut<'a, T> {
2349 type Item = &'a mut [T];
2352 fn next(&mut self) -> Option<&'a mut [T]> {
2353 if self.v.is_empty() {
2356 let sz = cmp::min(self.v.len(), self.chunk_size);
2357 let tmp = mem::replace(&mut self.v, &mut []);
2358 let (head, tail) = tmp.split_at_mut(sz);
2365 fn size_hint(&self) -> (usize, Option<usize>) {
2366 if self.v.is_empty() {
2369 let n = self.v.len() / self.chunk_size;
2370 let rem = self.v.len() % self.chunk_size;
2371 let n = if rem > 0 { n + 1 } else { n };
2377 fn count(self) -> usize {
2382 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
2383 let (start, overflow) = n.overflowing_mul(self.chunk_size);
2384 if start >= self.v.len() || overflow {
2388 let end = match start.checked_add(self.chunk_size) {
2389 Some(sum) => cmp::min(self.v.len(), sum),
2390 None => self.v.len(),
2392 let tmp = mem::replace(&mut self.v, &mut []);
2393 let (head, tail) = tmp.split_at_mut(end);
2394 let (_, nth) = head.split_at_mut(start);
2401 fn last(self) -> Option<Self::Item> {
2402 if self.v.is_empty() {
2405 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
2406 Some(&mut self.v[start..])
2411 #[stable(feature = "rust1", since = "1.0.0")]
2412 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
2414 fn next_back(&mut self) -> Option<&'a mut [T]> {
2415 if self.v.is_empty() {
2418 let remainder = self.v.len() % self.chunk_size;
2419 let sz = if remainder != 0 { remainder } else { self.chunk_size };
2420 let tmp = mem::replace(&mut self.v, &mut []);
2421 let tmp_len = tmp.len();
2422 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
2429 #[stable(feature = "rust1", since = "1.0.0")]
2430 impl<'a, T> ExactSizeIterator for ChunksMut<'a, T> {}
2432 #[stable(feature = "fused", since = "1.25.0")]
2433 impl<'a, T> FusedIterator for ChunksMut<'a, T> {}
2436 unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
2437 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
2438 let start = i * self.chunk_size;
2439 let end = match start.checked_add(self.chunk_size) {
2440 None => self.v.len(),
2441 Some(end) => cmp::min(end, self.v.len()),
2443 from_raw_parts_mut(self.v.as_mut_ptr().offset(start as isize), end - start)
2445 fn may_have_side_effect() -> bool { false }
2448 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
2451 /// When the slice len is not evenly divided by the chunk size, the last
2452 /// up to `chunk_size-1` elements will be omitted.
2454 /// This struct is created by the [`exact_chunks`] method on [slices].
2456 /// [`exact_chunks`]: ../../std/primitive.slice.html#method.exact_chunks
2457 /// [slices]: ../../std/primitive.slice.html
2459 #[unstable(feature = "exact_chunks", issue = "47115")]
2460 pub struct ExactChunks<'a, T:'a> {
2465 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
2466 #[unstable(feature = "exact_chunks", issue = "47115")]
2467 impl<'a, T> Clone for ExactChunks<'a, T> {
2468 fn clone(&self) -> ExactChunks<'a, T> {
2471 chunk_size: self.chunk_size,
2476 #[unstable(feature = "exact_chunks", issue = "47115")]
2477 impl<'a, T> Iterator for ExactChunks<'a, T> {
2478 type Item = &'a [T];
2481 fn next(&mut self) -> Option<&'a [T]> {
2482 if self.v.len() < self.chunk_size {
2485 let (fst, snd) = self.v.split_at(self.chunk_size);
2492 fn size_hint(&self) -> (usize, Option<usize>) {
2493 let n = self.v.len() / self.chunk_size;
2498 fn count(self) -> usize {
2503 fn nth(&mut self, n: usize) -> Option<Self::Item> {
2504 let (start, overflow) = n.overflowing_mul(self.chunk_size);
2505 if start >= self.v.len() || overflow {
2509 let (_, snd) = self.v.split_at(start);
2516 fn last(mut self) -> Option<Self::Item> {
2521 #[unstable(feature = "exact_chunks", issue = "47115")]
2522 impl<'a, T> DoubleEndedIterator for ExactChunks<'a, T> {
2524 fn next_back(&mut self) -> Option<&'a [T]> {
2525 if self.v.len() < self.chunk_size {
2528 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
2535 #[unstable(feature = "exact_chunks", issue = "47115")]
2536 impl<'a, T> ExactSizeIterator for ExactChunks<'a, T> {
2537 fn is_empty(&self) -> bool {
2542 #[stable(feature = "fused", since = "1.25.0")]
2543 impl<'a, T> FusedIterator for ExactChunks<'a, T> {}
2546 unsafe impl<'a, T> TrustedRandomAccess for ExactChunks<'a, T> {
2547 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
2548 let start = i * self.chunk_size;
2549 from_raw_parts(self.v.as_ptr().offset(start as isize), self.chunk_size)
2551 fn may_have_side_effect() -> bool { false }
2554 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
2555 /// elements at a time). When the slice len is not evenly divided by the chunk
2556 /// size, the last up to `chunk_size-1` elements will be omitted.
2558 /// This struct is created by the [`exact_chunks_mut`] method on [slices].
2560 /// [`exact_chunks_mut`]: ../../std/primitive.slice.html#method.exact_chunks_mut
2561 /// [slices]: ../../std/primitive.slice.html
2563 #[unstable(feature = "exact_chunks", issue = "47115")]
2564 pub struct ExactChunksMut<'a, T:'a> {
2569 #[unstable(feature = "exact_chunks", issue = "47115")]
2570 impl<'a, T> Iterator for ExactChunksMut<'a, T> {
2571 type Item = &'a mut [T];
2574 fn next(&mut self) -> Option<&'a mut [T]> {
2575 if self.v.len() < self.chunk_size {
2578 let tmp = mem::replace(&mut self.v, &mut []);
2579 let (head, tail) = tmp.split_at_mut(self.chunk_size);
2586 fn size_hint(&self) -> (usize, Option<usize>) {
2587 let n = self.v.len() / self.chunk_size;
2592 fn count(self) -> usize {
2597 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
2598 let (start, overflow) = n.overflowing_mul(self.chunk_size);
2599 if start >= self.v.len() || overflow {
2603 let tmp = mem::replace(&mut self.v, &mut []);
2604 let (_, snd) = tmp.split_at_mut(start);
2611 fn last(mut self) -> Option<Self::Item> {
2616 #[unstable(feature = "exact_chunks", issue = "47115")]
2617 impl<'a, T> DoubleEndedIterator for ExactChunksMut<'a, T> {
2619 fn next_back(&mut self) -> Option<&'a mut [T]> {
2620 if self.v.len() < self.chunk_size {
2623 let tmp = mem::replace(&mut self.v, &mut []);
2624 let tmp_len = tmp.len();
2625 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
2632 #[unstable(feature = "exact_chunks", issue = "47115")]
2633 impl<'a, T> ExactSizeIterator for ExactChunksMut<'a, T> {
2634 fn is_empty(&self) -> bool {
2639 #[stable(feature = "fused", since = "1.25.0")]
2640 impl<'a, T> FusedIterator for ExactChunksMut<'a, T> {}
2643 unsafe impl<'a, T> TrustedRandomAccess for ExactChunksMut<'a, T> {
2644 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
2645 let start = i * self.chunk_size;
2646 from_raw_parts_mut(self.v.as_mut_ptr().offset(start as isize), self.chunk_size)
2648 fn may_have_side_effect() -> bool { false }
2655 /// Forms a slice from a pointer and a length.
2657 /// The `len` argument is the number of **elements**, not the number of bytes.
2661 /// This function is unsafe as there is no guarantee that the given pointer is
2662 /// valid for `len` elements, nor whether the lifetime inferred is a suitable
2663 /// lifetime for the returned slice.
2665 /// `p` must be non-null, even for zero-length slices, because non-zero bits
2666 /// are required to distinguish between a zero-length slice within `Some()`
2667 /// from `None`. `p` can be a bogus non-dereferencable pointer, such as `0x1`,
2668 /// for zero-length slices, though.
2672 /// The lifetime for the returned slice is inferred from its usage. To
2673 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
2674 /// source lifetime is safe in the context, such as by providing a helper
2675 /// function taking the lifetime of a host value for the slice, or by explicit
2683 /// // manifest a slice out of thin air!
2684 /// let ptr = 0x1234 as *const usize;
2687 /// let slice = slice::from_raw_parts(ptr, amt);
2691 #[stable(feature = "rust1", since = "1.0.0")]
2692 pub unsafe fn from_raw_parts<'a, T>(p: *const T, len: usize) -> &'a [T] {
2693 mem::transmute(Repr { data: p, len: len })
2696 /// Performs the same functionality as `from_raw_parts`, except that a mutable
2697 /// slice is returned.
2699 /// This function is unsafe for the same reasons as `from_raw_parts`, as well
2700 /// as not being able to provide a non-aliasing guarantee of the returned
2701 /// mutable slice. `p` must be non-null even for zero-length slices as with
2702 /// `from_raw_parts`.
2704 #[stable(feature = "rust1", since = "1.0.0")]
2705 pub unsafe fn from_raw_parts_mut<'a, T>(p: *mut T, len: usize) -> &'a mut [T] {
2706 mem::transmute(Repr { data: p, len: len })
2709 /// Converts a reference to T into a slice of length 1 (without copying).
2710 #[unstable(feature = "from_ref", issue = "45703")]
2711 pub fn from_ref<T>(s: &T) -> &[T] {
2713 from_raw_parts(s, 1)
2717 /// Converts a reference to T into a slice of length 1 (without copying).
2718 #[unstable(feature = "from_ref", issue = "45703")]
2719 pub fn from_ref_mut<T>(s: &mut T) -> &mut [T] {
2721 from_raw_parts_mut(s, 1)
2725 // This function is public only because there is no other way to unit test heapsort.
2726 #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "0")]
2728 pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
2729 where F: FnMut(&T, &T) -> bool
2731 sort::heapsort(v, &mut is_less);
2735 // Comparison traits
2739 /// Calls implementation provided memcmp.
2741 /// Interprets the data as u8.
2743 /// Returns 0 for equal, < 0 for less than and > 0 for greater
2745 // FIXME(#32610): Return type should be c_int
2746 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
2749 #[stable(feature = "rust1", since = "1.0.0")]
2750 impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
2751 fn eq(&self, other: &[B]) -> bool {
2752 SlicePartialEq::equal(self, other)
2755 fn ne(&self, other: &[B]) -> bool {
2756 SlicePartialEq::not_equal(self, other)
2760 #[stable(feature = "rust1", since = "1.0.0")]
2761 impl<T: Eq> Eq for [T] {}
2763 /// Implements comparison of vectors lexicographically.
2764 #[stable(feature = "rust1", since = "1.0.0")]
2765 impl<T: Ord> Ord for [T] {
2766 fn cmp(&self, other: &[T]) -> Ordering {
2767 SliceOrd::compare(self, other)
2771 /// Implements comparison of vectors lexicographically.
2772 #[stable(feature = "rust1", since = "1.0.0")]
2773 impl<T: PartialOrd> PartialOrd for [T] {
2774 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
2775 SlicePartialOrd::partial_compare(self, other)
2780 // intermediate trait for specialization of slice's PartialEq
2781 trait SlicePartialEq<B> {
2782 fn equal(&self, other: &[B]) -> bool;
2784 fn not_equal(&self, other: &[B]) -> bool { !self.equal(other) }
2787 // Generic slice equality
2788 impl<A, B> SlicePartialEq<B> for [A]
2789 where A: PartialEq<B>
2791 default fn equal(&self, other: &[B]) -> bool {
2792 if self.len() != other.len() {
2796 for i in 0..self.len() {
2797 if !self[i].eq(&other[i]) {
2806 // Use memcmp for bytewise equality when the types allow
2807 impl<A> SlicePartialEq<A> for [A]
2808 where A: PartialEq<A> + BytewiseEquality
2810 fn equal(&self, other: &[A]) -> bool {
2811 if self.len() != other.len() {
2814 if self.as_ptr() == other.as_ptr() {
2818 let size = mem::size_of_val(self);
2819 memcmp(self.as_ptr() as *const u8,
2820 other.as_ptr() as *const u8, size) == 0
2826 // intermediate trait for specialization of slice's PartialOrd
2827 trait SlicePartialOrd<B> {
2828 fn partial_compare(&self, other: &[B]) -> Option<Ordering>;
2831 impl<A> SlicePartialOrd<A> for [A]
2834 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
2835 let l = cmp::min(self.len(), other.len());
2837 // Slice to the loop iteration range to enable bound check
2838 // elimination in the compiler
2839 let lhs = &self[..l];
2840 let rhs = &other[..l];
2843 match lhs[i].partial_cmp(&rhs[i]) {
2844 Some(Ordering::Equal) => (),
2845 non_eq => return non_eq,
2849 self.len().partial_cmp(&other.len())
2853 impl<A> SlicePartialOrd<A> for [A]
2856 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
2857 Some(SliceOrd::compare(self, other))
2862 // intermediate trait for specialization of slice's Ord
2864 fn compare(&self, other: &[B]) -> Ordering;
2867 impl<A> SliceOrd<A> for [A]
2870 default fn compare(&self, other: &[A]) -> Ordering {
2871 let l = cmp::min(self.len(), other.len());
2873 // Slice to the loop iteration range to enable bound check
2874 // elimination in the compiler
2875 let lhs = &self[..l];
2876 let rhs = &other[..l];
2879 match lhs[i].cmp(&rhs[i]) {
2880 Ordering::Equal => (),
2881 non_eq => return non_eq,
2885 self.len().cmp(&other.len())
2889 // memcmp compares a sequence of unsigned bytes lexicographically.
2890 // this matches the order we want for [u8], but no others (not even [i8]).
2891 impl SliceOrd<u8> for [u8] {
2893 fn compare(&self, other: &[u8]) -> Ordering {
2894 let order = unsafe {
2895 memcmp(self.as_ptr(), other.as_ptr(),
2896 cmp::min(self.len(), other.len()))
2899 self.len().cmp(&other.len())
2900 } else if order < 0 {
2909 /// Trait implemented for types that can be compared for equality using
2910 /// their bytewise representation
2911 trait BytewiseEquality { }
2913 macro_rules! impl_marker_for {
2914 ($traitname:ident, $($ty:ty)*) => {
2916 impl $traitname for $ty { }
2921 impl_marker_for!(BytewiseEquality,
2922 u8 i8 u16 i16 u32 i32 u64 i64 usize isize char bool);
2925 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
2926 unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
2927 &*self.ptr.offset(i as isize)
2929 fn may_have_side_effect() -> bool { false }
2933 unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
2934 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
2935 &mut *self.ptr.offset(i as isize)
2937 fn may_have_side_effect() -> bool { false }
2940 trait SliceContains: Sized {
2941 fn slice_contains(&self, x: &[Self]) -> bool;
2944 impl<T> SliceContains for T where T: PartialEq {
2945 default fn slice_contains(&self, x: &[Self]) -> bool {
2946 x.iter().any(|y| *y == *self)
2950 impl SliceContains for u8 {
2951 fn slice_contains(&self, x: &[Self]) -> bool {
2952 memchr::memchr(*self, x).is_some()
2956 impl SliceContains for i8 {
2957 fn slice_contains(&self, x: &[Self]) -> bool {
2958 let byte = *self as u8;
2959 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
2960 memchr::memchr(byte, bytes).is_some()