1 // ignore-tidy-filelength
3 //! Slice management and manipulation.
5 //! For more details see [`std::slice`].
7 //! [`std::slice`]: ../../std/slice/index.html
9 #![stable(feature = "rust1", since = "1.0.0")]
11 // How this module is organized.
13 // The library infrastructure for slices is fairly messy. There's
14 // a lot of stuff defined here. Let's keep it clean.
16 // The layout of this file is thus:
18 // * Inherent methods. This is where most of the slice API resides.
19 // * Implementations of a few common traits with important slice ops.
20 // * Definitions of a bunch of iterators.
22 // * The `raw` and `bytes` submodules.
23 // * Boilerplate trait implementations.
25 use crate::cmp::Ordering::{self, Less, Equal, Greater};
28 use crate::intrinsics::{assume, exact_div, unchecked_sub, is_aligned_and_not_null};
31 use crate::ops::{FnMut, Try, self};
32 use crate::option::Option;
33 use crate::option::Option::{None, Some};
34 use crate::result::Result;
35 use crate::result::Result::{Ok, Err};
38 use crate::marker::{Copy, Send, Sync, Sized, self};
40 #[unstable(feature = "slice_internals", issue = "0",
41 reason = "exposed from core to be reused in std; use the memchr crate")]
42 /// Pure rust memchr implementation, taken from rust-memchr
55 /// Returns the number of elements in the slice.
60 /// let a = [1, 2, 3];
61 /// assert_eq!(a.len(), 3);
63 #[stable(feature = "rust1", since = "1.0.0")]
65 #[rustc_const_unstable(feature = "const_slice_len")]
66 pub const fn len(&self) -> usize {
68 crate::ptr::Repr { rust: self }.raw.len
72 /// Returns `true` if the slice has a length of 0.
77 /// let a = [1, 2, 3];
78 /// assert!(!a.is_empty());
80 #[stable(feature = "rust1", since = "1.0.0")]
82 #[rustc_const_unstable(feature = "const_slice_len")]
83 pub const fn is_empty(&self) -> bool {
87 /// Returns the first element of the slice, or `None` if it is empty.
92 /// let v = [10, 40, 30];
93 /// assert_eq!(Some(&10), v.first());
95 /// let w: &[i32] = &[];
96 /// assert_eq!(None, w.first());
98 #[stable(feature = "rust1", since = "1.0.0")]
100 pub fn first(&self) -> Option<&T> {
104 /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty.
109 /// let x = &mut [0, 1, 2];
111 /// if let Some(first) = x.first_mut() {
114 /// assert_eq!(x, &[5, 1, 2]);
116 #[stable(feature = "rust1", since = "1.0.0")]
118 pub fn first_mut(&mut self) -> Option<&mut T> {
122 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
127 /// let x = &[0, 1, 2];
129 /// if let Some((first, elements)) = x.split_first() {
130 /// assert_eq!(first, &0);
131 /// assert_eq!(elements, &[1, 2]);
134 #[stable(feature = "slice_splits", since = "1.5.0")]
136 pub fn split_first(&self) -> Option<(&T, &[T])> {
137 if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
140 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
145 /// let x = &mut [0, 1, 2];
147 /// if let Some((first, elements)) = x.split_first_mut() {
152 /// assert_eq!(x, &[3, 4, 5]);
154 #[stable(feature = "slice_splits", since = "1.5.0")]
156 pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
157 if self.is_empty() { None } else {
158 let split = self.split_at_mut(1);
159 Some((&mut split.0[0], split.1))
163 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
168 /// let x = &[0, 1, 2];
170 /// if let Some((last, elements)) = x.split_last() {
171 /// assert_eq!(last, &2);
172 /// assert_eq!(elements, &[0, 1]);
175 #[stable(feature = "slice_splits", since = "1.5.0")]
177 pub fn split_last(&self) -> Option<(&T, &[T])> {
178 let len = self.len();
179 if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
182 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
187 /// let x = &mut [0, 1, 2];
189 /// if let Some((last, elements)) = x.split_last_mut() {
194 /// assert_eq!(x, &[4, 5, 3]);
196 #[stable(feature = "slice_splits", since = "1.5.0")]
198 pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
199 let len = self.len();
200 if len == 0 { None } else {
201 let split = self.split_at_mut(len - 1);
202 Some((&mut split.1[0], split.0))
207 /// Returns the last element of the slice, or `None` if it is empty.
212 /// let v = [10, 40, 30];
213 /// assert_eq!(Some(&30), v.last());
215 /// let w: &[i32] = &[];
216 /// assert_eq!(None, w.last());
218 #[stable(feature = "rust1", since = "1.0.0")]
220 pub fn last(&self) -> Option<&T> {
221 let last_idx = self.len().checked_sub(1)?;
225 /// Returns a mutable pointer to the last item in the slice.
230 /// let x = &mut [0, 1, 2];
232 /// if let Some(last) = x.last_mut() {
235 /// assert_eq!(x, &[0, 1, 10]);
237 #[stable(feature = "rust1", since = "1.0.0")]
239 pub fn last_mut(&mut self) -> Option<&mut T> {
240 let last_idx = self.len().checked_sub(1)?;
241 self.get_mut(last_idx)
244 /// Returns a reference to an element or subslice depending on the type of
247 /// - If given a position, returns a reference to the element at that
248 /// position or `None` if out of bounds.
249 /// - If given a range, returns the subslice corresponding to that range,
250 /// or `None` if out of bounds.
255 /// let v = [10, 40, 30];
256 /// assert_eq!(Some(&40), v.get(1));
257 /// assert_eq!(Some(&[10, 40][..]), v.get(0..2));
258 /// assert_eq!(None, v.get(3));
259 /// assert_eq!(None, v.get(0..4));
261 #[stable(feature = "rust1", since = "1.0.0")]
263 pub fn get<I>(&self, index: I) -> Option<&I::Output>
264 where I: SliceIndex<Self>
269 /// Returns a mutable reference to an element or subslice depending on the
270 /// type of index (see [`get`]) or `None` if the index is out of bounds.
272 /// [`get`]: #method.get
277 /// let x = &mut [0, 1, 2];
279 /// if let Some(elem) = x.get_mut(1) {
282 /// assert_eq!(x, &[0, 42, 2]);
284 #[stable(feature = "rust1", since = "1.0.0")]
286 pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
287 where I: SliceIndex<Self>
292 /// Returns a reference to an element or subslice, without doing bounds
295 /// This is generally not recommended, use with caution! For a safe
296 /// alternative see [`get`].
298 /// [`get`]: #method.get
303 /// let x = &[1, 2, 4];
306 /// assert_eq!(x.get_unchecked(1), &2);
309 #[stable(feature = "rust1", since = "1.0.0")]
311 pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
312 where I: SliceIndex<Self>
314 index.get_unchecked(self)
317 /// Returns a mutable reference to an element or subslice, without doing
320 /// This is generally not recommended, use with caution! For a safe
321 /// alternative see [`get_mut`].
323 /// [`get_mut`]: #method.get_mut
328 /// let x = &mut [1, 2, 4];
331 /// let elem = x.get_unchecked_mut(1);
334 /// assert_eq!(x, &[1, 13, 4]);
336 #[stable(feature = "rust1", since = "1.0.0")]
338 pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
339 where I: SliceIndex<Self>
341 index.get_unchecked_mut(self)
344 /// Returns a raw pointer to the slice's buffer.
346 /// The caller must ensure that the slice outlives the pointer this
347 /// function returns, or else it will end up pointing to garbage.
349 /// The caller must also ensure that the memory the pointer (non-transitively) points to
350 /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer
351 /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`].
353 /// Modifying the container referenced by this slice may cause its buffer
354 /// to be reallocated, which would also make any pointers to it invalid.
359 /// let x = &[1, 2, 4];
360 /// let x_ptr = x.as_ptr();
363 /// for i in 0..x.len() {
364 /// assert_eq!(x.get_unchecked(i), &*x_ptr.add(i));
369 /// [`as_mut_ptr`]: #method.as_mut_ptr
370 #[stable(feature = "rust1", since = "1.0.0")]
372 pub const fn as_ptr(&self) -> *const T {
373 self as *const [T] as *const T
376 /// Returns an unsafe mutable pointer to the slice's buffer.
378 /// The caller must ensure that the slice outlives the pointer this
379 /// function returns, or else it will end up pointing to garbage.
381 /// Modifying the container referenced by this slice may cause its buffer
382 /// to be reallocated, which would also make any pointers to it invalid.
387 /// let x = &mut [1, 2, 4];
388 /// let x_ptr = x.as_mut_ptr();
391 /// for i in 0..x.len() {
392 /// *x_ptr.add(i) += 2;
395 /// assert_eq!(x, &[3, 4, 6]);
397 #[stable(feature = "rust1", since = "1.0.0")]
399 pub fn as_mut_ptr(&mut self) -> *mut T {
400 self as *mut [T] as *mut T
403 /// Swaps two elements in the slice.
407 /// * a - The index of the first element
408 /// * b - The index of the second element
412 /// Panics if `a` or `b` are out of bounds.
417 /// let mut v = ["a", "b", "c", "d"];
419 /// assert!(v == ["a", "d", "c", "b"]);
421 #[stable(feature = "rust1", since = "1.0.0")]
423 pub fn swap(&mut self, a: usize, b: usize) {
425 // Can't take two mutable loans from one vector, so instead just cast
426 // them to their raw pointers to do the swap
427 let pa: *mut T = &mut self[a];
428 let pb: *mut T = &mut self[b];
433 /// Reverses the order of elements in the slice, in place.
438 /// let mut v = [1, 2, 3];
440 /// assert!(v == [3, 2, 1]);
442 #[stable(feature = "rust1", since = "1.0.0")]
444 pub fn reverse(&mut self) {
445 let mut i: usize = 0;
448 // For very small types, all the individual reads in the normal
449 // path perform poorly. We can do better, given efficient unaligned
450 // load/store, by loading a larger chunk and reversing a register.
452 // Ideally LLVM would do this for us, as it knows better than we do
453 // whether unaligned reads are efficient (since that changes between
454 // different ARM versions, for example) and what the best chunk size
455 // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
456 // the loop, so we need to do this ourselves. (Hypothesis: reverse
457 // is troublesome because the sides can be aligned differently --
458 // will be, when the length is odd -- so there's no way of emitting
459 // pre- and postludes to use fully-aligned SIMD in the middle.)
462 cfg!(any(target_arch = "x86", target_arch = "x86_64"));
464 if fast_unaligned && mem::size_of::<T>() == 1 {
465 // Use the llvm.bswap intrinsic to reverse u8s in a usize
466 let chunk = mem::size_of::<usize>();
467 while i + chunk - 1 < ln / 2 {
469 let pa: *mut T = self.get_unchecked_mut(i);
470 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
471 let va = ptr::read_unaligned(pa as *mut usize);
472 let vb = ptr::read_unaligned(pb as *mut usize);
473 ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
474 ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
480 if fast_unaligned && mem::size_of::<T>() == 2 {
481 // Use rotate-by-16 to reverse u16s in a u32
482 let chunk = mem::size_of::<u32>() / 2;
483 while i + chunk - 1 < ln / 2 {
485 let pa: *mut T = self.get_unchecked_mut(i);
486 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
487 let va = ptr::read_unaligned(pa as *mut u32);
488 let vb = ptr::read_unaligned(pb as *mut u32);
489 ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
490 ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
497 // Unsafe swap to avoid the bounds check in safe swap.
499 let pa: *mut T = self.get_unchecked_mut(i);
500 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
507 /// Returns an iterator over the slice.
512 /// let x = &[1, 2, 4];
513 /// let mut iterator = x.iter();
515 /// assert_eq!(iterator.next(), Some(&1));
516 /// assert_eq!(iterator.next(), Some(&2));
517 /// assert_eq!(iterator.next(), Some(&4));
518 /// assert_eq!(iterator.next(), None);
520 #[stable(feature = "rust1", since = "1.0.0")]
522 pub fn iter(&self) -> Iter<'_, T> {
524 let ptr = self.as_ptr();
525 assume(!ptr.is_null());
527 let end = if mem::size_of::<T>() == 0 {
528 (ptr as *const u8).wrapping_add(self.len()) as *const T
536 _marker: marker::PhantomData
541 /// Returns an iterator that allows modifying each value.
546 /// let x = &mut [1, 2, 4];
547 /// for elem in x.iter_mut() {
550 /// assert_eq!(x, &[3, 4, 6]);
552 #[stable(feature = "rust1", since = "1.0.0")]
554 pub fn iter_mut(&mut self) -> IterMut<'_, T> {
556 let ptr = self.as_mut_ptr();
557 assume(!ptr.is_null());
559 let end = if mem::size_of::<T>() == 0 {
560 (ptr as *mut u8).wrapping_add(self.len()) as *mut T
568 _marker: marker::PhantomData
573 /// Returns an iterator over all contiguous windows of length
574 /// `size`. The windows overlap. If the slice is shorter than
575 /// `size`, the iterator returns no values.
579 /// Panics if `size` is 0.
584 /// let slice = ['r', 'u', 's', 't'];
585 /// let mut iter = slice.windows(2);
586 /// assert_eq!(iter.next().unwrap(), &['r', 'u']);
587 /// assert_eq!(iter.next().unwrap(), &['u', 's']);
588 /// assert_eq!(iter.next().unwrap(), &['s', 't']);
589 /// assert!(iter.next().is_none());
592 /// If the slice is shorter than `size`:
595 /// let slice = ['f', 'o', 'o'];
596 /// let mut iter = slice.windows(4);
597 /// assert!(iter.next().is_none());
599 #[stable(feature = "rust1", since = "1.0.0")]
601 pub fn windows(&self, size: usize) -> Windows<'_, T> {
603 Windows { v: self, size }
606 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
607 /// beginning of the slice.
609 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
610 /// slice, then the last chunk will not have length `chunk_size`.
612 /// See [`chunks_exact`] for a variant of this iterator that returns chunks of always exactly
613 /// `chunk_size` elements, and [`rchunks`] for the same iterator but starting at the end of the
618 /// Panics if `chunk_size` is 0.
623 /// let slice = ['l', 'o', 'r', 'e', 'm'];
624 /// let mut iter = slice.chunks(2);
625 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
626 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
627 /// assert_eq!(iter.next().unwrap(), &['m']);
628 /// assert!(iter.next().is_none());
631 /// [`chunks_exact`]: #method.chunks_exact
632 /// [`rchunks`]: #method.rchunks
633 #[stable(feature = "rust1", since = "1.0.0")]
635 pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> {
636 assert!(chunk_size != 0);
637 Chunks { v: self, chunk_size }
640 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
641 /// beginning of the slice.
643 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
644 /// length of the slice, then the last chunk will not have length `chunk_size`.
646 /// See [`chunks_exact_mut`] for a variant of this iterator that returns chunks of always
647 /// exactly `chunk_size` elements, and [`rchunks_mut`] for the same iterator but starting at
648 /// the end of the slice.
652 /// Panics if `chunk_size` is 0.
657 /// let v = &mut [0, 0, 0, 0, 0];
658 /// let mut count = 1;
660 /// for chunk in v.chunks_mut(2) {
661 /// for elem in chunk.iter_mut() {
666 /// assert_eq!(v, &[1, 1, 2, 2, 3]);
669 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
670 /// [`rchunks_mut`]: #method.rchunks_mut
671 #[stable(feature = "rust1", since = "1.0.0")]
673 pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<'_, T> {
674 assert!(chunk_size != 0);
675 ChunksMut { v: self, chunk_size }
678 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
679 /// beginning of the slice.
681 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
682 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
683 /// from the `remainder` function of the iterator.
685 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
686 /// resulting code better than in the case of [`chunks`].
688 /// See [`chunks`] for a variant of this iterator that also returns the remainder as a smaller
689 /// chunk, and [`rchunks_exact`] for the same iterator but starting at the end of the slice.
693 /// Panics if `chunk_size` is 0.
698 /// let slice = ['l', 'o', 'r', 'e', 'm'];
699 /// let mut iter = slice.chunks_exact(2);
700 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
701 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
702 /// assert!(iter.next().is_none());
703 /// assert_eq!(iter.remainder(), &['m']);
706 /// [`chunks`]: #method.chunks
707 /// [`rchunks_exact`]: #method.rchunks_exact
708 #[stable(feature = "chunks_exact", since = "1.31.0")]
710 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> {
711 assert!(chunk_size != 0);
712 let rem = self.len() % chunk_size;
713 let len = self.len() - rem;
714 let (fst, snd) = self.split_at(len);
715 ChunksExact { v: fst, rem: snd, chunk_size }
718 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
719 /// beginning of the slice.
721 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
722 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
723 /// retrieved from the `into_remainder` function of the iterator.
725 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
726 /// resulting code better than in the case of [`chunks_mut`].
728 /// See [`chunks_mut`] for a variant of this iterator that also returns the remainder as a
729 /// smaller chunk, and [`rchunks_exact_mut`] for the same iterator but starting at the end of
734 /// Panics if `chunk_size` is 0.
739 /// let v = &mut [0, 0, 0, 0, 0];
740 /// let mut count = 1;
742 /// for chunk in v.chunks_exact_mut(2) {
743 /// for elem in chunk.iter_mut() {
748 /// assert_eq!(v, &[1, 1, 2, 2, 0]);
751 /// [`chunks_mut`]: #method.chunks_mut
752 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
753 #[stable(feature = "chunks_exact", since = "1.31.0")]
755 pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<'_, T> {
756 assert!(chunk_size != 0);
757 let rem = self.len() % chunk_size;
758 let len = self.len() - rem;
759 let (fst, snd) = self.split_at_mut(len);
760 ChunksExactMut { v: fst, rem: snd, chunk_size }
763 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
766 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
767 /// slice, then the last chunk will not have length `chunk_size`.
769 /// See [`rchunks_exact`] for a variant of this iterator that returns chunks of always exactly
770 /// `chunk_size` elements, and [`chunks`] for the same iterator but starting at the beginning
775 /// Panics if `chunk_size` is 0.
780 /// let slice = ['l', 'o', 'r', 'e', 'm'];
781 /// let mut iter = slice.rchunks(2);
782 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
783 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
784 /// assert_eq!(iter.next().unwrap(), &['l']);
785 /// assert!(iter.next().is_none());
788 /// [`rchunks_exact`]: #method.rchunks_exact
789 /// [`chunks`]: #method.chunks
790 #[stable(feature = "rchunks", since = "1.31.0")]
792 pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> {
793 assert!(chunk_size != 0);
794 RChunks { v: self, chunk_size }
797 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
800 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
801 /// length of the slice, then the last chunk will not have length `chunk_size`.
803 /// See [`rchunks_exact_mut`] for a variant of this iterator that returns chunks of always
804 /// exactly `chunk_size` elements, and [`chunks_mut`] for the same iterator but starting at the
805 /// beginning of the slice.
809 /// Panics if `chunk_size` is 0.
814 /// let v = &mut [0, 0, 0, 0, 0];
815 /// let mut count = 1;
817 /// for chunk in v.rchunks_mut(2) {
818 /// for elem in chunk.iter_mut() {
823 /// assert_eq!(v, &[3, 2, 2, 1, 1]);
826 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
827 /// [`chunks_mut`]: #method.chunks_mut
828 #[stable(feature = "rchunks", since = "1.31.0")]
830 pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<'_, T> {
831 assert!(chunk_size != 0);
832 RChunksMut { v: self, chunk_size }
835 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
836 /// end of the slice.
838 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
839 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
840 /// from the `remainder` function of the iterator.
842 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
843 /// resulting code better than in the case of [`chunks`].
845 /// See [`rchunks`] for a variant of this iterator that also returns the remainder as a smaller
846 /// chunk, and [`chunks_exact`] for the same iterator but starting at the beginning of the
851 /// Panics if `chunk_size` is 0.
856 /// let slice = ['l', 'o', 'r', 'e', 'm'];
857 /// let mut iter = slice.rchunks_exact(2);
858 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
859 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
860 /// assert!(iter.next().is_none());
861 /// assert_eq!(iter.remainder(), &['l']);
864 /// [`chunks`]: #method.chunks
865 /// [`rchunks`]: #method.rchunks
866 /// [`chunks_exact`]: #method.chunks_exact
867 #[stable(feature = "rchunks", since = "1.31.0")]
869 pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<'_, T> {
870 assert!(chunk_size != 0);
871 let rem = self.len() % chunk_size;
872 let (fst, snd) = self.split_at(rem);
873 RChunksExact { v: snd, rem: fst, chunk_size }
876 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
879 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
880 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
881 /// retrieved from the `into_remainder` function of the iterator.
883 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
884 /// resulting code better than in the case of [`chunks_mut`].
886 /// See [`rchunks_mut`] for a variant of this iterator that also returns the remainder as a
887 /// smaller chunk, and [`chunks_exact_mut`] for the same iterator but starting at the beginning
892 /// Panics if `chunk_size` is 0.
897 /// let v = &mut [0, 0, 0, 0, 0];
898 /// let mut count = 1;
900 /// for chunk in v.rchunks_exact_mut(2) {
901 /// for elem in chunk.iter_mut() {
906 /// assert_eq!(v, &[0, 2, 2, 1, 1]);
909 /// [`chunks_mut`]: #method.chunks_mut
910 /// [`rchunks_mut`]: #method.rchunks_mut
911 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
912 #[stable(feature = "rchunks", since = "1.31.0")]
914 pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<'_, T> {
915 assert!(chunk_size != 0);
916 let rem = self.len() % chunk_size;
917 let (fst, snd) = self.split_at_mut(rem);
918 RChunksExactMut { v: snd, rem: fst, chunk_size }
921 /// Divides one slice into two at an index.
923 /// The first will contain all indices from `[0, mid)` (excluding
924 /// the index `mid` itself) and the second will contain all
925 /// indices from `[mid, len)` (excluding the index `len` itself).
929 /// Panics if `mid > len`.
934 /// let v = [1, 2, 3, 4, 5, 6];
937 /// let (left, right) = v.split_at(0);
938 /// assert!(left == []);
939 /// assert!(right == [1, 2, 3, 4, 5, 6]);
943 /// let (left, right) = v.split_at(2);
944 /// assert!(left == [1, 2]);
945 /// assert!(right == [3, 4, 5, 6]);
949 /// let (left, right) = v.split_at(6);
950 /// assert!(left == [1, 2, 3, 4, 5, 6]);
951 /// assert!(right == []);
954 #[stable(feature = "rust1", since = "1.0.0")]
956 pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
957 (&self[..mid], &self[mid..])
960 /// Divides one mutable slice into two at an index.
962 /// The first will contain all indices from `[0, mid)` (excluding
963 /// the index `mid` itself) and the second will contain all
964 /// indices from `[mid, len)` (excluding the index `len` itself).
968 /// Panics if `mid > len`.
973 /// let mut v = [1, 0, 3, 0, 5, 6];
974 /// // scoped to restrict the lifetime of the borrows
976 /// let (left, right) = v.split_at_mut(2);
977 /// assert!(left == [1, 0]);
978 /// assert!(right == [3, 0, 5, 6]);
982 /// assert!(v == [1, 2, 3, 4, 5, 6]);
984 #[stable(feature = "rust1", since = "1.0.0")]
986 pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
987 let len = self.len();
988 let ptr = self.as_mut_ptr();
993 (from_raw_parts_mut(ptr, mid),
994 from_raw_parts_mut(ptr.add(mid), len - mid))
998 /// Returns an iterator over subslices separated by elements that match
999 /// `pred`. The matched element is not contained in the subslices.
1004 /// let slice = [10, 40, 33, 20];
1005 /// let mut iter = slice.split(|num| num % 3 == 0);
1007 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1008 /// assert_eq!(iter.next().unwrap(), &[20]);
1009 /// assert!(iter.next().is_none());
1012 /// If the first element is matched, an empty slice will be the first item
1013 /// returned by the iterator. Similarly, if the last element in the slice
1014 /// is matched, an empty slice will be the last item returned by the
1018 /// let slice = [10, 40, 33];
1019 /// let mut iter = slice.split(|num| num % 3 == 0);
1021 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1022 /// assert_eq!(iter.next().unwrap(), &[]);
1023 /// assert!(iter.next().is_none());
1026 /// If two matched elements are directly adjacent, an empty slice will be
1027 /// present between them:
1030 /// let slice = [10, 6, 33, 20];
1031 /// let mut iter = slice.split(|num| num % 3 == 0);
1033 /// assert_eq!(iter.next().unwrap(), &[10]);
1034 /// assert_eq!(iter.next().unwrap(), &[]);
1035 /// assert_eq!(iter.next().unwrap(), &[20]);
1036 /// assert!(iter.next().is_none());
1038 #[stable(feature = "rust1", since = "1.0.0")]
1040 pub fn split<F>(&self, pred: F) -> Split<'_, T, F>
1041 where F: FnMut(&T) -> bool
1050 /// Returns an iterator over mutable subslices separated by elements that
1051 /// match `pred`. The matched element is not contained in the subslices.
1056 /// let mut v = [10, 40, 30, 20, 60, 50];
1058 /// for group in v.split_mut(|num| *num % 3 == 0) {
1061 /// assert_eq!(v, [1, 40, 30, 1, 60, 1]);
1063 #[stable(feature = "rust1", since = "1.0.0")]
1065 pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<'_, T, F>
1066 where F: FnMut(&T) -> bool
1068 SplitMut { v: self, pred, finished: false }
1071 /// Returns an iterator over subslices separated by elements that match
1072 /// `pred`, starting at the end of the slice and working backwards.
1073 /// The matched element is not contained in the subslices.
1078 /// let slice = [11, 22, 33, 0, 44, 55];
1079 /// let mut iter = slice.rsplit(|num| *num == 0);
1081 /// assert_eq!(iter.next().unwrap(), &[44, 55]);
1082 /// assert_eq!(iter.next().unwrap(), &[11, 22, 33]);
1083 /// assert_eq!(iter.next(), None);
1086 /// As with `split()`, if the first or last element is matched, an empty
1087 /// slice will be the first (or last) item returned by the iterator.
1090 /// let v = &[0, 1, 1, 2, 3, 5, 8];
1091 /// let mut it = v.rsplit(|n| *n % 2 == 0);
1092 /// assert_eq!(it.next().unwrap(), &[]);
1093 /// assert_eq!(it.next().unwrap(), &[3, 5]);
1094 /// assert_eq!(it.next().unwrap(), &[1, 1]);
1095 /// assert_eq!(it.next().unwrap(), &[]);
1096 /// assert_eq!(it.next(), None);
1098 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1100 pub fn rsplit<F>(&self, pred: F) -> RSplit<'_, T, F>
1101 where F: FnMut(&T) -> bool
1103 RSplit { inner: self.split(pred) }
1106 /// Returns an iterator over mutable subslices separated by elements that
1107 /// match `pred`, starting at the end of the slice and working
1108 /// backwards. The matched element is not contained in the subslices.
1113 /// let mut v = [100, 400, 300, 200, 600, 500];
1115 /// let mut count = 0;
1116 /// for group in v.rsplit_mut(|num| *num % 3 == 0) {
1118 /// group[0] = count;
1120 /// assert_eq!(v, [3, 400, 300, 2, 600, 1]);
1123 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1125 pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<'_, T, F>
1126 where F: FnMut(&T) -> bool
1128 RSplitMut { inner: self.split_mut(pred) }
1131 /// Returns an iterator over subslices separated by elements that match
1132 /// `pred`, limited to returning at most `n` items. The matched element is
1133 /// not contained in the subslices.
1135 /// The last element returned, if any, will contain the remainder of the
1140 /// Print the slice split once by numbers divisible by 3 (i.e., `[10, 40]`,
1141 /// `[20, 60, 50]`):
1144 /// let v = [10, 40, 30, 20, 60, 50];
1146 /// for group in v.splitn(2, |num| *num % 3 == 0) {
1147 /// println!("{:?}", group);
1150 #[stable(feature = "rust1", since = "1.0.0")]
1152 pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<'_, T, F>
1153 where F: FnMut(&T) -> bool
1156 inner: GenericSplitN {
1157 iter: self.split(pred),
1163 /// Returns an iterator over subslices separated by elements that match
1164 /// `pred`, limited to returning at most `n` items. The matched element is
1165 /// not contained in the subslices.
1167 /// The last element returned, if any, will contain the remainder of the
1173 /// let mut v = [10, 40, 30, 20, 60, 50];
1175 /// for group in v.splitn_mut(2, |num| *num % 3 == 0) {
1178 /// assert_eq!(v, [1, 40, 30, 1, 60, 50]);
1180 #[stable(feature = "rust1", since = "1.0.0")]
1182 pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<'_, T, F>
1183 where F: FnMut(&T) -> bool
1186 inner: GenericSplitN {
1187 iter: self.split_mut(pred),
1193 /// Returns an iterator over subslices separated by elements that match
1194 /// `pred` limited to returning at most `n` items. This starts at the end of
1195 /// the slice and works backwards. The matched element is not contained in
1198 /// The last element returned, if any, will contain the remainder of the
1203 /// Print the slice split once, starting from the end, by numbers divisible
1204 /// by 3 (i.e., `[50]`, `[10, 40, 30, 20]`):
1207 /// let v = [10, 40, 30, 20, 60, 50];
1209 /// for group in v.rsplitn(2, |num| *num % 3 == 0) {
1210 /// println!("{:?}", group);
1213 #[stable(feature = "rust1", since = "1.0.0")]
1215 pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<'_, T, F>
1216 where F: FnMut(&T) -> bool
1219 inner: GenericSplitN {
1220 iter: self.rsplit(pred),
1226 /// Returns an iterator over subslices separated by elements that match
1227 /// `pred` limited to returning at most `n` items. This starts at the end of
1228 /// the slice and works backwards. The matched element is not contained in
1231 /// The last element returned, if any, will contain the remainder of the
1237 /// let mut s = [10, 40, 30, 20, 60, 50];
1239 /// for group in s.rsplitn_mut(2, |num| *num % 3 == 0) {
1242 /// assert_eq!(s, [1, 40, 30, 20, 60, 1]);
1244 #[stable(feature = "rust1", since = "1.0.0")]
1246 pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<'_, T, F>
1247 where F: FnMut(&T) -> bool
1250 inner: GenericSplitN {
1251 iter: self.rsplit_mut(pred),
1257 /// Returns `true` if the slice contains an element with the given value.
1262 /// let v = [10, 40, 30];
1263 /// assert!(v.contains(&30));
1264 /// assert!(!v.contains(&50));
1267 /// If you do not have an `&T`, but just an `&U` such that `T: Borrow<U>`
1268 /// (e.g. `String: Borrow<str>`), you can use `iter().any`:
1271 /// let v = [String::from("hello"), String::from("world")]; // slice of `String`
1272 /// assert!(v.iter().any(|e| e == "hello")); // search with `&str`
1273 /// assert!(!v.iter().any(|e| e == "hi"));
1275 #[stable(feature = "rust1", since = "1.0.0")]
1276 pub fn contains(&self, x: &T) -> bool
1279 x.slice_contains(self)
1282 /// Returns `true` if `needle` is a prefix of the slice.
1287 /// let v = [10, 40, 30];
1288 /// assert!(v.starts_with(&[10]));
1289 /// assert!(v.starts_with(&[10, 40]));
1290 /// assert!(!v.starts_with(&[50]));
1291 /// assert!(!v.starts_with(&[10, 50]));
1294 /// Always returns `true` if `needle` is an empty slice:
1297 /// let v = &[10, 40, 30];
1298 /// assert!(v.starts_with(&[]));
1299 /// let v: &[u8] = &[];
1300 /// assert!(v.starts_with(&[]));
1302 #[stable(feature = "rust1", since = "1.0.0")]
1303 pub fn starts_with(&self, needle: &[T]) -> bool
1306 let n = needle.len();
1307 self.len() >= n && needle == &self[..n]
1310 /// Returns `true` if `needle` is a suffix of the slice.
1315 /// let v = [10, 40, 30];
1316 /// assert!(v.ends_with(&[30]));
1317 /// assert!(v.ends_with(&[40, 30]));
1318 /// assert!(!v.ends_with(&[50]));
1319 /// assert!(!v.ends_with(&[50, 30]));
1322 /// Always returns `true` if `needle` is an empty slice:
1325 /// let v = &[10, 40, 30];
1326 /// assert!(v.ends_with(&[]));
1327 /// let v: &[u8] = &[];
1328 /// assert!(v.ends_with(&[]));
1330 #[stable(feature = "rust1", since = "1.0.0")]
1331 pub fn ends_with(&self, needle: &[T]) -> bool
1334 let (m, n) = (self.len(), needle.len());
1335 m >= n && needle == &self[m-n..]
1338 /// Binary searches this sorted slice for a given element.
1340 /// If the value is found then [`Result::Ok`] is returned, containing the
1341 /// index of the matching element. If there are multiple matches, then any
1342 /// one of the matches could be returned. If the value is not found then
1343 /// [`Result::Err`] is returned, containing the index where a matching
1344 /// element could be inserted while maintaining sorted order.
1348 /// Looks up a series of four elements. The first is found, with a
1349 /// uniquely determined position; the second and third are not
1350 /// found; the fourth could match any position in `[1, 4]`.
1353 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1355 /// assert_eq!(s.binary_search(&13), Ok(9));
1356 /// assert_eq!(s.binary_search(&4), Err(7));
1357 /// assert_eq!(s.binary_search(&100), Err(13));
1358 /// let r = s.binary_search(&1);
1359 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1361 #[stable(feature = "rust1", since = "1.0.0")]
1362 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
1365 self.binary_search_by(|p| p.cmp(x))
1368 /// Binary searches this sorted slice with a comparator function.
1370 /// The comparator function should implement an order consistent
1371 /// with the sort order of the underlying slice, returning an
1372 /// order code that indicates whether its argument is `Less`,
1373 /// `Equal` or `Greater` the desired target.
1375 /// If the value is found then [`Result::Ok`] is returned, containing the
1376 /// index of the matching element. If there are multiple matches, then any
1377 /// one of the matches could be returned. If the value is not found then
1378 /// [`Result::Err`] is returned, containing the index where a matching
1379 /// element could be inserted while maintaining sorted order.
1383 /// Looks up a series of four elements. The first is found, with a
1384 /// uniquely determined position; the second and third are not
1385 /// found; the fourth could match any position in `[1, 4]`.
1388 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1391 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Ok(9));
1393 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(7));
1395 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13));
1397 /// let r = s.binary_search_by(|probe| probe.cmp(&seek));
1398 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1400 #[stable(feature = "rust1", since = "1.0.0")]
1402 pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
1403 where F: FnMut(&'a T) -> Ordering
1406 let mut size = s.len();
1410 let mut base = 0usize;
1412 let half = size / 2;
1413 let mid = base + half;
1414 // mid is always in [0, size), that means mid is >= 0 and < size.
1415 // mid >= 0: by definition
1416 // mid < size: mid = size / 2 + size / 4 + size / 8 ...
1417 let cmp = f(unsafe { s.get_unchecked(mid) });
1418 base = if cmp == Greater { base } else { mid };
1421 // base is always in [0, size) because base <= mid.
1422 let cmp = f(unsafe { s.get_unchecked(base) });
1423 if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
1427 /// Binary searches this sorted slice with a key extraction function.
1429 /// Assumes that the slice is sorted by the key, for instance with
1430 /// [`sort_by_key`] using the same key extraction function.
1432 /// If the value is found then [`Result::Ok`] is returned, containing the
1433 /// index of the matching element. If there are multiple matches, then any
1434 /// one of the matches could be returned. If the value is not found then
1435 /// [`Result::Err`] is returned, containing the index where a matching
1436 /// element could be inserted while maintaining sorted order.
1438 /// [`sort_by_key`]: #method.sort_by_key
1442 /// Looks up a series of four elements in a slice of pairs sorted by
1443 /// their second elements. The first is found, with a uniquely
1444 /// determined position; the second and third are not found; the
1445 /// fourth could match any position in `[1, 4]`.
1448 /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1),
1449 /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
1450 /// (1, 21), (2, 34), (4, 55)];
1452 /// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9));
1453 /// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7));
1454 /// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13));
1455 /// let r = s.binary_search_by_key(&1, |&(a,b)| b);
1456 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1458 #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
1460 pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
1461 where F: FnMut(&'a T) -> B,
1464 self.binary_search_by(|k| f(k).cmp(b))
1467 /// Sorts the slice, but may not preserve the order of equal elements.
1469 /// This sort is unstable (i.e., may reorder equal elements), in-place
1470 /// (i.e., does not allocate), and `O(n log n)` worst-case.
1472 /// # Current implementation
1474 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1475 /// which combines the fast average case of randomized quicksort with the fast worst case of
1476 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1477 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1478 /// deterministic behavior.
1480 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1481 /// slice consists of several concatenated sorted sequences.
1486 /// let mut v = [-5, 4, 1, -3, 2];
1488 /// v.sort_unstable();
1489 /// assert!(v == [-5, -3, 1, 2, 4]);
1492 /// [pdqsort]: https://github.com/orlp/pdqsort
1493 #[stable(feature = "sort_unstable", since = "1.20.0")]
1495 pub fn sort_unstable(&mut self)
1498 sort::quicksort(self, |a, b| a.lt(b));
1501 /// Sorts the slice with a comparator function, but may not preserve the order of equal
1504 /// This sort is unstable (i.e., may reorder equal elements), in-place
1505 /// (i.e., does not allocate), and `O(n log n)` worst-case.
1507 /// The comparator function must define a total ordering for the elements in the slice. If
1508 /// the ordering is not total, the order of the elements is unspecified. An order is a
1509 /// total order if it is (for all a, b and c):
1511 /// * total and antisymmetric: exactly one of a < b, a == b or a > b is true; and
1512 /// * transitive, a < b and b < c implies a < c. The same must hold for both == and >.
1514 /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
1515 /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
1518 /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
1519 /// floats.sort_by(|a, b| a.partial_cmp(b).unwrap());
1520 /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
1523 /// # Current implementation
1525 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1526 /// which combines the fast average case of randomized quicksort with the fast worst case of
1527 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1528 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1529 /// deterministic behavior.
1531 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1532 /// slice consists of several concatenated sorted sequences.
1537 /// let mut v = [5, 4, 1, 3, 2];
1538 /// v.sort_unstable_by(|a, b| a.cmp(b));
1539 /// assert!(v == [1, 2, 3, 4, 5]);
1541 /// // reverse sorting
1542 /// v.sort_unstable_by(|a, b| b.cmp(a));
1543 /// assert!(v == [5, 4, 3, 2, 1]);
1546 /// [pdqsort]: https://github.com/orlp/pdqsort
1547 #[stable(feature = "sort_unstable", since = "1.20.0")]
1549 pub fn sort_unstable_by<F>(&mut self, mut compare: F)
1550 where F: FnMut(&T, &T) -> Ordering
1552 sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
1555 /// Sorts the slice with a key extraction function, but may not preserve the order of equal
1558 /// This sort is unstable (i.e., may reorder equal elements), in-place
1559 /// (i.e., does not allocate), and `O(m n log(m n))` worst-case, where the key function is
1562 /// # Current implementation
1564 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1565 /// which combines the fast average case of randomized quicksort with the fast worst case of
1566 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1567 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1568 /// deterministic behavior.
1570 /// Due to its key calling strategy, [`sort_unstable_by_key`](#method.sort_unstable_by_key)
1571 /// is likely to be slower than [`sort_by_cached_key`](#method.sort_by_cached_key) in
1572 /// cases where the key function is expensive.
1577 /// let mut v = [-5i32, 4, 1, -3, 2];
1579 /// v.sort_unstable_by_key(|k| k.abs());
1580 /// assert!(v == [1, 2, -3, 4, -5]);
1583 /// [pdqsort]: https://github.com/orlp/pdqsort
1584 #[stable(feature = "sort_unstable", since = "1.20.0")]
1586 pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
1587 where F: FnMut(&T) -> K, K: Ord
1589 sort::quicksort(self, |a, b| f(a).lt(&f(b)));
1592 /// Reorder the slice such that the element at `index` is at its final sorted position.
1594 /// This reordering has the additional property that any value at position `i < index` will be
1595 /// less than or equal to any value at a position `j > index`. Additionally, this reordering is
1596 /// unstable (i.e. any number of equal elements may end up at position `index`), in-place
1597 /// (i.e. does not allocate), and `O(n)` worst-case. This function is also/ known as "kth
1598 /// element" in other libraries. It returns a triplet of the following values: all elements less
1599 /// than the one at the given index, the value at the given index, and all elements greater than
1600 /// the one at the given index.
1602 /// # Current implementation
1604 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1605 /// used for [`sort_unstable`].
1607 /// [`sort_unstable`]: #method.sort_unstable
1611 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1616 /// #![feature(slice_partition_at_index)]
1618 /// let mut v = [-5i32, 4, 1, -3, 2];
1620 /// // Find the median
1621 /// v.partition_at_index(2);
1623 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1624 /// // about the specified index.
1625 /// assert!(v == [-3, -5, 1, 2, 4] ||
1626 /// v == [-5, -3, 1, 2, 4] ||
1627 /// v == [-3, -5, 1, 4, 2] ||
1628 /// v == [-5, -3, 1, 4, 2]);
1630 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1632 pub fn partition_at_index(&mut self, index: usize) -> (&mut [T], &mut T, &mut [T])
1635 let mut f = |a: &T, b: &T| a.lt(b);
1636 sort::partition_at_index(self, index, &mut f)
1639 /// Reorder the slice with a comparator function such that the element at `index` is at its
1640 /// final sorted position.
1642 /// This reordering has the additional property that any value at position `i < index` will be
1643 /// less than or equal to any value at a position `j > index` using the comparator function.
1644 /// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
1645 /// position `index`), in-place (i.e. does not allocate), and `O(n)` worst-case. This function
1646 /// is also known as "kth element" in other libraries. It returns a triplet of the following
1647 /// values: all elements less than the one at the given index, the value at the given index,
1648 /// and all elements greater than the one at the given index, using the provided comparator
1651 /// # Current implementation
1653 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1654 /// used for [`sort_unstable`].
1656 /// [`sort_unstable`]: #method.sort_unstable
1660 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1665 /// #![feature(slice_partition_at_index)]
1667 /// let mut v = [-5i32, 4, 1, -3, 2];
1669 /// // Find the median as if the slice were sorted in descending order.
1670 /// v.partition_at_index_by(2, |a, b| b.cmp(a));
1672 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1673 /// // about the specified index.
1674 /// assert!(v == [2, 4, 1, -5, -3] ||
1675 /// v == [2, 4, 1, -3, -5] ||
1676 /// v == [4, 2, 1, -5, -3] ||
1677 /// v == [4, 2, 1, -3, -5]);
1679 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1681 pub fn partition_at_index_by<F>(&mut self, index: usize, mut compare: F)
1682 -> (&mut [T], &mut T, &mut [T])
1683 where F: FnMut(&T, &T) -> Ordering
1685 let mut f = |a: &T, b: &T| compare(a, b) == Less;
1686 sort::partition_at_index(self, index, &mut f)
1689 /// Reorder the slice with a key extraction function such that the element at `index` is at its
1690 /// final sorted position.
1692 /// This reordering has the additional property that any value at position `i < index` will be
1693 /// less than or equal to any value at a position `j > index` using the key extraction function.
1694 /// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
1695 /// position `index`), in-place (i.e. does not allocate), and `O(n)` worst-case. This function
1696 /// is also known as "kth element" in other libraries. It returns a triplet of the following
1697 /// values: all elements less than the one at the given index, the value at the given index, and
1698 /// all elements greater than the one at the given index, using the provided key extraction
1701 /// # Current implementation
1703 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1704 /// used for [`sort_unstable`].
1706 /// [`sort_unstable`]: #method.sort_unstable
1710 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1715 /// #![feature(slice_partition_at_index)]
1717 /// let mut v = [-5i32, 4, 1, -3, 2];
1719 /// // Return the median as if the array were sorted according to absolute value.
1720 /// v.partition_at_index_by_key(2, |a| a.abs());
1722 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1723 /// // about the specified index.
1724 /// assert!(v == [1, 2, -3, 4, -5] ||
1725 /// v == [1, 2, -3, -5, 4] ||
1726 /// v == [2, 1, -3, 4, -5] ||
1727 /// v == [2, 1, -3, -5, 4]);
1729 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1731 pub fn partition_at_index_by_key<K, F>(&mut self, index: usize, mut f: F)
1732 -> (&mut [T], &mut T, &mut [T])
1733 where F: FnMut(&T) -> K, K: Ord
1735 let mut g = |a: &T, b: &T| f(a).lt(&f(b));
1736 sort::partition_at_index(self, index, &mut g)
1739 /// Moves all consecutive repeated elements to the end of the slice according to the
1740 /// [`PartialEq`] trait implementation.
1742 /// Returns two slices. The first contains no consecutive repeated elements.
1743 /// The second contains all the duplicates in no specified order.
1745 /// If the slice is sorted, the first returned slice contains no duplicates.
1750 /// #![feature(slice_partition_dedup)]
1752 /// let mut slice = [1, 2, 2, 3, 3, 2, 1, 1];
1754 /// let (dedup, duplicates) = slice.partition_dedup();
1756 /// assert_eq!(dedup, [1, 2, 3, 2, 1]);
1757 /// assert_eq!(duplicates, [2, 3, 1]);
1759 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1761 pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T])
1764 self.partition_dedup_by(|a, b| a == b)
1767 /// Moves all but the first of consecutive elements to the end of the slice satisfying
1768 /// a given equality relation.
1770 /// Returns two slices. The first contains no consecutive repeated elements.
1771 /// The second contains all the duplicates in no specified order.
1773 /// The `same_bucket` function is passed references to two elements from the slice and
1774 /// must determine if the elements compare equal. The elements are passed in opposite order
1775 /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is moved
1776 /// at the end of the slice.
1778 /// If the slice is sorted, the first returned slice contains no duplicates.
1783 /// #![feature(slice_partition_dedup)]
1785 /// let mut slice = ["foo", "Foo", "BAZ", "Bar", "bar", "baz", "BAZ"];
1787 /// let (dedup, duplicates) = slice.partition_dedup_by(|a, b| a.eq_ignore_ascii_case(b));
1789 /// assert_eq!(dedup, ["foo", "BAZ", "Bar", "baz"]);
1790 /// assert_eq!(duplicates, ["bar", "Foo", "BAZ"]);
1792 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1794 pub fn partition_dedup_by<F>(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T])
1795 where F: FnMut(&mut T, &mut T) -> bool
1797 // Although we have a mutable reference to `self`, we cannot make
1798 // *arbitrary* changes. The `same_bucket` calls could panic, so we
1799 // must ensure that the slice is in a valid state at all times.
1801 // The way that we handle this is by using swaps; we iterate
1802 // over all the elements, swapping as we go so that at the end
1803 // the elements we wish to keep are in the front, and those we
1804 // wish to reject are at the back. We can then split the slice.
1805 // This operation is still O(n).
1807 // Example: We start in this state, where `r` represents "next
1808 // read" and `w` represents "next_write`.
1811 // +---+---+---+---+---+---+
1812 // | 0 | 1 | 1 | 2 | 3 | 3 |
1813 // +---+---+---+---+---+---+
1816 // Comparing self[r] against self[w-1], this is not a duplicate, so
1817 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1818 // r and w, leaving us with:
1821 // +---+---+---+---+---+---+
1822 // | 0 | 1 | 1 | 2 | 3 | 3 |
1823 // +---+---+---+---+---+---+
1826 // Comparing self[r] against self[w-1], this value is a duplicate,
1827 // so we increment `r` but leave everything else unchanged:
1830 // +---+---+---+---+---+---+
1831 // | 0 | 1 | 1 | 2 | 3 | 3 |
1832 // +---+---+---+---+---+---+
1835 // Comparing self[r] against self[w-1], this is not a duplicate,
1836 // so swap self[r] and self[w] and advance r and w:
1839 // +---+---+---+---+---+---+
1840 // | 0 | 1 | 2 | 1 | 3 | 3 |
1841 // +---+---+---+---+---+---+
1844 // Not a duplicate, repeat:
1847 // +---+---+---+---+---+---+
1848 // | 0 | 1 | 2 | 3 | 1 | 3 |
1849 // +---+---+---+---+---+---+
1852 // Duplicate, advance r. End of slice. Split at w.
1854 let len = self.len();
1856 return (self, &mut [])
1859 let ptr = self.as_mut_ptr();
1860 let mut next_read: usize = 1;
1861 let mut next_write: usize = 1;
1864 // Avoid bounds checks by using raw pointers.
1865 while next_read < len {
1866 let ptr_read = ptr.add(next_read);
1867 let prev_ptr_write = ptr.add(next_write - 1);
1868 if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
1869 if next_read != next_write {
1870 let ptr_write = prev_ptr_write.offset(1);
1871 mem::swap(&mut *ptr_read, &mut *ptr_write);
1879 self.split_at_mut(next_write)
1882 /// Moves all but the first of consecutive elements to the end of the slice that resolve
1883 /// to the same key.
1885 /// Returns two slices. The first contains no consecutive repeated elements.
1886 /// The second contains all the duplicates in no specified order.
1888 /// If the slice is sorted, the first returned slice contains no duplicates.
1893 /// #![feature(slice_partition_dedup)]
1895 /// let mut slice = [10, 20, 21, 30, 30, 20, 11, 13];
1897 /// let (dedup, duplicates) = slice.partition_dedup_by_key(|i| *i / 10);
1899 /// assert_eq!(dedup, [10, 20, 30, 20, 11]);
1900 /// assert_eq!(duplicates, [21, 30, 13]);
1902 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1904 pub fn partition_dedup_by_key<K, F>(&mut self, mut key: F) -> (&mut [T], &mut [T])
1905 where F: FnMut(&mut T) -> K,
1908 self.partition_dedup_by(|a, b| key(a) == key(b))
1911 /// Rotates the slice in-place such that the first `mid` elements of the
1912 /// slice move to the end while the last `self.len() - mid` elements move to
1913 /// the front. After calling `rotate_left`, the element previously at index
1914 /// `mid` will become the first element in the slice.
1918 /// This function will panic if `mid` is greater than the length of the
1919 /// slice. Note that `mid == self.len()` does _not_ panic and is a no-op
1924 /// Takes linear (in `self.len()`) time.
1929 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1930 /// a.rotate_left(2);
1931 /// assert_eq!(a, ['c', 'd', 'e', 'f', 'a', 'b']);
1934 /// Rotating a subslice:
1937 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1938 /// a[1..5].rotate_left(1);
1939 /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']);
1941 #[stable(feature = "slice_rotate", since = "1.26.0")]
1942 pub fn rotate_left(&mut self, mid: usize) {
1943 assert!(mid <= self.len());
1944 let k = self.len() - mid;
1947 let p = self.as_mut_ptr();
1948 rotate::ptr_rotate(mid, p.add(mid), k);
1952 /// Rotates the slice in-place such that the first `self.len() - k`
1953 /// elements of the slice move to the end while the last `k` elements move
1954 /// to the front. After calling `rotate_right`, the element previously at
1955 /// index `self.len() - k` will become the first element in the slice.
1959 /// This function will panic if `k` is greater than the length of the
1960 /// slice. Note that `k == self.len()` does _not_ panic and is a no-op
1965 /// Takes linear (in `self.len()`) time.
1970 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1971 /// a.rotate_right(2);
1972 /// assert_eq!(a, ['e', 'f', 'a', 'b', 'c', 'd']);
1975 /// Rotate a subslice:
1978 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1979 /// a[1..5].rotate_right(1);
1980 /// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']);
1982 #[stable(feature = "slice_rotate", since = "1.26.0")]
1983 pub fn rotate_right(&mut self, k: usize) {
1984 assert!(k <= self.len());
1985 let mid = self.len() - k;
1988 let p = self.as_mut_ptr();
1989 rotate::ptr_rotate(mid, p.add(mid), k);
1993 /// Copies the elements from `src` into `self`.
1995 /// The length of `src` must be the same as `self`.
1997 /// If `src` implements `Copy`, it can be more performant to use
1998 /// [`copy_from_slice`].
2002 /// This function will panic if the two slices have different lengths.
2006 /// Cloning two elements from a slice into another:
2009 /// let src = [1, 2, 3, 4];
2010 /// let mut dst = [0, 0];
2012 /// // Because the slices have to be the same length,
2013 /// // we slice the source slice from four elements
2014 /// // to two. It will panic if we don't do this.
2015 /// dst.clone_from_slice(&src[2..]);
2017 /// assert_eq!(src, [1, 2, 3, 4]);
2018 /// assert_eq!(dst, [3, 4]);
2021 /// Rust enforces that there can only be one mutable reference with no
2022 /// immutable references to a particular piece of data in a particular
2023 /// scope. Because of this, attempting to use `clone_from_slice` on a
2024 /// single slice will result in a compile failure:
2027 /// let mut slice = [1, 2, 3, 4, 5];
2029 /// slice[..2].clone_from_slice(&slice[3..]); // compile fail!
2032 /// To work around this, we can use [`split_at_mut`] to create two distinct
2033 /// sub-slices from a slice:
2036 /// let mut slice = [1, 2, 3, 4, 5];
2039 /// let (left, right) = slice.split_at_mut(2);
2040 /// left.clone_from_slice(&right[1..]);
2043 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
2046 /// [`copy_from_slice`]: #method.copy_from_slice
2047 /// [`split_at_mut`]: #method.split_at_mut
2048 #[stable(feature = "clone_from_slice", since = "1.7.0")]
2049 pub fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
2050 assert!(self.len() == src.len(),
2051 "destination and source slices have different lengths");
2052 // NOTE: We need to explicitly slice them to the same length
2053 // for bounds checking to be elided, and the optimizer will
2054 // generate memcpy for simple cases (for example T = u8).
2055 let len = self.len();
2056 let src = &src[..len];
2058 self[i].clone_from(&src[i]);
2063 /// Copies all elements from `src` into `self`, using a memcpy.
2065 /// The length of `src` must be the same as `self`.
2067 /// If `src` does not implement `Copy`, use [`clone_from_slice`].
2071 /// This function will panic if the two slices have different lengths.
2075 /// Copying two elements from a slice into another:
2078 /// let src = [1, 2, 3, 4];
2079 /// let mut dst = [0, 0];
2081 /// // Because the slices have to be the same length,
2082 /// // we slice the source slice from four elements
2083 /// // to two. It will panic if we don't do this.
2084 /// dst.copy_from_slice(&src[2..]);
2086 /// assert_eq!(src, [1, 2, 3, 4]);
2087 /// assert_eq!(dst, [3, 4]);
2090 /// Rust enforces that there can only be one mutable reference with no
2091 /// immutable references to a particular piece of data in a particular
2092 /// scope. Because of this, attempting to use `copy_from_slice` on a
2093 /// single slice will result in a compile failure:
2096 /// let mut slice = [1, 2, 3, 4, 5];
2098 /// slice[..2].copy_from_slice(&slice[3..]); // compile fail!
2101 /// To work around this, we can use [`split_at_mut`] to create two distinct
2102 /// sub-slices from a slice:
2105 /// let mut slice = [1, 2, 3, 4, 5];
2108 /// let (left, right) = slice.split_at_mut(2);
2109 /// left.copy_from_slice(&right[1..]);
2112 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
2115 /// [`clone_from_slice`]: #method.clone_from_slice
2116 /// [`split_at_mut`]: #method.split_at_mut
2117 #[stable(feature = "copy_from_slice", since = "1.9.0")]
2118 pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
2119 assert_eq!(self.len(), src.len(),
2120 "destination and source slices have different lengths");
2122 ptr::copy_nonoverlapping(
2123 src.as_ptr(), self.as_mut_ptr(), self.len());
2127 /// Copies elements from one part of the slice to another part of itself,
2128 /// using a memmove.
2130 /// `src` is the range within `self` to copy from. `dest` is the starting
2131 /// index of the range within `self` to copy to, which will have the same
2132 /// length as `src`. The two ranges may overlap. The ends of the two ranges
2133 /// must be less than or equal to `self.len()`.
2137 /// This function will panic if either range exceeds the end of the slice,
2138 /// or if the end of `src` is before the start.
2142 /// Copying four bytes within a slice:
2145 /// let mut bytes = *b"Hello, World!";
2147 /// bytes.copy_within(1..5, 8);
2149 /// assert_eq!(&bytes, b"Hello, Wello!");
2151 #[stable(feature = "copy_within", since = "1.37.0")]
2152 pub fn copy_within<R: ops::RangeBounds<usize>>(&mut self, src: R, dest: usize)
2156 let src_start = match src.start_bound() {
2157 ops::Bound::Included(&n) => n,
2158 ops::Bound::Excluded(&n) => n
2160 .unwrap_or_else(|| slice_index_overflow_fail()),
2161 ops::Bound::Unbounded => 0,
2163 let src_end = match src.end_bound() {
2164 ops::Bound::Included(&n) => n
2166 .unwrap_or_else(|| slice_index_overflow_fail()),
2167 ops::Bound::Excluded(&n) => n,
2168 ops::Bound::Unbounded => self.len(),
2170 assert!(src_start <= src_end, "src end is before src start");
2171 assert!(src_end <= self.len(), "src is out of bounds");
2172 let count = src_end - src_start;
2173 assert!(dest <= self.len() - count, "dest is out of bounds");
2176 self.as_ptr().add(src_start),
2177 self.as_mut_ptr().add(dest),
2183 /// Swaps all elements in `self` with those in `other`.
2185 /// The length of `other` must be the same as `self`.
2189 /// This function will panic if the two slices have different lengths.
2193 /// Swapping two elements across slices:
2196 /// let mut slice1 = [0, 0];
2197 /// let mut slice2 = [1, 2, 3, 4];
2199 /// slice1.swap_with_slice(&mut slice2[2..]);
2201 /// assert_eq!(slice1, [3, 4]);
2202 /// assert_eq!(slice2, [1, 2, 0, 0]);
2205 /// Rust enforces that there can only be one mutable reference to a
2206 /// particular piece of data in a particular scope. Because of this,
2207 /// attempting to use `swap_with_slice` on a single slice will result in
2208 /// a compile failure:
2211 /// let mut slice = [1, 2, 3, 4, 5];
2212 /// slice[..2].swap_with_slice(&mut slice[3..]); // compile fail!
2215 /// To work around this, we can use [`split_at_mut`] to create two distinct
2216 /// mutable sub-slices from a slice:
2219 /// let mut slice = [1, 2, 3, 4, 5];
2222 /// let (left, right) = slice.split_at_mut(2);
2223 /// left.swap_with_slice(&mut right[1..]);
2226 /// assert_eq!(slice, [4, 5, 3, 1, 2]);
2229 /// [`split_at_mut`]: #method.split_at_mut
2230 #[stable(feature = "swap_with_slice", since = "1.27.0")]
2231 pub fn swap_with_slice(&mut self, other: &mut [T]) {
2232 assert!(self.len() == other.len(),
2233 "destination and source slices have different lengths");
2235 ptr::swap_nonoverlapping(
2236 self.as_mut_ptr(), other.as_mut_ptr(), self.len());
2240 /// Function to calculate lengths of the middle and trailing slice for `align_to{,_mut}`.
2241 fn align_to_offsets<U>(&self) -> (usize, usize) {
2242 // What we gonna do about `rest` is figure out what multiple of `U`s we can put in a
2243 // lowest number of `T`s. And how many `T`s we need for each such "multiple".
2245 // Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider
2246 // for example a case where size_of::<T> = 16, size_of::<U> = 24. We can put 2 Us in
2247 // place of every 3 Ts in the `rest` slice. A bit more complicated.
2249 // Formula to calculate this is:
2251 // Us = lcm(size_of::<T>, size_of::<U>) / size_of::<U>
2252 // Ts = lcm(size_of::<T>, size_of::<U>) / size_of::<T>
2254 // Expanded and simplified:
2256 // Us = size_of::<T> / gcd(size_of::<T>, size_of::<U>)
2257 // Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
2259 // Luckily since all this is constant-evaluated... performance here matters not!
2261 fn gcd(a: usize, b: usize) -> usize {
2262 use crate::intrinsics;
2263 // iterative stein’s algorithm
2264 // We should still make this `const fn` (and revert to recursive algorithm if we do)
2265 // because relying on llvm to consteval all this is… well, it makes me uncomfortable.
2266 let (ctz_a, mut ctz_b) = unsafe {
2267 if a == 0 { return b; }
2268 if b == 0 { return a; }
2269 (intrinsics::cttz_nonzero(a), intrinsics::cttz_nonzero(b))
2271 let k = ctz_a.min(ctz_b);
2272 let mut a = a >> ctz_a;
2275 // remove all factors of 2 from b
2278 mem::swap(&mut a, &mut b);
2285 ctz_b = intrinsics::cttz_nonzero(b);
2290 let gcd: usize = gcd(mem::size_of::<T>(), mem::size_of::<U>());
2291 let ts: usize = mem::size_of::<U>() / gcd;
2292 let us: usize = mem::size_of::<T>() / gcd;
2294 // Armed with this knowledge, we can find how many `U`s we can fit!
2295 let us_len = self.len() / ts * us;
2296 // And how many `T`s will be in the trailing slice!
2297 let ts_len = self.len() % ts;
2301 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2304 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2305 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2306 /// middle slice the greatest length possible for a given type and input slice, but only
2307 /// your algorithm's performance should depend on that, not its correctness.
2309 /// This method has no purpose when either input element `T` or output element `U` are
2310 /// zero-sized and will return the original slice without splitting anything.
2314 /// This method is essentially a `transmute` with respect to the elements in the returned
2315 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2323 /// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2324 /// let (prefix, shorts, suffix) = bytes.align_to::<u16>();
2325 /// // less_efficient_algorithm_for_bytes(prefix);
2326 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2327 /// // less_efficient_algorithm_for_bytes(suffix);
2330 #[stable(feature = "slice_align_to", since = "1.30.0")]
2331 pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
2332 // Note that most of this function will be constant-evaluated,
2333 if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
2334 // handle ZSTs specially, which is – don't handle them at all.
2335 return (self, &[], &[]);
2338 // First, find at what point do we split between the first and 2nd slice. Easy with
2339 // ptr.align_offset.
2340 let ptr = self.as_ptr();
2341 let offset = crate::ptr::align_offset(ptr, mem::align_of::<U>());
2342 if offset > self.len() {
2345 let (left, rest) = self.split_at(offset);
2346 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2347 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2349 from_raw_parts(rest.as_ptr() as *const U, us_len),
2350 from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len))
2354 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2357 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2358 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2359 /// middle slice the greatest length possible for a given type and input slice, but only
2360 /// your algorithm's performance should depend on that, not its correctness.
2362 /// This method has no purpose when either input element `T` or output element `U` are
2363 /// zero-sized and will return the original slice without splitting anything.
2367 /// This method is essentially a `transmute` with respect to the elements in the returned
2368 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2376 /// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2377 /// let (prefix, shorts, suffix) = bytes.align_to_mut::<u16>();
2378 /// // less_efficient_algorithm_for_bytes(prefix);
2379 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2380 /// // less_efficient_algorithm_for_bytes(suffix);
2383 #[stable(feature = "slice_align_to", since = "1.30.0")]
2384 pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
2385 // Note that most of this function will be constant-evaluated,
2386 if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
2387 // handle ZSTs specially, which is – don't handle them at all.
2388 return (self, &mut [], &mut []);
2391 // First, find at what point do we split between the first and 2nd slice. Easy with
2392 // ptr.align_offset.
2393 let ptr = self.as_ptr();
2394 let offset = crate::ptr::align_offset(ptr, mem::align_of::<U>());
2395 if offset > self.len() {
2396 (self, &mut [], &mut [])
2398 let (left, rest) = self.split_at_mut(offset);
2399 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2400 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2401 let mut_ptr = rest.as_mut_ptr();
2403 from_raw_parts_mut(mut_ptr as *mut U, us_len),
2404 from_raw_parts_mut(mut_ptr.add(rest.len() - ts_len), ts_len))
2408 /// Checks if the elements of this slice are sorted.
2410 /// That is, for each element `a` and its following element `b`, `a <= b` must hold. If the
2411 /// slice yields exactly zero or one element, `true` is returned.
2413 /// Note that if `Self::Item` is only `PartialOrd`, but not `Ord`, the above definition
2414 /// implies that this function returns `false` if any two consecutive items are not
2420 /// #![feature(is_sorted)]
2421 /// let empty: [i32; 0] = [];
2423 /// assert!([1, 2, 2, 9].is_sorted());
2424 /// assert!(![1, 3, 2, 4].is_sorted());
2425 /// assert!([0].is_sorted());
2426 /// assert!(empty.is_sorted());
2427 /// assert!(![0.0, 1.0, std::f32::NAN].is_sorted());
2430 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2431 pub fn is_sorted(&self) -> bool
2435 self.is_sorted_by(|a, b| a.partial_cmp(b))
2438 /// Checks if the elements of this slice are sorted using the given comparator function.
2440 /// Instead of using `PartialOrd::partial_cmp`, this function uses the given `compare`
2441 /// function to determine the ordering of two elements. Apart from that, it's equivalent to
2442 /// [`is_sorted`]; see its documentation for more information.
2444 /// [`is_sorted`]: #method.is_sorted
2445 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2446 pub fn is_sorted_by<F>(&self, mut compare: F) -> bool
2448 F: FnMut(&T, &T) -> Option<Ordering>
2450 self.iter().is_sorted_by(|a, b| compare(*a, *b))
2453 /// Checks if the elements of this slice are sorted using the given key extraction function.
2455 /// Instead of comparing the slice's elements directly, this function compares the keys of the
2456 /// elements, as determined by `f`. Apart from that, it's equivalent to [`is_sorted`]; see its
2457 /// documentation for more information.
2459 /// [`is_sorted`]: #method.is_sorted
2464 /// #![feature(is_sorted)]
2466 /// assert!(["c", "bb", "aaa"].is_sorted_by_key(|s| s.len()));
2467 /// assert!(![-2i32, -1, 0, 3].is_sorted_by_key(|n| n.abs()));
2470 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2471 pub fn is_sorted_by_key<F, K>(&self, f: F) -> bool
2476 self.iter().is_sorted_by_key(f)
2480 #[lang = "slice_u8"]
2483 /// Checks if all bytes in this slice are within the ASCII range.
2484 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2486 pub fn is_ascii(&self) -> bool {
2487 self.iter().all(|b| b.is_ascii())
2490 /// Checks that two slices are an ASCII case-insensitive match.
2492 /// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
2493 /// but without allocating and copying temporaries.
2494 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2496 pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
2497 self.len() == other.len() &&
2498 self.iter().zip(other).all(|(a, b)| {
2499 a.eq_ignore_ascii_case(b)
2503 /// Converts this slice to its ASCII upper case equivalent in-place.
2505 /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
2506 /// but non-ASCII letters are unchanged.
2508 /// To return a new uppercased value without modifying the existing one, use
2509 /// [`to_ascii_uppercase`].
2511 /// [`to_ascii_uppercase`]: #method.to_ascii_uppercase
2512 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2514 pub fn make_ascii_uppercase(&mut self) {
2516 byte.make_ascii_uppercase();
2520 /// Converts this slice to its ASCII lower case equivalent in-place.
2522 /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
2523 /// but non-ASCII letters are unchanged.
2525 /// To return a new lowercased value without modifying the existing one, use
2526 /// [`to_ascii_lowercase`].
2528 /// [`to_ascii_lowercase`]: #method.to_ascii_lowercase
2529 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2531 pub fn make_ascii_lowercase(&mut self) {
2533 byte.make_ascii_lowercase();
2539 #[stable(feature = "rust1", since = "1.0.0")]
2540 impl<T, I> ops::Index<I> for [T]
2541 where I: SliceIndex<[T]>
2543 type Output = I::Output;
2546 fn index(&self, index: I) -> &I::Output {
2551 #[stable(feature = "rust1", since = "1.0.0")]
2552 impl<T, I> ops::IndexMut<I> for [T]
2553 where I: SliceIndex<[T]>
2556 fn index_mut(&mut self, index: I) -> &mut I::Output {
2557 index.index_mut(self)
2563 fn slice_index_len_fail(index: usize, len: usize) -> ! {
2564 panic!("index {} out of range for slice of length {}", index, len);
2569 fn slice_index_order_fail(index: usize, end: usize) -> ! {
2570 panic!("slice index starts at {} but ends at {}", index, end);
2575 fn slice_index_overflow_fail() -> ! {
2576 panic!("attempted to index slice up to maximum usize");
2579 mod private_slice_index {
2581 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2584 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2585 impl Sealed for usize {}
2586 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2587 impl Sealed for ops::Range<usize> {}
2588 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2589 impl Sealed for ops::RangeTo<usize> {}
2590 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2591 impl Sealed for ops::RangeFrom<usize> {}
2592 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2593 impl Sealed for ops::RangeFull {}
2594 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2595 impl Sealed for ops::RangeInclusive<usize> {}
2596 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2597 impl Sealed for ops::RangeToInclusive<usize> {}
2600 /// A helper trait used for indexing operations.
2601 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2602 #[rustc_on_unimplemented(
2605 label = "string indices are ranges of `usize`",
2608 all(any(T = "str", T = "&str", T = "std::string::String"), _Self="{integer}"),
2609 note="you can use `.chars().nth()` or `.bytes().nth()`
2610 see chapter in The Book <https://doc.rust-lang.org/book/ch08-02-strings.html#indexing-into-strings>"
2612 message = "the type `{T}` cannot be indexed by `{Self}`",
2613 label = "slice indices are of type `usize` or ranges of `usize`",
2615 pub trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
2616 /// The output type returned by methods.
2617 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2618 type Output: ?Sized;
2620 /// Returns a shared reference to the output at this location, if in
2622 #[unstable(feature = "slice_index_methods", issue = "0")]
2623 fn get(self, slice: &T) -> Option<&Self::Output>;
2625 /// Returns a mutable reference to the output at this location, if in
2627 #[unstable(feature = "slice_index_methods", issue = "0")]
2628 fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
2630 /// Returns a shared reference to the output at this location, without
2631 /// performing any bounds checking.
2632 #[unstable(feature = "slice_index_methods", issue = "0")]
2633 unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
2635 /// Returns a mutable reference to the output at this location, without
2636 /// performing any bounds checking.
2637 #[unstable(feature = "slice_index_methods", issue = "0")]
2638 unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
2640 /// Returns a shared reference to the output at this location, panicking
2641 /// if out of bounds.
2642 #[unstable(feature = "slice_index_methods", issue = "0")]
2643 fn index(self, slice: &T) -> &Self::Output;
2645 /// Returns a mutable reference to the output at this location, panicking
2646 /// if out of bounds.
2647 #[unstable(feature = "slice_index_methods", issue = "0")]
2648 fn index_mut(self, slice: &mut T) -> &mut Self::Output;
2651 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2652 impl<T> SliceIndex<[T]> for usize {
2656 fn get(self, slice: &[T]) -> Option<&T> {
2657 if self < slice.len() {
2659 Some(self.get_unchecked(slice))
2667 fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
2668 if self < slice.len() {
2670 Some(self.get_unchecked_mut(slice))
2678 unsafe fn get_unchecked(self, slice: &[T]) -> &T {
2679 &*slice.as_ptr().add(self)
2683 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
2684 &mut *slice.as_mut_ptr().add(self)
2688 fn index(self, slice: &[T]) -> &T {
2689 // N.B., use intrinsic indexing
2694 fn index_mut(self, slice: &mut [T]) -> &mut T {
2695 // N.B., use intrinsic indexing
2700 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2701 impl<T> SliceIndex<[T]> for ops::Range<usize> {
2705 fn get(self, slice: &[T]) -> Option<&[T]> {
2706 if self.start > self.end || self.end > slice.len() {
2710 Some(self.get_unchecked(slice))
2716 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2717 if self.start > self.end || self.end > slice.len() {
2721 Some(self.get_unchecked_mut(slice))
2727 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2728 from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
2732 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2733 from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
2737 fn index(self, slice: &[T]) -> &[T] {
2738 if self.start > self.end {
2739 slice_index_order_fail(self.start, self.end);
2740 } else if self.end > slice.len() {
2741 slice_index_len_fail(self.end, slice.len());
2744 self.get_unchecked(slice)
2749 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2750 if self.start > self.end {
2751 slice_index_order_fail(self.start, self.end);
2752 } else if self.end > slice.len() {
2753 slice_index_len_fail(self.end, slice.len());
2756 self.get_unchecked_mut(slice)
2761 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2762 impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
2766 fn get(self, slice: &[T]) -> Option<&[T]> {
2767 (0..self.end).get(slice)
2771 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2772 (0..self.end).get_mut(slice)
2776 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2777 (0..self.end).get_unchecked(slice)
2781 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2782 (0..self.end).get_unchecked_mut(slice)
2786 fn index(self, slice: &[T]) -> &[T] {
2787 (0..self.end).index(slice)
2791 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2792 (0..self.end).index_mut(slice)
2796 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2797 impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
2801 fn get(self, slice: &[T]) -> Option<&[T]> {
2802 (self.start..slice.len()).get(slice)
2806 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2807 (self.start..slice.len()).get_mut(slice)
2811 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2812 (self.start..slice.len()).get_unchecked(slice)
2816 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2817 (self.start..slice.len()).get_unchecked_mut(slice)
2821 fn index(self, slice: &[T]) -> &[T] {
2822 (self.start..slice.len()).index(slice)
2826 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2827 (self.start..slice.len()).index_mut(slice)
2831 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2832 impl<T> SliceIndex<[T]> for ops::RangeFull {
2836 fn get(self, slice: &[T]) -> Option<&[T]> {
2841 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2846 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2851 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2856 fn index(self, slice: &[T]) -> &[T] {
2861 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2867 #[stable(feature = "inclusive_range", since = "1.26.0")]
2868 impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
2872 fn get(self, slice: &[T]) -> Option<&[T]> {
2873 if *self.end() == usize::max_value() { None }
2874 else { (*self.start()..self.end() + 1).get(slice) }
2878 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2879 if *self.end() == usize::max_value() { None }
2880 else { (*self.start()..self.end() + 1).get_mut(slice) }
2884 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2885 (*self.start()..self.end() + 1).get_unchecked(slice)
2889 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2890 (*self.start()..self.end() + 1).get_unchecked_mut(slice)
2894 fn index(self, slice: &[T]) -> &[T] {
2895 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2896 (*self.start()..self.end() + 1).index(slice)
2900 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2901 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2902 (*self.start()..self.end() + 1).index_mut(slice)
2906 #[stable(feature = "inclusive_range", since = "1.26.0")]
2907 impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
2911 fn get(self, slice: &[T]) -> Option<&[T]> {
2912 (0..=self.end).get(slice)
2916 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2917 (0..=self.end).get_mut(slice)
2921 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2922 (0..=self.end).get_unchecked(slice)
2926 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2927 (0..=self.end).get_unchecked_mut(slice)
2931 fn index(self, slice: &[T]) -> &[T] {
2932 (0..=self.end).index(slice)
2936 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2937 (0..=self.end).index_mut(slice)
2941 ////////////////////////////////////////////////////////////////////////////////
2943 ////////////////////////////////////////////////////////////////////////////////
2945 #[stable(feature = "rust1", since = "1.0.0")]
2946 impl<T> Default for &[T] {
2947 /// Creates an empty slice.
2948 fn default() -> Self { &[] }
2951 #[stable(feature = "mut_slice_default", since = "1.5.0")]
2952 impl<T> Default for &mut [T] {
2953 /// Creates a mutable empty slice.
2954 fn default() -> Self { &mut [] }
2961 #[stable(feature = "rust1", since = "1.0.0")]
2962 impl<'a, T> IntoIterator for &'a [T] {
2964 type IntoIter = Iter<'a, T>;
2966 fn into_iter(self) -> Iter<'a, T> {
2971 #[stable(feature = "rust1", since = "1.0.0")]
2972 impl<'a, T> IntoIterator for &'a mut [T] {
2973 type Item = &'a mut T;
2974 type IntoIter = IterMut<'a, T>;
2976 fn into_iter(self) -> IterMut<'a, T> {
2981 // Macro helper functions
2983 fn size_from_ptr<T>(_: *const T) -> usize {
2987 // Inlining is_empty and len makes a huge performance difference
2988 macro_rules! is_empty {
2989 // The way we encode the length of a ZST iterator, this works both for ZST
2991 ($self: ident) => {$self.ptr == $self.end}
2993 // To get rid of some bounds checks (see `position`), we compute the length in a somewhat
2994 // unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
2996 ($self: ident) => {{
2997 #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
2999 let start = $self.ptr;
3000 let size = size_from_ptr(start);
3002 // This _cannot_ use `unchecked_sub` because we depend on wrapping
3003 // to represent the length of long ZST slice iterators.
3004 let diff = ($self.end as usize).wrapping_sub(start as usize);
3007 // We know that `start <= end`, so can do better than `offset_from`,
3008 // which needs to deal in signed. By setting appropriate flags here
3009 // we can tell LLVM this, which helps it remove bounds checks.
3010 // SAFETY: By the type invariant, `start <= end`
3011 let diff = unsafe { unchecked_sub($self.end as usize, start as usize) };
3012 // By also telling LLVM that the pointers are apart by an exact
3013 // multiple of the type size, it can optimize `len() == 0` down to
3014 // `start == end` instead of `(end - start) < size`.
3015 // SAFETY: By the type invariant, the pointers are aligned so the
3016 // distance between them must be a multiple of pointee size
3017 unsafe { exact_div(diff, size) }
3022 // The shared definition of the `Iter` and `IterMut` iterators
3023 macro_rules! iterator {
3025 struct $name:ident -> $ptr:ty,
3031 // Returns the first element and moves the start of the iterator forwards by 1.
3032 // Greatly improves performance compared to an inlined function. The iterator
3033 // must not be empty.
3034 macro_rules! next_unchecked {
3035 ($self: ident) => {& $( $mut_ )* *$self.post_inc_start(1)}
3038 // Returns the last element and moves the end of the iterator backwards by 1.
3039 // Greatly improves performance compared to an inlined function. The iterator
3040 // must not be empty.
3041 macro_rules! next_back_unchecked {
3042 ($self: ident) => {& $( $mut_ )* *$self.pre_dec_end(1)}
3045 // Shrinks the iterator when T is a ZST, by moving the end of the iterator
3046 // backwards by `n`. `n` must not exceed `self.len()`.
3047 macro_rules! zst_shrink {
3048 ($self: ident, $n: ident) => {
3049 $self.end = ($self.end as * $raw_mut u8).wrapping_offset(-$n) as * $raw_mut T;
3053 impl<'a, T> $name<'a, T> {
3054 // Helper function for creating a slice from the iterator.
3056 fn make_slice(&self) -> &'a [T] {
3057 unsafe { from_raw_parts(self.ptr, len!(self)) }
3060 // Helper function for moving the start of the iterator forwards by `offset` elements,
3061 // returning the old start.
3062 // Unsafe because the offset must not exceed `self.len()`.
3064 unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T {
3065 if mem::size_of::<T>() == 0 {
3066 zst_shrink!(self, offset);
3070 self.ptr = self.ptr.offset(offset);
3075 // Helper function for moving the end of the iterator backwards by `offset` elements,
3076 // returning the new end.
3077 // Unsafe because the offset must not exceed `self.len()`.
3079 unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T {
3080 if mem::size_of::<T>() == 0 {
3081 zst_shrink!(self, offset);
3084 self.end = self.end.offset(-offset);
3090 #[stable(feature = "rust1", since = "1.0.0")]
3091 impl<T> ExactSizeIterator for $name<'_, T> {
3093 fn len(&self) -> usize {
3098 fn is_empty(&self) -> bool {
3103 #[stable(feature = "rust1", since = "1.0.0")]
3104 impl<'a, T> Iterator for $name<'a, T> {
3108 fn next(&mut self) -> Option<$elem> {
3109 // could be implemented with slices, but this avoids bounds checks
3111 assume(!self.ptr.is_null());
3112 if mem::size_of::<T>() != 0 {
3113 assume(!self.end.is_null());
3115 if is_empty!(self) {
3118 Some(next_unchecked!(self))
3124 fn size_hint(&self) -> (usize, Option<usize>) {
3125 let exact = len!(self);
3126 (exact, Some(exact))
3130 fn count(self) -> usize {
3135 fn nth(&mut self, n: usize) -> Option<$elem> {
3136 if n >= len!(self) {
3137 // This iterator is now empty.
3138 if mem::size_of::<T>() == 0 {
3139 // We have to do it this way as `ptr` may never be 0, but `end`
3140 // could be (due to wrapping).
3141 self.end = self.ptr;
3143 self.ptr = self.end;
3147 // We are in bounds. `post_inc_start` does the right thing even for ZSTs.
3149 self.post_inc_start(n as isize);
3150 Some(next_unchecked!(self))
3155 fn last(mut self) -> Option<$elem> {
3160 fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where
3161 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
3163 // manual unrolling is needed when there are conditional exits from the loop
3164 let mut accum = init;
3166 while len!(self) >= 4 {
3167 accum = f(accum, next_unchecked!(self))?;
3168 accum = f(accum, next_unchecked!(self))?;
3169 accum = f(accum, next_unchecked!(self))?;
3170 accum = f(accum, next_unchecked!(self))?;
3172 while !is_empty!(self) {
3173 accum = f(accum, next_unchecked!(self))?;
3180 fn fold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
3181 where Fold: FnMut(Acc, Self::Item) -> Acc,
3183 // Let LLVM unroll this, rather than using the default
3184 // impl that would force the manual unrolling above
3185 let mut accum = init;
3186 while let Some(x) = self.next() {
3187 accum = f(accum, x);
3193 #[rustc_inherit_overflow_checks]
3194 fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
3196 P: FnMut(Self::Item) -> bool,
3198 // The addition might panic on overflow.
3200 self.try_fold(0, move |i, x| {
3201 if predicate(x) { Err(i) }
3205 unsafe { assume(i < n) };
3211 fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
3212 P: FnMut(Self::Item) -> bool,
3213 Self: Sized + ExactSizeIterator + DoubleEndedIterator
3215 // No need for an overflow check here, because `ExactSizeIterator`
3217 self.try_rfold(n, move |i, x| {
3219 if predicate(x) { Err(i) }
3223 unsafe { assume(i < n) };
3231 #[stable(feature = "rust1", since = "1.0.0")]
3232 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
3234 fn next_back(&mut self) -> Option<$elem> {
3235 // could be implemented with slices, but this avoids bounds checks
3237 assume(!self.ptr.is_null());
3238 if mem::size_of::<T>() != 0 {
3239 assume(!self.end.is_null());
3241 if is_empty!(self) {
3244 Some(next_back_unchecked!(self))
3250 fn nth_back(&mut self, n: usize) -> Option<$elem> {
3251 if n >= len!(self) {
3252 // This iterator is now empty.
3253 self.end = self.ptr;
3256 // We are in bounds. `pre_dec_end` does the right thing even for ZSTs.
3258 self.pre_dec_end(n as isize);
3259 Some(next_back_unchecked!(self))
3264 fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
3265 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
3267 // manual unrolling is needed when there are conditional exits from the loop
3268 let mut accum = init;
3270 while len!(self) >= 4 {
3271 accum = f(accum, next_back_unchecked!(self))?;
3272 accum = f(accum, next_back_unchecked!(self))?;
3273 accum = f(accum, next_back_unchecked!(self))?;
3274 accum = f(accum, next_back_unchecked!(self))?;
3276 // inlining is_empty everywhere makes a huge performance difference
3277 while !is_empty!(self) {
3278 accum = f(accum, next_back_unchecked!(self))?;
3285 fn rfold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
3286 where Fold: FnMut(Acc, Self::Item) -> Acc,
3288 // Let LLVM unroll this, rather than using the default
3289 // impl that would force the manual unrolling above
3290 let mut accum = init;
3291 while let Some(x) = self.next_back() {
3292 accum = f(accum, x);
3298 #[stable(feature = "fused", since = "1.26.0")]
3299 impl<T> FusedIterator for $name<'_, T> {}
3301 #[unstable(feature = "trusted_len", issue = "37572")]
3302 unsafe impl<T> TrustedLen for $name<'_, T> {}
3306 /// Immutable slice iterator
3308 /// This struct is created by the [`iter`] method on [slices].
3315 /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
3316 /// let slice = &[1, 2, 3];
3318 /// // Then, we iterate over it:
3319 /// for element in slice.iter() {
3320 /// println!("{}", element);
3324 /// [`iter`]: ../../std/primitive.slice.html#method.iter
3325 /// [slices]: ../../std/primitive.slice.html
3326 #[stable(feature = "rust1", since = "1.0.0")]
3327 pub struct Iter<'a, T: 'a> {
3329 end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3330 // ptr == end is a quick test for the Iterator being empty, that works
3331 // for both ZST and non-ZST.
3332 _marker: marker::PhantomData<&'a T>,
3335 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3336 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
3337 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3338 f.debug_tuple("Iter")
3339 .field(&self.as_slice())
3344 #[stable(feature = "rust1", since = "1.0.0")]
3345 unsafe impl<T: Sync> Sync for Iter<'_, T> {}
3346 #[stable(feature = "rust1", since = "1.0.0")]
3347 unsafe impl<T: Sync> Send for Iter<'_, T> {}
3349 impl<'a, T> Iter<'a, T> {
3350 /// Views the underlying data as a subslice of the original data.
3352 /// This has the same lifetime as the original slice, and so the
3353 /// iterator can continue to be used while this exists.
3360 /// // First, we declare a type which has the `iter` method to get the `Iter`
3361 /// // struct (&[usize here]):
3362 /// let slice = &[1, 2, 3];
3364 /// // Then, we get the iterator:
3365 /// let mut iter = slice.iter();
3366 /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
3367 /// println!("{:?}", iter.as_slice());
3369 /// // Next, we move to the second element of the slice:
3371 /// // Now `as_slice` returns "[2, 3]":
3372 /// println!("{:?}", iter.as_slice());
3374 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3375 pub fn as_slice(&self) -> &'a [T] {
3380 iterator!{struct Iter -> *const T, &'a T, const, {/* no mut */}, {
3381 fn is_sorted_by<F>(self, mut compare: F) -> bool
3384 F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>,
3386 self.as_slice().windows(2).all(|w| {
3387 compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false)
3392 #[stable(feature = "rust1", since = "1.0.0")]
3393 impl<T> Clone for Iter<'_, T> {
3394 fn clone(&self) -> Self { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
3397 #[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
3398 impl<T> AsRef<[T]> for Iter<'_, T> {
3399 fn as_ref(&self) -> &[T] {
3404 /// Mutable slice iterator.
3406 /// This struct is created by the [`iter_mut`] method on [slices].
3413 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3414 /// // struct (&[usize here]):
3415 /// let mut slice = &mut [1, 2, 3];
3417 /// // Then, we iterate over it and increment each element value:
3418 /// for element in slice.iter_mut() {
3422 /// // We now have "[2, 3, 4]":
3423 /// println!("{:?}", slice);
3426 /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
3427 /// [slices]: ../../std/primitive.slice.html
3428 #[stable(feature = "rust1", since = "1.0.0")]
3429 pub struct IterMut<'a, T: 'a> {
3431 end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3432 // ptr == end is a quick test for the Iterator being empty, that works
3433 // for both ZST and non-ZST.
3434 _marker: marker::PhantomData<&'a mut T>,
3437 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3438 impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
3439 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3440 f.debug_tuple("IterMut")
3441 .field(&self.make_slice())
3446 #[stable(feature = "rust1", since = "1.0.0")]
3447 unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
3448 #[stable(feature = "rust1", since = "1.0.0")]
3449 unsafe impl<T: Send> Send for IterMut<'_, T> {}
3451 impl<'a, T> IterMut<'a, T> {
3452 /// Views the underlying data as a subslice of the original data.
3454 /// To avoid creating `&mut` references that alias, this is forced
3455 /// to consume the iterator.
3462 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3463 /// // struct (&[usize here]):
3464 /// let mut slice = &mut [1, 2, 3];
3467 /// // Then, we get the iterator:
3468 /// let mut iter = slice.iter_mut();
3469 /// // We move to next element:
3471 /// // So if we print what `into_slice` method returns here, we have "[2, 3]":
3472 /// println!("{:?}", iter.into_slice());
3475 /// // Now let's modify a value of the slice:
3477 /// // First we get back the iterator:
3478 /// let mut iter = slice.iter_mut();
3479 /// // We change the value of the first element of the slice returned by the `next` method:
3480 /// *iter.next().unwrap() += 1;
3482 /// // Now slice is "[2, 2, 3]":
3483 /// println!("{:?}", slice);
3485 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3486 pub fn into_slice(self) -> &'a mut [T] {
3487 unsafe { from_raw_parts_mut(self.ptr, len!(self)) }
3490 /// Views the underlying data as a subslice of the original data.
3492 /// To avoid creating `&mut [T]` references that alias, the returned slice
3493 /// borrows its lifetime from the iterator the method is applied on.
3500 /// # #![feature(slice_iter_mut_as_slice)]
3501 /// let mut slice: &mut [usize] = &mut [1, 2, 3];
3503 /// // First, we get the iterator:
3504 /// let mut iter = slice.iter_mut();
3505 /// // So if we check what the `as_slice` method returns here, we have "[1, 2, 3]":
3506 /// assert_eq!(iter.as_slice(), &[1, 2, 3]);
3508 /// // Next, we move to the second element of the slice:
3510 /// // Now `as_slice` returns "[2, 3]":
3511 /// assert_eq!(iter.as_slice(), &[2, 3]);
3513 #[unstable(feature = "slice_iter_mut_as_slice", reason = "recently added", issue = "58957")]
3514 pub fn as_slice(&self) -> &[T] {
3519 iterator!{struct IterMut -> *mut T, &'a mut T, mut, {mut}, {}}
3521 /// An internal abstraction over the splitting iterators, so that
3522 /// splitn, splitn_mut etc can be implemented once.
3524 trait SplitIter: DoubleEndedIterator {
3525 /// Marks the underlying iterator as complete, extracting the remaining
3526 /// portion of the slice.
3527 fn finish(&mut self) -> Option<Self::Item>;
3530 /// An iterator over subslices separated by elements that match a predicate
3533 /// This struct is created by the [`split`] method on [slices].
3535 /// [`split`]: ../../std/primitive.slice.html#method.split
3536 /// [slices]: ../../std/primitive.slice.html
3537 #[stable(feature = "rust1", since = "1.0.0")]
3538 pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
3544 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3545 impl<T: fmt::Debug, P> fmt::Debug for Split<'_, T, P> where P: FnMut(&T) -> bool {
3546 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3547 f.debug_struct("Split")
3548 .field("v", &self.v)
3549 .field("finished", &self.finished)
3554 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3555 #[stable(feature = "rust1", since = "1.0.0")]
3556 impl<T, P> Clone for Split<'_, T, P> where P: Clone + FnMut(&T) -> bool {
3557 fn clone(&self) -> Self {
3560 pred: self.pred.clone(),
3561 finished: self.finished,
3566 #[stable(feature = "rust1", since = "1.0.0")]
3567 impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3568 type Item = &'a [T];
3571 fn next(&mut self) -> Option<&'a [T]> {
3572 if self.finished { return None; }
3574 match self.v.iter().position(|x| (self.pred)(x)) {
3575 None => self.finish(),
3577 let ret = Some(&self.v[..idx]);
3578 self.v = &self.v[idx + 1..];
3585 fn size_hint(&self) -> (usize, Option<usize>) {
3589 (1, Some(self.v.len() + 1))
3594 #[stable(feature = "rust1", since = "1.0.0")]
3595 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3597 fn next_back(&mut self) -> Option<&'a [T]> {
3598 if self.finished { return None; }
3600 match self.v.iter().rposition(|x| (self.pred)(x)) {
3601 None => self.finish(),
3603 let ret = Some(&self.v[idx + 1..]);
3604 self.v = &self.v[..idx];
3611 impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool {
3613 fn finish(&mut self) -> Option<&'a [T]> {
3614 if self.finished { None } else { self.finished = true; Some(self.v) }
3618 #[stable(feature = "fused", since = "1.26.0")]
3619 impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
3621 /// An iterator over the subslices of the vector which are separated
3622 /// by elements that match `pred`.
3624 /// This struct is created by the [`split_mut`] method on [slices].
3626 /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
3627 /// [slices]: ../../std/primitive.slice.html
3628 #[stable(feature = "rust1", since = "1.0.0")]
3629 pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3635 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3636 impl<T: fmt::Debug, P> fmt::Debug for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {
3637 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3638 f.debug_struct("SplitMut")
3639 .field("v", &self.v)
3640 .field("finished", &self.finished)
3645 impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3647 fn finish(&mut self) -> Option<&'a mut [T]> {
3651 self.finished = true;
3652 Some(mem::replace(&mut self.v, &mut []))
3657 #[stable(feature = "rust1", since = "1.0.0")]
3658 impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3659 type Item = &'a mut [T];
3662 fn next(&mut self) -> Option<&'a mut [T]> {
3663 if self.finished { return None; }
3665 let idx_opt = { // work around borrowck limitations
3666 let pred = &mut self.pred;
3667 self.v.iter().position(|x| (*pred)(x))
3670 None => self.finish(),
3672 let tmp = mem::replace(&mut self.v, &mut []);
3673 let (head, tail) = tmp.split_at_mut(idx);
3674 self.v = &mut tail[1..];
3681 fn size_hint(&self) -> (usize, Option<usize>) {
3685 // if the predicate doesn't match anything, we yield one slice
3686 // if it matches every element, we yield len+1 empty slices.
3687 (1, Some(self.v.len() + 1))
3692 #[stable(feature = "rust1", since = "1.0.0")]
3693 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where
3694 P: FnMut(&T) -> bool,
3697 fn next_back(&mut self) -> Option<&'a mut [T]> {
3698 if self.finished { return None; }
3700 let idx_opt = { // work around borrowck limitations
3701 let pred = &mut self.pred;
3702 self.v.iter().rposition(|x| (*pred)(x))
3705 None => self.finish(),
3707 let tmp = mem::replace(&mut self.v, &mut []);
3708 let (head, tail) = tmp.split_at_mut(idx);
3710 Some(&mut tail[1..])
3716 #[stable(feature = "fused", since = "1.26.0")]
3717 impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3719 /// An iterator over subslices separated by elements that match a predicate
3720 /// function, starting from the end of the slice.
3722 /// This struct is created by the [`rsplit`] method on [slices].
3724 /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
3725 /// [slices]: ../../std/primitive.slice.html
3726 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3727 #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
3728 pub struct RSplit<'a, T:'a, P> where P: FnMut(&T) -> bool {
3729 inner: Split<'a, T, P>
3732 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3733 impl<T: fmt::Debug, P> fmt::Debug for RSplit<'_, T, P> where P: FnMut(&T) -> bool {
3734 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3735 f.debug_struct("RSplit")
3736 .field("v", &self.inner.v)
3737 .field("finished", &self.inner.finished)
3742 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3743 impl<'a, T, P> Iterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3744 type Item = &'a [T];
3747 fn next(&mut self) -> Option<&'a [T]> {
3748 self.inner.next_back()
3752 fn size_hint(&self) -> (usize, Option<usize>) {
3753 self.inner.size_hint()
3757 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3758 impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3760 fn next_back(&mut self) -> Option<&'a [T]> {
3765 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3766 impl<'a, T, P> SplitIter for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3768 fn finish(&mut self) -> Option<&'a [T]> {
3773 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3774 impl<T, P> FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {}
3776 /// An iterator over the subslices of the vector which are separated
3777 /// by elements that match `pred`, starting from the end of the slice.
3779 /// This struct is created by the [`rsplit_mut`] method on [slices].
3781 /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
3782 /// [slices]: ../../std/primitive.slice.html
3783 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3784 pub struct RSplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3785 inner: SplitMut<'a, T, P>
3788 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3789 impl<T: fmt::Debug, P> fmt::Debug for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {
3790 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3791 f.debug_struct("RSplitMut")
3792 .field("v", &self.inner.v)
3793 .field("finished", &self.inner.finished)
3798 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3799 impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3801 fn finish(&mut self) -> Option<&'a mut [T]> {
3806 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3807 impl<'a, T, P> Iterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3808 type Item = &'a mut [T];
3811 fn next(&mut self) -> Option<&'a mut [T]> {
3812 self.inner.next_back()
3816 fn size_hint(&self) -> (usize, Option<usize>) {
3817 self.inner.size_hint()
3821 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3822 impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P> where
3823 P: FnMut(&T) -> bool,
3826 fn next_back(&mut self) -> Option<&'a mut [T]> {
3831 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3832 impl<T, P> FusedIterator for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3834 /// An private iterator over subslices separated by elements that
3835 /// match a predicate function, splitting at most a fixed number of
3838 struct GenericSplitN<I> {
3843 impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> {
3847 fn next(&mut self) -> Option<T> {
3850 1 => { self.count -= 1; self.iter.finish() }
3851 _ => { self.count -= 1; self.iter.next() }
3856 fn size_hint(&self) -> (usize, Option<usize>) {
3857 let (lower, upper_opt) = self.iter.size_hint();
3858 (lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
3862 /// An iterator over subslices separated by elements that match a predicate
3863 /// function, limited to a given number of splits.
3865 /// This struct is created by the [`splitn`] method on [slices].
3867 /// [`splitn`]: ../../std/primitive.slice.html#method.splitn
3868 /// [slices]: ../../std/primitive.slice.html
3869 #[stable(feature = "rust1", since = "1.0.0")]
3870 pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3871 inner: GenericSplitN<Split<'a, T, P>>
3874 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3875 impl<T: fmt::Debug, P> fmt::Debug for SplitN<'_, T, P> where P: FnMut(&T) -> bool {
3876 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3877 f.debug_struct("SplitN")
3878 .field("inner", &self.inner)
3883 /// An iterator over subslices separated by elements that match a
3884 /// predicate function, limited to a given number of splits, starting
3885 /// from the end of the slice.
3887 /// This struct is created by the [`rsplitn`] method on [slices].
3889 /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
3890 /// [slices]: ../../std/primitive.slice.html
3891 #[stable(feature = "rust1", since = "1.0.0")]
3892 pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3893 inner: GenericSplitN<RSplit<'a, T, P>>
3896 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3897 impl<T: fmt::Debug, P> fmt::Debug for RSplitN<'_, T, P> where P: FnMut(&T) -> bool {
3898 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3899 f.debug_struct("RSplitN")
3900 .field("inner", &self.inner)
3905 /// An iterator over subslices separated by elements that match a predicate
3906 /// function, limited to a given number of splits.
3908 /// This struct is created by the [`splitn_mut`] method on [slices].
3910 /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
3911 /// [slices]: ../../std/primitive.slice.html
3912 #[stable(feature = "rust1", since = "1.0.0")]
3913 pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3914 inner: GenericSplitN<SplitMut<'a, T, P>>
3917 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3918 impl<T: fmt::Debug, P> fmt::Debug for SplitNMut<'_, T, P> where P: FnMut(&T) -> bool {
3919 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3920 f.debug_struct("SplitNMut")
3921 .field("inner", &self.inner)
3926 /// An iterator over subslices separated by elements that match a
3927 /// predicate function, limited to a given number of splits, starting
3928 /// from the end of the slice.
3930 /// This struct is created by the [`rsplitn_mut`] method on [slices].
3932 /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
3933 /// [slices]: ../../std/primitive.slice.html
3934 #[stable(feature = "rust1", since = "1.0.0")]
3935 pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3936 inner: GenericSplitN<RSplitMut<'a, T, P>>
3939 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3940 impl<T: fmt::Debug, P> fmt::Debug for RSplitNMut<'_, T, P> where P: FnMut(&T) -> bool {
3941 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3942 f.debug_struct("RSplitNMut")
3943 .field("inner", &self.inner)
3948 macro_rules! forward_iterator {
3949 ($name:ident: $elem:ident, $iter_of:ty) => {
3950 #[stable(feature = "rust1", since = "1.0.0")]
3951 impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where
3952 P: FnMut(&T) -> bool
3954 type Item = $iter_of;
3957 fn next(&mut self) -> Option<$iter_of> {
3962 fn size_hint(&self) -> (usize, Option<usize>) {
3963 self.inner.size_hint()
3967 #[stable(feature = "fused", since = "1.26.0")]
3968 impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P>
3969 where P: FnMut(&T) -> bool {}
3973 forward_iterator! { SplitN: T, &'a [T] }
3974 forward_iterator! { RSplitN: T, &'a [T] }
3975 forward_iterator! { SplitNMut: T, &'a mut [T] }
3976 forward_iterator! { RSplitNMut: T, &'a mut [T] }
3978 /// An iterator over overlapping subslices of length `size`.
3980 /// This struct is created by the [`windows`] method on [slices].
3982 /// [`windows`]: ../../std/primitive.slice.html#method.windows
3983 /// [slices]: ../../std/primitive.slice.html
3985 #[stable(feature = "rust1", since = "1.0.0")]
3986 pub struct Windows<'a, T:'a> {
3991 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3992 #[stable(feature = "rust1", since = "1.0.0")]
3993 impl<T> Clone for Windows<'_, T> {
3994 fn clone(&self) -> Self {
4002 #[stable(feature = "rust1", since = "1.0.0")]
4003 impl<'a, T> Iterator for Windows<'a, T> {
4004 type Item = &'a [T];
4007 fn next(&mut self) -> Option<&'a [T]> {
4008 if self.size > self.v.len() {
4011 let ret = Some(&self.v[..self.size]);
4012 self.v = &self.v[1..];
4018 fn size_hint(&self) -> (usize, Option<usize>) {
4019 if self.size > self.v.len() {
4022 let size = self.v.len() - self.size + 1;
4028 fn count(self) -> usize {
4033 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4034 let (end, overflow) = self.size.overflowing_add(n);
4035 if end > self.v.len() || overflow {
4039 let nth = &self.v[n..end];
4040 self.v = &self.v[n+1..];
4046 fn last(self) -> Option<Self::Item> {
4047 if self.size > self.v.len() {
4050 let start = self.v.len() - self.size;
4051 Some(&self.v[start..])
4056 #[stable(feature = "rust1", since = "1.0.0")]
4057 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
4059 fn next_back(&mut self) -> Option<&'a [T]> {
4060 if self.size > self.v.len() {
4063 let ret = Some(&self.v[self.v.len()-self.size..]);
4064 self.v = &self.v[..self.v.len()-1];
4070 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4071 let (end, overflow) = self.v.len().overflowing_sub(n);
4072 if end < self.size || overflow {
4076 let ret = &self.v[end-self.size..end];
4077 self.v = &self.v[..end-1];
4083 #[stable(feature = "rust1", since = "1.0.0")]
4084 impl<T> ExactSizeIterator for Windows<'_, T> {}
4086 #[unstable(feature = "trusted_len", issue = "37572")]
4087 unsafe impl<T> TrustedLen for Windows<'_, T> {}
4089 #[stable(feature = "fused", since = "1.26.0")]
4090 impl<T> FusedIterator for Windows<'_, T> {}
4093 unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
4094 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4095 from_raw_parts(self.v.as_ptr().add(i), self.size)
4097 fn may_have_side_effect() -> bool { false }
4100 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4101 /// time), starting at the beginning of the slice.
4103 /// When the slice len is not evenly divided by the chunk size, the last slice
4104 /// of the iteration will be the remainder.
4106 /// This struct is created by the [`chunks`] method on [slices].
4108 /// [`chunks`]: ../../std/primitive.slice.html#method.chunks
4109 /// [slices]: ../../std/primitive.slice.html
4111 #[stable(feature = "rust1", since = "1.0.0")]
4112 pub struct Chunks<'a, T:'a> {
4117 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4118 #[stable(feature = "rust1", since = "1.0.0")]
4119 impl<T> Clone for Chunks<'_, T> {
4120 fn clone(&self) -> Self {
4123 chunk_size: self.chunk_size,
4128 #[stable(feature = "rust1", since = "1.0.0")]
4129 impl<'a, T> Iterator for Chunks<'a, T> {
4130 type Item = &'a [T];
4133 fn next(&mut self) -> Option<&'a [T]> {
4134 if self.v.is_empty() {
4137 let chunksz = cmp::min(self.v.len(), self.chunk_size);
4138 let (fst, snd) = self.v.split_at(chunksz);
4145 fn size_hint(&self) -> (usize, Option<usize>) {
4146 if self.v.is_empty() {
4149 let n = self.v.len() / self.chunk_size;
4150 let rem = self.v.len() % self.chunk_size;
4151 let n = if rem > 0 { n+1 } else { n };
4157 fn count(self) -> usize {
4162 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4163 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4164 if start >= self.v.len() || overflow {
4168 let end = match start.checked_add(self.chunk_size) {
4169 Some(sum) => cmp::min(self.v.len(), sum),
4170 None => self.v.len(),
4172 let nth = &self.v[start..end];
4173 self.v = &self.v[end..];
4179 fn last(self) -> Option<Self::Item> {
4180 if self.v.is_empty() {
4183 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
4184 Some(&self.v[start..])
4189 #[stable(feature = "rust1", since = "1.0.0")]
4190 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
4192 fn next_back(&mut self) -> Option<&'a [T]> {
4193 if self.v.is_empty() {
4196 let remainder = self.v.len() % self.chunk_size;
4197 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
4198 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
4205 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4206 let len = self.len();
4211 let start = (len - 1 - n) * self.chunk_size;
4212 let end = match start.checked_add(self.chunk_size) {
4213 Some(res) => cmp::min(res, self.v.len()),
4214 None => self.v.len(),
4216 let nth_back = &self.v[start..end];
4217 self.v = &self.v[..start];
4223 #[stable(feature = "rust1", since = "1.0.0")]
4224 impl<T> ExactSizeIterator for Chunks<'_, T> {}
4226 #[unstable(feature = "trusted_len", issue = "37572")]
4227 unsafe impl<T> TrustedLen for Chunks<'_, T> {}
4229 #[stable(feature = "fused", since = "1.26.0")]
4230 impl<T> FusedIterator for Chunks<'_, T> {}
4233 unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
4234 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4235 let start = i * self.chunk_size;
4236 let end = match start.checked_add(self.chunk_size) {
4237 None => self.v.len(),
4238 Some(end) => cmp::min(end, self.v.len()),
4240 from_raw_parts(self.v.as_ptr().add(start), end - start)
4242 fn may_have_side_effect() -> bool { false }
4245 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4246 /// elements at a time), starting at the beginning of the slice.
4248 /// When the slice len is not evenly divided by the chunk size, the last slice
4249 /// of the iteration will be the remainder.
4251 /// This struct is created by the [`chunks_mut`] method on [slices].
4253 /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
4254 /// [slices]: ../../std/primitive.slice.html
4256 #[stable(feature = "rust1", since = "1.0.0")]
4257 pub struct ChunksMut<'a, T:'a> {
4262 #[stable(feature = "rust1", since = "1.0.0")]
4263 impl<'a, T> Iterator for ChunksMut<'a, T> {
4264 type Item = &'a mut [T];
4267 fn next(&mut self) -> Option<&'a mut [T]> {
4268 if self.v.is_empty() {
4271 let sz = cmp::min(self.v.len(), self.chunk_size);
4272 let tmp = mem::replace(&mut self.v, &mut []);
4273 let (head, tail) = tmp.split_at_mut(sz);
4280 fn size_hint(&self) -> (usize, Option<usize>) {
4281 if self.v.is_empty() {
4284 let n = self.v.len() / self.chunk_size;
4285 let rem = self.v.len() % self.chunk_size;
4286 let n = if rem > 0 { n + 1 } else { n };
4292 fn count(self) -> usize {
4297 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4298 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4299 if start >= self.v.len() || overflow {
4303 let end = match start.checked_add(self.chunk_size) {
4304 Some(sum) => cmp::min(self.v.len(), sum),
4305 None => self.v.len(),
4307 let tmp = mem::replace(&mut self.v, &mut []);
4308 let (head, tail) = tmp.split_at_mut(end);
4309 let (_, nth) = head.split_at_mut(start);
4316 fn last(self) -> Option<Self::Item> {
4317 if self.v.is_empty() {
4320 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
4321 Some(&mut self.v[start..])
4326 #[stable(feature = "rust1", since = "1.0.0")]
4327 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
4329 fn next_back(&mut self) -> Option<&'a mut [T]> {
4330 if self.v.is_empty() {
4333 let remainder = self.v.len() % self.chunk_size;
4334 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4335 let tmp = mem::replace(&mut self.v, &mut []);
4336 let tmp_len = tmp.len();
4337 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4344 #[stable(feature = "rust1", since = "1.0.0")]
4345 impl<T> ExactSizeIterator for ChunksMut<'_, T> {}
4347 #[unstable(feature = "trusted_len", issue = "37572")]
4348 unsafe impl<T> TrustedLen for ChunksMut<'_, T> {}
4350 #[stable(feature = "fused", since = "1.26.0")]
4351 impl<T> FusedIterator for ChunksMut<'_, T> {}
4354 unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
4355 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4356 let start = i * self.chunk_size;
4357 let end = match start.checked_add(self.chunk_size) {
4358 None => self.v.len(),
4359 Some(end) => cmp::min(end, self.v.len()),
4361 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4363 fn may_have_side_effect() -> bool { false }
4366 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4367 /// time), starting at the beginning of the slice.
4369 /// When the slice len is not evenly divided by the chunk size, the last
4370 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4371 /// the [`remainder`] function from the iterator.
4373 /// This struct is created by the [`chunks_exact`] method on [slices].
4375 /// [`chunks_exact`]: ../../std/primitive.slice.html#method.chunks_exact
4376 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4377 /// [slices]: ../../std/primitive.slice.html
4379 #[stable(feature = "chunks_exact", since = "1.31.0")]
4380 pub struct ChunksExact<'a, T:'a> {
4386 impl<'a, T> ChunksExact<'a, T> {
4387 /// Returns the remainder of the original slice that is not going to be
4388 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4390 #[stable(feature = "chunks_exact", since = "1.31.0")]
4391 pub fn remainder(&self) -> &'a [T] {
4396 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4397 #[stable(feature = "chunks_exact", since = "1.31.0")]
4398 impl<T> Clone for ChunksExact<'_, T> {
4399 fn clone(&self) -> Self {
4403 chunk_size: self.chunk_size,
4408 #[stable(feature = "chunks_exact", since = "1.31.0")]
4409 impl<'a, T> Iterator for ChunksExact<'a, T> {
4410 type Item = &'a [T];
4413 fn next(&mut self) -> Option<&'a [T]> {
4414 if self.v.len() < self.chunk_size {
4417 let (fst, snd) = self.v.split_at(self.chunk_size);
4424 fn size_hint(&self) -> (usize, Option<usize>) {
4425 let n = self.v.len() / self.chunk_size;
4430 fn count(self) -> usize {
4435 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4436 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4437 if start >= self.v.len() || overflow {
4441 let (_, snd) = self.v.split_at(start);
4448 fn last(mut self) -> Option<Self::Item> {
4453 #[stable(feature = "chunks_exact", since = "1.31.0")]
4454 impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> {
4456 fn next_back(&mut self) -> Option<&'a [T]> {
4457 if self.v.len() < self.chunk_size {
4460 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4467 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4468 let len = self.len();
4473 let start = (len - 1 - n) * self.chunk_size;
4474 let end = start + self.chunk_size;
4475 let nth_back = &self.v[start..end];
4476 self.v = &self.v[..start];
4482 #[stable(feature = "chunks_exact", since = "1.31.0")]
4483 impl<T> ExactSizeIterator for ChunksExact<'_, T> {
4484 fn is_empty(&self) -> bool {
4489 #[unstable(feature = "trusted_len", issue = "37572")]
4490 unsafe impl<T> TrustedLen for ChunksExact<'_, T> {}
4492 #[stable(feature = "chunks_exact", since = "1.31.0")]
4493 impl<T> FusedIterator for ChunksExact<'_, T> {}
4496 #[stable(feature = "chunks_exact", since = "1.31.0")]
4497 unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> {
4498 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4499 let start = i * self.chunk_size;
4500 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
4502 fn may_have_side_effect() -> bool { false }
4505 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4506 /// elements at a time), starting at the beginning of the slice.
4508 /// When the slice len is not evenly divided by the chunk size, the last up to
4509 /// `chunk_size-1` elements will be omitted but can be retrieved from the
4510 /// [`into_remainder`] function from the iterator.
4512 /// This struct is created by the [`chunks_exact_mut`] method on [slices].
4514 /// [`chunks_exact_mut`]: ../../std/primitive.slice.html#method.chunks_exact_mut
4515 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
4516 /// [slices]: ../../std/primitive.slice.html
4518 #[stable(feature = "chunks_exact", since = "1.31.0")]
4519 pub struct ChunksExactMut<'a, T:'a> {
4525 impl<'a, T> ChunksExactMut<'a, T> {
4526 /// Returns the remainder of the original slice that is not going to be
4527 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4529 #[stable(feature = "chunks_exact", since = "1.31.0")]
4530 pub fn into_remainder(self) -> &'a mut [T] {
4535 #[stable(feature = "chunks_exact", since = "1.31.0")]
4536 impl<'a, T> Iterator for ChunksExactMut<'a, T> {
4537 type Item = &'a mut [T];
4540 fn next(&mut self) -> Option<&'a mut [T]> {
4541 if self.v.len() < self.chunk_size {
4544 let tmp = mem::replace(&mut self.v, &mut []);
4545 let (head, tail) = tmp.split_at_mut(self.chunk_size);
4552 fn size_hint(&self) -> (usize, Option<usize>) {
4553 let n = self.v.len() / self.chunk_size;
4558 fn count(self) -> usize {
4563 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4564 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4565 if start >= self.v.len() || overflow {
4569 let tmp = mem::replace(&mut self.v, &mut []);
4570 let (_, snd) = tmp.split_at_mut(start);
4577 fn last(mut self) -> Option<Self::Item> {
4582 #[stable(feature = "chunks_exact", since = "1.31.0")]
4583 impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> {
4585 fn next_back(&mut self) -> Option<&'a mut [T]> {
4586 if self.v.len() < self.chunk_size {
4589 let tmp = mem::replace(&mut self.v, &mut []);
4590 let tmp_len = tmp.len();
4591 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
4598 #[stable(feature = "chunks_exact", since = "1.31.0")]
4599 impl<T> ExactSizeIterator for ChunksExactMut<'_, T> {
4600 fn is_empty(&self) -> bool {
4605 #[unstable(feature = "trusted_len", issue = "37572")]
4606 unsafe impl<T> TrustedLen for ChunksExactMut<'_, T> {}
4608 #[stable(feature = "chunks_exact", since = "1.31.0")]
4609 impl<T> FusedIterator for ChunksExactMut<'_, T> {}
4612 #[stable(feature = "chunks_exact", since = "1.31.0")]
4613 unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> {
4614 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4615 let start = i * self.chunk_size;
4616 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
4618 fn may_have_side_effect() -> bool { false }
4621 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4622 /// time), starting at the end of the slice.
4624 /// When the slice len is not evenly divided by the chunk size, the last slice
4625 /// of the iteration will be the remainder.
4627 /// This struct is created by the [`rchunks`] method on [slices].
4629 /// [`rchunks`]: ../../std/primitive.slice.html#method.rchunks
4630 /// [slices]: ../../std/primitive.slice.html
4632 #[stable(feature = "rchunks", since = "1.31.0")]
4633 pub struct RChunks<'a, T:'a> {
4638 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4639 #[stable(feature = "rchunks", since = "1.31.0")]
4640 impl<T> Clone for RChunks<'_, T> {
4641 fn clone(&self) -> Self {
4644 chunk_size: self.chunk_size,
4649 #[stable(feature = "rchunks", since = "1.31.0")]
4650 impl<'a, T> Iterator for RChunks<'a, T> {
4651 type Item = &'a [T];
4654 fn next(&mut self) -> Option<&'a [T]> {
4655 if self.v.is_empty() {
4658 let chunksz = cmp::min(self.v.len(), self.chunk_size);
4659 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
4666 fn size_hint(&self) -> (usize, Option<usize>) {
4667 if self.v.is_empty() {
4670 let n = self.v.len() / self.chunk_size;
4671 let rem = self.v.len() % self.chunk_size;
4672 let n = if rem > 0 { n+1 } else { n };
4678 fn count(self) -> usize {
4683 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4684 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4685 if end >= self.v.len() || overflow {
4689 // Can't underflow because of the check above
4690 let end = self.v.len() - end;
4691 let start = match end.checked_sub(self.chunk_size) {
4695 let nth = &self.v[start..end];
4696 self.v = &self.v[0..start];
4702 fn last(self) -> Option<Self::Item> {
4703 if self.v.is_empty() {
4706 let rem = self.v.len() % self.chunk_size;
4707 let end = if rem == 0 { self.chunk_size } else { rem };
4708 Some(&self.v[0..end])
4713 #[stable(feature = "rchunks", since = "1.31.0")]
4714 impl<'a, T> DoubleEndedIterator for RChunks<'a, T> {
4716 fn next_back(&mut self) -> Option<&'a [T]> {
4717 if self.v.is_empty() {
4720 let remainder = self.v.len() % self.chunk_size;
4721 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
4722 let (fst, snd) = self.v.split_at(chunksz);
4729 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4730 let len = self.len();
4735 // can't underflow because `n < len`
4736 let offset_from_end = (len - 1 - n) * self.chunk_size;
4737 let end = self.v.len() - offset_from_end;
4738 let start = end.saturating_sub(self.chunk_size);
4739 let nth_back = &self.v[start..end];
4740 self.v = &self.v[end..];
4746 #[stable(feature = "rchunks", since = "1.31.0")]
4747 impl<T> ExactSizeIterator for RChunks<'_, T> {}
4749 #[unstable(feature = "trusted_len", issue = "37572")]
4750 unsafe impl<T> TrustedLen for RChunks<'_, T> {}
4752 #[stable(feature = "rchunks", since = "1.31.0")]
4753 impl<T> FusedIterator for RChunks<'_, T> {}
4756 #[stable(feature = "rchunks", since = "1.31.0")]
4757 unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> {
4758 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4759 let end = self.v.len() - i * self.chunk_size;
4760 let start = match end.checked_sub(self.chunk_size) {
4762 Some(start) => start,
4764 from_raw_parts(self.v.as_ptr().add(start), end - start)
4766 fn may_have_side_effect() -> bool { false }
4769 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4770 /// elements at a time), starting at the end of the slice.
4772 /// When the slice len is not evenly divided by the chunk size, the last slice
4773 /// of the iteration will be the remainder.
4775 /// This struct is created by the [`rchunks_mut`] method on [slices].
4777 /// [`rchunks_mut`]: ../../std/primitive.slice.html#method.rchunks_mut
4778 /// [slices]: ../../std/primitive.slice.html
4780 #[stable(feature = "rchunks", since = "1.31.0")]
4781 pub struct RChunksMut<'a, T:'a> {
4786 #[stable(feature = "rchunks", since = "1.31.0")]
4787 impl<'a, T> Iterator for RChunksMut<'a, T> {
4788 type Item = &'a mut [T];
4791 fn next(&mut self) -> Option<&'a mut [T]> {
4792 if self.v.is_empty() {
4795 let sz = cmp::min(self.v.len(), self.chunk_size);
4796 let tmp = mem::replace(&mut self.v, &mut []);
4797 let tmp_len = tmp.len();
4798 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4805 fn size_hint(&self) -> (usize, Option<usize>) {
4806 if self.v.is_empty() {
4809 let n = self.v.len() / self.chunk_size;
4810 let rem = self.v.len() % self.chunk_size;
4811 let n = if rem > 0 { n + 1 } else { n };
4817 fn count(self) -> usize {
4822 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4823 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4824 if end >= self.v.len() || overflow {
4828 // Can't underflow because of the check above
4829 let end = self.v.len() - end;
4830 let start = match end.checked_sub(self.chunk_size) {
4834 let tmp = mem::replace(&mut self.v, &mut []);
4835 let (head, tail) = tmp.split_at_mut(start);
4836 let (nth, _) = tail.split_at_mut(end - start);
4843 fn last(self) -> Option<Self::Item> {
4844 if self.v.is_empty() {
4847 let rem = self.v.len() % self.chunk_size;
4848 let end = if rem == 0 { self.chunk_size } else { rem };
4849 Some(&mut self.v[0..end])
4854 #[stable(feature = "rchunks", since = "1.31.0")]
4855 impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> {
4857 fn next_back(&mut self) -> Option<&'a mut [T]> {
4858 if self.v.is_empty() {
4861 let remainder = self.v.len() % self.chunk_size;
4862 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4863 let tmp = mem::replace(&mut self.v, &mut []);
4864 let (head, tail) = tmp.split_at_mut(sz);
4871 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4872 let len = self.len();
4877 // can't underflow because `n < len`
4878 let offset_from_end = (len - 1 - n) * self.chunk_size;
4879 let end = self.v.len() - offset_from_end;
4880 let start = end.saturating_sub(self.chunk_size);
4881 let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
4882 let (_, nth_back) = tmp.split_at_mut(start);
4889 #[stable(feature = "rchunks", since = "1.31.0")]
4890 impl<T> ExactSizeIterator for RChunksMut<'_, T> {}
4892 #[unstable(feature = "trusted_len", issue = "37572")]
4893 unsafe impl<T> TrustedLen for RChunksMut<'_, T> {}
4895 #[stable(feature = "rchunks", since = "1.31.0")]
4896 impl<T> FusedIterator for RChunksMut<'_, T> {}
4899 #[stable(feature = "rchunks", since = "1.31.0")]
4900 unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> {
4901 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4902 let end = self.v.len() - i * self.chunk_size;
4903 let start = match end.checked_sub(self.chunk_size) {
4905 Some(start) => start,
4907 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4909 fn may_have_side_effect() -> bool { false }
4912 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4913 /// time), starting at the end of the slice.
4915 /// When the slice len is not evenly divided by the chunk size, the last
4916 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4917 /// the [`remainder`] function from the iterator.
4919 /// This struct is created by the [`rchunks_exact`] method on [slices].
4921 /// [`rchunks_exact`]: ../../std/primitive.slice.html#method.rchunks_exact
4922 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4923 /// [slices]: ../../std/primitive.slice.html
4925 #[stable(feature = "rchunks", since = "1.31.0")]
4926 pub struct RChunksExact<'a, T:'a> {
4932 impl<'a, T> RChunksExact<'a, T> {
4933 /// Returns the remainder of the original slice that is not going to be
4934 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4936 #[stable(feature = "rchunks", since = "1.31.0")]
4937 pub fn remainder(&self) -> &'a [T] {
4942 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4943 #[stable(feature = "rchunks", since = "1.31.0")]
4944 impl<'a, T> Clone for RChunksExact<'a, T> {
4945 fn clone(&self) -> RChunksExact<'a, T> {
4949 chunk_size: self.chunk_size,
4954 #[stable(feature = "rchunks", since = "1.31.0")]
4955 impl<'a, T> Iterator for RChunksExact<'a, T> {
4956 type Item = &'a [T];
4959 fn next(&mut self) -> Option<&'a [T]> {
4960 if self.v.len() < self.chunk_size {
4963 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4970 fn size_hint(&self) -> (usize, Option<usize>) {
4971 let n = self.v.len() / self.chunk_size;
4976 fn count(self) -> usize {
4981 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4982 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4983 if end >= self.v.len() || overflow {
4987 let (fst, _) = self.v.split_at(self.v.len() - end);
4994 fn last(mut self) -> Option<Self::Item> {
4999 #[stable(feature = "rchunks", since = "1.31.0")]
5000 impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> {
5002 fn next_back(&mut self) -> Option<&'a [T]> {
5003 if self.v.len() < self.chunk_size {
5006 let (fst, snd) = self.v.split_at(self.chunk_size);
5013 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5014 let len = self.len();
5019 // now that we know that `n` corresponds to a chunk,
5020 // none of these operations can underflow/overflow
5021 let offset = (len - n) * self.chunk_size;
5022 let start = self.v.len() - offset;
5023 let end = start + self.chunk_size;
5024 let nth_back = &self.v[start..end];
5025 self.v = &self.v[end..];
5031 #[stable(feature = "rchunks", since = "1.31.0")]
5032 impl<'a, T> ExactSizeIterator for RChunksExact<'a, T> {
5033 fn is_empty(&self) -> bool {
5038 #[unstable(feature = "trusted_len", issue = "37572")]
5039 unsafe impl<T> TrustedLen for RChunksExact<'_, T> {}
5041 #[stable(feature = "rchunks", since = "1.31.0")]
5042 impl<T> FusedIterator for RChunksExact<'_, T> {}
5045 #[stable(feature = "rchunks", since = "1.31.0")]
5046 unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> {
5047 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
5048 let end = self.v.len() - i * self.chunk_size;
5049 let start = end - self.chunk_size;
5050 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
5052 fn may_have_side_effect() -> bool { false }
5055 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
5056 /// elements at a time), starting at the end of the slice.
5058 /// When the slice len is not evenly divided by the chunk size, the last up to
5059 /// `chunk_size-1` elements will be omitted but can be retrieved from the
5060 /// [`into_remainder`] function from the iterator.
5062 /// This struct is created by the [`rchunks_exact_mut`] method on [slices].
5064 /// [`rchunks_exact_mut`]: ../../std/primitive.slice.html#method.rchunks_exact_mut
5065 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
5066 /// [slices]: ../../std/primitive.slice.html
5068 #[stable(feature = "rchunks", since = "1.31.0")]
5069 pub struct RChunksExactMut<'a, T:'a> {
5075 impl<'a, T> RChunksExactMut<'a, T> {
5076 /// Returns the remainder of the original slice that is not going to be
5077 /// returned by the iterator. The returned slice has at most `chunk_size-1`
5079 #[stable(feature = "rchunks", since = "1.31.0")]
5080 pub fn into_remainder(self) -> &'a mut [T] {
5085 #[stable(feature = "rchunks", since = "1.31.0")]
5086 impl<'a, T> Iterator for RChunksExactMut<'a, T> {
5087 type Item = &'a mut [T];
5090 fn next(&mut self) -> Option<&'a mut [T]> {
5091 if self.v.len() < self.chunk_size {
5094 let tmp = mem::replace(&mut self.v, &mut []);
5095 let tmp_len = tmp.len();
5096 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
5103 fn size_hint(&self) -> (usize, Option<usize>) {
5104 let n = self.v.len() / self.chunk_size;
5109 fn count(self) -> usize {
5114 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
5115 let (end, overflow) = n.overflowing_mul(self.chunk_size);
5116 if end >= self.v.len() || overflow {
5120 let tmp = mem::replace(&mut self.v, &mut []);
5121 let tmp_len = tmp.len();
5122 let (fst, _) = tmp.split_at_mut(tmp_len - end);
5129 fn last(mut self) -> Option<Self::Item> {
5134 #[stable(feature = "rchunks", since = "1.31.0")]
5135 impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> {
5137 fn next_back(&mut self) -> Option<&'a mut [T]> {
5138 if self.v.len() < self.chunk_size {
5141 let tmp = mem::replace(&mut self.v, &mut []);
5142 let (head, tail) = tmp.split_at_mut(self.chunk_size);
5149 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5150 let len = self.len();
5155 // now that we know that `n` corresponds to a chunk,
5156 // none of these operations can underflow/overflow
5157 let offset = (len - n) * self.chunk_size;
5158 let start = self.v.len() - offset;
5159 let end = start + self.chunk_size;
5160 let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
5161 let (_, nth_back) = tmp.split_at_mut(start);
5168 #[stable(feature = "rchunks", since = "1.31.0")]
5169 impl<T> ExactSizeIterator for RChunksExactMut<'_, T> {
5170 fn is_empty(&self) -> bool {
5175 #[unstable(feature = "trusted_len", issue = "37572")]
5176 unsafe impl<T> TrustedLen for RChunksExactMut<'_, T> {}
5178 #[stable(feature = "rchunks", since = "1.31.0")]
5179 impl<T> FusedIterator for RChunksExactMut<'_, T> {}
5182 #[stable(feature = "rchunks", since = "1.31.0")]
5183 unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> {
5184 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
5185 let end = self.v.len() - i * self.chunk_size;
5186 let start = end - self.chunk_size;
5187 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
5189 fn may_have_side_effect() -> bool { false }
5196 /// Forms a slice from a pointer and a length.
5198 /// The `len` argument is the number of **elements**, not the number of bytes.
5202 /// This function is unsafe as there is no guarantee that the given pointer is
5203 /// valid for `len` elements, nor whether the lifetime inferred is a suitable
5204 /// lifetime for the returned slice.
5206 /// `data` must be non-null and aligned, even for zero-length slices. One
5207 /// reason for this is that enum layout optimizations may rely on references
5208 /// (including slices of any length) being aligned and non-null to distinguish
5209 /// them from other data. You can obtain a pointer that is usable as `data`
5210 /// for zero-length slices using [`NonNull::dangling()`].
5212 /// The total size of the slice must be no larger than `isize::MAX` **bytes**
5213 /// in memory. See the safety documentation of [`pointer::offset`].
5217 /// The lifetime for the returned slice is inferred from its usage. To
5218 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
5219 /// source lifetime is safe in the context, such as by providing a helper
5220 /// function taking the lifetime of a host value for the slice, or by explicit
5228 /// // manifest a slice for a single element
5230 /// let ptr = &x as *const _;
5231 /// let slice = unsafe { slice::from_raw_parts(ptr, 1) };
5232 /// assert_eq!(slice[0], 42);
5235 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
5236 /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
5238 #[stable(feature = "rust1", since = "1.0.0")]
5239 pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
5240 debug_assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice");
5241 debug_assert!(mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
5242 "attempt to create slice covering half the address space");
5243 &*ptr::slice_from_raw_parts(data, len)
5246 /// Performs the same functionality as [`from_raw_parts`], except that a
5247 /// mutable slice is returned.
5249 /// This function is unsafe for the same reasons as [`from_raw_parts`], as well
5250 /// as not being able to provide a non-aliasing guarantee of the returned
5251 /// mutable slice. `data` must be non-null and aligned even for zero-length
5252 /// slices as with [`from_raw_parts`]. The total size of the slice must be no
5253 /// larger than `isize::MAX` **bytes** in memory.
5255 /// See the documentation of [`from_raw_parts`] for more details.
5257 /// [`from_raw_parts`]: ../../std/slice/fn.from_raw_parts.html
5259 #[stable(feature = "rust1", since = "1.0.0")]
5260 pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
5261 debug_assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice");
5262 debug_assert!(mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
5263 "attempt to create slice covering half the address space");
5264 &mut *ptr::slice_from_raw_parts_mut(data, len)
5267 /// Converts a reference to T into a slice of length 1 (without copying).
5268 #[stable(feature = "from_ref", since = "1.28.0")]
5269 pub fn from_ref<T>(s: &T) -> &[T] {
5271 from_raw_parts(s, 1)
5275 /// Converts a reference to T into a slice of length 1 (without copying).
5276 #[stable(feature = "from_ref", since = "1.28.0")]
5277 pub fn from_mut<T>(s: &mut T) -> &mut [T] {
5279 from_raw_parts_mut(s, 1)
5283 // This function is public only because there is no other way to unit test heapsort.
5284 #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "0")]
5286 pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
5287 where F: FnMut(&T, &T) -> bool
5289 sort::heapsort(v, &mut is_less);
5293 // Comparison traits
5297 /// Calls implementation provided memcmp.
5299 /// Interprets the data as u8.
5301 /// Returns 0 for equal, < 0 for less than and > 0 for greater
5303 // FIXME(#32610): Return type should be c_int
5304 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
5307 #[stable(feature = "rust1", since = "1.0.0")]
5308 impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
5309 fn eq(&self, other: &[B]) -> bool {
5310 SlicePartialEq::equal(self, other)
5313 fn ne(&self, other: &[B]) -> bool {
5314 SlicePartialEq::not_equal(self, other)
5318 #[stable(feature = "rust1", since = "1.0.0")]
5319 impl<T: Eq> Eq for [T] {}
5321 /// Implements comparison of vectors lexicographically.
5322 #[stable(feature = "rust1", since = "1.0.0")]
5323 impl<T: Ord> Ord for [T] {
5324 fn cmp(&self, other: &[T]) -> Ordering {
5325 SliceOrd::compare(self, other)
5329 /// Implements comparison of vectors lexicographically.
5330 #[stable(feature = "rust1", since = "1.0.0")]
5331 impl<T: PartialOrd> PartialOrd for [T] {
5332 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
5333 SlicePartialOrd::partial_compare(self, other)
5338 // intermediate trait for specialization of slice's PartialEq
5339 trait SlicePartialEq<B> {
5340 fn equal(&self, other: &[B]) -> bool;
5342 fn not_equal(&self, other: &[B]) -> bool { !self.equal(other) }
5345 // Generic slice equality
5346 impl<A, B> SlicePartialEq<B> for [A]
5347 where A: PartialEq<B>
5349 default fn equal(&self, other: &[B]) -> bool {
5350 if self.len() != other.len() {
5354 self.iter().zip(other.iter()).all(|(x, y)| x == y)
5358 // Use an equal-pointer optimization when types are `Eq`
5359 impl<A> SlicePartialEq<A> for [A]
5360 where A: PartialEq<A> + Eq
5362 default fn equal(&self, other: &[A]) -> bool {
5363 if self.len() != other.len() {
5367 if self.as_ptr() == other.as_ptr() {
5371 self.iter().zip(other.iter()).all(|(x, y)| x == y)
5375 // Use memcmp for bytewise equality when the types allow
5376 impl<A> SlicePartialEq<A> for [A]
5377 where A: PartialEq<A> + BytewiseEquality
5379 fn equal(&self, other: &[A]) -> bool {
5380 if self.len() != other.len() {
5383 if self.as_ptr() == other.as_ptr() {
5387 let size = mem::size_of_val(self);
5388 memcmp(self.as_ptr() as *const u8,
5389 other.as_ptr() as *const u8, size) == 0
5395 // intermediate trait for specialization of slice's PartialOrd
5396 trait SlicePartialOrd<B> {
5397 fn partial_compare(&self, other: &[B]) -> Option<Ordering>;
5400 impl<A> SlicePartialOrd<A> for [A]
5403 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
5404 let l = cmp::min(self.len(), other.len());
5406 // Slice to the loop iteration range to enable bound check
5407 // elimination in the compiler
5408 let lhs = &self[..l];
5409 let rhs = &other[..l];
5412 match lhs[i].partial_cmp(&rhs[i]) {
5413 Some(Ordering::Equal) => (),
5414 non_eq => return non_eq,
5418 self.len().partial_cmp(&other.len())
5422 impl<A> SlicePartialOrd<A> for [A]
5425 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
5426 Some(SliceOrd::compare(self, other))
5431 // intermediate trait for specialization of slice's Ord
5433 fn compare(&self, other: &[B]) -> Ordering;
5436 impl<A> SliceOrd<A> for [A]
5439 default fn compare(&self, other: &[A]) -> Ordering {
5440 let l = cmp::min(self.len(), other.len());
5442 // Slice to the loop iteration range to enable bound check
5443 // elimination in the compiler
5444 let lhs = &self[..l];
5445 let rhs = &other[..l];
5448 match lhs[i].cmp(&rhs[i]) {
5449 Ordering::Equal => (),
5450 non_eq => return non_eq,
5454 self.len().cmp(&other.len())
5458 // memcmp compares a sequence of unsigned bytes lexicographically.
5459 // this matches the order we want for [u8], but no others (not even [i8]).
5460 impl SliceOrd<u8> for [u8] {
5462 fn compare(&self, other: &[u8]) -> Ordering {
5463 let order = unsafe {
5464 memcmp(self.as_ptr(), other.as_ptr(),
5465 cmp::min(self.len(), other.len()))
5468 self.len().cmp(&other.len())
5469 } else if order < 0 {
5478 /// Trait implemented for types that can be compared for equality using
5479 /// their bytewise representation
5480 trait BytewiseEquality: Eq + Copy { }
5482 macro_rules! impl_marker_for {
5483 ($traitname:ident, $($ty:ty)*) => {
5485 impl $traitname for $ty { }
5490 impl_marker_for!(BytewiseEquality,
5491 u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize char bool);
5494 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
5495 unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
5498 fn may_have_side_effect() -> bool { false }
5502 unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
5503 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
5504 &mut *self.ptr.add(i)
5506 fn may_have_side_effect() -> bool { false }
5509 trait SliceContains: Sized {
5510 fn slice_contains(&self, x: &[Self]) -> bool;
5513 impl<T> SliceContains for T where T: PartialEq {
5514 default fn slice_contains(&self, x: &[Self]) -> bool {
5515 x.iter().any(|y| *y == *self)
5519 impl SliceContains for u8 {
5520 fn slice_contains(&self, x: &[Self]) -> bool {
5521 memchr::memchr(*self, x).is_some()
5525 impl SliceContains for i8 {
5526 fn slice_contains(&self, x: &[Self]) -> bool {
5527 let byte = *self as u8;
5528 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
5529 memchr::memchr(byte, bytes).is_some()