1 //! Slice management and manipulation.
3 //! For more details see [`std::slice`].
5 //! [`std::slice`]: ../../std/slice/index.html
7 #![stable(feature = "rust1", since = "1.0.0")]
9 // How this module is organized.
11 // The library infrastructure for slices is fairly messy. There's
12 // a lot of stuff defined here. Let's keep it clean.
14 // The layout of this file is thus:
16 // * Inherent methods. This is where most of the slice API resides.
17 // * Implementations of a few common traits with important slice ops.
18 // * Definitions of a bunch of iterators.
20 // * The `raw` and `bytes` submodules.
21 // * Boilerplate trait implementations.
23 use cmp::Ordering::{self, Less, Equal, Greater};
26 use intrinsics::assume;
29 use ops::{FnMut, Try, self};
31 use option::Option::{None, Some};
33 use result::Result::{Ok, Err};
36 use marker::{Copy, Send, Sync, Sized, self};
38 #[unstable(feature = "slice_internals", issue = "0",
39 reason = "exposed from core to be reused in std; use the memchr crate")]
40 /// Pure rust memchr implementation, taken from rust-memchr
47 union Repr<'a, T: 'a> {
49 rust_mut: &'a mut [T],
66 /// Returns the number of elements in the slice.
71 /// let a = [1, 2, 3];
72 /// assert_eq!(a.len(), 3);
74 #[stable(feature = "rust1", since = "1.0.0")]
76 #[rustc_const_unstable(feature = "const_slice_len")]
77 pub const fn len(&self) -> usize {
79 Repr { rust: self }.raw.len
83 /// Returns `true` if the slice has a length of 0.
88 /// let a = [1, 2, 3];
89 /// assert!(!a.is_empty());
91 #[stable(feature = "rust1", since = "1.0.0")]
93 #[rustc_const_unstable(feature = "const_slice_len")]
94 pub const fn is_empty(&self) -> bool {
98 /// Returns the first element of the slice, or `None` if it is empty.
103 /// let v = [10, 40, 30];
104 /// assert_eq!(Some(&10), v.first());
106 /// let w: &[i32] = &[];
107 /// assert_eq!(None, w.first());
109 #[stable(feature = "rust1", since = "1.0.0")]
111 pub fn first(&self) -> Option<&T> {
115 /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty.
120 /// let x = &mut [0, 1, 2];
122 /// if let Some(first) = x.first_mut() {
125 /// assert_eq!(x, &[5, 1, 2]);
127 #[stable(feature = "rust1", since = "1.0.0")]
129 pub fn first_mut(&mut self) -> Option<&mut T> {
133 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
138 /// let x = &[0, 1, 2];
140 /// if let Some((first, elements)) = x.split_first() {
141 /// assert_eq!(first, &0);
142 /// assert_eq!(elements, &[1, 2]);
145 #[stable(feature = "slice_splits", since = "1.5.0")]
147 pub fn split_first(&self) -> Option<(&T, &[T])> {
148 if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
151 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
156 /// let x = &mut [0, 1, 2];
158 /// if let Some((first, elements)) = x.split_first_mut() {
163 /// assert_eq!(x, &[3, 4, 5]);
165 #[stable(feature = "slice_splits", since = "1.5.0")]
167 pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
168 if self.is_empty() { None } else {
169 let split = self.split_at_mut(1);
170 Some((&mut split.0[0], split.1))
174 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
179 /// let x = &[0, 1, 2];
181 /// if let Some((last, elements)) = x.split_last() {
182 /// assert_eq!(last, &2);
183 /// assert_eq!(elements, &[0, 1]);
186 #[stable(feature = "slice_splits", since = "1.5.0")]
188 pub fn split_last(&self) -> Option<(&T, &[T])> {
189 let len = self.len();
190 if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
193 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
198 /// let x = &mut [0, 1, 2];
200 /// if let Some((last, elements)) = x.split_last_mut() {
205 /// assert_eq!(x, &[4, 5, 3]);
207 #[stable(feature = "slice_splits", since = "1.5.0")]
209 pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
210 let len = self.len();
211 if len == 0 { None } else {
212 let split = self.split_at_mut(len - 1);
213 Some((&mut split.1[0], split.0))
218 /// Returns the last element of the slice, or `None` if it is empty.
223 /// let v = [10, 40, 30];
224 /// assert_eq!(Some(&30), v.last());
226 /// let w: &[i32] = &[];
227 /// assert_eq!(None, w.last());
229 #[stable(feature = "rust1", since = "1.0.0")]
231 pub fn last(&self) -> Option<&T> {
232 let last_idx = self.len().checked_sub(1)?;
236 /// Returns a mutable pointer to the last item in the slice.
241 /// let x = &mut [0, 1, 2];
243 /// if let Some(last) = x.last_mut() {
246 /// assert_eq!(x, &[0, 1, 10]);
248 #[stable(feature = "rust1", since = "1.0.0")]
250 pub fn last_mut(&mut self) -> Option<&mut T> {
251 let last_idx = self.len().checked_sub(1)?;
252 self.get_mut(last_idx)
255 /// Returns a reference to an element or subslice depending on the type of
258 /// - If given a position, returns a reference to the element at that
259 /// position or `None` if out of bounds.
260 /// - If given a range, returns the subslice corresponding to that range,
261 /// or `None` if out of bounds.
266 /// let v = [10, 40, 30];
267 /// assert_eq!(Some(&40), v.get(1));
268 /// assert_eq!(Some(&[10, 40][..]), v.get(0..2));
269 /// assert_eq!(None, v.get(3));
270 /// assert_eq!(None, v.get(0..4));
272 #[stable(feature = "rust1", since = "1.0.0")]
274 pub fn get<I>(&self, index: I) -> Option<&I::Output>
275 where I: SliceIndex<Self>
280 /// Returns a mutable reference to an element or subslice depending on the
281 /// type of index (see [`get`]) or `None` if the index is out of bounds.
283 /// [`get`]: #method.get
288 /// let x = &mut [0, 1, 2];
290 /// if let Some(elem) = x.get_mut(1) {
293 /// assert_eq!(x, &[0, 42, 2]);
295 #[stable(feature = "rust1", since = "1.0.0")]
297 pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
298 where I: SliceIndex<Self>
303 /// Returns a reference to an element or subslice, without doing bounds
306 /// This is generally not recommended, use with caution! For a safe
307 /// alternative see [`get`].
309 /// [`get`]: #method.get
314 /// let x = &[1, 2, 4];
317 /// assert_eq!(x.get_unchecked(1), &2);
320 #[stable(feature = "rust1", since = "1.0.0")]
322 pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
323 where I: SliceIndex<Self>
325 index.get_unchecked(self)
328 /// Returns a mutable reference to an element or subslice, without doing
331 /// This is generally not recommended, use with caution! For a safe
332 /// alternative see [`get_mut`].
334 /// [`get_mut`]: #method.get_mut
339 /// let x = &mut [1, 2, 4];
342 /// let elem = x.get_unchecked_mut(1);
345 /// assert_eq!(x, &[1, 13, 4]);
347 #[stable(feature = "rust1", since = "1.0.0")]
349 pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
350 where I: SliceIndex<Self>
352 index.get_unchecked_mut(self)
355 /// Returns a raw pointer to the slice's buffer.
357 /// The caller must ensure that the slice outlives the pointer this
358 /// function returns, or else it will end up pointing to garbage.
360 /// Modifying the container referenced by this slice may cause its buffer
361 /// to be reallocated, which would also make any pointers to it invalid.
366 /// let x = &[1, 2, 4];
367 /// let x_ptr = x.as_ptr();
370 /// for i in 0..x.len() {
371 /// assert_eq!(x.get_unchecked(i), &*x_ptr.add(i));
375 #[stable(feature = "rust1", since = "1.0.0")]
377 pub const fn as_ptr(&self) -> *const T {
378 self as *const [T] as *const T
381 /// Returns an unsafe mutable pointer to the slice's buffer.
383 /// The caller must ensure that the slice outlives the pointer this
384 /// function returns, or else it will end up pointing to garbage.
386 /// Modifying the container referenced by this slice may cause its buffer
387 /// to be reallocated, which would also make any pointers to it invalid.
392 /// let x = &mut [1, 2, 4];
393 /// let x_ptr = x.as_mut_ptr();
396 /// for i in 0..x.len() {
397 /// *x_ptr.add(i) += 2;
400 /// assert_eq!(x, &[3, 4, 6]);
402 #[stable(feature = "rust1", since = "1.0.0")]
404 pub fn as_mut_ptr(&mut self) -> *mut T {
405 self as *mut [T] as *mut T
408 /// Swaps two elements in the slice.
412 /// * a - The index of the first element
413 /// * b - The index of the second element
417 /// Panics if `a` or `b` are out of bounds.
422 /// let mut v = ["a", "b", "c", "d"];
424 /// assert!(v == ["a", "d", "c", "b"]);
426 #[stable(feature = "rust1", since = "1.0.0")]
428 pub fn swap(&mut self, a: usize, b: usize) {
430 // Can't take two mutable loans from one vector, so instead just cast
431 // them to their raw pointers to do the swap
432 let pa: *mut T = &mut self[a];
433 let pb: *mut T = &mut self[b];
438 /// Reverses the order of elements in the slice, in place.
443 /// let mut v = [1, 2, 3];
445 /// assert!(v == [3, 2, 1]);
447 #[stable(feature = "rust1", since = "1.0.0")]
449 pub fn reverse(&mut self) {
450 let mut i: usize = 0;
453 // For very small types, all the individual reads in the normal
454 // path perform poorly. We can do better, given efficient unaligned
455 // load/store, by loading a larger chunk and reversing a register.
457 // Ideally LLVM would do this for us, as it knows better than we do
458 // whether unaligned reads are efficient (since that changes between
459 // different ARM versions, for example) and what the best chunk size
460 // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
461 // the loop, so we need to do this ourselves. (Hypothesis: reverse
462 // is troublesome because the sides can be aligned differently --
463 // will be, when the length is odd -- so there's no way of emitting
464 // pre- and postludes to use fully-aligned SIMD in the middle.)
467 cfg!(any(target_arch = "x86", target_arch = "x86_64"));
469 if fast_unaligned && mem::size_of::<T>() == 1 {
470 // Use the llvm.bswap intrinsic to reverse u8s in a usize
471 let chunk = mem::size_of::<usize>();
472 while i + chunk - 1 < ln / 2 {
474 let pa: *mut T = self.get_unchecked_mut(i);
475 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
476 let va = ptr::read_unaligned(pa as *mut usize);
477 let vb = ptr::read_unaligned(pb as *mut usize);
478 ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
479 ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
485 if fast_unaligned && mem::size_of::<T>() == 2 {
486 // Use rotate-by-16 to reverse u16s in a u32
487 let chunk = mem::size_of::<u32>() / 2;
488 while i + chunk - 1 < ln / 2 {
490 let pa: *mut T = self.get_unchecked_mut(i);
491 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
492 let va = ptr::read_unaligned(pa as *mut u32);
493 let vb = ptr::read_unaligned(pb as *mut u32);
494 ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
495 ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
502 // Unsafe swap to avoid the bounds check in safe swap.
504 let pa: *mut T = self.get_unchecked_mut(i);
505 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
512 /// Returns an iterator over the slice.
517 /// let x = &[1, 2, 4];
518 /// let mut iterator = x.iter();
520 /// assert_eq!(iterator.next(), Some(&1));
521 /// assert_eq!(iterator.next(), Some(&2));
522 /// assert_eq!(iterator.next(), Some(&4));
523 /// assert_eq!(iterator.next(), None);
525 #[stable(feature = "rust1", since = "1.0.0")]
527 pub fn iter(&self) -> Iter<T> {
529 let ptr = self.as_ptr();
530 assume(!ptr.is_null());
532 let end = if mem::size_of::<T>() == 0 {
533 (ptr as *const u8).wrapping_add(self.len()) as *const T
541 _marker: marker::PhantomData
546 /// Returns an iterator that allows modifying each value.
551 /// let x = &mut [1, 2, 4];
552 /// for elem in x.iter_mut() {
555 /// assert_eq!(x, &[3, 4, 6]);
557 #[stable(feature = "rust1", since = "1.0.0")]
559 pub fn iter_mut(&mut self) -> IterMut<T> {
561 let ptr = self.as_mut_ptr();
562 assume(!ptr.is_null());
564 let end = if mem::size_of::<T>() == 0 {
565 (ptr as *mut u8).wrapping_add(self.len()) as *mut T
573 _marker: marker::PhantomData
578 /// Returns an iterator over all contiguous windows of length
579 /// `size`. The windows overlap. If the slice is shorter than
580 /// `size`, the iterator returns no values.
584 /// Panics if `size` is 0.
589 /// let slice = ['r', 'u', 's', 't'];
590 /// let mut iter = slice.windows(2);
591 /// assert_eq!(iter.next().unwrap(), &['r', 'u']);
592 /// assert_eq!(iter.next().unwrap(), &['u', 's']);
593 /// assert_eq!(iter.next().unwrap(), &['s', 't']);
594 /// assert!(iter.next().is_none());
597 /// If the slice is shorter than `size`:
600 /// let slice = ['f', 'o', 'o'];
601 /// let mut iter = slice.windows(4);
602 /// assert!(iter.next().is_none());
604 #[stable(feature = "rust1", since = "1.0.0")]
606 pub fn windows(&self, size: usize) -> Windows<T> {
608 Windows { v: self, size }
611 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
612 /// beginning of the slice.
614 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
615 /// slice, then the last chunk will not have length `chunk_size`.
617 /// See [`chunks_exact`] for a variant of this iterator that returns chunks of always exactly
618 /// `chunk_size` elements, and [`rchunks`] for the same iterator but starting at the end of the
619 /// slice of the slice.
623 /// Panics if `chunk_size` is 0.
628 /// let slice = ['l', 'o', 'r', 'e', 'm'];
629 /// let mut iter = slice.chunks(2);
630 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
631 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
632 /// assert_eq!(iter.next().unwrap(), &['m']);
633 /// assert!(iter.next().is_none());
636 /// [`chunks_exact`]: #method.chunks_exact
637 /// [`rchunks`]: #method.rchunks
638 #[stable(feature = "rust1", since = "1.0.0")]
640 pub fn chunks(&self, chunk_size: usize) -> Chunks<T> {
641 assert!(chunk_size != 0);
642 Chunks { v: self, chunk_size }
645 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
646 /// beginning of the slice.
648 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
649 /// length of the slice, then the last chunk will not have length `chunk_size`.
651 /// See [`chunks_exact_mut`] for a variant of this iterator that returns chunks of always
652 /// exactly `chunk_size` elements, and [`rchunks_mut`] for the same iterator but starting at
653 /// the end of the slice of the slice.
657 /// Panics if `chunk_size` is 0.
662 /// let v = &mut [0, 0, 0, 0, 0];
663 /// let mut count = 1;
665 /// for chunk in v.chunks_mut(2) {
666 /// for elem in chunk.iter_mut() {
671 /// assert_eq!(v, &[1, 1, 2, 2, 3]);
674 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
675 /// [`rchunks_mut`]: #method.rchunks_mut
676 #[stable(feature = "rust1", since = "1.0.0")]
678 pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
679 assert!(chunk_size != 0);
680 ChunksMut { v: self, chunk_size }
683 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
684 /// beginning of the slice.
686 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
687 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
688 /// from the `remainder` function of the iterator.
690 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
691 /// resulting code better than in the case of [`chunks`].
693 /// See [`chunks`] for a variant of this iterator that also returns the remainder as a smaller
694 /// chunk, and [`rchunks_exact`] for the same iterator but starting at the end of the slice.
698 /// Panics if `chunk_size` is 0.
703 /// let slice = ['l', 'o', 'r', 'e', 'm'];
704 /// let mut iter = slice.chunks_exact(2);
705 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
706 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
707 /// assert!(iter.next().is_none());
708 /// assert_eq!(iter.remainder(), &['m']);
711 /// [`chunks`]: #method.chunks
712 /// [`rchunks_exact`]: #method.rchunks_exact
713 #[stable(feature = "chunks_exact", since = "1.31.0")]
715 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<T> {
716 assert!(chunk_size != 0);
717 let rem = self.len() % chunk_size;
718 let len = self.len() - rem;
719 let (fst, snd) = self.split_at(len);
720 ChunksExact { v: fst, rem: snd, chunk_size }
723 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
724 /// beginning of the slice.
726 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
727 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
728 /// retrieved from the `into_remainder` function of the iterator.
730 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
731 /// resulting code better than in the case of [`chunks_mut`].
733 /// See [`chunks_mut`] for a variant of this iterator that also returns the remainder as a
734 /// smaller chunk, and [`rchunks_exact_mut`] for the same iterator but starting at the end of
735 /// the slice of the slice.
739 /// Panics if `chunk_size` is 0.
744 /// let v = &mut [0, 0, 0, 0, 0];
745 /// let mut count = 1;
747 /// for chunk in v.chunks_exact_mut(2) {
748 /// for elem in chunk.iter_mut() {
753 /// assert_eq!(v, &[1, 1, 2, 2, 0]);
756 /// [`chunks_mut`]: #method.chunks_mut
757 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
758 #[stable(feature = "chunks_exact", since = "1.31.0")]
760 pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<T> {
761 assert!(chunk_size != 0);
762 let rem = self.len() % chunk_size;
763 let len = self.len() - rem;
764 let (fst, snd) = self.split_at_mut(len);
765 ChunksExactMut { v: fst, rem: snd, chunk_size }
768 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
771 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
772 /// slice, then the last chunk will not have length `chunk_size`.
774 /// See [`rchunks_exact`] for a variant of this iterator that returns chunks of always exactly
775 /// `chunk_size` elements, and [`chunks`] for the same iterator but starting at the beginning
780 /// Panics if `chunk_size` is 0.
785 /// let slice = ['l', 'o', 'r', 'e', 'm'];
786 /// let mut iter = slice.rchunks(2);
787 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
788 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
789 /// assert_eq!(iter.next().unwrap(), &['l']);
790 /// assert!(iter.next().is_none());
793 /// [`rchunks_exact`]: #method.rchunks_exact
794 /// [`chunks`]: #method.chunks
795 #[stable(feature = "rchunks", since = "1.31.0")]
797 pub fn rchunks(&self, chunk_size: usize) -> RChunks<T> {
798 assert!(chunk_size != 0);
799 RChunks { v: self, chunk_size }
802 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
805 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
806 /// length of the slice, then the last chunk will not have length `chunk_size`.
808 /// See [`rchunks_exact_mut`] for a variant of this iterator that returns chunks of always
809 /// exactly `chunk_size` elements, and [`chunks_mut`] for the same iterator but starting at the
810 /// beginning of the slice.
814 /// Panics if `chunk_size` is 0.
819 /// let v = &mut [0, 0, 0, 0, 0];
820 /// let mut count = 1;
822 /// for chunk in v.rchunks_mut(2) {
823 /// for elem in chunk.iter_mut() {
828 /// assert_eq!(v, &[3, 2, 2, 1, 1]);
831 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
832 /// [`chunks_mut`]: #method.chunks_mut
833 #[stable(feature = "rchunks", since = "1.31.0")]
835 pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<T> {
836 assert!(chunk_size != 0);
837 RChunksMut { v: self, chunk_size }
840 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
841 /// beginning of the slice.
843 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
844 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
845 /// from the `remainder` function of the iterator.
847 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
848 /// resulting code better than in the case of [`chunks`].
850 /// See [`rchunks`] for a variant of this iterator that also returns the remainder as a smaller
851 /// chunk, and [`chunks_exact`] for the same iterator but starting at the beginning of the
852 /// slice of the slice.
856 /// Panics if `chunk_size` is 0.
861 /// let slice = ['l', 'o', 'r', 'e', 'm'];
862 /// let mut iter = slice.rchunks_exact(2);
863 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
864 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
865 /// assert!(iter.next().is_none());
866 /// assert_eq!(iter.remainder(), &['l']);
869 /// [`chunks`]: #method.chunks
870 /// [`rchunks`]: #method.rchunks
871 /// [`chunks_exact`]: #method.chunks_exact
872 #[stable(feature = "rchunks", since = "1.31.0")]
874 pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<T> {
875 assert!(chunk_size != 0);
876 let rem = self.len() % chunk_size;
877 let (fst, snd) = self.split_at(rem);
878 RChunksExact { v: snd, rem: fst, chunk_size }
881 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
884 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
885 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
886 /// retrieved from the `into_remainder` function of the iterator.
888 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
889 /// resulting code better than in the case of [`chunks_mut`].
891 /// See [`rchunks_mut`] for a variant of this iterator that also returns the remainder as a
892 /// smaller chunk, and [`chunks_exact_mut`] for the same iterator but starting at the beginning
893 /// of the slice of the slice.
897 /// Panics if `chunk_size` is 0.
902 /// let v = &mut [0, 0, 0, 0, 0];
903 /// let mut count = 1;
905 /// for chunk in v.rchunks_exact_mut(2) {
906 /// for elem in chunk.iter_mut() {
911 /// assert_eq!(v, &[0, 2, 2, 1, 1]);
914 /// [`chunks_mut`]: #method.chunks_mut
915 /// [`rchunks_mut`]: #method.rchunks_mut
916 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
917 #[stable(feature = "rchunks", since = "1.31.0")]
919 pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<T> {
920 assert!(chunk_size != 0);
921 let rem = self.len() % chunk_size;
922 let (fst, snd) = self.split_at_mut(rem);
923 RChunksExactMut { v: snd, rem: fst, chunk_size }
926 /// Divides one slice into two at an index.
928 /// The first will contain all indices from `[0, mid)` (excluding
929 /// the index `mid` itself) and the second will contain all
930 /// indices from `[mid, len)` (excluding the index `len` itself).
934 /// Panics if `mid > len`.
939 /// let v = [1, 2, 3, 4, 5, 6];
942 /// let (left, right) = v.split_at(0);
943 /// assert!(left == []);
944 /// assert!(right == [1, 2, 3, 4, 5, 6]);
948 /// let (left, right) = v.split_at(2);
949 /// assert!(left == [1, 2]);
950 /// assert!(right == [3, 4, 5, 6]);
954 /// let (left, right) = v.split_at(6);
955 /// assert!(left == [1, 2, 3, 4, 5, 6]);
956 /// assert!(right == []);
959 #[stable(feature = "rust1", since = "1.0.0")]
961 pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
962 (&self[..mid], &self[mid..])
965 /// Divides one mutable slice into two at an index.
967 /// The first will contain all indices from `[0, mid)` (excluding
968 /// the index `mid` itself) and the second will contain all
969 /// indices from `[mid, len)` (excluding the index `len` itself).
973 /// Panics if `mid > len`.
978 /// let mut v = [1, 0, 3, 0, 5, 6];
979 /// // scoped to restrict the lifetime of the borrows
981 /// let (left, right) = v.split_at_mut(2);
982 /// assert!(left == [1, 0]);
983 /// assert!(right == [3, 0, 5, 6]);
987 /// assert!(v == [1, 2, 3, 4, 5, 6]);
989 #[stable(feature = "rust1", since = "1.0.0")]
991 pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
992 let len = self.len();
993 let ptr = self.as_mut_ptr();
998 (from_raw_parts_mut(ptr, mid),
999 from_raw_parts_mut(ptr.add(mid), len - mid))
1003 /// Returns an iterator over subslices separated by elements that match
1004 /// `pred`. The matched element is not contained in the subslices.
1009 /// let slice = [10, 40, 33, 20];
1010 /// let mut iter = slice.split(|num| num % 3 == 0);
1012 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1013 /// assert_eq!(iter.next().unwrap(), &[20]);
1014 /// assert!(iter.next().is_none());
1017 /// If the first element is matched, an empty slice will be the first item
1018 /// returned by the iterator. Similarly, if the last element in the slice
1019 /// is matched, an empty slice will be the last item returned by the
1023 /// let slice = [10, 40, 33];
1024 /// let mut iter = slice.split(|num| num % 3 == 0);
1026 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1027 /// assert_eq!(iter.next().unwrap(), &[]);
1028 /// assert!(iter.next().is_none());
1031 /// If two matched elements are directly adjacent, an empty slice will be
1032 /// present between them:
1035 /// let slice = [10, 6, 33, 20];
1036 /// let mut iter = slice.split(|num| num % 3 == 0);
1038 /// assert_eq!(iter.next().unwrap(), &[10]);
1039 /// assert_eq!(iter.next().unwrap(), &[]);
1040 /// assert_eq!(iter.next().unwrap(), &[20]);
1041 /// assert!(iter.next().is_none());
1043 #[stable(feature = "rust1", since = "1.0.0")]
1045 pub fn split<F>(&self, pred: F) -> Split<T, F>
1046 where F: FnMut(&T) -> bool
1055 /// Returns an iterator over mutable subslices separated by elements that
1056 /// match `pred`. The matched element is not contained in the subslices.
1061 /// let mut v = [10, 40, 30, 20, 60, 50];
1063 /// for group in v.split_mut(|num| *num % 3 == 0) {
1066 /// assert_eq!(v, [1, 40, 30, 1, 60, 1]);
1068 #[stable(feature = "rust1", since = "1.0.0")]
1070 pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<T, F>
1071 where F: FnMut(&T) -> bool
1073 SplitMut { v: self, pred, finished: false }
1076 /// Returns an iterator over subslices separated by elements that match
1077 /// `pred`, starting at the end of the slice and working backwards.
1078 /// The matched element is not contained in the subslices.
1083 /// let slice = [11, 22, 33, 0, 44, 55];
1084 /// let mut iter = slice.rsplit(|num| *num == 0);
1086 /// assert_eq!(iter.next().unwrap(), &[44, 55]);
1087 /// assert_eq!(iter.next().unwrap(), &[11, 22, 33]);
1088 /// assert_eq!(iter.next(), None);
1091 /// As with `split()`, if the first or last element is matched, an empty
1092 /// slice will be the first (or last) item returned by the iterator.
1095 /// let v = &[0, 1, 1, 2, 3, 5, 8];
1096 /// let mut it = v.rsplit(|n| *n % 2 == 0);
1097 /// assert_eq!(it.next().unwrap(), &[]);
1098 /// assert_eq!(it.next().unwrap(), &[3, 5]);
1099 /// assert_eq!(it.next().unwrap(), &[1, 1]);
1100 /// assert_eq!(it.next().unwrap(), &[]);
1101 /// assert_eq!(it.next(), None);
1103 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1105 pub fn rsplit<F>(&self, pred: F) -> RSplit<T, F>
1106 where F: FnMut(&T) -> bool
1108 RSplit { inner: self.split(pred) }
1111 /// Returns an iterator over mutable subslices separated by elements that
1112 /// match `pred`, starting at the end of the slice and working
1113 /// backwards. The matched element is not contained in the subslices.
1118 /// let mut v = [100, 400, 300, 200, 600, 500];
1120 /// let mut count = 0;
1121 /// for group in v.rsplit_mut(|num| *num % 3 == 0) {
1123 /// group[0] = count;
1125 /// assert_eq!(v, [3, 400, 300, 2, 600, 1]);
1128 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1130 pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<T, F>
1131 where F: FnMut(&T) -> bool
1133 RSplitMut { inner: self.split_mut(pred) }
1136 /// Returns an iterator over subslices separated by elements that match
1137 /// `pred`, limited to returning at most `n` items. The matched element is
1138 /// not contained in the subslices.
1140 /// The last element returned, if any, will contain the remainder of the
1145 /// Print the slice split once by numbers divisible by 3 (i.e., `[10, 40]`,
1146 /// `[20, 60, 50]`):
1149 /// let v = [10, 40, 30, 20, 60, 50];
1151 /// for group in v.splitn(2, |num| *num % 3 == 0) {
1152 /// println!("{:?}", group);
1155 #[stable(feature = "rust1", since = "1.0.0")]
1157 pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<T, F>
1158 where F: FnMut(&T) -> bool
1161 inner: GenericSplitN {
1162 iter: self.split(pred),
1168 /// Returns an iterator over subslices separated by elements that match
1169 /// `pred`, limited to returning at most `n` items. The matched element is
1170 /// not contained in the subslices.
1172 /// The last element returned, if any, will contain the remainder of the
1178 /// let mut v = [10, 40, 30, 20, 60, 50];
1180 /// for group in v.splitn_mut(2, |num| *num % 3 == 0) {
1183 /// assert_eq!(v, [1, 40, 30, 1, 60, 50]);
1185 #[stable(feature = "rust1", since = "1.0.0")]
1187 pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<T, F>
1188 where F: FnMut(&T) -> bool
1191 inner: GenericSplitN {
1192 iter: self.split_mut(pred),
1198 /// Returns an iterator over subslices separated by elements that match
1199 /// `pred` limited to returning at most `n` items. This starts at the end of
1200 /// the slice and works backwards. The matched element is not contained in
1203 /// The last element returned, if any, will contain the remainder of the
1208 /// Print the slice split once, starting from the end, by numbers divisible
1209 /// by 3 (i.e., `[50]`, `[10, 40, 30, 20]`):
1212 /// let v = [10, 40, 30, 20, 60, 50];
1214 /// for group in v.rsplitn(2, |num| *num % 3 == 0) {
1215 /// println!("{:?}", group);
1218 #[stable(feature = "rust1", since = "1.0.0")]
1220 pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<T, F>
1221 where F: FnMut(&T) -> bool
1224 inner: GenericSplitN {
1225 iter: self.rsplit(pred),
1231 /// Returns an iterator over subslices separated by elements that match
1232 /// `pred` limited to returning at most `n` items. This starts at the end of
1233 /// the slice and works backwards. The matched element is not contained in
1236 /// The last element returned, if any, will contain the remainder of the
1242 /// let mut s = [10, 40, 30, 20, 60, 50];
1244 /// for group in s.rsplitn_mut(2, |num| *num % 3 == 0) {
1247 /// assert_eq!(s, [1, 40, 30, 20, 60, 1]);
1249 #[stable(feature = "rust1", since = "1.0.0")]
1251 pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<T, F>
1252 where F: FnMut(&T) -> bool
1255 inner: GenericSplitN {
1256 iter: self.rsplit_mut(pred),
1262 /// Returns `true` if the slice contains an element with the given value.
1267 /// let v = [10, 40, 30];
1268 /// assert!(v.contains(&30));
1269 /// assert!(!v.contains(&50));
1271 #[stable(feature = "rust1", since = "1.0.0")]
1272 pub fn contains(&self, x: &T) -> bool
1275 x.slice_contains(self)
1278 /// Returns `true` if `needle` is a prefix of the slice.
1283 /// let v = [10, 40, 30];
1284 /// assert!(v.starts_with(&[10]));
1285 /// assert!(v.starts_with(&[10, 40]));
1286 /// assert!(!v.starts_with(&[50]));
1287 /// assert!(!v.starts_with(&[10, 50]));
1290 /// Always returns `true` if `needle` is an empty slice:
1293 /// let v = &[10, 40, 30];
1294 /// assert!(v.starts_with(&[]));
1295 /// let v: &[u8] = &[];
1296 /// assert!(v.starts_with(&[]));
1298 #[stable(feature = "rust1", since = "1.0.0")]
1299 pub fn starts_with(&self, needle: &[T]) -> bool
1302 let n = needle.len();
1303 self.len() >= n && needle == &self[..n]
1306 /// Returns `true` if `needle` is a suffix of the slice.
1311 /// let v = [10, 40, 30];
1312 /// assert!(v.ends_with(&[30]));
1313 /// assert!(v.ends_with(&[40, 30]));
1314 /// assert!(!v.ends_with(&[50]));
1315 /// assert!(!v.ends_with(&[50, 30]));
1318 /// Always returns `true` if `needle` is an empty slice:
1321 /// let v = &[10, 40, 30];
1322 /// assert!(v.ends_with(&[]));
1323 /// let v: &[u8] = &[];
1324 /// assert!(v.ends_with(&[]));
1326 #[stable(feature = "rust1", since = "1.0.0")]
1327 pub fn ends_with(&self, needle: &[T]) -> bool
1330 let (m, n) = (self.len(), needle.len());
1331 m >= n && needle == &self[m-n..]
1334 /// Binary searches this sorted slice for a given element.
1336 /// If the value is found then [`Result::Ok`] is returned, containing the
1337 /// index of the matching element. If there are multiple matches, then any
1338 /// one of the matches could be returned. If the value is not found then
1339 /// [`Result::Err`] is returned, containing the index where a matching
1340 /// element could be inserted while maintaining sorted order.
1344 /// Looks up a series of four elements. The first is found, with a
1345 /// uniquely determined position; the second and third are not
1346 /// found; the fourth could match any position in `[1, 4]`.
1349 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1351 /// assert_eq!(s.binary_search(&13), Ok(9));
1352 /// assert_eq!(s.binary_search(&4), Err(7));
1353 /// assert_eq!(s.binary_search(&100), Err(13));
1354 /// let r = s.binary_search(&1);
1355 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1357 #[stable(feature = "rust1", since = "1.0.0")]
1358 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
1361 self.binary_search_by(|p| p.cmp(x))
1364 /// Binary searches this sorted slice with a comparator function.
1366 /// The comparator function should implement an order consistent
1367 /// with the sort order of the underlying slice, returning an
1368 /// order code that indicates whether its argument is `Less`,
1369 /// `Equal` or `Greater` the desired target.
1371 /// If the value is found then [`Result::Ok`] is returned, containing the
1372 /// index of the matching element. If there are multiple matches, then any
1373 /// one of the matches could be returned. If the value is not found then
1374 /// [`Result::Err`] is returned, containing the index where a matching
1375 /// element could be inserted while maintaining sorted order.
1379 /// Looks up a series of four elements. The first is found, with a
1380 /// uniquely determined position; the second and third are not
1381 /// found; the fourth could match any position in `[1, 4]`.
1384 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1387 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Ok(9));
1389 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(7));
1391 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13));
1393 /// let r = s.binary_search_by(|probe| probe.cmp(&seek));
1394 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1396 #[stable(feature = "rust1", since = "1.0.0")]
1398 pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
1399 where F: FnMut(&'a T) -> Ordering
1402 let mut size = s.len();
1406 let mut base = 0usize;
1408 let half = size / 2;
1409 let mid = base + half;
1410 // mid is always in [0, size), that means mid is >= 0 and < size.
1411 // mid >= 0: by definition
1412 // mid < size: mid = size / 2 + size / 4 + size / 8 ...
1413 let cmp = f(unsafe { s.get_unchecked(mid) });
1414 base = if cmp == Greater { base } else { mid };
1417 // base is always in [0, size) because base <= mid.
1418 let cmp = f(unsafe { s.get_unchecked(base) });
1419 if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
1423 /// Binary searches this sorted slice with a key extraction function.
1425 /// Assumes that the slice is sorted by the key, for instance with
1426 /// [`sort_by_key`] using the same key extraction function.
1428 /// If the value is found then [`Result::Ok`] is returned, containing the
1429 /// index of the matching element. If there are multiple matches, then any
1430 /// one of the matches could be returned. If the value is not found then
1431 /// [`Result::Err`] is returned, containing the index where a matching
1432 /// element could be inserted while maintaining sorted order.
1434 /// [`sort_by_key`]: #method.sort_by_key
1438 /// Looks up a series of four elements in a slice of pairs sorted by
1439 /// their second elements. The first is found, with a uniquely
1440 /// determined position; the second and third are not found; the
1441 /// fourth could match any position in `[1, 4]`.
1444 /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1),
1445 /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
1446 /// (1, 21), (2, 34), (4, 55)];
1448 /// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9));
1449 /// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7));
1450 /// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13));
1451 /// let r = s.binary_search_by_key(&1, |&(a,b)| b);
1452 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1454 #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
1456 pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
1457 where F: FnMut(&'a T) -> B,
1460 self.binary_search_by(|k| f(k).cmp(b))
1463 /// Sorts the slice, but may not preserve the order of equal elements.
1465 /// This sort is unstable (i.e., may reorder equal elements), in-place
1466 /// (i.e., does not allocate), and `O(n log n)` worst-case.
1468 /// # Current implementation
1470 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1471 /// which combines the fast average case of randomized quicksort with the fast worst case of
1472 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1473 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1474 /// deterministic behavior.
1476 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1477 /// slice consists of several concatenated sorted sequences.
1482 /// let mut v = [-5, 4, 1, -3, 2];
1484 /// v.sort_unstable();
1485 /// assert!(v == [-5, -3, 1, 2, 4]);
1488 /// [pdqsort]: https://github.com/orlp/pdqsort
1489 #[stable(feature = "sort_unstable", since = "1.20.0")]
1491 pub fn sort_unstable(&mut self)
1494 sort::quicksort(self, |a, b| a.lt(b));
1497 /// Sorts the slice with a comparator function, but may not preserve the order of equal
1500 /// This sort is unstable (i.e., may reorder equal elements), in-place
1501 /// (i.e., does not allocate), and `O(n log n)` worst-case.
1503 /// The comparator function must define a total ordering for the elements in the slice. If
1504 /// the ordering is not total, the order of the elements is unspecified. An order is a
1505 /// total order if it is (for all a, b and c):
1507 /// * total and antisymmetric: exactly one of a < b, a == b or a > b is true; and
1508 /// * transitive, a < b and b < c implies a < c. The same must hold for both == and >.
1510 /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
1511 /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
1514 /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
1515 /// floats.sort_by(|a, b| a.partial_cmp(b).unwrap());
1516 /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
1519 /// # Current implementation
1521 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1522 /// which combines the fast average case of randomized quicksort with the fast worst case of
1523 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1524 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1525 /// deterministic behavior.
1527 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1528 /// slice consists of several concatenated sorted sequences.
1533 /// let mut v = [5, 4, 1, 3, 2];
1534 /// v.sort_unstable_by(|a, b| a.cmp(b));
1535 /// assert!(v == [1, 2, 3, 4, 5]);
1537 /// // reverse sorting
1538 /// v.sort_unstable_by(|a, b| b.cmp(a));
1539 /// assert!(v == [5, 4, 3, 2, 1]);
1542 /// [pdqsort]: https://github.com/orlp/pdqsort
1543 #[stable(feature = "sort_unstable", since = "1.20.0")]
1545 pub fn sort_unstable_by<F>(&mut self, mut compare: F)
1546 where F: FnMut(&T, &T) -> Ordering
1548 sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
1551 /// Sorts the slice with a key extraction function, but may not preserve the order of equal
1554 /// This sort is unstable (i.e., may reorder equal elements), in-place
1555 /// (i.e., does not allocate), and `O(m n log(m n))` worst-case, where the key function is
1558 /// # Current implementation
1560 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1561 /// which combines the fast average case of randomized quicksort with the fast worst case of
1562 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1563 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1564 /// deterministic behavior.
1569 /// let mut v = [-5i32, 4, 1, -3, 2];
1571 /// v.sort_unstable_by_key(|k| k.abs());
1572 /// assert!(v == [1, 2, -3, 4, -5]);
1575 /// [pdqsort]: https://github.com/orlp/pdqsort
1576 #[stable(feature = "sort_unstable", since = "1.20.0")]
1578 pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
1579 where F: FnMut(&T) -> K, K: Ord
1581 sort::quicksort(self, |a, b| f(a).lt(&f(b)));
1584 /// Moves all consecutive repeated elements to the end of the slice according to the
1585 /// [`PartialEq`] trait implementation.
1587 /// Returns two slices. The first contains no consecutive repeated elements.
1588 /// The second contains all the duplicates in no specified order.
1590 /// If the slice is sorted, the first returned slice contains no duplicates.
1595 /// #![feature(slice_partition_dedup)]
1597 /// let mut slice = [1, 2, 2, 3, 3, 2, 1, 1];
1599 /// let (dedup, duplicates) = slice.partition_dedup();
1601 /// assert_eq!(dedup, [1, 2, 3, 2, 1]);
1602 /// assert_eq!(duplicates, [2, 3, 1]);
1604 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1606 pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T])
1609 self.partition_dedup_by(|a, b| a == b)
1612 /// Moves all but the first of consecutive elements to the end of the slice satisfying
1613 /// a given equality relation.
1615 /// Returns two slices. The first contains no consecutive repeated elements.
1616 /// The second contains all the duplicates in no specified order.
1618 /// The `same_bucket` function is passed references to two elements from the slice and
1619 /// must determine if the elements compare equal. The elements are passed in opposite order
1620 /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is moved
1621 /// at the end of the slice.
1623 /// If the slice is sorted, the first returned slice contains no duplicates.
1628 /// #![feature(slice_partition_dedup)]
1630 /// let mut slice = ["foo", "Foo", "BAZ", "Bar", "bar", "baz", "BAZ"];
1632 /// let (dedup, duplicates) = slice.partition_dedup_by(|a, b| a.eq_ignore_ascii_case(b));
1634 /// assert_eq!(dedup, ["foo", "BAZ", "Bar", "baz"]);
1635 /// assert_eq!(duplicates, ["bar", "Foo", "BAZ"]);
1637 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1639 pub fn partition_dedup_by<F>(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T])
1640 where F: FnMut(&mut T, &mut T) -> bool
1642 // Although we have a mutable reference to `self`, we cannot make
1643 // *arbitrary* changes. The `same_bucket` calls could panic, so we
1644 // must ensure that the slice is in a valid state at all times.
1646 // The way that we handle this is by using swaps; we iterate
1647 // over all the elements, swapping as we go so that at the end
1648 // the elements we wish to keep are in the front, and those we
1649 // wish to reject are at the back. We can then split the slice.
1650 // This operation is still O(n).
1652 // Example: We start in this state, where `r` represents "next
1653 // read" and `w` represents "next_write`.
1656 // +---+---+---+---+---+---+
1657 // | 0 | 1 | 1 | 2 | 3 | 3 |
1658 // +---+---+---+---+---+---+
1661 // Comparing self[r] against self[w-1], this is not a duplicate, so
1662 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1663 // r and w, leaving us with:
1666 // +---+---+---+---+---+---+
1667 // | 0 | 1 | 1 | 2 | 3 | 3 |
1668 // +---+---+---+---+---+---+
1671 // Comparing self[r] against self[w-1], this value is a duplicate,
1672 // so we increment `r` but leave everything else unchanged:
1675 // +---+---+---+---+---+---+
1676 // | 0 | 1 | 1 | 2 | 3 | 3 |
1677 // +---+---+---+---+---+---+
1680 // Comparing self[r] against self[w-1], this is not a duplicate,
1681 // so swap self[r] and self[w] and advance r and w:
1684 // +---+---+---+---+---+---+
1685 // | 0 | 1 | 2 | 1 | 3 | 3 |
1686 // +---+---+---+---+---+---+
1689 // Not a duplicate, repeat:
1692 // +---+---+---+---+---+---+
1693 // | 0 | 1 | 2 | 3 | 1 | 3 |
1694 // +---+---+---+---+---+---+
1697 // Duplicate, advance r. End of slice. Split at w.
1699 let len = self.len();
1701 return (self, &mut [])
1704 let ptr = self.as_mut_ptr();
1705 let mut next_read: usize = 1;
1706 let mut next_write: usize = 1;
1709 // Avoid bounds checks by using raw pointers.
1710 while next_read < len {
1711 let ptr_read = ptr.add(next_read);
1712 let prev_ptr_write = ptr.add(next_write - 1);
1713 if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
1714 if next_read != next_write {
1715 let ptr_write = prev_ptr_write.offset(1);
1716 mem::swap(&mut *ptr_read, &mut *ptr_write);
1724 self.split_at_mut(next_write)
1727 /// Moves all but the first of consecutive elements to the end of the slice that resolve
1728 /// to the same key.
1730 /// Returns two slices. The first contains no consecutive repeated elements.
1731 /// The second contains all the duplicates in no specified order.
1733 /// If the slice is sorted, the first returned slice contains no duplicates.
1738 /// #![feature(slice_partition_dedup)]
1740 /// let mut slice = [10, 20, 21, 30, 30, 20, 11, 13];
1742 /// let (dedup, duplicates) = slice.partition_dedup_by_key(|i| *i / 10);
1744 /// assert_eq!(dedup, [10, 20, 30, 20, 11]);
1745 /// assert_eq!(duplicates, [21, 30, 13]);
1747 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1749 pub fn partition_dedup_by_key<K, F>(&mut self, mut key: F) -> (&mut [T], &mut [T])
1750 where F: FnMut(&mut T) -> K,
1753 self.partition_dedup_by(|a, b| key(a) == key(b))
1756 /// Rotates the slice in-place such that the first `mid` elements of the
1757 /// slice move to the end while the last `self.len() - mid` elements move to
1758 /// the front. After calling `rotate_left`, the element previously at index
1759 /// `mid` will become the first element in the slice.
1763 /// This function will panic if `mid` is greater than the length of the
1764 /// slice. Note that `mid == self.len()` does _not_ panic and is a no-op
1769 /// Takes linear (in `self.len()`) time.
1774 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1775 /// a.rotate_left(2);
1776 /// assert_eq!(a, ['c', 'd', 'e', 'f', 'a', 'b']);
1779 /// Rotating a subslice:
1782 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1783 /// a[1..5].rotate_left(1);
1784 /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']);
1786 #[stable(feature = "slice_rotate", since = "1.26.0")]
1787 pub fn rotate_left(&mut self, mid: usize) {
1788 assert!(mid <= self.len());
1789 let k = self.len() - mid;
1792 let p = self.as_mut_ptr();
1793 rotate::ptr_rotate(mid, p.add(mid), k);
1797 /// Rotates the slice in-place such that the first `self.len() - k`
1798 /// elements of the slice move to the end while the last `k` elements move
1799 /// to the front. After calling `rotate_right`, the element previously at
1800 /// index `self.len() - k` will become the first element in the slice.
1804 /// This function will panic if `k` is greater than the length of the
1805 /// slice. Note that `k == self.len()` does _not_ panic and is a no-op
1810 /// Takes linear (in `self.len()`) time.
1815 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1816 /// a.rotate_right(2);
1817 /// assert_eq!(a, ['e', 'f', 'a', 'b', 'c', 'd']);
1820 /// Rotate a subslice:
1823 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1824 /// a[1..5].rotate_right(1);
1825 /// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']);
1827 #[stable(feature = "slice_rotate", since = "1.26.0")]
1828 pub fn rotate_right(&mut self, k: usize) {
1829 assert!(k <= self.len());
1830 let mid = self.len() - k;
1833 let p = self.as_mut_ptr();
1834 rotate::ptr_rotate(mid, p.add(mid), k);
1838 /// Copies the elements from `src` into `self`.
1840 /// The length of `src` must be the same as `self`.
1842 /// If `src` implements `Copy`, it can be more performant to use
1843 /// [`copy_from_slice`].
1847 /// This function will panic if the two slices have different lengths.
1851 /// Cloning two elements from a slice into another:
1854 /// let src = [1, 2, 3, 4];
1855 /// let mut dst = [0, 0];
1857 /// // Because the slices have to be the same length,
1858 /// // we slice the source slice from four elements
1859 /// // to two. It will panic if we don't do this.
1860 /// dst.clone_from_slice(&src[2..]);
1862 /// assert_eq!(src, [1, 2, 3, 4]);
1863 /// assert_eq!(dst, [3, 4]);
1866 /// Rust enforces that there can only be one mutable reference with no
1867 /// immutable references to a particular piece of data in a particular
1868 /// scope. Because of this, attempting to use `clone_from_slice` on a
1869 /// single slice will result in a compile failure:
1872 /// let mut slice = [1, 2, 3, 4, 5];
1874 /// slice[..2].clone_from_slice(&slice[3..]); // compile fail!
1877 /// To work around this, we can use [`split_at_mut`] to create two distinct
1878 /// sub-slices from a slice:
1881 /// let mut slice = [1, 2, 3, 4, 5];
1884 /// let (left, right) = slice.split_at_mut(2);
1885 /// left.clone_from_slice(&right[1..]);
1888 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
1891 /// [`copy_from_slice`]: #method.copy_from_slice
1892 /// [`split_at_mut`]: #method.split_at_mut
1893 #[stable(feature = "clone_from_slice", since = "1.7.0")]
1894 pub fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
1895 assert!(self.len() == src.len(),
1896 "destination and source slices have different lengths");
1897 // NOTE: We need to explicitly slice them to the same length
1898 // for bounds checking to be elided, and the optimizer will
1899 // generate memcpy for simple cases (for example T = u8).
1900 let len = self.len();
1901 let src = &src[..len];
1903 self[i].clone_from(&src[i]);
1908 /// Copies all elements from `src` into `self`, using a memcpy.
1910 /// The length of `src` must be the same as `self`.
1912 /// If `src` does not implement `Copy`, use [`clone_from_slice`].
1916 /// This function will panic if the two slices have different lengths.
1920 /// Copying two elements from a slice into another:
1923 /// let src = [1, 2, 3, 4];
1924 /// let mut dst = [0, 0];
1926 /// // Because the slices have to be the same length,
1927 /// // we slice the source slice from four elements
1928 /// // to two. It will panic if we don't do this.
1929 /// dst.copy_from_slice(&src[2..]);
1931 /// assert_eq!(src, [1, 2, 3, 4]);
1932 /// assert_eq!(dst, [3, 4]);
1935 /// Rust enforces that there can only be one mutable reference with no
1936 /// immutable references to a particular piece of data in a particular
1937 /// scope. Because of this, attempting to use `copy_from_slice` on a
1938 /// single slice will result in a compile failure:
1941 /// let mut slice = [1, 2, 3, 4, 5];
1943 /// slice[..2].copy_from_slice(&slice[3..]); // compile fail!
1946 /// To work around this, we can use [`split_at_mut`] to create two distinct
1947 /// sub-slices from a slice:
1950 /// let mut slice = [1, 2, 3, 4, 5];
1953 /// let (left, right) = slice.split_at_mut(2);
1954 /// left.copy_from_slice(&right[1..]);
1957 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
1960 /// [`clone_from_slice`]: #method.clone_from_slice
1961 /// [`split_at_mut`]: #method.split_at_mut
1962 #[stable(feature = "copy_from_slice", since = "1.9.0")]
1963 pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
1964 assert_eq!(self.len(), src.len(),
1965 "destination and source slices have different lengths");
1967 ptr::copy_nonoverlapping(
1968 src.as_ptr(), self.as_mut_ptr(), self.len());
1972 /// Copies elements from one part of the slice to another part of itself,
1973 /// using a memmove.
1975 /// `src` is the range within `self` to copy from. `dest` is the starting
1976 /// index of the range within `self` to copy to, which will have the same
1977 /// length as `src`. The two ranges may overlap. The ends of the two ranges
1978 /// must be less than or equal to `self.len()`.
1982 /// This function will panic if either range exceeds the end of the slice,
1983 /// or if the end of `src` is before the start.
1987 /// Copying four bytes within a slice:
1990 /// # #![feature(copy_within)]
1991 /// let mut bytes = *b"Hello, World!";
1993 /// bytes.copy_within(1..5, 8);
1995 /// assert_eq!(&bytes, b"Hello, Wello!");
1997 #[unstable(feature = "copy_within", issue = "54236")]
1998 pub fn copy_within<R: ops::RangeBounds<usize>>(&mut self, src: R, dest: usize)
2002 let src_start = match src.start_bound() {
2003 ops::Bound::Included(&n) => n,
2004 ops::Bound::Excluded(&n) => n
2006 .unwrap_or_else(|| slice_index_overflow_fail()),
2007 ops::Bound::Unbounded => 0,
2009 let src_end = match src.end_bound() {
2010 ops::Bound::Included(&n) => n
2012 .unwrap_or_else(|| slice_index_overflow_fail()),
2013 ops::Bound::Excluded(&n) => n,
2014 ops::Bound::Unbounded => self.len(),
2016 assert!(src_start <= src_end, "src end is before src start");
2017 assert!(src_end <= self.len(), "src is out of bounds");
2018 let count = src_end - src_start;
2019 assert!(dest <= self.len() - count, "dest is out of bounds");
2022 self.get_unchecked(src_start),
2023 self.get_unchecked_mut(dest),
2029 /// Swaps all elements in `self` with those in `other`.
2031 /// The length of `other` must be the same as `self`.
2035 /// This function will panic if the two slices have different lengths.
2039 /// Swapping two elements across slices:
2042 /// let mut slice1 = [0, 0];
2043 /// let mut slice2 = [1, 2, 3, 4];
2045 /// slice1.swap_with_slice(&mut slice2[2..]);
2047 /// assert_eq!(slice1, [3, 4]);
2048 /// assert_eq!(slice2, [1, 2, 0, 0]);
2051 /// Rust enforces that there can only be one mutable reference to a
2052 /// particular piece of data in a particular scope. Because of this,
2053 /// attempting to use `swap_with_slice` on a single slice will result in
2054 /// a compile failure:
2057 /// let mut slice = [1, 2, 3, 4, 5];
2058 /// slice[..2].swap_with_slice(&mut slice[3..]); // compile fail!
2061 /// To work around this, we can use [`split_at_mut`] to create two distinct
2062 /// mutable sub-slices from a slice:
2065 /// let mut slice = [1, 2, 3, 4, 5];
2068 /// let (left, right) = slice.split_at_mut(2);
2069 /// left.swap_with_slice(&mut right[1..]);
2072 /// assert_eq!(slice, [4, 5, 3, 1, 2]);
2075 /// [`split_at_mut`]: #method.split_at_mut
2076 #[stable(feature = "swap_with_slice", since = "1.27.0")]
2077 pub fn swap_with_slice(&mut self, other: &mut [T]) {
2078 assert!(self.len() == other.len(),
2079 "destination and source slices have different lengths");
2081 ptr::swap_nonoverlapping(
2082 self.as_mut_ptr(), other.as_mut_ptr(), self.len());
2086 /// Function to calculate lengths of the middle and trailing slice for `align_to{,_mut}`.
2087 fn align_to_offsets<U>(&self) -> (usize, usize) {
2088 // What we gonna do about `rest` is figure out what multiple of `U`s we can put in a
2089 // lowest number of `T`s. And how many `T`s we need for each such "multiple".
2091 // Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider
2092 // for example a case where size_of::<T> = 16, size_of::<U> = 24. We can put 2 Us in
2093 // place of every 3 Ts in the `rest` slice. A bit more complicated.
2095 // Formula to calculate this is:
2097 // Us = lcm(size_of::<T>, size_of::<U>) / size_of::<U>
2098 // Ts = lcm(size_of::<T>, size_of::<U>) / size_of::<T>
2100 // Expanded and simplified:
2102 // Us = size_of::<T> / gcd(size_of::<T>, size_of::<U>)
2103 // Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
2105 // Luckily since all this is constant-evaluated... performance here matters not!
2107 fn gcd(a: usize, b: usize) -> usize {
2108 // iterative stein’s algorithm
2109 // We should still make this `const fn` (and revert to recursive algorithm if we do)
2110 // because relying on llvm to consteval all this is… well, it makes me uncomfortable.
2111 let (ctz_a, mut ctz_b) = unsafe {
2112 if a == 0 { return b; }
2113 if b == 0 { return a; }
2114 (::intrinsics::cttz_nonzero(a), ::intrinsics::cttz_nonzero(b))
2116 let k = ctz_a.min(ctz_b);
2117 let mut a = a >> ctz_a;
2120 // remove all factors of 2 from b
2123 ::mem::swap(&mut a, &mut b);
2130 ctz_b = ::intrinsics::cttz_nonzero(b);
2135 let gcd: usize = gcd(::mem::size_of::<T>(), ::mem::size_of::<U>());
2136 let ts: usize = ::mem::size_of::<U>() / gcd;
2137 let us: usize = ::mem::size_of::<T>() / gcd;
2139 // Armed with this knowledge, we can find how many `U`s we can fit!
2140 let us_len = self.len() / ts * us;
2141 // And how many `T`s will be in the trailing slice!
2142 let ts_len = self.len() % ts;
2146 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2149 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2150 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2151 /// middle slice the greatest length possible for a given type and input slice, but only
2152 /// your algorithm's performance should depend on that, not its correctness.
2154 /// This method has no purpose when either input element `T` or output element `U` are
2155 /// zero-sized and will return the original slice without splitting anything.
2159 /// This method is essentially a `transmute` with respect to the elements in the returned
2160 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2168 /// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2169 /// let (prefix, shorts, suffix) = bytes.align_to::<u16>();
2170 /// // less_efficient_algorithm_for_bytes(prefix);
2171 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2172 /// // less_efficient_algorithm_for_bytes(suffix);
2175 #[stable(feature = "slice_align_to", since = "1.30.0")]
2176 pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
2177 // Note that most of this function will be constant-evaluated,
2178 if ::mem::size_of::<U>() == 0 || ::mem::size_of::<T>() == 0 {
2179 // handle ZSTs specially, which is – don't handle them at all.
2180 return (self, &[], &[]);
2183 // First, find at what point do we split between the first and 2nd slice. Easy with
2184 // ptr.align_offset.
2185 let ptr = self.as_ptr();
2186 let offset = ::ptr::align_offset(ptr, ::mem::align_of::<U>());
2187 if offset > self.len() {
2190 let (left, rest) = self.split_at(offset);
2191 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2192 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2194 from_raw_parts(rest.as_ptr() as *const U, us_len),
2195 from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len))
2199 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2202 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2203 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2204 /// middle slice the greatest length possible for a given type and input slice, but only
2205 /// your algorithm's performance should depend on that, not its correctness.
2207 /// This method has no purpose when either input element `T` or output element `U` are
2208 /// zero-sized and will return the original slice without splitting anything.
2212 /// This method is essentially a `transmute` with respect to the elements in the returned
2213 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2221 /// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2222 /// let (prefix, shorts, suffix) = bytes.align_to_mut::<u16>();
2223 /// // less_efficient_algorithm_for_bytes(prefix);
2224 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2225 /// // less_efficient_algorithm_for_bytes(suffix);
2228 #[stable(feature = "slice_align_to", since = "1.30.0")]
2229 pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
2230 // Note that most of this function will be constant-evaluated,
2231 if ::mem::size_of::<U>() == 0 || ::mem::size_of::<T>() == 0 {
2232 // handle ZSTs specially, which is – don't handle them at all.
2233 return (self, &mut [], &mut []);
2236 // First, find at what point do we split between the first and 2nd slice. Easy with
2237 // ptr.align_offset.
2238 let ptr = self.as_ptr();
2239 let offset = ::ptr::align_offset(ptr, ::mem::align_of::<U>());
2240 if offset > self.len() {
2241 (self, &mut [], &mut [])
2243 let (left, rest) = self.split_at_mut(offset);
2244 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2245 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2246 let mut_ptr = rest.as_mut_ptr();
2248 from_raw_parts_mut(mut_ptr as *mut U, us_len),
2249 from_raw_parts_mut(mut_ptr.add(rest.len() - ts_len), ts_len))
2253 /// Checks if the elements of this slice are sorted.
2255 /// That is, for each element `a` and its following element `b`, `a <= b` must hold. If the
2256 /// slice yields exactly zero or one element, `true` is returned.
2258 /// Note that if `Self::Item` is only `PartialOrd`, but not `Ord`, the above definition
2259 /// implies that this function returns `false` if any two consecutive items are not
2265 /// #![feature(is_sorted)]
2266 /// let empty: [i32; 0] = [];
2268 /// assert!([1, 2, 2, 9].is_sorted());
2269 /// assert!(![1, 3, 2, 4].is_sorted());
2270 /// assert!([0].is_sorted());
2271 /// assert!(empty.is_sorted());
2272 /// assert!(![0.0, 1.0, std::f32::NAN].is_sorted());
2275 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2276 pub fn is_sorted(&self) -> bool
2280 self.is_sorted_by(|a, b| a.partial_cmp(b))
2283 /// Checks if the elements of this slice are sorted using the given comparator function.
2285 /// Instead of using `PartialOrd::partial_cmp`, this function uses the given `compare`
2286 /// function to determine the ordering of two elements. Apart from that, it's equivalent to
2287 /// [`is_sorted`]; see its documentation for more information.
2289 /// [`is_sorted`]: #method.is_sorted
2290 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2291 pub fn is_sorted_by<F>(&self, mut compare: F) -> bool
2293 F: FnMut(&T, &T) -> Option<Ordering>
2295 self.iter().is_sorted_by(|a, b| compare(*a, *b))
2298 /// Checks if the elements of this slice are sorted using the given key extraction function.
2300 /// Instead of comparing the slice's elements directly, this function compares the keys of the
2301 /// elements, as determined by `f`. Apart from that, it's equivalent to [`is_sorted`]; see its
2302 /// documentation for more information.
2304 /// [`is_sorted`]: #method.is_sorted
2309 /// #![feature(is_sorted)]
2311 /// assert!(["c", "bb", "aaa"].is_sorted_by_key(|s| s.len()));
2312 /// assert!(![-2i32, -1, 0, 3].is_sorted_by_key(|n| n.abs()));
2315 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2316 pub fn is_sorted_by_key<F, K>(&self, mut f: F) -> bool
2321 self.is_sorted_by(|a, b| f(a).partial_cmp(&f(b)))
2325 #[lang = "slice_u8"]
2328 /// Checks if all bytes in this slice are within the ASCII range.
2329 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2331 pub fn is_ascii(&self) -> bool {
2332 self.iter().all(|b| b.is_ascii())
2335 /// Checks that two slices are an ASCII case-insensitive match.
2337 /// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
2338 /// but without allocating and copying temporaries.
2339 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2341 pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
2342 self.len() == other.len() &&
2343 self.iter().zip(other).all(|(a, b)| {
2344 a.eq_ignore_ascii_case(b)
2348 /// Converts this slice to its ASCII upper case equivalent in-place.
2350 /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
2351 /// but non-ASCII letters are unchanged.
2353 /// To return a new uppercased value without modifying the existing one, use
2354 /// [`to_ascii_uppercase`].
2356 /// [`to_ascii_uppercase`]: #method.to_ascii_uppercase
2357 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2359 pub fn make_ascii_uppercase(&mut self) {
2361 byte.make_ascii_uppercase();
2365 /// Converts this slice to its ASCII lower case equivalent in-place.
2367 /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
2368 /// but non-ASCII letters are unchanged.
2370 /// To return a new lowercased value without modifying the existing one, use
2371 /// [`to_ascii_lowercase`].
2373 /// [`to_ascii_lowercase`]: #method.to_ascii_lowercase
2374 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2376 pub fn make_ascii_lowercase(&mut self) {
2378 byte.make_ascii_lowercase();
2384 #[stable(feature = "rust1", since = "1.0.0")]
2385 impl<T, I> ops::Index<I> for [T]
2386 where I: SliceIndex<[T]>
2388 type Output = I::Output;
2391 fn index(&self, index: I) -> &I::Output {
2396 #[stable(feature = "rust1", since = "1.0.0")]
2397 impl<T, I> ops::IndexMut<I> for [T]
2398 where I: SliceIndex<[T]>
2401 fn index_mut(&mut self, index: I) -> &mut I::Output {
2402 index.index_mut(self)
2408 fn slice_index_len_fail(index: usize, len: usize) -> ! {
2409 panic!("index {} out of range for slice of length {}", index, len);
2414 fn slice_index_order_fail(index: usize, end: usize) -> ! {
2415 panic!("slice index starts at {} but ends at {}", index, end);
2420 fn slice_index_overflow_fail() -> ! {
2421 panic!("attempted to index slice up to maximum usize");
2424 mod private_slice_index {
2426 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2429 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2430 impl Sealed for usize {}
2431 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2432 impl Sealed for ops::Range<usize> {}
2433 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2434 impl Sealed for ops::RangeTo<usize> {}
2435 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2436 impl Sealed for ops::RangeFrom<usize> {}
2437 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2438 impl Sealed for ops::RangeFull {}
2439 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2440 impl Sealed for ops::RangeInclusive<usize> {}
2441 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2442 impl Sealed for ops::RangeToInclusive<usize> {}
2445 /// A helper trait used for indexing operations.
2446 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2447 #[rustc_on_unimplemented(
2450 label = "string indices are ranges of `usize`",
2453 all(any(T = "str", T = "&str", T = "std::string::String"), _Self="{integer}"),
2454 note="you can use `.chars().nth()` or `.bytes().nth()`
2455 see chapter in The Book <https://doc.rust-lang.org/book/ch08-02-strings.html#indexing-into-strings>"
2457 message = "the type `{T}` cannot be indexed by `{Self}`",
2458 label = "slice indices are of type `usize` or ranges of `usize`",
2460 pub trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
2461 /// The output type returned by methods.
2462 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2463 type Output: ?Sized;
2465 /// Returns a shared reference to the output at this location, if in
2467 #[unstable(feature = "slice_index_methods", issue = "0")]
2468 fn get(self, slice: &T) -> Option<&Self::Output>;
2470 /// Returns a mutable reference to the output at this location, if in
2472 #[unstable(feature = "slice_index_methods", issue = "0")]
2473 fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
2475 /// Returns a shared reference to the output at this location, without
2476 /// performing any bounds checking.
2477 #[unstable(feature = "slice_index_methods", issue = "0")]
2478 unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
2480 /// Returns a mutable reference to the output at this location, without
2481 /// performing any bounds checking.
2482 #[unstable(feature = "slice_index_methods", issue = "0")]
2483 unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
2485 /// Returns a shared reference to the output at this location, panicking
2486 /// if out of bounds.
2487 #[unstable(feature = "slice_index_methods", issue = "0")]
2488 fn index(self, slice: &T) -> &Self::Output;
2490 /// Returns a mutable reference to the output at this location, panicking
2491 /// if out of bounds.
2492 #[unstable(feature = "slice_index_methods", issue = "0")]
2493 fn index_mut(self, slice: &mut T) -> &mut Self::Output;
2496 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2497 impl<T> SliceIndex<[T]> for usize {
2501 fn get(self, slice: &[T]) -> Option<&T> {
2502 if self < slice.len() {
2504 Some(self.get_unchecked(slice))
2512 fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
2513 if self < slice.len() {
2515 Some(self.get_unchecked_mut(slice))
2523 unsafe fn get_unchecked(self, slice: &[T]) -> &T {
2524 &*slice.as_ptr().add(self)
2528 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
2529 &mut *slice.as_mut_ptr().add(self)
2533 fn index(self, slice: &[T]) -> &T {
2534 // N.B., use intrinsic indexing
2539 fn index_mut(self, slice: &mut [T]) -> &mut T {
2540 // N.B., use intrinsic indexing
2545 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2546 impl<T> SliceIndex<[T]> for ops::Range<usize> {
2550 fn get(self, slice: &[T]) -> Option<&[T]> {
2551 if self.start > self.end || self.end > slice.len() {
2555 Some(self.get_unchecked(slice))
2561 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2562 if self.start > self.end || self.end > slice.len() {
2566 Some(self.get_unchecked_mut(slice))
2572 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2573 from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
2577 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2578 from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
2582 fn index(self, slice: &[T]) -> &[T] {
2583 if self.start > self.end {
2584 slice_index_order_fail(self.start, self.end);
2585 } else if self.end > slice.len() {
2586 slice_index_len_fail(self.end, slice.len());
2589 self.get_unchecked(slice)
2594 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2595 if self.start > self.end {
2596 slice_index_order_fail(self.start, self.end);
2597 } else if self.end > slice.len() {
2598 slice_index_len_fail(self.end, slice.len());
2601 self.get_unchecked_mut(slice)
2606 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2607 impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
2611 fn get(self, slice: &[T]) -> Option<&[T]> {
2612 (0..self.end).get(slice)
2616 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2617 (0..self.end).get_mut(slice)
2621 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2622 (0..self.end).get_unchecked(slice)
2626 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2627 (0..self.end).get_unchecked_mut(slice)
2631 fn index(self, slice: &[T]) -> &[T] {
2632 (0..self.end).index(slice)
2636 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2637 (0..self.end).index_mut(slice)
2641 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2642 impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
2646 fn get(self, slice: &[T]) -> Option<&[T]> {
2647 (self.start..slice.len()).get(slice)
2651 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2652 (self.start..slice.len()).get_mut(slice)
2656 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2657 (self.start..slice.len()).get_unchecked(slice)
2661 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2662 (self.start..slice.len()).get_unchecked_mut(slice)
2666 fn index(self, slice: &[T]) -> &[T] {
2667 (self.start..slice.len()).index(slice)
2671 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2672 (self.start..slice.len()).index_mut(slice)
2676 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2677 impl<T> SliceIndex<[T]> for ops::RangeFull {
2681 fn get(self, slice: &[T]) -> Option<&[T]> {
2686 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2691 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2696 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2701 fn index(self, slice: &[T]) -> &[T] {
2706 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2712 #[stable(feature = "inclusive_range", since = "1.26.0")]
2713 impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
2717 fn get(self, slice: &[T]) -> Option<&[T]> {
2718 if *self.end() == usize::max_value() { None }
2719 else { (*self.start()..self.end() + 1).get(slice) }
2723 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2724 if *self.end() == usize::max_value() { None }
2725 else { (*self.start()..self.end() + 1).get_mut(slice) }
2729 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2730 (*self.start()..self.end() + 1).get_unchecked(slice)
2734 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2735 (*self.start()..self.end() + 1).get_unchecked_mut(slice)
2739 fn index(self, slice: &[T]) -> &[T] {
2740 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2741 (*self.start()..self.end() + 1).index(slice)
2745 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2746 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2747 (*self.start()..self.end() + 1).index_mut(slice)
2751 #[stable(feature = "inclusive_range", since = "1.26.0")]
2752 impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
2756 fn get(self, slice: &[T]) -> Option<&[T]> {
2757 (0..=self.end).get(slice)
2761 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2762 (0..=self.end).get_mut(slice)
2766 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2767 (0..=self.end).get_unchecked(slice)
2771 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2772 (0..=self.end).get_unchecked_mut(slice)
2776 fn index(self, slice: &[T]) -> &[T] {
2777 (0..=self.end).index(slice)
2781 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2782 (0..=self.end).index_mut(slice)
2786 ////////////////////////////////////////////////////////////////////////////////
2788 ////////////////////////////////////////////////////////////////////////////////
2790 #[stable(feature = "rust1", since = "1.0.0")]
2791 impl<T> Default for &[T] {
2792 /// Creates an empty slice.
2793 fn default() -> Self { &[] }
2796 #[stable(feature = "mut_slice_default", since = "1.5.0")]
2797 impl<T> Default for &mut [T] {
2798 /// Creates a mutable empty slice.
2799 fn default() -> Self { &mut [] }
2806 #[stable(feature = "rust1", since = "1.0.0")]
2807 impl<'a, T> IntoIterator for &'a [T] {
2809 type IntoIter = Iter<'a, T>;
2811 fn into_iter(self) -> Iter<'a, T> {
2816 #[stable(feature = "rust1", since = "1.0.0")]
2817 impl<'a, T> IntoIterator for &'a mut [T] {
2818 type Item = &'a mut T;
2819 type IntoIter = IterMut<'a, T>;
2821 fn into_iter(self) -> IterMut<'a, T> {
2826 // Macro helper functions
2828 fn size_from_ptr<T>(_: *const T) -> usize {
2832 // Inlining is_empty and len makes a huge performance difference
2833 macro_rules! is_empty {
2834 // The way we encode the length of a ZST iterator, this works both for ZST
2836 ($self: ident) => {$self.ptr == $self.end}
2838 // To get rid of some bounds checks (see `position`), we compute the length in a somewhat
2839 // unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
2841 ($self: ident) => {{
2842 let start = $self.ptr;
2843 let diff = ($self.end as usize).wrapping_sub(start as usize);
2844 let size = size_from_ptr(start);
2848 // Using division instead of `offset_from` helps LLVM remove bounds checks
2854 // The shared definition of the `Iter` and `IterMut` iterators
2855 macro_rules! iterator {
2857 struct $name:ident -> $ptr:ty,
2863 impl<'a, T> $name<'a, T> {
2864 // Helper function for creating a slice from the iterator.
2866 fn make_slice(&self) -> &'a [T] {
2867 unsafe { from_raw_parts(self.ptr, len!(self)) }
2870 // Helper function for moving the start of the iterator forwards by `offset` elements,
2871 // returning the old start.
2872 // Unsafe because the offset must be in-bounds or one-past-the-end.
2874 unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T {
2875 if mem::size_of::<T>() == 0 {
2876 // This is *reducing* the length. `ptr` never changes with ZST.
2877 self.end = (self.end as * $raw_mut u8).wrapping_offset(-offset) as * $raw_mut T;
2881 self.ptr = self.ptr.offset(offset);
2886 // Helper function for moving the end of the iterator backwards by `offset` elements,
2887 // returning the new end.
2888 // Unsafe because the offset must be in-bounds or one-past-the-end.
2890 unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T {
2891 if mem::size_of::<T>() == 0 {
2892 self.end = (self.end as * $raw_mut u8).wrapping_offset(-offset) as * $raw_mut T;
2895 self.end = self.end.offset(-offset);
2901 #[stable(feature = "rust1", since = "1.0.0")]
2902 impl<'a, T> ExactSizeIterator for $name<'a, T> {
2904 fn len(&self) -> usize {
2909 fn is_empty(&self) -> bool {
2914 #[stable(feature = "rust1", since = "1.0.0")]
2915 impl<'a, T> Iterator for $name<'a, T> {
2919 fn next(&mut self) -> Option<$elem> {
2920 // could be implemented with slices, but this avoids bounds checks
2922 assume(!self.ptr.is_null());
2923 if mem::size_of::<T>() != 0 {
2924 assume(!self.end.is_null());
2926 if is_empty!(self) {
2929 Some(& $( $mut_ )* *self.post_inc_start(1))
2935 fn size_hint(&self) -> (usize, Option<usize>) {
2936 let exact = len!(self);
2937 (exact, Some(exact))
2941 fn count(self) -> usize {
2946 fn nth(&mut self, n: usize) -> Option<$elem> {
2947 if n >= len!(self) {
2948 // This iterator is now empty.
2949 if mem::size_of::<T>() == 0 {
2950 // We have to do it this way as `ptr` may never be 0, but `end`
2951 // could be (due to wrapping).
2952 self.end = self.ptr;
2954 self.ptr = self.end;
2958 // We are in bounds. `offset` does the right thing even for ZSTs.
2960 let elem = Some(& $( $mut_ )* *self.ptr.add(n));
2961 self.post_inc_start((n as isize).wrapping_add(1));
2967 fn last(mut self) -> Option<$elem> {
2972 fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where
2973 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
2975 // manual unrolling is needed when there are conditional exits from the loop
2976 let mut accum = init;
2978 while len!(self) >= 4 {
2979 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2980 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2981 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2982 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2984 while !is_empty!(self) {
2985 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2992 fn fold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
2993 where Fold: FnMut(Acc, Self::Item) -> Acc,
2995 // Let LLVM unroll this, rather than using the default
2996 // impl that would force the manual unrolling above
2997 let mut accum = init;
2998 while let Some(x) = self.next() {
2999 accum = f(accum, x);
3005 #[rustc_inherit_overflow_checks]
3006 fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
3008 P: FnMut(Self::Item) -> bool,
3010 // The addition might panic on overflow.
3012 self.try_fold(0, move |i, x| {
3013 if predicate(x) { Err(i) }
3017 unsafe { assume(i < n) };
3023 fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
3024 P: FnMut(Self::Item) -> bool,
3025 Self: Sized + ExactSizeIterator + DoubleEndedIterator
3027 // No need for an overflow check here, because `ExactSizeIterator`
3029 self.try_rfold(n, move |i, x| {
3031 if predicate(x) { Err(i) }
3035 unsafe { assume(i < n) };
3043 #[stable(feature = "rust1", since = "1.0.0")]
3044 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
3046 fn next_back(&mut self) -> Option<$elem> {
3047 // could be implemented with slices, but this avoids bounds checks
3049 assume(!self.ptr.is_null());
3050 if mem::size_of::<T>() != 0 {
3051 assume(!self.end.is_null());
3053 if is_empty!(self) {
3056 Some(& $( $mut_ )* *self.pre_dec_end(1))
3062 fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
3063 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
3065 // manual unrolling is needed when there are conditional exits from the loop
3066 let mut accum = init;
3068 while len!(self) >= 4 {
3069 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3070 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3071 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3072 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3074 // inlining is_empty everywhere makes a huge performance difference
3075 while !is_empty!(self) {
3076 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3083 fn rfold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
3084 where Fold: FnMut(Acc, Self::Item) -> Acc,
3086 // Let LLVM unroll this, rather than using the default
3087 // impl that would force the manual unrolling above
3088 let mut accum = init;
3089 while let Some(x) = self.next_back() {
3090 accum = f(accum, x);
3096 #[stable(feature = "fused", since = "1.26.0")]
3097 impl<'a, T> FusedIterator for $name<'a, T> {}
3099 #[unstable(feature = "trusted_len", issue = "37572")]
3100 unsafe impl<'a, T> TrustedLen for $name<'a, T> {}
3104 /// Immutable slice iterator
3106 /// This struct is created by the [`iter`] method on [slices].
3113 /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
3114 /// let slice = &[1, 2, 3];
3116 /// // Then, we iterate over it:
3117 /// for element in slice.iter() {
3118 /// println!("{}", element);
3122 /// [`iter`]: ../../std/primitive.slice.html#method.iter
3123 /// [slices]: ../../std/primitive.slice.html
3124 #[stable(feature = "rust1", since = "1.0.0")]
3125 pub struct Iter<'a, T: 'a> {
3127 end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3128 // ptr == end is a quick test for the Iterator being empty, that works
3129 // for both ZST and non-ZST.
3130 _marker: marker::PhantomData<&'a T>,
3133 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3134 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
3135 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3136 f.debug_tuple("Iter")
3137 .field(&self.as_slice())
3142 #[stable(feature = "rust1", since = "1.0.0")]
3143 unsafe impl<T: Sync> Sync for Iter<'_, T> {}
3144 #[stable(feature = "rust1", since = "1.0.0")]
3145 unsafe impl<T: Sync> Send for Iter<'_, T> {}
3147 impl<'a, T> Iter<'a, T> {
3148 /// View the underlying data as a subslice of the original data.
3150 /// This has the same lifetime as the original slice, and so the
3151 /// iterator can continue to be used while this exists.
3158 /// // First, we declare a type which has the `iter` method to get the `Iter`
3159 /// // struct (&[usize here]):
3160 /// let slice = &[1, 2, 3];
3162 /// // Then, we get the iterator:
3163 /// let mut iter = slice.iter();
3164 /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
3165 /// println!("{:?}", iter.as_slice());
3167 /// // Next, we move to the second element of the slice:
3169 /// // Now `as_slice` returns "[2, 3]":
3170 /// println!("{:?}", iter.as_slice());
3172 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3173 pub fn as_slice(&self) -> &'a [T] {
3178 iterator!{struct Iter -> *const T, &'a T, const, {/* no mut */}, {
3179 fn is_sorted_by<F>(self, mut compare: F) -> bool
3182 F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>,
3184 self.as_slice().windows(2).all(|w| {
3185 compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false)
3190 #[stable(feature = "rust1", since = "1.0.0")]
3191 impl<T> Clone for Iter<'_, T> {
3192 fn clone(&self) -> Self { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
3195 #[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
3196 impl<T> AsRef<[T]> for Iter<'_, T> {
3197 fn as_ref(&self) -> &[T] {
3202 /// Mutable slice iterator.
3204 /// This struct is created by the [`iter_mut`] method on [slices].
3211 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3212 /// // struct (&[usize here]):
3213 /// let mut slice = &mut [1, 2, 3];
3215 /// // Then, we iterate over it and increment each element value:
3216 /// for element in slice.iter_mut() {
3220 /// // We now have "[2, 3, 4]":
3221 /// println!("{:?}", slice);
3224 /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
3225 /// [slices]: ../../std/primitive.slice.html
3226 #[stable(feature = "rust1", since = "1.0.0")]
3227 pub struct IterMut<'a, T: 'a> {
3229 end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3230 // ptr == end is a quick test for the Iterator being empty, that works
3231 // for both ZST and non-ZST.
3232 _marker: marker::PhantomData<&'a mut T>,
3235 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3236 impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
3237 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3238 f.debug_tuple("IterMut")
3239 .field(&self.make_slice())
3244 #[stable(feature = "rust1", since = "1.0.0")]
3245 unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
3246 #[stable(feature = "rust1", since = "1.0.0")]
3247 unsafe impl<T: Send> Send for IterMut<'_, T> {}
3249 impl<'a, T> IterMut<'a, T> {
3250 /// View the underlying data as a subslice of the original data.
3252 /// To avoid creating `&mut` references that alias, this is forced
3253 /// to consume the iterator.
3260 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3261 /// // struct (&[usize here]):
3262 /// let mut slice = &mut [1, 2, 3];
3265 /// // Then, we get the iterator:
3266 /// let mut iter = slice.iter_mut();
3267 /// // We move to next element:
3269 /// // So if we print what `into_slice` method returns here, we have "[2, 3]":
3270 /// println!("{:?}", iter.into_slice());
3273 /// // Now let's modify a value of the slice:
3275 /// // First we get back the iterator:
3276 /// let mut iter = slice.iter_mut();
3277 /// // We change the value of the first element of the slice returned by the `next` method:
3278 /// *iter.next().unwrap() += 1;
3280 /// // Now slice is "[2, 2, 3]":
3281 /// println!("{:?}", slice);
3283 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3284 pub fn into_slice(self) -> &'a mut [T] {
3285 unsafe { from_raw_parts_mut(self.ptr, len!(self)) }
3289 iterator!{struct IterMut -> *mut T, &'a mut T, mut, {mut}, {}}
3291 /// An internal abstraction over the splitting iterators, so that
3292 /// splitn, splitn_mut etc can be implemented once.
3294 trait SplitIter: DoubleEndedIterator {
3295 /// Marks the underlying iterator as complete, extracting the remaining
3296 /// portion of the slice.
3297 fn finish(&mut self) -> Option<Self::Item>;
3300 /// An iterator over subslices separated by elements that match a predicate
3303 /// This struct is created by the [`split`] method on [slices].
3305 /// [`split`]: ../../std/primitive.slice.html#method.split
3306 /// [slices]: ../../std/primitive.slice.html
3307 #[stable(feature = "rust1", since = "1.0.0")]
3308 pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
3314 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3315 impl<T: fmt::Debug, P> fmt::Debug for Split<'_, T, P> where P: FnMut(&T) -> bool {
3316 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3317 f.debug_struct("Split")
3318 .field("v", &self.v)
3319 .field("finished", &self.finished)
3324 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3325 #[stable(feature = "rust1", since = "1.0.0")]
3326 impl<T, P> Clone for Split<'_, T, P> where P: Clone + FnMut(&T) -> bool {
3327 fn clone(&self) -> Self {
3330 pred: self.pred.clone(),
3331 finished: self.finished,
3336 #[stable(feature = "rust1", since = "1.0.0")]
3337 impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3338 type Item = &'a [T];
3341 fn next(&mut self) -> Option<&'a [T]> {
3342 if self.finished { return None; }
3344 match self.v.iter().position(|x| (self.pred)(x)) {
3345 None => self.finish(),
3347 let ret = Some(&self.v[..idx]);
3348 self.v = &self.v[idx + 1..];
3355 fn size_hint(&self) -> (usize, Option<usize>) {
3359 (1, Some(self.v.len() + 1))
3364 #[stable(feature = "rust1", since = "1.0.0")]
3365 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3367 fn next_back(&mut self) -> Option<&'a [T]> {
3368 if self.finished { return None; }
3370 match self.v.iter().rposition(|x| (self.pred)(x)) {
3371 None => self.finish(),
3373 let ret = Some(&self.v[idx + 1..]);
3374 self.v = &self.v[..idx];
3381 impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool {
3383 fn finish(&mut self) -> Option<&'a [T]> {
3384 if self.finished { None } else { self.finished = true; Some(self.v) }
3388 #[stable(feature = "fused", since = "1.26.0")]
3389 impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
3391 /// An iterator over the subslices of the vector which are separated
3392 /// by elements that match `pred`.
3394 /// This struct is created by the [`split_mut`] method on [slices].
3396 /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
3397 /// [slices]: ../../std/primitive.slice.html
3398 #[stable(feature = "rust1", since = "1.0.0")]
3399 pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3405 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3406 impl<T: fmt::Debug, P> fmt::Debug for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {
3407 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3408 f.debug_struct("SplitMut")
3409 .field("v", &self.v)
3410 .field("finished", &self.finished)
3415 impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3417 fn finish(&mut self) -> Option<&'a mut [T]> {
3421 self.finished = true;
3422 Some(mem::replace(&mut self.v, &mut []))
3427 #[stable(feature = "rust1", since = "1.0.0")]
3428 impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3429 type Item = &'a mut [T];
3432 fn next(&mut self) -> Option<&'a mut [T]> {
3433 if self.finished { return None; }
3435 let idx_opt = { // work around borrowck limitations
3436 let pred = &mut self.pred;
3437 self.v.iter().position(|x| (*pred)(x))
3440 None => self.finish(),
3442 let tmp = mem::replace(&mut self.v, &mut []);
3443 let (head, tail) = tmp.split_at_mut(idx);
3444 self.v = &mut tail[1..];
3451 fn size_hint(&self) -> (usize, Option<usize>) {
3455 // if the predicate doesn't match anything, we yield one slice
3456 // if it matches every element, we yield len+1 empty slices.
3457 (1, Some(self.v.len() + 1))
3462 #[stable(feature = "rust1", since = "1.0.0")]
3463 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where
3464 P: FnMut(&T) -> bool,
3467 fn next_back(&mut self) -> Option<&'a mut [T]> {
3468 if self.finished { return None; }
3470 let idx_opt = { // work around borrowck limitations
3471 let pred = &mut self.pred;
3472 self.v.iter().rposition(|x| (*pred)(x))
3475 None => self.finish(),
3477 let tmp = mem::replace(&mut self.v, &mut []);
3478 let (head, tail) = tmp.split_at_mut(idx);
3480 Some(&mut tail[1..])
3486 #[stable(feature = "fused", since = "1.26.0")]
3487 impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3489 /// An iterator over subslices separated by elements that match a predicate
3490 /// function, starting from the end of the slice.
3492 /// This struct is created by the [`rsplit`] method on [slices].
3494 /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
3495 /// [slices]: ../../std/primitive.slice.html
3496 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3497 #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
3498 pub struct RSplit<'a, T:'a, P> where P: FnMut(&T) -> bool {
3499 inner: Split<'a, T, P>
3502 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3503 impl<T: fmt::Debug, P> fmt::Debug for RSplit<'_, T, P> where P: FnMut(&T) -> bool {
3504 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3505 f.debug_struct("RSplit")
3506 .field("v", &self.inner.v)
3507 .field("finished", &self.inner.finished)
3512 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3513 impl<'a, T, P> Iterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3514 type Item = &'a [T];
3517 fn next(&mut self) -> Option<&'a [T]> {
3518 self.inner.next_back()
3522 fn size_hint(&self) -> (usize, Option<usize>) {
3523 self.inner.size_hint()
3527 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3528 impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3530 fn next_back(&mut self) -> Option<&'a [T]> {
3535 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3536 impl<'a, T, P> SplitIter for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3538 fn finish(&mut self) -> Option<&'a [T]> {
3543 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3544 impl<T, P> FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {}
3546 /// An iterator over the subslices of the vector which are separated
3547 /// by elements that match `pred`, starting from the end of the slice.
3549 /// This struct is created by the [`rsplit_mut`] method on [slices].
3551 /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
3552 /// [slices]: ../../std/primitive.slice.html
3553 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3554 pub struct RSplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3555 inner: SplitMut<'a, T, P>
3558 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3559 impl<T: fmt::Debug, P> fmt::Debug for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {
3560 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3561 f.debug_struct("RSplitMut")
3562 .field("v", &self.inner.v)
3563 .field("finished", &self.inner.finished)
3568 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3569 impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3571 fn finish(&mut self) -> Option<&'a mut [T]> {
3576 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3577 impl<'a, T, P> Iterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3578 type Item = &'a mut [T];
3581 fn next(&mut self) -> Option<&'a mut [T]> {
3582 self.inner.next_back()
3586 fn size_hint(&self) -> (usize, Option<usize>) {
3587 self.inner.size_hint()
3591 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3592 impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P> where
3593 P: FnMut(&T) -> bool,
3596 fn next_back(&mut self) -> Option<&'a mut [T]> {
3601 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3602 impl<T, P> FusedIterator for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3604 /// An private iterator over subslices separated by elements that
3605 /// match a predicate function, splitting at most a fixed number of
3608 struct GenericSplitN<I> {
3613 impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> {
3617 fn next(&mut self) -> Option<T> {
3620 1 => { self.count -= 1; self.iter.finish() }
3621 _ => { self.count -= 1; self.iter.next() }
3626 fn size_hint(&self) -> (usize, Option<usize>) {
3627 let (lower, upper_opt) = self.iter.size_hint();
3628 (lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
3632 /// An iterator over subslices separated by elements that match a predicate
3633 /// function, limited to a given number of splits.
3635 /// This struct is created by the [`splitn`] method on [slices].
3637 /// [`splitn`]: ../../std/primitive.slice.html#method.splitn
3638 /// [slices]: ../../std/primitive.slice.html
3639 #[stable(feature = "rust1", since = "1.0.0")]
3640 pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3641 inner: GenericSplitN<Split<'a, T, P>>
3644 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3645 impl<T: fmt::Debug, P> fmt::Debug for SplitN<'_, T, P> where P: FnMut(&T) -> bool {
3646 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3647 f.debug_struct("SplitN")
3648 .field("inner", &self.inner)
3653 /// An iterator over subslices separated by elements that match a
3654 /// predicate function, limited to a given number of splits, starting
3655 /// from the end of the slice.
3657 /// This struct is created by the [`rsplitn`] method on [slices].
3659 /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
3660 /// [slices]: ../../std/primitive.slice.html
3661 #[stable(feature = "rust1", since = "1.0.0")]
3662 pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3663 inner: GenericSplitN<RSplit<'a, T, P>>
3666 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3667 impl<T: fmt::Debug, P> fmt::Debug for RSplitN<'_, T, P> where P: FnMut(&T) -> bool {
3668 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3669 f.debug_struct("RSplitN")
3670 .field("inner", &self.inner)
3675 /// An iterator over subslices separated by elements that match a predicate
3676 /// function, limited to a given number of splits.
3678 /// This struct is created by the [`splitn_mut`] method on [slices].
3680 /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
3681 /// [slices]: ../../std/primitive.slice.html
3682 #[stable(feature = "rust1", since = "1.0.0")]
3683 pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3684 inner: GenericSplitN<SplitMut<'a, T, P>>
3687 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3688 impl<T: fmt::Debug, P> fmt::Debug for SplitNMut<'_, T, P> where P: FnMut(&T) -> bool {
3689 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3690 f.debug_struct("SplitNMut")
3691 .field("inner", &self.inner)
3696 /// An iterator over subslices separated by elements that match a
3697 /// predicate function, limited to a given number of splits, starting
3698 /// from the end of the slice.
3700 /// This struct is created by the [`rsplitn_mut`] method on [slices].
3702 /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
3703 /// [slices]: ../../std/primitive.slice.html
3704 #[stable(feature = "rust1", since = "1.0.0")]
3705 pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3706 inner: GenericSplitN<RSplitMut<'a, T, P>>
3709 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3710 impl<T: fmt::Debug, P> fmt::Debug for RSplitNMut<'_, T, P> where P: FnMut(&T) -> bool {
3711 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3712 f.debug_struct("RSplitNMut")
3713 .field("inner", &self.inner)
3718 macro_rules! forward_iterator {
3719 ($name:ident: $elem:ident, $iter_of:ty) => {
3720 #[stable(feature = "rust1", since = "1.0.0")]
3721 impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where
3722 P: FnMut(&T) -> bool
3724 type Item = $iter_of;
3727 fn next(&mut self) -> Option<$iter_of> {
3732 fn size_hint(&self) -> (usize, Option<usize>) {
3733 self.inner.size_hint()
3737 #[stable(feature = "fused", since = "1.26.0")]
3738 impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P>
3739 where P: FnMut(&T) -> bool {}
3743 forward_iterator! { SplitN: T, &'a [T] }
3744 forward_iterator! { RSplitN: T, &'a [T] }
3745 forward_iterator! { SplitNMut: T, &'a mut [T] }
3746 forward_iterator! { RSplitNMut: T, &'a mut [T] }
3748 /// An iterator over overlapping subslices of length `size`.
3750 /// This struct is created by the [`windows`] method on [slices].
3752 /// [`windows`]: ../../std/primitive.slice.html#method.windows
3753 /// [slices]: ../../std/primitive.slice.html
3755 #[stable(feature = "rust1", since = "1.0.0")]
3756 pub struct Windows<'a, T:'a> {
3761 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3762 #[stable(feature = "rust1", since = "1.0.0")]
3763 impl<T> Clone for Windows<'_, T> {
3764 fn clone(&self) -> Self {
3772 #[stable(feature = "rust1", since = "1.0.0")]
3773 impl<'a, T> Iterator for Windows<'a, T> {
3774 type Item = &'a [T];
3777 fn next(&mut self) -> Option<&'a [T]> {
3778 if self.size > self.v.len() {
3781 let ret = Some(&self.v[..self.size]);
3782 self.v = &self.v[1..];
3788 fn size_hint(&self) -> (usize, Option<usize>) {
3789 if self.size > self.v.len() {
3792 let size = self.v.len() - self.size + 1;
3798 fn count(self) -> usize {
3803 fn nth(&mut self, n: usize) -> Option<Self::Item> {
3804 let (end, overflow) = self.size.overflowing_add(n);
3805 if end > self.v.len() || overflow {
3809 let nth = &self.v[n..end];
3810 self.v = &self.v[n+1..];
3816 fn last(self) -> Option<Self::Item> {
3817 if self.size > self.v.len() {
3820 let start = self.v.len() - self.size;
3821 Some(&self.v[start..])
3826 #[stable(feature = "rust1", since = "1.0.0")]
3827 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
3829 fn next_back(&mut self) -> Option<&'a [T]> {
3830 if self.size > self.v.len() {
3833 let ret = Some(&self.v[self.v.len()-self.size..]);
3834 self.v = &self.v[..self.v.len()-1];
3840 #[stable(feature = "rust1", since = "1.0.0")]
3841 impl<T> ExactSizeIterator for Windows<'_, T> {}
3843 #[unstable(feature = "trusted_len", issue = "37572")]
3844 unsafe impl<T> TrustedLen for Windows<'_, T> {}
3846 #[stable(feature = "fused", since = "1.26.0")]
3847 impl<T> FusedIterator for Windows<'_, T> {}
3850 unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
3851 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
3852 from_raw_parts(self.v.as_ptr().add(i), self.size)
3854 fn may_have_side_effect() -> bool { false }
3857 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
3858 /// time), starting at the beginning of the slice.
3860 /// When the slice len is not evenly divided by the chunk size, the last slice
3861 /// of the iteration will be the remainder.
3863 /// This struct is created by the [`chunks`] method on [slices].
3865 /// [`chunks`]: ../../std/primitive.slice.html#method.chunks
3866 /// [slices]: ../../std/primitive.slice.html
3868 #[stable(feature = "rust1", since = "1.0.0")]
3869 pub struct Chunks<'a, T:'a> {
3874 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3875 #[stable(feature = "rust1", since = "1.0.0")]
3876 impl<T> Clone for Chunks<'_, T> {
3877 fn clone(&self) -> Self {
3880 chunk_size: self.chunk_size,
3885 #[stable(feature = "rust1", since = "1.0.0")]
3886 impl<'a, T> Iterator for Chunks<'a, T> {
3887 type Item = &'a [T];
3890 fn next(&mut self) -> Option<&'a [T]> {
3891 if self.v.is_empty() {
3894 let chunksz = cmp::min(self.v.len(), self.chunk_size);
3895 let (fst, snd) = self.v.split_at(chunksz);
3902 fn size_hint(&self) -> (usize, Option<usize>) {
3903 if self.v.is_empty() {
3906 let n = self.v.len() / self.chunk_size;
3907 let rem = self.v.len() % self.chunk_size;
3908 let n = if rem > 0 { n+1 } else { n };
3914 fn count(self) -> usize {
3919 fn nth(&mut self, n: usize) -> Option<Self::Item> {
3920 let (start, overflow) = n.overflowing_mul(self.chunk_size);
3921 if start >= self.v.len() || overflow {
3925 let end = match start.checked_add(self.chunk_size) {
3926 Some(sum) => cmp::min(self.v.len(), sum),
3927 None => self.v.len(),
3929 let nth = &self.v[start..end];
3930 self.v = &self.v[end..];
3936 fn last(self) -> Option<Self::Item> {
3937 if self.v.is_empty() {
3940 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
3941 Some(&self.v[start..])
3946 #[stable(feature = "rust1", since = "1.0.0")]
3947 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
3949 fn next_back(&mut self) -> Option<&'a [T]> {
3950 if self.v.is_empty() {
3953 let remainder = self.v.len() % self.chunk_size;
3954 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
3955 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
3962 #[stable(feature = "rust1", since = "1.0.0")]
3963 impl<T> ExactSizeIterator for Chunks<'_, T> {}
3965 #[unstable(feature = "trusted_len", issue = "37572")]
3966 unsafe impl<T> TrustedLen for Chunks<'_, T> {}
3968 #[stable(feature = "fused", since = "1.26.0")]
3969 impl<T> FusedIterator for Chunks<'_, T> {}
3972 unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
3973 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
3974 let start = i * self.chunk_size;
3975 let end = match start.checked_add(self.chunk_size) {
3976 None => self.v.len(),
3977 Some(end) => cmp::min(end, self.v.len()),
3979 from_raw_parts(self.v.as_ptr().add(start), end - start)
3981 fn may_have_side_effect() -> bool { false }
3984 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
3985 /// elements at a time), starting at the beginning of the slice.
3987 /// When the slice len is not evenly divided by the chunk size, the last slice
3988 /// of the iteration will be the remainder.
3990 /// This struct is created by the [`chunks_mut`] method on [slices].
3992 /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
3993 /// [slices]: ../../std/primitive.slice.html
3995 #[stable(feature = "rust1", since = "1.0.0")]
3996 pub struct ChunksMut<'a, T:'a> {
4001 #[stable(feature = "rust1", since = "1.0.0")]
4002 impl<'a, T> Iterator for ChunksMut<'a, T> {
4003 type Item = &'a mut [T];
4006 fn next(&mut self) -> Option<&'a mut [T]> {
4007 if self.v.is_empty() {
4010 let sz = cmp::min(self.v.len(), self.chunk_size);
4011 let tmp = mem::replace(&mut self.v, &mut []);
4012 let (head, tail) = tmp.split_at_mut(sz);
4019 fn size_hint(&self) -> (usize, Option<usize>) {
4020 if self.v.is_empty() {
4023 let n = self.v.len() / self.chunk_size;
4024 let rem = self.v.len() % self.chunk_size;
4025 let n = if rem > 0 { n + 1 } else { n };
4031 fn count(self) -> usize {
4036 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4037 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4038 if start >= self.v.len() || overflow {
4042 let end = match start.checked_add(self.chunk_size) {
4043 Some(sum) => cmp::min(self.v.len(), sum),
4044 None => self.v.len(),
4046 let tmp = mem::replace(&mut self.v, &mut []);
4047 let (head, tail) = tmp.split_at_mut(end);
4048 let (_, nth) = head.split_at_mut(start);
4055 fn last(self) -> Option<Self::Item> {
4056 if self.v.is_empty() {
4059 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
4060 Some(&mut self.v[start..])
4065 #[stable(feature = "rust1", since = "1.0.0")]
4066 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
4068 fn next_back(&mut self) -> Option<&'a mut [T]> {
4069 if self.v.is_empty() {
4072 let remainder = self.v.len() % self.chunk_size;
4073 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4074 let tmp = mem::replace(&mut self.v, &mut []);
4075 let tmp_len = tmp.len();
4076 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4083 #[stable(feature = "rust1", since = "1.0.0")]
4084 impl<T> ExactSizeIterator for ChunksMut<'_, T> {}
4086 #[unstable(feature = "trusted_len", issue = "37572")]
4087 unsafe impl<T> TrustedLen for ChunksMut<'_, T> {}
4089 #[stable(feature = "fused", since = "1.26.0")]
4090 impl<T> FusedIterator for ChunksMut<'_, T> {}
4093 unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
4094 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4095 let start = i * self.chunk_size;
4096 let end = match start.checked_add(self.chunk_size) {
4097 None => self.v.len(),
4098 Some(end) => cmp::min(end, self.v.len()),
4100 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4102 fn may_have_side_effect() -> bool { false }
4105 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4106 /// time), starting at the beginning of the slice.
4108 /// When the slice len is not evenly divided by the chunk size, the last
4109 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4110 /// the [`remainder`] function from the iterator.
4112 /// This struct is created by the [`chunks_exact`] method on [slices].
4114 /// [`chunks_exact`]: ../../std/primitive.slice.html#method.chunks_exact
4115 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4116 /// [slices]: ../../std/primitive.slice.html
4118 #[stable(feature = "chunks_exact", since = "1.31.0")]
4119 pub struct ChunksExact<'a, T:'a> {
4125 impl<'a, T> ChunksExact<'a, T> {
4126 /// Return the remainder of the original slice that is not going to be
4127 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4129 #[stable(feature = "chunks_exact", since = "1.31.0")]
4130 pub fn remainder(&self) -> &'a [T] {
4135 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4136 #[stable(feature = "chunks_exact", since = "1.31.0")]
4137 impl<T> Clone for ChunksExact<'_, T> {
4138 fn clone(&self) -> Self {
4142 chunk_size: self.chunk_size,
4147 #[stable(feature = "chunks_exact", since = "1.31.0")]
4148 impl<'a, T> Iterator for ChunksExact<'a, T> {
4149 type Item = &'a [T];
4152 fn next(&mut self) -> Option<&'a [T]> {
4153 if self.v.len() < self.chunk_size {
4156 let (fst, snd) = self.v.split_at(self.chunk_size);
4163 fn size_hint(&self) -> (usize, Option<usize>) {
4164 let n = self.v.len() / self.chunk_size;
4169 fn count(self) -> usize {
4174 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4175 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4176 if start >= self.v.len() || overflow {
4180 let (_, snd) = self.v.split_at(start);
4187 fn last(mut self) -> Option<Self::Item> {
4192 #[stable(feature = "chunks_exact", since = "1.31.0")]
4193 impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> {
4195 fn next_back(&mut self) -> Option<&'a [T]> {
4196 if self.v.len() < self.chunk_size {
4199 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4206 #[stable(feature = "chunks_exact", since = "1.31.0")]
4207 impl<T> ExactSizeIterator for ChunksExact<'_, T> {
4208 fn is_empty(&self) -> bool {
4213 #[unstable(feature = "trusted_len", issue = "37572")]
4214 unsafe impl<T> TrustedLen for ChunksExact<'_, T> {}
4216 #[stable(feature = "chunks_exact", since = "1.31.0")]
4217 impl<T> FusedIterator for ChunksExact<'_, T> {}
4220 #[stable(feature = "chunks_exact", since = "1.31.0")]
4221 unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> {
4222 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4223 let start = i * self.chunk_size;
4224 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
4226 fn may_have_side_effect() -> bool { false }
4229 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4230 /// elements at a time), starting at the beginning of the slice.
4232 /// When the slice len is not evenly divided by the chunk size, the last up to
4233 /// `chunk_size-1` elements will be omitted but can be retrieved from the
4234 /// [`into_remainder`] function from the iterator.
4236 /// This struct is created by the [`chunks_exact_mut`] method on [slices].
4238 /// [`chunks_exact_mut`]: ../../std/primitive.slice.html#method.chunks_exact_mut
4239 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
4240 /// [slices]: ../../std/primitive.slice.html
4242 #[stable(feature = "chunks_exact", since = "1.31.0")]
4243 pub struct ChunksExactMut<'a, T:'a> {
4249 impl<'a, T> ChunksExactMut<'a, T> {
4250 /// Return the remainder of the original slice that is not going to be
4251 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4253 #[stable(feature = "chunks_exact", since = "1.31.0")]
4254 pub fn into_remainder(self) -> &'a mut [T] {
4259 #[stable(feature = "chunks_exact", since = "1.31.0")]
4260 impl<'a, T> Iterator for ChunksExactMut<'a, T> {
4261 type Item = &'a mut [T];
4264 fn next(&mut self) -> Option<&'a mut [T]> {
4265 if self.v.len() < self.chunk_size {
4268 let tmp = mem::replace(&mut self.v, &mut []);
4269 let (head, tail) = tmp.split_at_mut(self.chunk_size);
4276 fn size_hint(&self) -> (usize, Option<usize>) {
4277 let n = self.v.len() / self.chunk_size;
4282 fn count(self) -> usize {
4287 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4288 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4289 if start >= self.v.len() || overflow {
4293 let tmp = mem::replace(&mut self.v, &mut []);
4294 let (_, snd) = tmp.split_at_mut(start);
4301 fn last(mut self) -> Option<Self::Item> {
4306 #[stable(feature = "chunks_exact", since = "1.31.0")]
4307 impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> {
4309 fn next_back(&mut self) -> Option<&'a mut [T]> {
4310 if self.v.len() < self.chunk_size {
4313 let tmp = mem::replace(&mut self.v, &mut []);
4314 let tmp_len = tmp.len();
4315 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
4322 #[stable(feature = "chunks_exact", since = "1.31.0")]
4323 impl<T> ExactSizeIterator for ChunksExactMut<'_, T> {
4324 fn is_empty(&self) -> bool {
4329 #[unstable(feature = "trusted_len", issue = "37572")]
4330 unsafe impl<T> TrustedLen for ChunksExactMut<'_, T> {}
4332 #[stable(feature = "chunks_exact", since = "1.31.0")]
4333 impl<T> FusedIterator for ChunksExactMut<'_, T> {}
4336 #[stable(feature = "chunks_exact", since = "1.31.0")]
4337 unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> {
4338 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4339 let start = i * self.chunk_size;
4340 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
4342 fn may_have_side_effect() -> bool { false }
4345 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4346 /// time), starting at the end of the slice.
4348 /// When the slice len is not evenly divided by the chunk size, the last slice
4349 /// of the iteration will be the remainder.
4351 /// This struct is created by the [`rchunks`] method on [slices].
4353 /// [`rchunks`]: ../../std/primitive.slice.html#method.rchunks
4354 /// [slices]: ../../std/primitive.slice.html
4356 #[stable(feature = "rchunks", since = "1.31.0")]
4357 pub struct RChunks<'a, T:'a> {
4362 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4363 #[stable(feature = "rchunks", since = "1.31.0")]
4364 impl<'a, T> Clone for RChunks<'a, T> {
4365 fn clone(&self) -> RChunks<'a, T> {
4368 chunk_size: self.chunk_size,
4373 #[stable(feature = "rchunks", since = "1.31.0")]
4374 impl<'a, T> Iterator for RChunks<'a, T> {
4375 type Item = &'a [T];
4378 fn next(&mut self) -> Option<&'a [T]> {
4379 if self.v.is_empty() {
4382 let chunksz = cmp::min(self.v.len(), self.chunk_size);
4383 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
4390 fn size_hint(&self) -> (usize, Option<usize>) {
4391 if self.v.is_empty() {
4394 let n = self.v.len() / self.chunk_size;
4395 let rem = self.v.len() % self.chunk_size;
4396 let n = if rem > 0 { n+1 } else { n };
4402 fn count(self) -> usize {
4407 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4408 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4409 if end >= self.v.len() || overflow {
4413 // Can't underflow because of the check above
4414 let end = self.v.len() - end;
4415 let start = match end.checked_sub(self.chunk_size) {
4419 let nth = &self.v[start..end];
4420 self.v = &self.v[0..start];
4426 fn last(self) -> Option<Self::Item> {
4427 if self.v.is_empty() {
4430 let rem = self.v.len() % self.chunk_size;
4431 let end = if rem == 0 { self.chunk_size } else { rem };
4432 Some(&self.v[0..end])
4437 #[stable(feature = "rchunks", since = "1.31.0")]
4438 impl<'a, T> DoubleEndedIterator for RChunks<'a, T> {
4440 fn next_back(&mut self) -> Option<&'a [T]> {
4441 if self.v.is_empty() {
4444 let remainder = self.v.len() % self.chunk_size;
4445 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
4446 let (fst, snd) = self.v.split_at(chunksz);
4453 #[stable(feature = "rchunks", since = "1.31.0")]
4454 impl<'a, T> ExactSizeIterator for RChunks<'a, T> {}
4456 #[unstable(feature = "trusted_len", issue = "37572")]
4457 unsafe impl<'a, T> TrustedLen for RChunks<'a, T> {}
4459 #[stable(feature = "rchunks", since = "1.31.0")]
4460 impl<'a, T> FusedIterator for RChunks<'a, T> {}
4463 #[stable(feature = "rchunks", since = "1.31.0")]
4464 unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> {
4465 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4466 let end = self.v.len() - i * self.chunk_size;
4467 let start = match end.checked_sub(self.chunk_size) {
4469 Some(start) => start,
4471 from_raw_parts(self.v.as_ptr().add(start), end - start)
4473 fn may_have_side_effect() -> bool { false }
4476 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4477 /// elements at a time), starting at the end of the slice.
4479 /// When the slice len is not evenly divided by the chunk size, the last slice
4480 /// of the iteration will be the remainder.
4482 /// This struct is created by the [`rchunks_mut`] method on [slices].
4484 /// [`rchunks_mut`]: ../../std/primitive.slice.html#method.rchunks_mut
4485 /// [slices]: ../../std/primitive.slice.html
4487 #[stable(feature = "rchunks", since = "1.31.0")]
4488 pub struct RChunksMut<'a, T:'a> {
4493 #[stable(feature = "rchunks", since = "1.31.0")]
4494 impl<'a, T> Iterator for RChunksMut<'a, T> {
4495 type Item = &'a mut [T];
4498 fn next(&mut self) -> Option<&'a mut [T]> {
4499 if self.v.is_empty() {
4502 let sz = cmp::min(self.v.len(), self.chunk_size);
4503 let tmp = mem::replace(&mut self.v, &mut []);
4504 let tmp_len = tmp.len();
4505 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4512 fn size_hint(&self) -> (usize, Option<usize>) {
4513 if self.v.is_empty() {
4516 let n = self.v.len() / self.chunk_size;
4517 let rem = self.v.len() % self.chunk_size;
4518 let n = if rem > 0 { n + 1 } else { n };
4524 fn count(self) -> usize {
4529 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4530 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4531 if end >= self.v.len() || overflow {
4535 // Can't underflow because of the check above
4536 let end = self.v.len() - end;
4537 let start = match end.checked_sub(self.chunk_size) {
4541 let tmp = mem::replace(&mut self.v, &mut []);
4542 let (head, tail) = tmp.split_at_mut(start);
4543 let (nth, _) = tail.split_at_mut(end - start);
4550 fn last(self) -> Option<Self::Item> {
4551 if self.v.is_empty() {
4554 let rem = self.v.len() % self.chunk_size;
4555 let end = if rem == 0 { self.chunk_size } else { rem };
4556 Some(&mut self.v[0..end])
4561 #[stable(feature = "rchunks", since = "1.31.0")]
4562 impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> {
4564 fn next_back(&mut self) -> Option<&'a mut [T]> {
4565 if self.v.is_empty() {
4568 let remainder = self.v.len() % self.chunk_size;
4569 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4570 let tmp = mem::replace(&mut self.v, &mut []);
4571 let (head, tail) = tmp.split_at_mut(sz);
4578 #[stable(feature = "rchunks", since = "1.31.0")]
4579 impl<'a, T> ExactSizeIterator for RChunksMut<'a, T> {}
4581 #[unstable(feature = "trusted_len", issue = "37572")]
4582 unsafe impl<'a, T> TrustedLen for RChunksMut<'a, T> {}
4584 #[stable(feature = "rchunks", since = "1.31.0")]
4585 impl<'a, T> FusedIterator for RChunksMut<'a, T> {}
4588 #[stable(feature = "rchunks", since = "1.31.0")]
4589 unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> {
4590 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4591 let end = self.v.len() - i * self.chunk_size;
4592 let start = match end.checked_sub(self.chunk_size) {
4594 Some(start) => start,
4596 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4598 fn may_have_side_effect() -> bool { false }
4601 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4602 /// time), starting at the end of the slice.
4604 /// When the slice len is not evenly divided by the chunk size, the last
4605 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4606 /// the [`remainder`] function from the iterator.
4608 /// This struct is created by the [`rchunks_exact`] method on [slices].
4610 /// [`rchunks_exact`]: ../../std/primitive.slice.html#method.rchunks_exact
4611 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4612 /// [slices]: ../../std/primitive.slice.html
4614 #[stable(feature = "rchunks", since = "1.31.0")]
4615 pub struct RChunksExact<'a, T:'a> {
4621 impl<'a, T> RChunksExact<'a, T> {
4622 /// Return the remainder of the original slice that is not going to be
4623 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4625 #[stable(feature = "rchunks", since = "1.31.0")]
4626 pub fn remainder(&self) -> &'a [T] {
4631 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4632 #[stable(feature = "rchunks", since = "1.31.0")]
4633 impl<'a, T> Clone for RChunksExact<'a, T> {
4634 fn clone(&self) -> RChunksExact<'a, T> {
4638 chunk_size: self.chunk_size,
4643 #[stable(feature = "rchunks", since = "1.31.0")]
4644 impl<'a, T> Iterator for RChunksExact<'a, T> {
4645 type Item = &'a [T];
4648 fn next(&mut self) -> Option<&'a [T]> {
4649 if self.v.len() < self.chunk_size {
4652 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4659 fn size_hint(&self) -> (usize, Option<usize>) {
4660 let n = self.v.len() / self.chunk_size;
4665 fn count(self) -> usize {
4670 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4671 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4672 if end >= self.v.len() || overflow {
4676 let (fst, _) = self.v.split_at(self.v.len() - end);
4683 fn last(mut self) -> Option<Self::Item> {
4688 #[stable(feature = "rchunks", since = "1.31.0")]
4689 impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> {
4691 fn next_back(&mut self) -> Option<&'a [T]> {
4692 if self.v.len() < self.chunk_size {
4695 let (fst, snd) = self.v.split_at(self.chunk_size);
4702 #[stable(feature = "rchunks", since = "1.31.0")]
4703 impl<'a, T> ExactSizeIterator for RChunksExact<'a, T> {
4704 fn is_empty(&self) -> bool {
4709 #[unstable(feature = "trusted_len", issue = "37572")]
4710 unsafe impl<'a, T> TrustedLen for RChunksExact<'a, T> {}
4712 #[stable(feature = "rchunks", since = "1.31.0")]
4713 impl<'a, T> FusedIterator for RChunksExact<'a, T> {}
4716 #[stable(feature = "rchunks", since = "1.31.0")]
4717 unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> {
4718 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4719 let end = self.v.len() - i * self.chunk_size;
4720 let start = end - self.chunk_size;
4721 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
4723 fn may_have_side_effect() -> bool { false }
4726 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4727 /// elements at a time), starting at the end of the slice.
4729 /// When the slice len is not evenly divided by the chunk size, the last up to
4730 /// `chunk_size-1` elements will be omitted but can be retrieved from the
4731 /// [`into_remainder`] function from the iterator.
4733 /// This struct is created by the [`rchunks_exact_mut`] method on [slices].
4735 /// [`rchunks_exact_mut`]: ../../std/primitive.slice.html#method.rchunks_exact_mut
4736 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
4737 /// [slices]: ../../std/primitive.slice.html
4739 #[stable(feature = "rchunks", since = "1.31.0")]
4740 pub struct RChunksExactMut<'a, T:'a> {
4746 impl<'a, T> RChunksExactMut<'a, T> {
4747 /// Return the remainder of the original slice that is not going to be
4748 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4750 #[stable(feature = "rchunks", since = "1.31.0")]
4751 pub fn into_remainder(self) -> &'a mut [T] {
4756 #[stable(feature = "rchunks", since = "1.31.0")]
4757 impl<'a, T> Iterator for RChunksExactMut<'a, T> {
4758 type Item = &'a mut [T];
4761 fn next(&mut self) -> Option<&'a mut [T]> {
4762 if self.v.len() < self.chunk_size {
4765 let tmp = mem::replace(&mut self.v, &mut []);
4766 let tmp_len = tmp.len();
4767 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
4774 fn size_hint(&self) -> (usize, Option<usize>) {
4775 let n = self.v.len() / self.chunk_size;
4780 fn count(self) -> usize {
4785 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4786 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4787 if end >= self.v.len() || overflow {
4791 let tmp = mem::replace(&mut self.v, &mut []);
4792 let tmp_len = tmp.len();
4793 let (fst, _) = tmp.split_at_mut(tmp_len - end);
4800 fn last(mut self) -> Option<Self::Item> {
4805 #[stable(feature = "rchunks", since = "1.31.0")]
4806 impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> {
4808 fn next_back(&mut self) -> Option<&'a mut [T]> {
4809 if self.v.len() < self.chunk_size {
4812 let tmp = mem::replace(&mut self.v, &mut []);
4813 let (head, tail) = tmp.split_at_mut(self.chunk_size);
4820 #[stable(feature = "rchunks", since = "1.31.0")]
4821 impl<'a, T> ExactSizeIterator for RChunksExactMut<'a, T> {
4822 fn is_empty(&self) -> bool {
4827 #[unstable(feature = "trusted_len", issue = "37572")]
4828 unsafe impl<'a, T> TrustedLen for RChunksExactMut<'a, T> {}
4830 #[stable(feature = "rchunks", since = "1.31.0")]
4831 impl<'a, T> FusedIterator for RChunksExactMut<'a, T> {}
4834 #[stable(feature = "rchunks", since = "1.31.0")]
4835 unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> {
4836 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4837 let end = self.v.len() - i * self.chunk_size;
4838 let start = end - self.chunk_size;
4839 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
4841 fn may_have_side_effect() -> bool { false }
4848 /// Forms a slice from a pointer and a length.
4850 /// The `len` argument is the number of **elements**, not the number of bytes.
4854 /// This function is unsafe as there is no guarantee that the given pointer is
4855 /// valid for `len` elements, nor whether the lifetime inferred is a suitable
4856 /// lifetime for the returned slice.
4858 /// `data` must be non-null and aligned, even for zero-length slices. One
4859 /// reason for this is that enum layout optimizations may rely on references
4860 /// (including slices of any length) being aligned and non-null to distinguish
4861 /// them from other data. You can obtain a pointer that is usable as `data`
4862 /// for zero-length slices using [`NonNull::dangling()`].
4864 /// The total size of the slice must be no larger than `isize::MAX` **bytes**
4865 /// in memory. See the safety documentation of [`pointer::offset`].
4869 /// The lifetime for the returned slice is inferred from its usage. To
4870 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
4871 /// source lifetime is safe in the context, such as by providing a helper
4872 /// function taking the lifetime of a host value for the slice, or by explicit
4880 /// // manifest a slice for a single element
4882 /// let ptr = &x as *const _;
4883 /// let slice = unsafe { slice::from_raw_parts(ptr, 1) };
4884 /// assert_eq!(slice[0], 42);
4887 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
4888 /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
4890 #[stable(feature = "rust1", since = "1.0.0")]
4891 pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
4892 debug_assert!(data as usize % mem::align_of::<T>() == 0, "attempt to create unaligned slice");
4893 debug_assert!(mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
4894 "attempt to create slice covering half the address space");
4895 Repr { raw: FatPtr { data, len } }.rust
4898 /// Performs the same functionality as [`from_raw_parts`], except that a
4899 /// mutable slice is returned.
4901 /// This function is unsafe for the same reasons as [`from_raw_parts`], as well
4902 /// as not being able to provide a non-aliasing guarantee of the returned
4903 /// mutable slice. `data` must be non-null and aligned even for zero-length
4904 /// slices as with [`from_raw_parts`]. The total size of the slice must be no
4905 /// larger than `isize::MAX` **bytes** in memory.
4907 /// See the documentation of [`from_raw_parts`] for more details.
4909 /// [`from_raw_parts`]: ../../std/slice/fn.from_raw_parts.html
4911 #[stable(feature = "rust1", since = "1.0.0")]
4912 pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
4913 debug_assert!(data as usize % mem::align_of::<T>() == 0, "attempt to create unaligned slice");
4914 debug_assert!(mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
4915 "attempt to create slice covering half the address space");
4916 Repr { raw: FatPtr { data, len } }.rust_mut
4919 /// Converts a reference to T into a slice of length 1 (without copying).
4920 #[stable(feature = "from_ref", since = "1.28.0")]
4921 pub fn from_ref<T>(s: &T) -> &[T] {
4923 from_raw_parts(s, 1)
4927 /// Converts a reference to T into a slice of length 1 (without copying).
4928 #[stable(feature = "from_ref", since = "1.28.0")]
4929 pub fn from_mut<T>(s: &mut T) -> &mut [T] {
4931 from_raw_parts_mut(s, 1)
4935 // This function is public only because there is no other way to unit test heapsort.
4936 #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "0")]
4938 pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
4939 where F: FnMut(&T, &T) -> bool
4941 sort::heapsort(v, &mut is_less);
4945 // Comparison traits
4949 /// Calls implementation provided memcmp.
4951 /// Interprets the data as u8.
4953 /// Returns 0 for equal, < 0 for less than and > 0 for greater
4955 // FIXME(#32610): Return type should be c_int
4956 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
4959 #[stable(feature = "rust1", since = "1.0.0")]
4960 impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
4961 fn eq(&self, other: &[B]) -> bool {
4962 SlicePartialEq::equal(self, other)
4965 fn ne(&self, other: &[B]) -> bool {
4966 SlicePartialEq::not_equal(self, other)
4970 #[stable(feature = "rust1", since = "1.0.0")]
4971 impl<T: Eq> Eq for [T] {}
4973 /// Implements comparison of vectors lexicographically.
4974 #[stable(feature = "rust1", since = "1.0.0")]
4975 impl<T: Ord> Ord for [T] {
4976 fn cmp(&self, other: &[T]) -> Ordering {
4977 SliceOrd::compare(self, other)
4981 /// Implements comparison of vectors lexicographically.
4982 #[stable(feature = "rust1", since = "1.0.0")]
4983 impl<T: PartialOrd> PartialOrd for [T] {
4984 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
4985 SlicePartialOrd::partial_compare(self, other)
4990 // intermediate trait for specialization of slice's PartialEq
4991 trait SlicePartialEq<B> {
4992 fn equal(&self, other: &[B]) -> bool;
4994 fn not_equal(&self, other: &[B]) -> bool { !self.equal(other) }
4997 // Generic slice equality
4998 impl<A, B> SlicePartialEq<B> for [A]
4999 where A: PartialEq<B>
5001 default fn equal(&self, other: &[B]) -> bool {
5002 if self.len() != other.len() {
5006 for i in 0..self.len() {
5007 if !self[i].eq(&other[i]) {
5016 // Use memcmp for bytewise equality when the types allow
5017 impl<A> SlicePartialEq<A> for [A]
5018 where A: PartialEq<A> + BytewiseEquality
5020 fn equal(&self, other: &[A]) -> bool {
5021 if self.len() != other.len() {
5024 if self.as_ptr() == other.as_ptr() {
5028 let size = mem::size_of_val(self);
5029 memcmp(self.as_ptr() as *const u8,
5030 other.as_ptr() as *const u8, size) == 0
5036 // intermediate trait for specialization of slice's PartialOrd
5037 trait SlicePartialOrd<B> {
5038 fn partial_compare(&self, other: &[B]) -> Option<Ordering>;
5041 impl<A> SlicePartialOrd<A> for [A]
5044 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
5045 let l = cmp::min(self.len(), other.len());
5047 // Slice to the loop iteration range to enable bound check
5048 // elimination in the compiler
5049 let lhs = &self[..l];
5050 let rhs = &other[..l];
5053 match lhs[i].partial_cmp(&rhs[i]) {
5054 Some(Ordering::Equal) => (),
5055 non_eq => return non_eq,
5059 self.len().partial_cmp(&other.len())
5063 impl<A> SlicePartialOrd<A> for [A]
5066 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
5067 Some(SliceOrd::compare(self, other))
5072 // intermediate trait for specialization of slice's Ord
5074 fn compare(&self, other: &[B]) -> Ordering;
5077 impl<A> SliceOrd<A> for [A]
5080 default fn compare(&self, other: &[A]) -> Ordering {
5081 let l = cmp::min(self.len(), other.len());
5083 // Slice to the loop iteration range to enable bound check
5084 // elimination in the compiler
5085 let lhs = &self[..l];
5086 let rhs = &other[..l];
5089 match lhs[i].cmp(&rhs[i]) {
5090 Ordering::Equal => (),
5091 non_eq => return non_eq,
5095 self.len().cmp(&other.len())
5099 // memcmp compares a sequence of unsigned bytes lexicographically.
5100 // this matches the order we want for [u8], but no others (not even [i8]).
5101 impl SliceOrd<u8> for [u8] {
5103 fn compare(&self, other: &[u8]) -> Ordering {
5104 let order = unsafe {
5105 memcmp(self.as_ptr(), other.as_ptr(),
5106 cmp::min(self.len(), other.len()))
5109 self.len().cmp(&other.len())
5110 } else if order < 0 {
5119 /// Trait implemented for types that can be compared for equality using
5120 /// their bytewise representation
5121 trait BytewiseEquality { }
5123 macro_rules! impl_marker_for {
5124 ($traitname:ident, $($ty:ty)*) => {
5126 impl $traitname for $ty { }
5131 impl_marker_for!(BytewiseEquality,
5132 u8 i8 u16 i16 u32 i32 u64 i64 usize isize char bool);
5135 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
5136 unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
5139 fn may_have_side_effect() -> bool { false }
5143 unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
5144 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
5145 &mut *self.ptr.add(i)
5147 fn may_have_side_effect() -> bool { false }
5150 trait SliceContains: Sized {
5151 fn slice_contains(&self, x: &[Self]) -> bool;
5154 impl<T> SliceContains for T where T: PartialEq {
5155 default fn slice_contains(&self, x: &[Self]) -> bool {
5156 x.iter().any(|y| *y == *self)
5160 impl SliceContains for u8 {
5161 fn slice_contains(&self, x: &[Self]) -> bool {
5162 memchr::memchr(*self, x).is_some()
5166 impl SliceContains for i8 {
5167 fn slice_contains(&self, x: &[Self]) -> bool {
5168 let byte = *self as u8;
5169 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
5170 memchr::memchr(byte, bytes).is_some()