1 // Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Slice management and manipulation
13 //! For more details see [`std::slice`].
15 //! [`std::slice`]: ../../std/slice/index.html
17 #![stable(feature = "rust1", since = "1.0.0")]
19 // How this module is organized.
21 // The library infrastructure for slices is fairly messy. There's
22 // a lot of stuff defined here. Let's keep it clean.
24 // The layout of this file is thus:
26 // * Inherent methods. This is where most of the slice API resides.
27 // * Implementations of a few common traits with important slice ops.
28 // * Definitions of a bunch of iterators.
30 // * The `raw` and `bytes` submodules.
31 // * Boilerplate trait implementations.
33 use cmp::Ordering::{self, Less, Equal, Greater};
36 use intrinsics::assume;
39 use ops::{FnMut, Try, self};
41 use option::Option::{None, Some};
43 use result::Result::{Ok, Err};
46 use marker::{Copy, Send, Sync, Sized, self};
47 use iter_private::TrustedRandomAccess;
49 #[unstable(feature = "slice_internals", issue = "0",
50 reason = "exposed from core to be reused in std; use the memchr crate")]
51 /// Pure rust memchr implementation, taken from rust-memchr
58 union Repr<'a, T: 'a> {
60 rust_mut: &'a mut [T],
77 /// Returns the number of elements in the slice.
82 /// let a = [1, 2, 3];
83 /// assert_eq!(a.len(), 3);
85 #[stable(feature = "rust1", since = "1.0.0")]
87 #[rustc_const_unstable(feature = "const_slice_len")]
88 pub const fn len(&self) -> usize {
90 Repr { rust: self }.raw.len
94 /// Returns `true` if the slice has a length of 0.
99 /// let a = [1, 2, 3];
100 /// assert!(!a.is_empty());
102 #[stable(feature = "rust1", since = "1.0.0")]
104 #[rustc_const_unstable(feature = "const_slice_len")]
105 pub const fn is_empty(&self) -> bool {
109 /// Returns the first element of the slice, or `None` if it is empty.
114 /// let v = [10, 40, 30];
115 /// assert_eq!(Some(&10), v.first());
117 /// let w: &[i32] = &[];
118 /// assert_eq!(None, w.first());
120 #[stable(feature = "rust1", since = "1.0.0")]
122 pub fn first(&self) -> Option<&T> {
126 /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty.
131 /// let x = &mut [0, 1, 2];
133 /// if let Some(first) = x.first_mut() {
136 /// assert_eq!(x, &[5, 1, 2]);
138 #[stable(feature = "rust1", since = "1.0.0")]
140 pub fn first_mut(&mut self) -> Option<&mut T> {
144 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
149 /// let x = &[0, 1, 2];
151 /// if let Some((first, elements)) = x.split_first() {
152 /// assert_eq!(first, &0);
153 /// assert_eq!(elements, &[1, 2]);
156 #[stable(feature = "slice_splits", since = "1.5.0")]
158 pub fn split_first(&self) -> Option<(&T, &[T])> {
159 if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
162 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
167 /// let x = &mut [0, 1, 2];
169 /// if let Some((first, elements)) = x.split_first_mut() {
174 /// assert_eq!(x, &[3, 4, 5]);
176 #[stable(feature = "slice_splits", since = "1.5.0")]
178 pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
179 if self.is_empty() { None } else {
180 let split = self.split_at_mut(1);
181 Some((&mut split.0[0], split.1))
185 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
190 /// let x = &[0, 1, 2];
192 /// if let Some((last, elements)) = x.split_last() {
193 /// assert_eq!(last, &2);
194 /// assert_eq!(elements, &[0, 1]);
197 #[stable(feature = "slice_splits", since = "1.5.0")]
199 pub fn split_last(&self) -> Option<(&T, &[T])> {
200 let len = self.len();
201 if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
204 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
209 /// let x = &mut [0, 1, 2];
211 /// if let Some((last, elements)) = x.split_last_mut() {
216 /// assert_eq!(x, &[4, 5, 3]);
218 #[stable(feature = "slice_splits", since = "1.5.0")]
220 pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
221 let len = self.len();
222 if len == 0 { None } else {
223 let split = self.split_at_mut(len - 1);
224 Some((&mut split.1[0], split.0))
229 /// Returns the last element of the slice, or `None` if it is empty.
234 /// let v = [10, 40, 30];
235 /// assert_eq!(Some(&30), v.last());
237 /// let w: &[i32] = &[];
238 /// assert_eq!(None, w.last());
240 #[stable(feature = "rust1", since = "1.0.0")]
242 pub fn last(&self) -> Option<&T> {
243 let last_idx = self.len().checked_sub(1)?;
247 /// Returns a mutable pointer to the last item in the slice.
252 /// let x = &mut [0, 1, 2];
254 /// if let Some(last) = x.last_mut() {
257 /// assert_eq!(x, &[0, 1, 10]);
259 #[stable(feature = "rust1", since = "1.0.0")]
261 pub fn last_mut(&mut self) -> Option<&mut T> {
262 let last_idx = self.len().checked_sub(1)?;
263 self.get_mut(last_idx)
266 /// Returns a reference to an element or subslice depending on the type of
269 /// - If given a position, returns a reference to the element at that
270 /// position or `None` if out of bounds.
271 /// - If given a range, returns the subslice corresponding to that range,
272 /// or `None` if out of bounds.
277 /// let v = [10, 40, 30];
278 /// assert_eq!(Some(&40), v.get(1));
279 /// assert_eq!(Some(&[10, 40][..]), v.get(0..2));
280 /// assert_eq!(None, v.get(3));
281 /// assert_eq!(None, v.get(0..4));
283 #[stable(feature = "rust1", since = "1.0.0")]
285 pub fn get<I>(&self, index: I) -> Option<&I::Output>
286 where I: SliceIndex<Self>
291 /// Returns a mutable reference to an element or subslice depending on the
292 /// type of index (see [`get`]) or `None` if the index is out of bounds.
294 /// [`get`]: #method.get
299 /// let x = &mut [0, 1, 2];
301 /// if let Some(elem) = x.get_mut(1) {
304 /// assert_eq!(x, &[0, 42, 2]);
306 #[stable(feature = "rust1", since = "1.0.0")]
308 pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
309 where I: SliceIndex<Self>
314 /// Returns a reference to an element or subslice, without doing bounds
317 /// This is generally not recommended, use with caution! For a safe
318 /// alternative see [`get`].
320 /// [`get`]: #method.get
325 /// let x = &[1, 2, 4];
328 /// assert_eq!(x.get_unchecked(1), &2);
331 #[stable(feature = "rust1", since = "1.0.0")]
333 pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
334 where I: SliceIndex<Self>
336 index.get_unchecked(self)
339 /// Returns a mutable reference to an element or subslice, without doing
342 /// This is generally not recommended, use with caution! For a safe
343 /// alternative see [`get_mut`].
345 /// [`get_mut`]: #method.get_mut
350 /// let x = &mut [1, 2, 4];
353 /// let elem = x.get_unchecked_mut(1);
356 /// assert_eq!(x, &[1, 13, 4]);
358 #[stable(feature = "rust1", since = "1.0.0")]
360 pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
361 where I: SliceIndex<Self>
363 index.get_unchecked_mut(self)
366 /// Returns a raw pointer to the slice's buffer.
368 /// The caller must ensure that the slice outlives the pointer this
369 /// function returns, or else it will end up pointing to garbage.
371 /// Modifying the container referenced by this slice may cause its buffer
372 /// to be reallocated, which would also make any pointers to it invalid.
377 /// let x = &[1, 2, 4];
378 /// let x_ptr = x.as_ptr();
381 /// for i in 0..x.len() {
382 /// assert_eq!(x.get_unchecked(i), &*x_ptr.add(i));
386 #[stable(feature = "rust1", since = "1.0.0")]
388 #[rustc_const_unstable(feature = "const_slice_as_ptr")]
389 pub const fn as_ptr(&self) -> *const T {
390 self as *const [T] as *const T
393 /// Returns an unsafe mutable pointer to the slice's buffer.
395 /// The caller must ensure that the slice outlives the pointer this
396 /// function returns, or else it will end up pointing to garbage.
398 /// Modifying the container referenced by this slice may cause its buffer
399 /// to be reallocated, which would also make any pointers to it invalid.
404 /// let x = &mut [1, 2, 4];
405 /// let x_ptr = x.as_mut_ptr();
408 /// for i in 0..x.len() {
409 /// *x_ptr.add(i) += 2;
412 /// assert_eq!(x, &[3, 4, 6]);
414 #[stable(feature = "rust1", since = "1.0.0")]
416 pub fn as_mut_ptr(&mut self) -> *mut T {
417 self as *mut [T] as *mut T
420 /// Swaps two elements in the slice.
424 /// * a - The index of the first element
425 /// * b - The index of the second element
429 /// Panics if `a` or `b` are out of bounds.
434 /// let mut v = ["a", "b", "c", "d"];
436 /// assert!(v == ["a", "d", "c", "b"]);
438 #[stable(feature = "rust1", since = "1.0.0")]
440 pub fn swap(&mut self, a: usize, b: usize) {
442 // Can't take two mutable loans from one vector, so instead just cast
443 // them to their raw pointers to do the swap
444 let pa: *mut T = &mut self[a];
445 let pb: *mut T = &mut self[b];
450 /// Reverses the order of elements in the slice, in place.
455 /// let mut v = [1, 2, 3];
457 /// assert!(v == [3, 2, 1]);
459 #[stable(feature = "rust1", since = "1.0.0")]
461 pub fn reverse(&mut self) {
462 let mut i: usize = 0;
465 // For very small types, all the individual reads in the normal
466 // path perform poorly. We can do better, given efficient unaligned
467 // load/store, by loading a larger chunk and reversing a register.
469 // Ideally LLVM would do this for us, as it knows better than we do
470 // whether unaligned reads are efficient (since that changes between
471 // different ARM versions, for example) and what the best chunk size
472 // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
473 // the loop, so we need to do this ourselves. (Hypothesis: reverse
474 // is troublesome because the sides can be aligned differently --
475 // will be, when the length is odd -- so there's no way of emitting
476 // pre- and postludes to use fully-aligned SIMD in the middle.)
479 cfg!(any(target_arch = "x86", target_arch = "x86_64"));
481 if fast_unaligned && mem::size_of::<T>() == 1 {
482 // Use the llvm.bswap intrinsic to reverse u8s in a usize
483 let chunk = mem::size_of::<usize>();
484 while i + chunk - 1 < ln / 2 {
486 let pa: *mut T = self.get_unchecked_mut(i);
487 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
488 let va = ptr::read_unaligned(pa as *mut usize);
489 let vb = ptr::read_unaligned(pb as *mut usize);
490 ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
491 ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
497 if fast_unaligned && mem::size_of::<T>() == 2 {
498 // Use rotate-by-16 to reverse u16s in a u32
499 let chunk = mem::size_of::<u32>() / 2;
500 while i + chunk - 1 < ln / 2 {
502 let pa: *mut T = self.get_unchecked_mut(i);
503 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
504 let va = ptr::read_unaligned(pa as *mut u32);
505 let vb = ptr::read_unaligned(pb as *mut u32);
506 ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
507 ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
514 // Unsafe swap to avoid the bounds check in safe swap.
516 let pa: *mut T = self.get_unchecked_mut(i);
517 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
524 /// Returns an iterator over the slice.
529 /// let x = &[1, 2, 4];
530 /// let mut iterator = x.iter();
532 /// assert_eq!(iterator.next(), Some(&1));
533 /// assert_eq!(iterator.next(), Some(&2));
534 /// assert_eq!(iterator.next(), Some(&4));
535 /// assert_eq!(iterator.next(), None);
537 #[stable(feature = "rust1", since = "1.0.0")]
539 pub fn iter(&self) -> Iter<T> {
541 let ptr = self.as_ptr();
542 assume(!ptr.is_null());
544 let end = if mem::size_of::<T>() == 0 {
545 (ptr as *const u8).wrapping_add(self.len()) as *const T
553 _marker: marker::PhantomData
558 /// Returns an iterator that allows modifying each value.
563 /// let x = &mut [1, 2, 4];
564 /// for elem in x.iter_mut() {
567 /// assert_eq!(x, &[3, 4, 6]);
569 #[stable(feature = "rust1", since = "1.0.0")]
571 pub fn iter_mut(&mut self) -> IterMut<T> {
573 let ptr = self.as_mut_ptr();
574 assume(!ptr.is_null());
576 let end = if mem::size_of::<T>() == 0 {
577 (ptr as *mut u8).wrapping_add(self.len()) as *mut T
585 _marker: marker::PhantomData
590 /// Returns an iterator over all contiguous windows of length
591 /// `size`. The windows overlap. If the slice is shorter than
592 /// `size`, the iterator returns no values.
596 /// Panics if `size` is 0.
601 /// let slice = ['r', 'u', 's', 't'];
602 /// let mut iter = slice.windows(2);
603 /// assert_eq!(iter.next().unwrap(), &['r', 'u']);
604 /// assert_eq!(iter.next().unwrap(), &['u', 's']);
605 /// assert_eq!(iter.next().unwrap(), &['s', 't']);
606 /// assert!(iter.next().is_none());
609 /// If the slice is shorter than `size`:
612 /// let slice = ['f', 'o', 'o'];
613 /// let mut iter = slice.windows(4);
614 /// assert!(iter.next().is_none());
616 #[stable(feature = "rust1", since = "1.0.0")]
618 pub fn windows(&self, size: usize) -> Windows<T> {
620 Windows { v: self, size }
623 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
624 /// beginning of the slice.
626 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
627 /// slice, then the last chunk will not have length `chunk_size`.
629 /// See [`chunks_exact`] for a variant of this iterator that returns chunks of always exactly
630 /// `chunk_size` elements, and [`rchunks`] for the same iterator but starting at the end of the
631 /// slice of the slice.
635 /// Panics if `chunk_size` is 0.
640 /// let slice = ['l', 'o', 'r', 'e', 'm'];
641 /// let mut iter = slice.chunks(2);
642 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
643 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
644 /// assert_eq!(iter.next().unwrap(), &['m']);
645 /// assert!(iter.next().is_none());
648 /// [`chunks_exact`]: #method.chunks_exact
649 /// [`rchunks`]: #method.rchunks
650 #[stable(feature = "rust1", since = "1.0.0")]
652 pub fn chunks(&self, chunk_size: usize) -> Chunks<T> {
653 assert!(chunk_size != 0);
654 Chunks { v: self, chunk_size }
657 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
658 /// beginning of the slice.
660 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
661 /// length of the slice, then the last chunk will not have length `chunk_size`.
663 /// See [`chunks_exact_mut`] for a variant of this iterator that returns chunks of always
664 /// exactly `chunk_size` elements, and [`rchunks_mut`] for the same iterator but starting at
665 /// the end of the slice of the slice.
669 /// Panics if `chunk_size` is 0.
674 /// let v = &mut [0, 0, 0, 0, 0];
675 /// let mut count = 1;
677 /// for chunk in v.chunks_mut(2) {
678 /// for elem in chunk.iter_mut() {
683 /// assert_eq!(v, &[1, 1, 2, 2, 3]);
686 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
687 /// [`rchunks_mut`]: #method.rchunks_mut
688 #[stable(feature = "rust1", since = "1.0.0")]
690 pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
691 assert!(chunk_size != 0);
692 ChunksMut { v: self, chunk_size }
695 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
696 /// beginning of the slice.
698 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
699 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
700 /// from the `remainder` function of the iterator.
702 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
703 /// resulting code better than in the case of [`chunks`].
705 /// See [`chunks`] for a variant of this iterator that also returns the remainder as a smaller
706 /// chunk, and [`rchunks_exact`] for the same iterator but starting at the end of the slice of
711 /// Panics if `chunk_size` is 0.
716 /// #![feature(chunks_exact)]
718 /// let slice = ['l', 'o', 'r', 'e', 'm'];
719 /// let mut iter = slice.chunks_exact(2);
720 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
721 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
722 /// assert!(iter.next().is_none());
723 /// assert_eq!(iter.remainder(), &['m']);
726 /// [`chunks`]: #method.chunks
727 /// [`rchunks_exact`]: #method.rchunks_exact
728 #[unstable(feature = "chunks_exact", issue = "47115")]
730 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<T> {
731 assert!(chunk_size != 0);
732 let rem = self.len() % chunk_size;
733 let len = self.len() - rem;
734 let (fst, snd) = self.split_at(len);
735 ChunksExact { v: fst, rem: snd, chunk_size }
738 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
739 /// beginning of the slice.
741 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
742 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
743 /// retrieved from the `into_remainder` function of the iterator.
745 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
746 /// resulting code better than in the case of [`chunks_mut`].
748 /// See [`chunks_mut`] for a variant of this iterator that also returns the remainder as a
749 /// smaller chunk, and [`rchunks_exact_mut`] for the same iterator but starting at the end of
750 /// the slice of the slice.
754 /// Panics if `chunk_size` is 0.
759 /// #![feature(chunks_exact)]
761 /// let v = &mut [0, 0, 0, 0, 0];
762 /// let mut count = 1;
764 /// for chunk in v.chunks_exact_mut(2) {
765 /// for elem in chunk.iter_mut() {
770 /// assert_eq!(v, &[1, 1, 2, 2, 0]);
773 /// [`chunks_mut`]: #method.chunks_mut
774 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
775 #[unstable(feature = "chunks_exact", issue = "47115")]
777 pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<T> {
778 assert!(chunk_size != 0);
779 let rem = self.len() % chunk_size;
780 let len = self.len() - rem;
781 let (fst, snd) = self.split_at_mut(len);
782 ChunksExactMut { v: fst, rem: snd, chunk_size }
785 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
788 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
789 /// slice, then the last chunk will not have length `chunk_size`.
791 /// See [`rchunks_exact`] for a variant of this iterator that returns chunks of always exactly
792 /// `chunk_size` elements, and [`chunks`] for the same iterator but starting at the beginning
797 /// Panics if `chunk_size` is 0.
802 /// #![feature(rchunks)]
804 /// let slice = ['l', 'o', 'r', 'e', 'm'];
805 /// let mut iter = slice.rchunks(2);
806 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
807 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
808 /// assert_eq!(iter.next().unwrap(), &['l']);
809 /// assert!(iter.next().is_none());
812 /// [`rchunks_exact`]: #method.rchunks_exact
813 /// [`chunks`]: #method.chunks
814 #[unstable(feature = "rchunks", issue = "55177")]
816 pub fn rchunks(&self, chunk_size: usize) -> RChunks<T> {
817 assert!(chunk_size != 0);
818 RChunks { v: self, chunk_size }
821 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
824 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
825 /// length of the slice, then the last chunk will not have length `chunk_size`.
827 /// See [`rchunks_exact_mut`] for a variant of this iterator that returns chunks of always
828 /// exactly `chunk_size` elements, and [`chunks_mut`] for the same iterator but starting at the
829 /// beginning of the slice.
833 /// Panics if `chunk_size` is 0.
838 /// #![feature(rchunks)]
840 /// let v = &mut [0, 0, 0, 0, 0];
841 /// let mut count = 1;
843 /// for chunk in v.rchunks_mut(2) {
844 /// for elem in chunk.iter_mut() {
849 /// assert_eq!(v, &[3, 2, 2, 1, 1]);
852 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
853 /// [`chunks_mut`]: #method.chunks_mut
854 #[unstable(feature = "rchunks", issue = "55177")]
856 pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<T> {
857 assert!(chunk_size != 0);
858 RChunksMut { v: self, chunk_size }
861 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
862 /// beginning of the slice.
864 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
865 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
866 /// from the `remainder` function of the iterator.
868 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
869 /// resulting code better than in the case of [`chunks`].
871 /// See [`rchunks`] for a variant of this iterator that also returns the remainder as a smaller
872 /// chunk, and [`chunks_exact`] for the same iterator but starting at the beginning of the
873 /// slice of the slice.
877 /// Panics if `chunk_size` is 0.
882 /// #![feature(rchunks)]
884 /// let slice = ['l', 'o', 'r', 'e', 'm'];
885 /// let mut iter = slice.rchunks_exact(2);
886 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
887 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
888 /// assert!(iter.next().is_none());
889 /// assert_eq!(iter.remainder(), &['l']);
892 /// [`rchunks`]: #method.rchunks
893 /// [`chunks_exact`]: #method.chunks_exact
894 #[unstable(feature = "rchunks", issue = "55177")]
896 pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<T> {
897 assert!(chunk_size != 0);
898 let rem = self.len() % chunk_size;
899 let (fst, snd) = self.split_at(rem);
900 RChunksExact { v: snd, rem: fst, chunk_size }
903 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
906 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
907 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
908 /// retrieved from the `into_remainder` function of the iterator.
910 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
911 /// resulting code better than in the case of [`chunks_mut`].
913 /// See [`rchunks_mut`] for a variant of this iterator that also returns the remainder as a
914 /// smaller chunk, and [`chunks_exact_mut`] for the same iterator but starting at the beginning
915 /// of the slice of the slice.
919 /// Panics if `chunk_size` is 0.
924 /// #![feature(rchunks)]
926 /// let v = &mut [0, 0, 0, 0, 0];
927 /// let mut count = 1;
929 /// for chunk in v.rchunks_exact_mut(2) {
930 /// for elem in chunk.iter_mut() {
935 /// assert_eq!(v, &[0, 2, 2, 1, 1]);
938 /// [`rchunks_mut`]: #method.rchunks_mut
939 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
940 #[unstable(feature = "rchunks", issue = "55177")]
942 pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<T> {
943 assert!(chunk_size != 0);
944 let rem = self.len() % chunk_size;
945 let (fst, snd) = self.split_at_mut(rem);
946 RChunksExactMut { v: snd, rem: fst, chunk_size }
949 /// Divides one slice into two at an index.
951 /// The first will contain all indices from `[0, mid)` (excluding
952 /// the index `mid` itself) and the second will contain all
953 /// indices from `[mid, len)` (excluding the index `len` itself).
957 /// Panics if `mid > len`.
962 /// let v = [1, 2, 3, 4, 5, 6];
965 /// let (left, right) = v.split_at(0);
966 /// assert!(left == []);
967 /// assert!(right == [1, 2, 3, 4, 5, 6]);
971 /// let (left, right) = v.split_at(2);
972 /// assert!(left == [1, 2]);
973 /// assert!(right == [3, 4, 5, 6]);
977 /// let (left, right) = v.split_at(6);
978 /// assert!(left == [1, 2, 3, 4, 5, 6]);
979 /// assert!(right == []);
982 #[stable(feature = "rust1", since = "1.0.0")]
984 pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
985 (&self[..mid], &self[mid..])
988 /// Divides one mutable slice into two at an index.
990 /// The first will contain all indices from `[0, mid)` (excluding
991 /// the index `mid` itself) and the second will contain all
992 /// indices from `[mid, len)` (excluding the index `len` itself).
996 /// Panics if `mid > len`.
1001 /// let mut v = [1, 0, 3, 0, 5, 6];
1002 /// // scoped to restrict the lifetime of the borrows
1004 /// let (left, right) = v.split_at_mut(2);
1005 /// assert!(left == [1, 0]);
1006 /// assert!(right == [3, 0, 5, 6]);
1010 /// assert!(v == [1, 2, 3, 4, 5, 6]);
1012 #[stable(feature = "rust1", since = "1.0.0")]
1014 pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
1015 let len = self.len();
1016 let ptr = self.as_mut_ptr();
1019 assert!(mid <= len);
1021 (from_raw_parts_mut(ptr, mid),
1022 from_raw_parts_mut(ptr.add(mid), len - mid))
1026 /// Returns an iterator over subslices separated by elements that match
1027 /// `pred`. The matched element is not contained in the subslices.
1032 /// let slice = [10, 40, 33, 20];
1033 /// let mut iter = slice.split(|num| num % 3 == 0);
1035 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1036 /// assert_eq!(iter.next().unwrap(), &[20]);
1037 /// assert!(iter.next().is_none());
1040 /// If the first element is matched, an empty slice will be the first item
1041 /// returned by the iterator. Similarly, if the last element in the slice
1042 /// is matched, an empty slice will be the last item returned by the
1046 /// let slice = [10, 40, 33];
1047 /// let mut iter = slice.split(|num| num % 3 == 0);
1049 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1050 /// assert_eq!(iter.next().unwrap(), &[]);
1051 /// assert!(iter.next().is_none());
1054 /// If two matched elements are directly adjacent, an empty slice will be
1055 /// present between them:
1058 /// let slice = [10, 6, 33, 20];
1059 /// let mut iter = slice.split(|num| num % 3 == 0);
1061 /// assert_eq!(iter.next().unwrap(), &[10]);
1062 /// assert_eq!(iter.next().unwrap(), &[]);
1063 /// assert_eq!(iter.next().unwrap(), &[20]);
1064 /// assert!(iter.next().is_none());
1066 #[stable(feature = "rust1", since = "1.0.0")]
1068 pub fn split<F>(&self, pred: F) -> Split<T, F>
1069 where F: FnMut(&T) -> bool
1078 /// Returns an iterator over mutable subslices separated by elements that
1079 /// match `pred`. The matched element is not contained in the subslices.
1084 /// let mut v = [10, 40, 30, 20, 60, 50];
1086 /// for group in v.split_mut(|num| *num % 3 == 0) {
1089 /// assert_eq!(v, [1, 40, 30, 1, 60, 1]);
1091 #[stable(feature = "rust1", since = "1.0.0")]
1093 pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<T, F>
1094 where F: FnMut(&T) -> bool
1096 SplitMut { v: self, pred, finished: false }
1099 /// Returns an iterator over subslices separated by elements that match
1100 /// `pred`, starting at the end of the slice and working backwards.
1101 /// The matched element is not contained in the subslices.
1106 /// let slice = [11, 22, 33, 0, 44, 55];
1107 /// let mut iter = slice.rsplit(|num| *num == 0);
1109 /// assert_eq!(iter.next().unwrap(), &[44, 55]);
1110 /// assert_eq!(iter.next().unwrap(), &[11, 22, 33]);
1111 /// assert_eq!(iter.next(), None);
1114 /// As with `split()`, if the first or last element is matched, an empty
1115 /// slice will be the first (or last) item returned by the iterator.
1118 /// let v = &[0, 1, 1, 2, 3, 5, 8];
1119 /// let mut it = v.rsplit(|n| *n % 2 == 0);
1120 /// assert_eq!(it.next().unwrap(), &[]);
1121 /// assert_eq!(it.next().unwrap(), &[3, 5]);
1122 /// assert_eq!(it.next().unwrap(), &[1, 1]);
1123 /// assert_eq!(it.next().unwrap(), &[]);
1124 /// assert_eq!(it.next(), None);
1126 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1128 pub fn rsplit<F>(&self, pred: F) -> RSplit<T, F>
1129 where F: FnMut(&T) -> bool
1131 RSplit { inner: self.split(pred) }
1134 /// Returns an iterator over mutable subslices separated by elements that
1135 /// match `pred`, starting at the end of the slice and working
1136 /// backwards. The matched element is not contained in the subslices.
1141 /// let mut v = [100, 400, 300, 200, 600, 500];
1143 /// let mut count = 0;
1144 /// for group in v.rsplit_mut(|num| *num % 3 == 0) {
1146 /// group[0] = count;
1148 /// assert_eq!(v, [3, 400, 300, 2, 600, 1]);
1151 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1153 pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<T, F>
1154 where F: FnMut(&T) -> bool
1156 RSplitMut { inner: self.split_mut(pred) }
1159 /// Returns an iterator over subslices separated by elements that match
1160 /// `pred`, limited to returning at most `n` items. The matched element is
1161 /// not contained in the subslices.
1163 /// The last element returned, if any, will contain the remainder of the
1168 /// Print the slice split once by numbers divisible by 3 (i.e. `[10, 40]`,
1169 /// `[20, 60, 50]`):
1172 /// let v = [10, 40, 30, 20, 60, 50];
1174 /// for group in v.splitn(2, |num| *num % 3 == 0) {
1175 /// println!("{:?}", group);
1178 #[stable(feature = "rust1", since = "1.0.0")]
1180 pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<T, F>
1181 where F: FnMut(&T) -> bool
1184 inner: GenericSplitN {
1185 iter: self.split(pred),
1191 /// Returns an iterator over subslices separated by elements that match
1192 /// `pred`, limited to returning at most `n` items. The matched element is
1193 /// not contained in the subslices.
1195 /// The last element returned, if any, will contain the remainder of the
1201 /// let mut v = [10, 40, 30, 20, 60, 50];
1203 /// for group in v.splitn_mut(2, |num| *num % 3 == 0) {
1206 /// assert_eq!(v, [1, 40, 30, 1, 60, 50]);
1208 #[stable(feature = "rust1", since = "1.0.0")]
1210 pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<T, F>
1211 where F: FnMut(&T) -> bool
1214 inner: GenericSplitN {
1215 iter: self.split_mut(pred),
1221 /// Returns an iterator over subslices separated by elements that match
1222 /// `pred` limited to returning at most `n` items. This starts at the end of
1223 /// the slice and works backwards. The matched element is not contained in
1226 /// The last element returned, if any, will contain the remainder of the
1231 /// Print the slice split once, starting from the end, by numbers divisible
1232 /// by 3 (i.e. `[50]`, `[10, 40, 30, 20]`):
1235 /// let v = [10, 40, 30, 20, 60, 50];
1237 /// for group in v.rsplitn(2, |num| *num % 3 == 0) {
1238 /// println!("{:?}", group);
1241 #[stable(feature = "rust1", since = "1.0.0")]
1243 pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<T, F>
1244 where F: FnMut(&T) -> bool
1247 inner: GenericSplitN {
1248 iter: self.rsplit(pred),
1254 /// Returns an iterator over subslices separated by elements that match
1255 /// `pred` limited to returning at most `n` items. This starts at the end of
1256 /// the slice and works backwards. The matched element is not contained in
1259 /// The last element returned, if any, will contain the remainder of the
1265 /// let mut s = [10, 40, 30, 20, 60, 50];
1267 /// for group in s.rsplitn_mut(2, |num| *num % 3 == 0) {
1270 /// assert_eq!(s, [1, 40, 30, 20, 60, 1]);
1272 #[stable(feature = "rust1", since = "1.0.0")]
1274 pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<T, F>
1275 where F: FnMut(&T) -> bool
1278 inner: GenericSplitN {
1279 iter: self.rsplit_mut(pred),
1285 /// Returns `true` if the slice contains an element with the given value.
1290 /// let v = [10, 40, 30];
1291 /// assert!(v.contains(&30));
1292 /// assert!(!v.contains(&50));
1294 #[stable(feature = "rust1", since = "1.0.0")]
1295 pub fn contains(&self, x: &T) -> bool
1298 x.slice_contains(self)
1301 /// Returns `true` if `needle` is a prefix of the slice.
1306 /// let v = [10, 40, 30];
1307 /// assert!(v.starts_with(&[10]));
1308 /// assert!(v.starts_with(&[10, 40]));
1309 /// assert!(!v.starts_with(&[50]));
1310 /// assert!(!v.starts_with(&[10, 50]));
1313 /// Always returns `true` if `needle` is an empty slice:
1316 /// let v = &[10, 40, 30];
1317 /// assert!(v.starts_with(&[]));
1318 /// let v: &[u8] = &[];
1319 /// assert!(v.starts_with(&[]));
1321 #[stable(feature = "rust1", since = "1.0.0")]
1322 pub fn starts_with(&self, needle: &[T]) -> bool
1325 let n = needle.len();
1326 self.len() >= n && needle == &self[..n]
1329 /// Returns `true` if `needle` is a suffix of the slice.
1334 /// let v = [10, 40, 30];
1335 /// assert!(v.ends_with(&[30]));
1336 /// assert!(v.ends_with(&[40, 30]));
1337 /// assert!(!v.ends_with(&[50]));
1338 /// assert!(!v.ends_with(&[50, 30]));
1341 /// Always returns `true` if `needle` is an empty slice:
1344 /// let v = &[10, 40, 30];
1345 /// assert!(v.ends_with(&[]));
1346 /// let v: &[u8] = &[];
1347 /// assert!(v.ends_with(&[]));
1349 #[stable(feature = "rust1", since = "1.0.0")]
1350 pub fn ends_with(&self, needle: &[T]) -> bool
1353 let (m, n) = (self.len(), needle.len());
1354 m >= n && needle == &self[m-n..]
1357 /// Binary searches this sorted slice for a given element.
1359 /// If the value is found then [`Result::Ok`] is returned, containing the
1360 /// index of the matching element. If there are multiple matches, then any
1361 /// one of the matches could be returned. If the value is not found then
1362 /// [`Result::Err`] is returned, containing the index where a matching
1363 /// element could be inserted while maintaining sorted order.
1367 /// Looks up a series of four elements. The first is found, with a
1368 /// uniquely determined position; the second and third are not
1369 /// found; the fourth could match any position in `[1, 4]`.
1372 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1374 /// assert_eq!(s.binary_search(&13), Ok(9));
1375 /// assert_eq!(s.binary_search(&4), Err(7));
1376 /// assert_eq!(s.binary_search(&100), Err(13));
1377 /// let r = s.binary_search(&1);
1378 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1380 #[stable(feature = "rust1", since = "1.0.0")]
1381 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
1384 self.binary_search_by(|p| p.cmp(x))
1387 /// Binary searches this sorted slice with a comparator function.
1389 /// The comparator function should implement an order consistent
1390 /// with the sort order of the underlying slice, returning an
1391 /// order code that indicates whether its argument is `Less`,
1392 /// `Equal` or `Greater` the desired target.
1394 /// If the value is found then [`Result::Ok`] is returned, containing the
1395 /// index of the matching element. If there are multiple matches, then any
1396 /// one of the matches could be returned. If the value is not found then
1397 /// [`Result::Err`] is returned, containing the index where a matching
1398 /// element could be inserted while maintaining sorted order.
1402 /// Looks up a series of four elements. The first is found, with a
1403 /// uniquely determined position; the second and third are not
1404 /// found; the fourth could match any position in `[1, 4]`.
1407 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1410 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Ok(9));
1412 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(7));
1414 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13));
1416 /// let r = s.binary_search_by(|probe| probe.cmp(&seek));
1417 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1419 #[stable(feature = "rust1", since = "1.0.0")]
1421 pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
1422 where F: FnMut(&'a T) -> Ordering
1425 let mut size = s.len();
1429 let mut base = 0usize;
1431 let half = size / 2;
1432 let mid = base + half;
1433 // mid is always in [0, size), that means mid is >= 0 and < size.
1434 // mid >= 0: by definition
1435 // mid < size: mid = size / 2 + size / 4 + size / 8 ...
1436 let cmp = f(unsafe { s.get_unchecked(mid) });
1437 base = if cmp == Greater { base } else { mid };
1440 // base is always in [0, size) because base <= mid.
1441 let cmp = f(unsafe { s.get_unchecked(base) });
1442 if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
1446 /// Binary searches this sorted slice with a key extraction function.
1448 /// Assumes that the slice is sorted by the key, for instance with
1449 /// [`sort_by_key`] using the same key extraction function.
1451 /// If the value is found then [`Result::Ok`] is returned, containing the
1452 /// index of the matching element. If there are multiple matches, then any
1453 /// one of the matches could be returned. If the value is not found then
1454 /// [`Result::Err`] is returned, containing the index where a matching
1455 /// element could be inserted while maintaining sorted order.
1457 /// [`sort_by_key`]: #method.sort_by_key
1461 /// Looks up a series of four elements in a slice of pairs sorted by
1462 /// their second elements. The first is found, with a uniquely
1463 /// determined position; the second and third are not found; the
1464 /// fourth could match any position in `[1, 4]`.
1467 /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1),
1468 /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
1469 /// (1, 21), (2, 34), (4, 55)];
1471 /// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9));
1472 /// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7));
1473 /// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13));
1474 /// let r = s.binary_search_by_key(&1, |&(a,b)| b);
1475 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1477 #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
1479 pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
1480 where F: FnMut(&'a T) -> B,
1483 self.binary_search_by(|k| f(k).cmp(b))
1486 /// Sorts the slice, but may not preserve the order of equal elements.
1488 /// This sort is unstable (i.e. may reorder equal elements), in-place (i.e. does not allocate),
1489 /// and `O(n log n)` worst-case.
1491 /// # Current implementation
1493 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1494 /// which combines the fast average case of randomized quicksort with the fast worst case of
1495 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1496 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1497 /// deterministic behavior.
1499 /// It is typically faster than stable sorting, except in a few special cases, e.g. when the
1500 /// slice consists of several concatenated sorted sequences.
1505 /// let mut v = [-5, 4, 1, -3, 2];
1507 /// v.sort_unstable();
1508 /// assert!(v == [-5, -3, 1, 2, 4]);
1511 /// [pdqsort]: https://github.com/orlp/pdqsort
1512 #[stable(feature = "sort_unstable", since = "1.20.0")]
1514 pub fn sort_unstable(&mut self)
1517 sort::quicksort(self, |a, b| a.lt(b));
1520 /// Sorts the slice with a comparator function, but may not preserve the order of equal
1523 /// This sort is unstable (i.e. may reorder equal elements), in-place (i.e. does not allocate),
1524 /// and `O(n log n)` worst-case.
1526 /// # Current implementation
1528 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1529 /// which combines the fast average case of randomized quicksort with the fast worst case of
1530 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1531 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1532 /// deterministic behavior.
1534 /// It is typically faster than stable sorting, except in a few special cases, e.g. when the
1535 /// slice consists of several concatenated sorted sequences.
1540 /// let mut v = [5, 4, 1, 3, 2];
1541 /// v.sort_unstable_by(|a, b| a.cmp(b));
1542 /// assert!(v == [1, 2, 3, 4, 5]);
1544 /// // reverse sorting
1545 /// v.sort_unstable_by(|a, b| b.cmp(a));
1546 /// assert!(v == [5, 4, 3, 2, 1]);
1549 /// [pdqsort]: https://github.com/orlp/pdqsort
1550 #[stable(feature = "sort_unstable", since = "1.20.0")]
1552 pub fn sort_unstable_by<F>(&mut self, mut compare: F)
1553 where F: FnMut(&T, &T) -> Ordering
1555 sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
1558 /// Sorts the slice with a key extraction function, but may not preserve the order of equal
1561 /// This sort is unstable (i.e. may reorder equal elements), in-place (i.e. does not allocate),
1562 /// and `O(m n log(m n))` worst-case, where the key function is `O(m)`.
1564 /// # Current implementation
1566 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1567 /// which combines the fast average case of randomized quicksort with the fast worst case of
1568 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1569 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1570 /// deterministic behavior.
1575 /// let mut v = [-5i32, 4, 1, -3, 2];
1577 /// v.sort_unstable_by_key(|k| k.abs());
1578 /// assert!(v == [1, 2, -3, 4, -5]);
1581 /// [pdqsort]: https://github.com/orlp/pdqsort
1582 #[stable(feature = "sort_unstable", since = "1.20.0")]
1584 pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
1585 where F: FnMut(&T) -> K, K: Ord
1587 sort::quicksort(self, |a, b| f(a).lt(&f(b)));
1590 /// Moves all consecutive repeated elements to the end of the slice according to the
1591 /// [`PartialEq`] trait implementation.
1593 /// Returns two slices. The first contains no consecutive repeated elements.
1594 /// The second contains all the duplicates in no specified order.
1596 /// If the slice is sorted, the first returned slice contains no duplicates.
1601 /// #![feature(slice_partition_dedup)]
1603 /// let mut slice = [1, 2, 2, 3, 3, 2, 1, 1];
1605 /// let (dedup, duplicates) = slice.partition_dedup();
1607 /// assert_eq!(dedup, [1, 2, 3, 2, 1]);
1608 /// assert_eq!(duplicates, [2, 3, 1]);
1610 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1612 pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T])
1615 self.partition_dedup_by(|a, b| a == b)
1618 /// Moves all but the first of consecutive elements to the end of the slice satisfying
1619 /// a given equality relation.
1621 /// Returns two slices. The first contains no consecutive repeated elements.
1622 /// The second contains all the duplicates in no specified order.
1624 /// The `same_bucket` function is passed references to two elements from the slice and
1625 /// must determine if the elements compare equal. The elements are passed in opposite order
1626 /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is moved
1627 /// at the end of the slice.
1629 /// If the slice is sorted, the first returned slice contains no duplicates.
1634 /// #![feature(slice_partition_dedup)]
1636 /// let mut slice = ["foo", "Foo", "BAZ", "Bar", "bar", "baz", "BAZ"];
1638 /// let (dedup, duplicates) = slice.partition_dedup_by(|a, b| a.eq_ignore_ascii_case(b));
1640 /// assert_eq!(dedup, ["foo", "BAZ", "Bar", "baz"]);
1641 /// assert_eq!(duplicates, ["bar", "Foo", "BAZ"]);
1643 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1645 pub fn partition_dedup_by<F>(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T])
1646 where F: FnMut(&mut T, &mut T) -> bool
1648 // Although we have a mutable reference to `self`, we cannot make
1649 // *arbitrary* changes. The `same_bucket` calls could panic, so we
1650 // must ensure that the slice is in a valid state at all times.
1652 // The way that we handle this is by using swaps; we iterate
1653 // over all the elements, swapping as we go so that at the end
1654 // the elements we wish to keep are in the front, and those we
1655 // wish to reject are at the back. We can then split the slice.
1656 // This operation is still O(n).
1658 // Example: We start in this state, where `r` represents "next
1659 // read" and `w` represents "next_write`.
1662 // +---+---+---+---+---+---+
1663 // | 0 | 1 | 1 | 2 | 3 | 3 |
1664 // +---+---+---+---+---+---+
1667 // Comparing self[r] against self[w-1], this is not a duplicate, so
1668 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1669 // r and w, leaving us with:
1672 // +---+---+---+---+---+---+
1673 // | 0 | 1 | 1 | 2 | 3 | 3 |
1674 // +---+---+---+---+---+---+
1677 // Comparing self[r] against self[w-1], this value is a duplicate,
1678 // so we increment `r` but leave everything else unchanged:
1681 // +---+---+---+---+---+---+
1682 // | 0 | 1 | 1 | 2 | 3 | 3 |
1683 // +---+---+---+---+---+---+
1686 // Comparing self[r] against self[w-1], this is not a duplicate,
1687 // so swap self[r] and self[w] and advance r and w:
1690 // +---+---+---+---+---+---+
1691 // | 0 | 1 | 2 | 1 | 3 | 3 |
1692 // +---+---+---+---+---+---+
1695 // Not a duplicate, repeat:
1698 // +---+---+---+---+---+---+
1699 // | 0 | 1 | 2 | 3 | 1 | 3 |
1700 // +---+---+---+---+---+---+
1703 // Duplicate, advance r. End of slice. Split at w.
1705 let len = self.len();
1707 return (self, &mut [])
1710 let ptr = self.as_mut_ptr();
1711 let mut next_read: usize = 1;
1712 let mut next_write: usize = 1;
1715 // Avoid bounds checks by using raw pointers.
1716 while next_read < len {
1717 let ptr_read = ptr.add(next_read);
1718 let prev_ptr_write = ptr.add(next_write - 1);
1719 if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
1720 if next_read != next_write {
1721 let ptr_write = prev_ptr_write.offset(1);
1722 mem::swap(&mut *ptr_read, &mut *ptr_write);
1730 self.split_at_mut(next_write)
1733 /// Moves all but the first of consecutive elements to the end of the slice that resolve
1734 /// to the same key.
1736 /// Returns two slices. The first contains no consecutive repeated elements.
1737 /// The second contains all the duplicates in no specified order.
1739 /// If the slice is sorted, the first returned slice contains no duplicates.
1744 /// #![feature(slice_partition_dedup)]
1746 /// let mut slice = [10, 20, 21, 30, 30, 20, 11, 13];
1748 /// let (dedup, duplicates) = slice.partition_dedup_by_key(|i| *i / 10);
1750 /// assert_eq!(dedup, [10, 20, 30, 20, 11]);
1751 /// assert_eq!(duplicates, [21, 30, 13]);
1753 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1755 pub fn partition_dedup_by_key<K, F>(&mut self, mut key: F) -> (&mut [T], &mut [T])
1756 where F: FnMut(&mut T) -> K,
1759 self.partition_dedup_by(|a, b| key(a) == key(b))
1762 /// Rotates the slice in-place such that the first `mid` elements of the
1763 /// slice move to the end while the last `self.len() - mid` elements move to
1764 /// the front. After calling `rotate_left`, the element previously at index
1765 /// `mid` will become the first element in the slice.
1769 /// This function will panic if `mid` is greater than the length of the
1770 /// slice. Note that `mid == self.len()` does _not_ panic and is a no-op
1775 /// Takes linear (in `self.len()`) time.
1780 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1781 /// a.rotate_left(2);
1782 /// assert_eq!(a, ['c', 'd', 'e', 'f', 'a', 'b']);
1785 /// Rotating a subslice:
1788 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1789 /// a[1..5].rotate_left(1);
1790 /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']);
1792 #[stable(feature = "slice_rotate", since = "1.26.0")]
1793 pub fn rotate_left(&mut self, mid: usize) {
1794 assert!(mid <= self.len());
1795 let k = self.len() - mid;
1798 let p = self.as_mut_ptr();
1799 rotate::ptr_rotate(mid, p.add(mid), k);
1803 /// Rotates the slice in-place such that the first `self.len() - k`
1804 /// elements of the slice move to the end while the last `k` elements move
1805 /// to the front. After calling `rotate_right`, the element previously at
1806 /// index `self.len() - k` will become the first element in the slice.
1810 /// This function will panic if `k` is greater than the length of the
1811 /// slice. Note that `k == self.len()` does _not_ panic and is a no-op
1816 /// Takes linear (in `self.len()`) time.
1821 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1822 /// a.rotate_right(2);
1823 /// assert_eq!(a, ['e', 'f', 'a', 'b', 'c', 'd']);
1826 /// Rotate a subslice:
1829 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1830 /// a[1..5].rotate_right(1);
1831 /// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']);
1833 #[stable(feature = "slice_rotate", since = "1.26.0")]
1834 pub fn rotate_right(&mut self, k: usize) {
1835 assert!(k <= self.len());
1836 let mid = self.len() - k;
1839 let p = self.as_mut_ptr();
1840 rotate::ptr_rotate(mid, p.add(mid), k);
1844 /// Copies the elements from `src` into `self`.
1846 /// The length of `src` must be the same as `self`.
1848 /// If `src` implements `Copy`, it can be more performant to use
1849 /// [`copy_from_slice`].
1853 /// This function will panic if the two slices have different lengths.
1857 /// Cloning two elements from a slice into another:
1860 /// let src = [1, 2, 3, 4];
1861 /// let mut dst = [0, 0];
1863 /// // Because the slices have to be the same length,
1864 /// // we slice the source slice from four elements
1865 /// // to two. It will panic if we don't do this.
1866 /// dst.clone_from_slice(&src[2..]);
1868 /// assert_eq!(src, [1, 2, 3, 4]);
1869 /// assert_eq!(dst, [3, 4]);
1872 /// Rust enforces that there can only be one mutable reference with no
1873 /// immutable references to a particular piece of data in a particular
1874 /// scope. Because of this, attempting to use `clone_from_slice` on a
1875 /// single slice will result in a compile failure:
1878 /// let mut slice = [1, 2, 3, 4, 5];
1880 /// slice[..2].clone_from_slice(&slice[3..]); // compile fail!
1883 /// To work around this, we can use [`split_at_mut`] to create two distinct
1884 /// sub-slices from a slice:
1887 /// let mut slice = [1, 2, 3, 4, 5];
1890 /// let (left, right) = slice.split_at_mut(2);
1891 /// left.clone_from_slice(&right[1..]);
1894 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
1897 /// [`copy_from_slice`]: #method.copy_from_slice
1898 /// [`split_at_mut`]: #method.split_at_mut
1899 #[stable(feature = "clone_from_slice", since = "1.7.0")]
1900 pub fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
1901 assert!(self.len() == src.len(),
1902 "destination and source slices have different lengths");
1903 // NOTE: We need to explicitly slice them to the same length
1904 // for bounds checking to be elided, and the optimizer will
1905 // generate memcpy for simple cases (for example T = u8).
1906 let len = self.len();
1907 let src = &src[..len];
1909 self[i].clone_from(&src[i]);
1914 /// Copies all elements from `src` into `self`, using a memcpy.
1916 /// The length of `src` must be the same as `self`.
1918 /// If `src` does not implement `Copy`, use [`clone_from_slice`].
1922 /// This function will panic if the two slices have different lengths.
1926 /// Copying two elements from a slice into another:
1929 /// let src = [1, 2, 3, 4];
1930 /// let mut dst = [0, 0];
1932 /// // Because the slices have to be the same length,
1933 /// // we slice the source slice from four elements
1934 /// // to two. It will panic if we don't do this.
1935 /// dst.copy_from_slice(&src[2..]);
1937 /// assert_eq!(src, [1, 2, 3, 4]);
1938 /// assert_eq!(dst, [3, 4]);
1941 /// Rust enforces that there can only be one mutable reference with no
1942 /// immutable references to a particular piece of data in a particular
1943 /// scope. Because of this, attempting to use `copy_from_slice` on a
1944 /// single slice will result in a compile failure:
1947 /// let mut slice = [1, 2, 3, 4, 5];
1949 /// slice[..2].copy_from_slice(&slice[3..]); // compile fail!
1952 /// To work around this, we can use [`split_at_mut`] to create two distinct
1953 /// sub-slices from a slice:
1956 /// let mut slice = [1, 2, 3, 4, 5];
1959 /// let (left, right) = slice.split_at_mut(2);
1960 /// left.copy_from_slice(&right[1..]);
1963 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
1966 /// [`clone_from_slice`]: #method.clone_from_slice
1967 /// [`split_at_mut`]: #method.split_at_mut
1968 #[stable(feature = "copy_from_slice", since = "1.9.0")]
1969 pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
1970 assert_eq!(self.len(), src.len(),
1971 "destination and source slices have different lengths");
1973 ptr::copy_nonoverlapping(
1974 src.as_ptr(), self.as_mut_ptr(), self.len());
1978 /// Copies elements from one part of the slice to another part of itself,
1979 /// using a memmove.
1981 /// `src` is the range within `self` to copy from. `dest` is the starting
1982 /// index of the range within `self` to copy to, which will have the same
1983 /// length as `src`. The two ranges may overlap. The ends of the two ranges
1984 /// must be less than or equal to `self.len()`.
1988 /// This function will panic if either range exceeds the end of the slice,
1989 /// or if the end of `src` is before the start.
1993 /// Copying four bytes within a slice:
1996 /// # #![feature(copy_within)]
1997 /// let mut bytes = *b"Hello, World!";
1999 /// bytes.copy_within(1..5, 8);
2001 /// assert_eq!(&bytes, b"Hello, Wello!");
2003 #[unstable(feature = "copy_within", issue = "54236")]
2004 pub fn copy_within<R: ops::RangeBounds<usize>>(&mut self, src: R, dest: usize)
2008 let src_start = match src.start_bound() {
2009 ops::Bound::Included(&n) => n,
2010 ops::Bound::Excluded(&n) => n
2012 .unwrap_or_else(|| slice_index_overflow_fail()),
2013 ops::Bound::Unbounded => 0,
2015 let src_end = match src.end_bound() {
2016 ops::Bound::Included(&n) => n
2018 .unwrap_or_else(|| slice_index_overflow_fail()),
2019 ops::Bound::Excluded(&n) => n,
2020 ops::Bound::Unbounded => self.len(),
2022 assert!(src_start <= src_end, "src end is before src start");
2023 assert!(src_end <= self.len(), "src is out of bounds");
2024 let count = src_end - src_start;
2025 assert!(dest <= self.len() - count, "dest is out of bounds");
2028 self.get_unchecked(src_start),
2029 self.get_unchecked_mut(dest),
2035 /// Swaps all elements in `self` with those in `other`.
2037 /// The length of `other` must be the same as `self`.
2041 /// This function will panic if the two slices have different lengths.
2045 /// Swapping two elements across slices:
2048 /// let mut slice1 = [0, 0];
2049 /// let mut slice2 = [1, 2, 3, 4];
2051 /// slice1.swap_with_slice(&mut slice2[2..]);
2053 /// assert_eq!(slice1, [3, 4]);
2054 /// assert_eq!(slice2, [1, 2, 0, 0]);
2057 /// Rust enforces that there can only be one mutable reference to a
2058 /// particular piece of data in a particular scope. Because of this,
2059 /// attempting to use `swap_with_slice` on a single slice will result in
2060 /// a compile failure:
2063 /// let mut slice = [1, 2, 3, 4, 5];
2064 /// slice[..2].swap_with_slice(&mut slice[3..]); // compile fail!
2067 /// To work around this, we can use [`split_at_mut`] to create two distinct
2068 /// mutable sub-slices from a slice:
2071 /// let mut slice = [1, 2, 3, 4, 5];
2074 /// let (left, right) = slice.split_at_mut(2);
2075 /// left.swap_with_slice(&mut right[1..]);
2078 /// assert_eq!(slice, [4, 5, 3, 1, 2]);
2081 /// [`split_at_mut`]: #method.split_at_mut
2082 #[stable(feature = "swap_with_slice", since = "1.27.0")]
2083 pub fn swap_with_slice(&mut self, other: &mut [T]) {
2084 assert!(self.len() == other.len(),
2085 "destination and source slices have different lengths");
2087 ptr::swap_nonoverlapping(
2088 self.as_mut_ptr(), other.as_mut_ptr(), self.len());
2092 /// Function to calculate lengths of the middle and trailing slice for `align_to{,_mut}`.
2093 fn align_to_offsets<U>(&self) -> (usize, usize) {
2094 // What we gonna do about `rest` is figure out what multiple of `U`s we can put in a
2095 // lowest number of `T`s. And how many `T`s we need for each such "multiple".
2097 // Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider
2098 // for example a case where size_of::<T> = 16, size_of::<U> = 24. We can put 2 Us in
2099 // place of every 3 Ts in the `rest` slice. A bit more complicated.
2101 // Formula to calculate this is:
2103 // Us = lcm(size_of::<T>, size_of::<U>) / size_of::<U>
2104 // Ts = lcm(size_of::<T>, size_of::<U>) / size_of::<T>
2106 // Expanded and simplified:
2108 // Us = size_of::<T> / gcd(size_of::<T>, size_of::<U>)
2109 // Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
2111 // Luckily since all this is constant-evaluated... performance here matters not!
2113 fn gcd(a: usize, b: usize) -> usize {
2114 // iterative stein’s algorithm
2115 // We should still make this `const fn` (and revert to recursive algorithm if we do)
2116 // because relying on llvm to consteval all this is… well, it makes me uncomfortable.
2117 let (ctz_a, mut ctz_b) = unsafe {
2118 if a == 0 { return b; }
2119 if b == 0 { return a; }
2120 (::intrinsics::cttz_nonzero(a), ::intrinsics::cttz_nonzero(b))
2122 let k = ctz_a.min(ctz_b);
2123 let mut a = a >> ctz_a;
2126 // remove all factors of 2 from b
2129 ::mem::swap(&mut a, &mut b);
2136 ctz_b = ::intrinsics::cttz_nonzero(b);
2141 let gcd: usize = gcd(::mem::size_of::<T>(), ::mem::size_of::<U>());
2142 let ts: usize = ::mem::size_of::<U>() / gcd;
2143 let us: usize = ::mem::size_of::<T>() / gcd;
2145 // Armed with this knowledge, we can find how many `U`s we can fit!
2146 let us_len = self.len() / ts * us;
2147 // And how many `T`s will be in the trailing slice!
2148 let ts_len = self.len() % ts;
2152 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2155 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2156 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2157 /// middle slice the greatest length possible for a given type and input slice, but only
2158 /// your algorithm's performance should depend on that, not its correctness.
2160 /// This method has no purpose when either input element `T` or output element `U` are
2161 /// zero-sized and will return the original slice without splitting anything.
2165 /// This method is essentially a `transmute` with respect to the elements in the returned
2166 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2174 /// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2175 /// let (prefix, shorts, suffix) = bytes.align_to::<u16>();
2176 /// // less_efficient_algorithm_for_bytes(prefix);
2177 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2178 /// // less_efficient_algorithm_for_bytes(suffix);
2181 #[stable(feature = "slice_align_to", since = "1.30.0")]
2182 pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
2183 // Note that most of this function will be constant-evaluated,
2184 if ::mem::size_of::<U>() == 0 || ::mem::size_of::<T>() == 0 {
2185 // handle ZSTs specially, which is – don't handle them at all.
2186 return (self, &[], &[]);
2189 // First, find at what point do we split between the first and 2nd slice. Easy with
2190 // ptr.align_offset.
2191 let ptr = self.as_ptr();
2192 let offset = ::ptr::align_offset(ptr, ::mem::align_of::<U>());
2193 if offset > self.len() {
2196 let (left, rest) = self.split_at(offset);
2197 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2198 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2200 from_raw_parts(rest.as_ptr() as *const U, us_len),
2201 from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len))
2205 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2208 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2209 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2210 /// middle slice the greatest length possible for a given type and input slice, but only
2211 /// your algorithm's performance should depend on that, not its correctness.
2213 /// This method has no purpose when either input element `T` or output element `U` are
2214 /// zero-sized and will return the original slice without splitting anything.
2218 /// This method is essentially a `transmute` with respect to the elements in the returned
2219 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2227 /// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2228 /// let (prefix, shorts, suffix) = bytes.align_to_mut::<u16>();
2229 /// // less_efficient_algorithm_for_bytes(prefix);
2230 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2231 /// // less_efficient_algorithm_for_bytes(suffix);
2234 #[stable(feature = "slice_align_to", since = "1.30.0")]
2235 pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
2236 // Note that most of this function will be constant-evaluated,
2237 if ::mem::size_of::<U>() == 0 || ::mem::size_of::<T>() == 0 {
2238 // handle ZSTs specially, which is – don't handle them at all.
2239 return (self, &mut [], &mut []);
2242 // First, find at what point do we split between the first and 2nd slice. Easy with
2243 // ptr.align_offset.
2244 let ptr = self.as_ptr();
2245 let offset = ::ptr::align_offset(ptr, ::mem::align_of::<U>());
2246 if offset > self.len() {
2247 (self, &mut [], &mut [])
2249 let (left, rest) = self.split_at_mut(offset);
2250 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2251 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2252 let mut_ptr = rest.as_mut_ptr();
2254 from_raw_parts_mut(mut_ptr as *mut U, us_len),
2255 from_raw_parts_mut(mut_ptr.add(rest.len() - ts_len), ts_len))
2260 #[lang = "slice_u8"]
2263 /// Checks if all bytes in this slice are within the ASCII range.
2264 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2266 pub fn is_ascii(&self) -> bool {
2267 self.iter().all(|b| b.is_ascii())
2270 /// Checks that two slices are an ASCII case-insensitive match.
2272 /// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
2273 /// but without allocating and copying temporaries.
2274 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2276 pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
2277 self.len() == other.len() &&
2278 self.iter().zip(other).all(|(a, b)| {
2279 a.eq_ignore_ascii_case(b)
2283 /// Converts this slice to its ASCII upper case equivalent in-place.
2285 /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
2286 /// but non-ASCII letters are unchanged.
2288 /// To return a new uppercased value without modifying the existing one, use
2289 /// [`to_ascii_uppercase`].
2291 /// [`to_ascii_uppercase`]: #method.to_ascii_uppercase
2292 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2294 pub fn make_ascii_uppercase(&mut self) {
2296 byte.make_ascii_uppercase();
2300 /// Converts this slice to its ASCII lower case equivalent in-place.
2302 /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
2303 /// but non-ASCII letters are unchanged.
2305 /// To return a new lowercased value without modifying the existing one, use
2306 /// [`to_ascii_lowercase`].
2308 /// [`to_ascii_lowercase`]: #method.to_ascii_lowercase
2309 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2311 pub fn make_ascii_lowercase(&mut self) {
2313 byte.make_ascii_lowercase();
2319 #[stable(feature = "rust1", since = "1.0.0")]
2320 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
2321 impl<T, I> ops::Index<I> for [T]
2322 where I: SliceIndex<[T]>
2324 type Output = I::Output;
2327 fn index(&self, index: I) -> &I::Output {
2332 #[stable(feature = "rust1", since = "1.0.0")]
2333 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
2334 impl<T, I> ops::IndexMut<I> for [T]
2335 where I: SliceIndex<[T]>
2338 fn index_mut(&mut self, index: I) -> &mut I::Output {
2339 index.index_mut(self)
2345 fn slice_index_len_fail(index: usize, len: usize) -> ! {
2346 panic!("index {} out of range for slice of length {}", index, len);
2351 fn slice_index_order_fail(index: usize, end: usize) -> ! {
2352 panic!("slice index starts at {} but ends at {}", index, end);
2357 fn slice_index_overflow_fail() -> ! {
2358 panic!("attempted to index slice up to maximum usize");
2361 mod private_slice_index {
2363 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2366 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2367 impl Sealed for usize {}
2368 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2369 impl Sealed for ops::Range<usize> {}
2370 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2371 impl Sealed for ops::RangeTo<usize> {}
2372 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2373 impl Sealed for ops::RangeFrom<usize> {}
2374 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2375 impl Sealed for ops::RangeFull {}
2376 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2377 impl Sealed for ops::RangeInclusive<usize> {}
2378 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2379 impl Sealed for ops::RangeToInclusive<usize> {}
2382 /// A helper trait used for indexing operations.
2383 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2384 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
2385 pub trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
2386 /// The output type returned by methods.
2387 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2388 type Output: ?Sized;
2390 /// Returns a shared reference to the output at this location, if in
2392 #[unstable(feature = "slice_index_methods", issue = "0")]
2393 fn get(self, slice: &T) -> Option<&Self::Output>;
2395 /// Returns a mutable reference to the output at this location, if in
2397 #[unstable(feature = "slice_index_methods", issue = "0")]
2398 fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
2400 /// Returns a shared reference to the output at this location, without
2401 /// performing any bounds checking.
2402 #[unstable(feature = "slice_index_methods", issue = "0")]
2403 unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
2405 /// Returns a mutable reference to the output at this location, without
2406 /// performing any bounds checking.
2407 #[unstable(feature = "slice_index_methods", issue = "0")]
2408 unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
2410 /// Returns a shared reference to the output at this location, panicking
2411 /// if out of bounds.
2412 #[unstable(feature = "slice_index_methods", issue = "0")]
2413 fn index(self, slice: &T) -> &Self::Output;
2415 /// Returns a mutable reference to the output at this location, panicking
2416 /// if out of bounds.
2417 #[unstable(feature = "slice_index_methods", issue = "0")]
2418 fn index_mut(self, slice: &mut T) -> &mut Self::Output;
2421 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2422 impl<T> SliceIndex<[T]> for usize {
2426 fn get(self, slice: &[T]) -> Option<&T> {
2427 if self < slice.len() {
2429 Some(self.get_unchecked(slice))
2437 fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
2438 if self < slice.len() {
2440 Some(self.get_unchecked_mut(slice))
2448 unsafe fn get_unchecked(self, slice: &[T]) -> &T {
2449 &*slice.as_ptr().add(self)
2453 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
2454 &mut *slice.as_mut_ptr().add(self)
2458 fn index(self, slice: &[T]) -> &T {
2459 // NB: use intrinsic indexing
2464 fn index_mut(self, slice: &mut [T]) -> &mut T {
2465 // NB: use intrinsic indexing
2470 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2471 impl<T> SliceIndex<[T]> for ops::Range<usize> {
2475 fn get(self, slice: &[T]) -> Option<&[T]> {
2476 if self.start > self.end || self.end > slice.len() {
2480 Some(self.get_unchecked(slice))
2486 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2487 if self.start > self.end || self.end > slice.len() {
2491 Some(self.get_unchecked_mut(slice))
2497 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2498 from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
2502 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2503 from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
2507 fn index(self, slice: &[T]) -> &[T] {
2508 if self.start > self.end {
2509 slice_index_order_fail(self.start, self.end);
2510 } else if self.end > slice.len() {
2511 slice_index_len_fail(self.end, slice.len());
2514 self.get_unchecked(slice)
2519 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2520 if self.start > self.end {
2521 slice_index_order_fail(self.start, self.end);
2522 } else if self.end > slice.len() {
2523 slice_index_len_fail(self.end, slice.len());
2526 self.get_unchecked_mut(slice)
2531 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2532 impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
2536 fn get(self, slice: &[T]) -> Option<&[T]> {
2537 (0..self.end).get(slice)
2541 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2542 (0..self.end).get_mut(slice)
2546 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2547 (0..self.end).get_unchecked(slice)
2551 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2552 (0..self.end).get_unchecked_mut(slice)
2556 fn index(self, slice: &[T]) -> &[T] {
2557 (0..self.end).index(slice)
2561 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2562 (0..self.end).index_mut(slice)
2566 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2567 impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
2571 fn get(self, slice: &[T]) -> Option<&[T]> {
2572 (self.start..slice.len()).get(slice)
2576 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2577 (self.start..slice.len()).get_mut(slice)
2581 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2582 (self.start..slice.len()).get_unchecked(slice)
2586 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2587 (self.start..slice.len()).get_unchecked_mut(slice)
2591 fn index(self, slice: &[T]) -> &[T] {
2592 (self.start..slice.len()).index(slice)
2596 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2597 (self.start..slice.len()).index_mut(slice)
2601 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2602 impl<T> SliceIndex<[T]> for ops::RangeFull {
2606 fn get(self, slice: &[T]) -> Option<&[T]> {
2611 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2616 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2621 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2626 fn index(self, slice: &[T]) -> &[T] {
2631 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2637 #[stable(feature = "inclusive_range", since = "1.26.0")]
2638 impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
2642 fn get(self, slice: &[T]) -> Option<&[T]> {
2643 if *self.end() == usize::max_value() { None }
2644 else { (*self.start()..self.end() + 1).get(slice) }
2648 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2649 if *self.end() == usize::max_value() { None }
2650 else { (*self.start()..self.end() + 1).get_mut(slice) }
2654 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2655 (*self.start()..self.end() + 1).get_unchecked(slice)
2659 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2660 (*self.start()..self.end() + 1).get_unchecked_mut(slice)
2664 fn index(self, slice: &[T]) -> &[T] {
2665 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2666 (*self.start()..self.end() + 1).index(slice)
2670 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2671 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2672 (*self.start()..self.end() + 1).index_mut(slice)
2676 #[stable(feature = "inclusive_range", since = "1.26.0")]
2677 impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
2681 fn get(self, slice: &[T]) -> Option<&[T]> {
2682 (0..=self.end).get(slice)
2686 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2687 (0..=self.end).get_mut(slice)
2691 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2692 (0..=self.end).get_unchecked(slice)
2696 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2697 (0..=self.end).get_unchecked_mut(slice)
2701 fn index(self, slice: &[T]) -> &[T] {
2702 (0..=self.end).index(slice)
2706 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2707 (0..=self.end).index_mut(slice)
2711 ////////////////////////////////////////////////////////////////////////////////
2713 ////////////////////////////////////////////////////////////////////////////////
2715 #[stable(feature = "rust1", since = "1.0.0")]
2716 impl<T> Default for &[T] {
2717 /// Creates an empty slice.
2718 fn default() -> Self { &[] }
2721 #[stable(feature = "mut_slice_default", since = "1.5.0")]
2722 impl<T> Default for &mut [T] {
2723 /// Creates a mutable empty slice.
2724 fn default() -> Self { &mut [] }
2731 #[stable(feature = "rust1", since = "1.0.0")]
2732 impl<'a, T> IntoIterator for &'a [T] {
2734 type IntoIter = Iter<'a, T>;
2736 fn into_iter(self) -> Iter<'a, T> {
2741 #[stable(feature = "rust1", since = "1.0.0")]
2742 impl<'a, T> IntoIterator for &'a mut [T] {
2743 type Item = &'a mut T;
2744 type IntoIter = IterMut<'a, T>;
2746 fn into_iter(self) -> IterMut<'a, T> {
2751 // Macro helper functions
2753 fn size_from_ptr<T>(_: *const T) -> usize {
2757 // Inlining is_empty and len makes a huge performance difference
2758 macro_rules! is_empty {
2759 // The way we encode the length of a ZST iterator, this works both for ZST
2761 ($self: ident) => {$self.ptr == $self.end}
2763 // To get rid of some bounds checks (see `position`), we compute the length in a somewhat
2764 // unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
2766 ($self: ident) => {{
2767 let start = $self.ptr;
2768 let diff = ($self.end as usize).wrapping_sub(start as usize);
2769 let size = size_from_ptr(start);
2773 // Using division instead of `offset_from` helps LLVM remove bounds checks
2779 // The shared definition of the `Iter` and `IterMut` iterators
2780 macro_rules! iterator {
2781 (struct $name:ident -> $ptr:ty, $elem:ty, $raw_mut:tt, $( $mut_:tt )*) => {
2782 impl<'a, T> $name<'a, T> {
2783 // Helper function for creating a slice from the iterator.
2785 fn make_slice(&self) -> &'a [T] {
2786 unsafe { from_raw_parts(self.ptr, len!(self)) }
2789 // Helper function for moving the start of the iterator forwards by `offset` elements,
2790 // returning the old start.
2791 // Unsafe because the offset must be in-bounds or one-past-the-end.
2793 unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T {
2794 if mem::size_of::<T>() == 0 {
2795 // This is *reducing* the length. `ptr` never changes with ZST.
2796 self.end = (self.end as * $raw_mut u8).wrapping_offset(-offset) as * $raw_mut T;
2800 self.ptr = self.ptr.offset(offset);
2805 // Helper function for moving the end of the iterator backwards by `offset` elements,
2806 // returning the new end.
2807 // Unsafe because the offset must be in-bounds or one-past-the-end.
2809 unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T {
2810 if mem::size_of::<T>() == 0 {
2811 self.end = (self.end as * $raw_mut u8).wrapping_offset(-offset) as * $raw_mut T;
2814 self.end = self.end.offset(-offset);
2820 #[stable(feature = "rust1", since = "1.0.0")]
2821 impl<'a, T> ExactSizeIterator for $name<'a, T> {
2823 fn len(&self) -> usize {
2828 fn is_empty(&self) -> bool {
2833 #[stable(feature = "rust1", since = "1.0.0")]
2834 impl<'a, T> Iterator for $name<'a, T> {
2838 fn next(&mut self) -> Option<$elem> {
2839 // could be implemented with slices, but this avoids bounds checks
2841 assume(!self.ptr.is_null());
2842 if mem::size_of::<T>() != 0 {
2843 assume(!self.end.is_null());
2845 if is_empty!(self) {
2848 Some(& $( $mut_ )* *self.post_inc_start(1))
2854 fn size_hint(&self) -> (usize, Option<usize>) {
2855 let exact = len!(self);
2856 (exact, Some(exact))
2860 fn count(self) -> usize {
2865 fn nth(&mut self, n: usize) -> Option<$elem> {
2866 if n >= len!(self) {
2867 // This iterator is now empty.
2868 if mem::size_of::<T>() == 0 {
2869 // We have to do it this way as `ptr` may never be 0, but `end`
2870 // could be (due to wrapping).
2871 self.end = self.ptr;
2873 self.ptr = self.end;
2877 // We are in bounds. `offset` does the right thing even for ZSTs.
2879 let elem = Some(& $( $mut_ )* *self.ptr.add(n));
2880 self.post_inc_start((n as isize).wrapping_add(1));
2886 fn last(mut self) -> Option<$elem> {
2891 fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where
2892 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
2894 // manual unrolling is needed when there are conditional exits from the loop
2895 let mut accum = init;
2897 while len!(self) >= 4 {
2898 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2899 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2900 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2901 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2903 while !is_empty!(self) {
2904 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2911 fn fold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
2912 where Fold: FnMut(Acc, Self::Item) -> Acc,
2914 // Let LLVM unroll this, rather than using the default
2915 // impl that would force the manual unrolling above
2916 let mut accum = init;
2917 while let Some(x) = self.next() {
2918 accum = f(accum, x);
2924 #[rustc_inherit_overflow_checks]
2925 fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
2927 P: FnMut(Self::Item) -> bool,
2929 // The addition might panic on overflow.
2931 self.try_fold(0, move |i, x| {
2932 if predicate(x) { Err(i) }
2936 unsafe { assume(i < n) };
2942 fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
2943 P: FnMut(Self::Item) -> bool,
2944 Self: Sized + ExactSizeIterator + DoubleEndedIterator
2946 // No need for an overflow check here, because `ExactSizeIterator`
2948 self.try_rfold(n, move |i, x| {
2950 if predicate(x) { Err(i) }
2954 unsafe { assume(i < n) };
2960 #[stable(feature = "rust1", since = "1.0.0")]
2961 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
2963 fn next_back(&mut self) -> Option<$elem> {
2964 // could be implemented with slices, but this avoids bounds checks
2966 assume(!self.ptr.is_null());
2967 if mem::size_of::<T>() != 0 {
2968 assume(!self.end.is_null());
2970 if is_empty!(self) {
2973 Some(& $( $mut_ )* *self.pre_dec_end(1))
2979 fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
2980 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
2982 // manual unrolling is needed when there are conditional exits from the loop
2983 let mut accum = init;
2985 while len!(self) >= 4 {
2986 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
2987 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
2988 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
2989 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
2991 // inlining is_empty everywhere makes a huge performance difference
2992 while !is_empty!(self) {
2993 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3000 fn rfold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
3001 where Fold: FnMut(Acc, Self::Item) -> Acc,
3003 // Let LLVM unroll this, rather than using the default
3004 // impl that would force the manual unrolling above
3005 let mut accum = init;
3006 while let Some(x) = self.next_back() {
3007 accum = f(accum, x);
3013 #[stable(feature = "fused", since = "1.26.0")]
3014 impl<'a, T> FusedIterator for $name<'a, T> {}
3016 #[unstable(feature = "trusted_len", issue = "37572")]
3017 unsafe impl<'a, T> TrustedLen for $name<'a, T> {}
3021 /// Immutable slice iterator
3023 /// This struct is created by the [`iter`] method on [slices].
3030 /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
3031 /// let slice = &[1, 2, 3];
3033 /// // Then, we iterate over it:
3034 /// for element in slice.iter() {
3035 /// println!("{}", element);
3039 /// [`iter`]: ../../std/primitive.slice.html#method.iter
3040 /// [slices]: ../../std/primitive.slice.html
3041 #[stable(feature = "rust1", since = "1.0.0")]
3042 pub struct Iter<'a, T: 'a> {
3044 end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3045 // ptr == end is a quick test for the Iterator being empty, that works
3046 // for both ZST and non-ZST.
3047 _marker: marker::PhantomData<&'a T>,
3050 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3051 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
3052 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3053 f.debug_tuple("Iter")
3054 .field(&self.as_slice())
3059 #[stable(feature = "rust1", since = "1.0.0")]
3060 unsafe impl<T: Sync> Sync for Iter<'_, T> {}
3061 #[stable(feature = "rust1", since = "1.0.0")]
3062 unsafe impl<T: Sync> Send for Iter<'_, T> {}
3064 impl<'a, T> Iter<'a, T> {
3065 /// View the underlying data as a subslice of the original data.
3067 /// This has the same lifetime as the original slice, and so the
3068 /// iterator can continue to be used while this exists.
3075 /// // First, we declare a type which has the `iter` method to get the `Iter`
3076 /// // struct (&[usize here]):
3077 /// let slice = &[1, 2, 3];
3079 /// // Then, we get the iterator:
3080 /// let mut iter = slice.iter();
3081 /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
3082 /// println!("{:?}", iter.as_slice());
3084 /// // Next, we move to the second element of the slice:
3086 /// // Now `as_slice` returns "[2, 3]":
3087 /// println!("{:?}", iter.as_slice());
3089 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3090 pub fn as_slice(&self) -> &'a [T] {
3095 iterator!{struct Iter -> *const T, &'a T, const, /* no mut */}
3097 #[stable(feature = "rust1", since = "1.0.0")]
3098 impl<T> Clone for Iter<'_, T> {
3099 fn clone(&self) -> Self { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
3102 #[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
3103 impl<T> AsRef<[T]> for Iter<'_, T> {
3104 fn as_ref(&self) -> &[T] {
3109 /// Mutable slice iterator.
3111 /// This struct is created by the [`iter_mut`] method on [slices].
3118 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3119 /// // struct (&[usize here]):
3120 /// let mut slice = &mut [1, 2, 3];
3122 /// // Then, we iterate over it and increment each element value:
3123 /// for element in slice.iter_mut() {
3127 /// // We now have "[2, 3, 4]":
3128 /// println!("{:?}", slice);
3131 /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
3132 /// [slices]: ../../std/primitive.slice.html
3133 #[stable(feature = "rust1", since = "1.0.0")]
3134 pub struct IterMut<'a, T: 'a> {
3136 end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3137 // ptr == end is a quick test for the Iterator being empty, that works
3138 // for both ZST and non-ZST.
3139 _marker: marker::PhantomData<&'a mut T>,
3142 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3143 impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
3144 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3145 f.debug_tuple("IterMut")
3146 .field(&self.make_slice())
3151 #[stable(feature = "rust1", since = "1.0.0")]
3152 unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
3153 #[stable(feature = "rust1", since = "1.0.0")]
3154 unsafe impl<T: Send> Send for IterMut<'_, T> {}
3156 impl<'a, T> IterMut<'a, T> {
3157 /// View the underlying data as a subslice of the original data.
3159 /// To avoid creating `&mut` references that alias, this is forced
3160 /// to consume the iterator.
3167 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3168 /// // struct (&[usize here]):
3169 /// let mut slice = &mut [1, 2, 3];
3172 /// // Then, we get the iterator:
3173 /// let mut iter = slice.iter_mut();
3174 /// // We move to next element:
3176 /// // So if we print what `into_slice` method returns here, we have "[2, 3]":
3177 /// println!("{:?}", iter.into_slice());
3180 /// // Now let's modify a value of the slice:
3182 /// // First we get back the iterator:
3183 /// let mut iter = slice.iter_mut();
3184 /// // We change the value of the first element of the slice returned by the `next` method:
3185 /// *iter.next().unwrap() += 1;
3187 /// // Now slice is "[2, 2, 3]":
3188 /// println!("{:?}", slice);
3190 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3191 pub fn into_slice(self) -> &'a mut [T] {
3192 unsafe { from_raw_parts_mut(self.ptr, len!(self)) }
3196 iterator!{struct IterMut -> *mut T, &'a mut T, mut, mut}
3198 /// An internal abstraction over the splitting iterators, so that
3199 /// splitn, splitn_mut etc can be implemented once.
3201 trait SplitIter: DoubleEndedIterator {
3202 /// Marks the underlying iterator as complete, extracting the remaining
3203 /// portion of the slice.
3204 fn finish(&mut self) -> Option<Self::Item>;
3207 /// An iterator over subslices separated by elements that match a predicate
3210 /// This struct is created by the [`split`] method on [slices].
3212 /// [`split`]: ../../std/primitive.slice.html#method.split
3213 /// [slices]: ../../std/primitive.slice.html
3214 #[stable(feature = "rust1", since = "1.0.0")]
3215 pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
3221 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3222 impl<T: fmt::Debug, P> fmt::Debug for Split<'_, T, P> where P: FnMut(&T) -> bool {
3223 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3224 f.debug_struct("Split")
3225 .field("v", &self.v)
3226 .field("finished", &self.finished)
3231 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3232 #[stable(feature = "rust1", since = "1.0.0")]
3233 impl<T, P> Clone for Split<'_, T, P> where P: Clone + FnMut(&T) -> bool {
3234 fn clone(&self) -> Self {
3237 pred: self.pred.clone(),
3238 finished: self.finished,
3243 #[stable(feature = "rust1", since = "1.0.0")]
3244 impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3245 type Item = &'a [T];
3248 fn next(&mut self) -> Option<&'a [T]> {
3249 if self.finished { return None; }
3251 match self.v.iter().position(|x| (self.pred)(x)) {
3252 None => self.finish(),
3254 let ret = Some(&self.v[..idx]);
3255 self.v = &self.v[idx + 1..];
3262 fn size_hint(&self) -> (usize, Option<usize>) {
3266 (1, Some(self.v.len() + 1))
3271 #[stable(feature = "rust1", since = "1.0.0")]
3272 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3274 fn next_back(&mut self) -> Option<&'a [T]> {
3275 if self.finished { return None; }
3277 match self.v.iter().rposition(|x| (self.pred)(x)) {
3278 None => self.finish(),
3280 let ret = Some(&self.v[idx + 1..]);
3281 self.v = &self.v[..idx];
3288 impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool {
3290 fn finish(&mut self) -> Option<&'a [T]> {
3291 if self.finished { None } else { self.finished = true; Some(self.v) }
3295 #[stable(feature = "fused", since = "1.26.0")]
3296 impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
3298 /// An iterator over the subslices of the vector which are separated
3299 /// by elements that match `pred`.
3301 /// This struct is created by the [`split_mut`] method on [slices].
3303 /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
3304 /// [slices]: ../../std/primitive.slice.html
3305 #[stable(feature = "rust1", since = "1.0.0")]
3306 pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3312 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3313 impl<T: fmt::Debug, P> fmt::Debug for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {
3314 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3315 f.debug_struct("SplitMut")
3316 .field("v", &self.v)
3317 .field("finished", &self.finished)
3322 impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3324 fn finish(&mut self) -> Option<&'a mut [T]> {
3328 self.finished = true;
3329 Some(mem::replace(&mut self.v, &mut []))
3334 #[stable(feature = "rust1", since = "1.0.0")]
3335 impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3336 type Item = &'a mut [T];
3339 fn next(&mut self) -> Option<&'a mut [T]> {
3340 if self.finished { return None; }
3342 let idx_opt = { // work around borrowck limitations
3343 let pred = &mut self.pred;
3344 self.v.iter().position(|x| (*pred)(x))
3347 None => self.finish(),
3349 let tmp = mem::replace(&mut self.v, &mut []);
3350 let (head, tail) = tmp.split_at_mut(idx);
3351 self.v = &mut tail[1..];
3358 fn size_hint(&self) -> (usize, Option<usize>) {
3362 // if the predicate doesn't match anything, we yield one slice
3363 // if it matches every element, we yield len+1 empty slices.
3364 (1, Some(self.v.len() + 1))
3369 #[stable(feature = "rust1", since = "1.0.0")]
3370 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where
3371 P: FnMut(&T) -> bool,
3374 fn next_back(&mut self) -> Option<&'a mut [T]> {
3375 if self.finished { return None; }
3377 let idx_opt = { // work around borrowck limitations
3378 let pred = &mut self.pred;
3379 self.v.iter().rposition(|x| (*pred)(x))
3382 None => self.finish(),
3384 let tmp = mem::replace(&mut self.v, &mut []);
3385 let (head, tail) = tmp.split_at_mut(idx);
3387 Some(&mut tail[1..])
3393 #[stable(feature = "fused", since = "1.26.0")]
3394 impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3396 /// An iterator over subslices separated by elements that match a predicate
3397 /// function, starting from the end of the slice.
3399 /// This struct is created by the [`rsplit`] method on [slices].
3401 /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
3402 /// [slices]: ../../std/primitive.slice.html
3403 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3404 #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
3405 pub struct RSplit<'a, T:'a, P> where P: FnMut(&T) -> bool {
3406 inner: Split<'a, T, P>
3409 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3410 impl<T: fmt::Debug, P> fmt::Debug for RSplit<'_, T, P> where P: FnMut(&T) -> bool {
3411 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3412 f.debug_struct("RSplit")
3413 .field("v", &self.inner.v)
3414 .field("finished", &self.inner.finished)
3419 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3420 impl<'a, T, P> Iterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3421 type Item = &'a [T];
3424 fn next(&mut self) -> Option<&'a [T]> {
3425 self.inner.next_back()
3429 fn size_hint(&self) -> (usize, Option<usize>) {
3430 self.inner.size_hint()
3434 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3435 impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3437 fn next_back(&mut self) -> Option<&'a [T]> {
3442 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3443 impl<'a, T, P> SplitIter for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3445 fn finish(&mut self) -> Option<&'a [T]> {
3450 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3451 impl<T, P> FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {}
3453 /// An iterator over the subslices of the vector which are separated
3454 /// by elements that match `pred`, starting from the end of the slice.
3456 /// This struct is created by the [`rsplit_mut`] method on [slices].
3458 /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
3459 /// [slices]: ../../std/primitive.slice.html
3460 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3461 pub struct RSplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3462 inner: SplitMut<'a, T, P>
3465 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3466 impl<T: fmt::Debug, P> fmt::Debug for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {
3467 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3468 f.debug_struct("RSplitMut")
3469 .field("v", &self.inner.v)
3470 .field("finished", &self.inner.finished)
3475 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3476 impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3478 fn finish(&mut self) -> Option<&'a mut [T]> {
3483 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3484 impl<'a, T, P> Iterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3485 type Item = &'a mut [T];
3488 fn next(&mut self) -> Option<&'a mut [T]> {
3489 self.inner.next_back()
3493 fn size_hint(&self) -> (usize, Option<usize>) {
3494 self.inner.size_hint()
3498 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3499 impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P> where
3500 P: FnMut(&T) -> bool,
3503 fn next_back(&mut self) -> Option<&'a mut [T]> {
3508 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3509 impl<T, P> FusedIterator for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3511 /// An private iterator over subslices separated by elements that
3512 /// match a predicate function, splitting at most a fixed number of
3515 struct GenericSplitN<I> {
3520 impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> {
3524 fn next(&mut self) -> Option<T> {
3527 1 => { self.count -= 1; self.iter.finish() }
3528 _ => { self.count -= 1; self.iter.next() }
3533 fn size_hint(&self) -> (usize, Option<usize>) {
3534 let (lower, upper_opt) = self.iter.size_hint();
3535 (lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
3539 /// An iterator over subslices separated by elements that match a predicate
3540 /// function, limited to a given number of splits.
3542 /// This struct is created by the [`splitn`] method on [slices].
3544 /// [`splitn`]: ../../std/primitive.slice.html#method.splitn
3545 /// [slices]: ../../std/primitive.slice.html
3546 #[stable(feature = "rust1", since = "1.0.0")]
3547 pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3548 inner: GenericSplitN<Split<'a, T, P>>
3551 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3552 impl<T: fmt::Debug, P> fmt::Debug for SplitN<'_, T, P> where P: FnMut(&T) -> bool {
3553 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3554 f.debug_struct("SplitN")
3555 .field("inner", &self.inner)
3560 /// An iterator over subslices separated by elements that match a
3561 /// predicate function, limited to a given number of splits, starting
3562 /// from the end of the slice.
3564 /// This struct is created by the [`rsplitn`] method on [slices].
3566 /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
3567 /// [slices]: ../../std/primitive.slice.html
3568 #[stable(feature = "rust1", since = "1.0.0")]
3569 pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3570 inner: GenericSplitN<RSplit<'a, T, P>>
3573 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3574 impl<T: fmt::Debug, P> fmt::Debug for RSplitN<'_, T, P> where P: FnMut(&T) -> bool {
3575 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3576 f.debug_struct("RSplitN")
3577 .field("inner", &self.inner)
3582 /// An iterator over subslices separated by elements that match a predicate
3583 /// function, limited to a given number of splits.
3585 /// This struct is created by the [`splitn_mut`] method on [slices].
3587 /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
3588 /// [slices]: ../../std/primitive.slice.html
3589 #[stable(feature = "rust1", since = "1.0.0")]
3590 pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3591 inner: GenericSplitN<SplitMut<'a, T, P>>
3594 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3595 impl<T: fmt::Debug, P> fmt::Debug for SplitNMut<'_, T, P> where P: FnMut(&T) -> bool {
3596 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3597 f.debug_struct("SplitNMut")
3598 .field("inner", &self.inner)
3603 /// An iterator over subslices separated by elements that match a
3604 /// predicate function, limited to a given number of splits, starting
3605 /// from the end of the slice.
3607 /// This struct is created by the [`rsplitn_mut`] method on [slices].
3609 /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
3610 /// [slices]: ../../std/primitive.slice.html
3611 #[stable(feature = "rust1", since = "1.0.0")]
3612 pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3613 inner: GenericSplitN<RSplitMut<'a, T, P>>
3616 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3617 impl<T: fmt::Debug, P> fmt::Debug for RSplitNMut<'_, T, P> where P: FnMut(&T) -> bool {
3618 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3619 f.debug_struct("RSplitNMut")
3620 .field("inner", &self.inner)
3625 macro_rules! forward_iterator {
3626 ($name:ident: $elem:ident, $iter_of:ty) => {
3627 #[stable(feature = "rust1", since = "1.0.0")]
3628 impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where
3629 P: FnMut(&T) -> bool
3631 type Item = $iter_of;
3634 fn next(&mut self) -> Option<$iter_of> {
3639 fn size_hint(&self) -> (usize, Option<usize>) {
3640 self.inner.size_hint()
3644 #[stable(feature = "fused", since = "1.26.0")]
3645 impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P>
3646 where P: FnMut(&T) -> bool {}
3650 forward_iterator! { SplitN: T, &'a [T] }
3651 forward_iterator! { RSplitN: T, &'a [T] }
3652 forward_iterator! { SplitNMut: T, &'a mut [T] }
3653 forward_iterator! { RSplitNMut: T, &'a mut [T] }
3655 /// An iterator over overlapping subslices of length `size`.
3657 /// This struct is created by the [`windows`] method on [slices].
3659 /// [`windows`]: ../../std/primitive.slice.html#method.windows
3660 /// [slices]: ../../std/primitive.slice.html
3662 #[stable(feature = "rust1", since = "1.0.0")]
3663 pub struct Windows<'a, T:'a> {
3668 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3669 #[stable(feature = "rust1", since = "1.0.0")]
3670 impl<T> Clone for Windows<'_, T> {
3671 fn clone(&self) -> Self {
3679 #[stable(feature = "rust1", since = "1.0.0")]
3680 impl<'a, T> Iterator for Windows<'a, T> {
3681 type Item = &'a [T];
3684 fn next(&mut self) -> Option<&'a [T]> {
3685 if self.size > self.v.len() {
3688 let ret = Some(&self.v[..self.size]);
3689 self.v = &self.v[1..];
3695 fn size_hint(&self) -> (usize, Option<usize>) {
3696 if self.size > self.v.len() {
3699 let size = self.v.len() - self.size + 1;
3705 fn count(self) -> usize {
3710 fn nth(&mut self, n: usize) -> Option<Self::Item> {
3711 let (end, overflow) = self.size.overflowing_add(n);
3712 if end > self.v.len() || overflow {
3716 let nth = &self.v[n..end];
3717 self.v = &self.v[n+1..];
3723 fn last(self) -> Option<Self::Item> {
3724 if self.size > self.v.len() {
3727 let start = self.v.len() - self.size;
3728 Some(&self.v[start..])
3733 #[stable(feature = "rust1", since = "1.0.0")]
3734 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
3736 fn next_back(&mut self) -> Option<&'a [T]> {
3737 if self.size > self.v.len() {
3740 let ret = Some(&self.v[self.v.len()-self.size..]);
3741 self.v = &self.v[..self.v.len()-1];
3747 #[stable(feature = "rust1", since = "1.0.0")]
3748 impl<T> ExactSizeIterator for Windows<'_, T> {}
3750 #[unstable(feature = "trusted_len", issue = "37572")]
3751 unsafe impl<T> TrustedLen for Windows<'_, T> {}
3753 #[stable(feature = "fused", since = "1.26.0")]
3754 impl<T> FusedIterator for Windows<'_, T> {}
3757 unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
3758 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
3759 from_raw_parts(self.v.as_ptr().add(i), self.size)
3761 fn may_have_side_effect() -> bool { false }
3764 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
3765 /// time), starting at the beginning of the slice.
3767 /// When the slice len is not evenly divided by the chunk size, the last slice
3768 /// of the iteration will be the remainder.
3770 /// This struct is created by the [`chunks`] method on [slices].
3772 /// [`chunks`]: ../../std/primitive.slice.html#method.chunks
3773 /// [slices]: ../../std/primitive.slice.html
3775 #[stable(feature = "rust1", since = "1.0.0")]
3776 pub struct Chunks<'a, T:'a> {
3781 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3782 #[stable(feature = "rust1", since = "1.0.0")]
3783 impl<T> Clone for Chunks<'_, T> {
3784 fn clone(&self) -> Self {
3787 chunk_size: self.chunk_size,
3792 #[stable(feature = "rust1", since = "1.0.0")]
3793 impl<'a, T> Iterator for Chunks<'a, T> {
3794 type Item = &'a [T];
3797 fn next(&mut self) -> Option<&'a [T]> {
3798 if self.v.is_empty() {
3801 let chunksz = cmp::min(self.v.len(), self.chunk_size);
3802 let (fst, snd) = self.v.split_at(chunksz);
3809 fn size_hint(&self) -> (usize, Option<usize>) {
3810 if self.v.is_empty() {
3813 let n = self.v.len() / self.chunk_size;
3814 let rem = self.v.len() % self.chunk_size;
3815 let n = if rem > 0 { n+1 } else { n };
3821 fn count(self) -> usize {
3826 fn nth(&mut self, n: usize) -> Option<Self::Item> {
3827 let (start, overflow) = n.overflowing_mul(self.chunk_size);
3828 if start >= self.v.len() || overflow {
3832 let end = match start.checked_add(self.chunk_size) {
3833 Some(sum) => cmp::min(self.v.len(), sum),
3834 None => self.v.len(),
3836 let nth = &self.v[start..end];
3837 self.v = &self.v[end..];
3843 fn last(self) -> Option<Self::Item> {
3844 if self.v.is_empty() {
3847 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
3848 Some(&self.v[start..])
3853 #[stable(feature = "rust1", since = "1.0.0")]
3854 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
3856 fn next_back(&mut self) -> Option<&'a [T]> {
3857 if self.v.is_empty() {
3860 let remainder = self.v.len() % self.chunk_size;
3861 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
3862 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
3869 #[stable(feature = "rust1", since = "1.0.0")]
3870 impl<T> ExactSizeIterator for Chunks<'_, T> {}
3872 #[unstable(feature = "trusted_len", issue = "37572")]
3873 unsafe impl<T> TrustedLen for Chunks<'_, T> {}
3875 #[stable(feature = "fused", since = "1.26.0")]
3876 impl<T> FusedIterator for Chunks<'_, T> {}
3879 unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
3880 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
3881 let start = i * self.chunk_size;
3882 let end = match start.checked_add(self.chunk_size) {
3883 None => self.v.len(),
3884 Some(end) => cmp::min(end, self.v.len()),
3886 from_raw_parts(self.v.as_ptr().add(start), end - start)
3888 fn may_have_side_effect() -> bool { false }
3891 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
3892 /// elements at a time), starting at the beginning of the slice.
3894 /// When the slice len is not evenly divided by the chunk size, the last slice
3895 /// of the iteration will be the remainder.
3897 /// This struct is created by the [`chunks_mut`] method on [slices].
3899 /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
3900 /// [slices]: ../../std/primitive.slice.html
3902 #[stable(feature = "rust1", since = "1.0.0")]
3903 pub struct ChunksMut<'a, T:'a> {
3908 #[stable(feature = "rust1", since = "1.0.0")]
3909 impl<'a, T> Iterator for ChunksMut<'a, T> {
3910 type Item = &'a mut [T];
3913 fn next(&mut self) -> Option<&'a mut [T]> {
3914 if self.v.is_empty() {
3917 let sz = cmp::min(self.v.len(), self.chunk_size);
3918 let tmp = mem::replace(&mut self.v, &mut []);
3919 let (head, tail) = tmp.split_at_mut(sz);
3926 fn size_hint(&self) -> (usize, Option<usize>) {
3927 if self.v.is_empty() {
3930 let n = self.v.len() / self.chunk_size;
3931 let rem = self.v.len() % self.chunk_size;
3932 let n = if rem > 0 { n + 1 } else { n };
3938 fn count(self) -> usize {
3943 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
3944 let (start, overflow) = n.overflowing_mul(self.chunk_size);
3945 if start >= self.v.len() || overflow {
3949 let end = match start.checked_add(self.chunk_size) {
3950 Some(sum) => cmp::min(self.v.len(), sum),
3951 None => self.v.len(),
3953 let tmp = mem::replace(&mut self.v, &mut []);
3954 let (head, tail) = tmp.split_at_mut(end);
3955 let (_, nth) = head.split_at_mut(start);
3962 fn last(self) -> Option<Self::Item> {
3963 if self.v.is_empty() {
3966 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
3967 Some(&mut self.v[start..])
3972 #[stable(feature = "rust1", since = "1.0.0")]
3973 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
3975 fn next_back(&mut self) -> Option<&'a mut [T]> {
3976 if self.v.is_empty() {
3979 let remainder = self.v.len() % self.chunk_size;
3980 let sz = if remainder != 0 { remainder } else { self.chunk_size };
3981 let tmp = mem::replace(&mut self.v, &mut []);
3982 let tmp_len = tmp.len();
3983 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
3990 #[stable(feature = "rust1", since = "1.0.0")]
3991 impl<T> ExactSizeIterator for ChunksMut<'_, T> {}
3993 #[unstable(feature = "trusted_len", issue = "37572")]
3994 unsafe impl<T> TrustedLen for ChunksMut<'_, T> {}
3996 #[stable(feature = "fused", since = "1.26.0")]
3997 impl<T> FusedIterator for ChunksMut<'_, T> {}
4000 unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
4001 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4002 let start = i * self.chunk_size;
4003 let end = match start.checked_add(self.chunk_size) {
4004 None => self.v.len(),
4005 Some(end) => cmp::min(end, self.v.len()),
4007 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4009 fn may_have_side_effect() -> bool { false }
4012 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4013 /// time), starting at the beginning of the slice.
4015 /// When the slice len is not evenly divided by the chunk size, the last
4016 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4017 /// the [`remainder`] function from the iterator.
4019 /// This struct is created by the [`chunks_exact`] method on [slices].
4021 /// [`chunks_exact`]: ../../std/primitive.slice.html#method.chunks_exact
4022 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4023 /// [slices]: ../../std/primitive.slice.html
4025 #[unstable(feature = "chunks_exact", issue = "47115")]
4026 pub struct ChunksExact<'a, T:'a> {
4032 #[unstable(feature = "chunks_exact", issue = "47115")]
4033 impl<'a, T> ChunksExact<'a, T> {
4034 /// Return the remainder of the original slice that is not going to be
4035 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4037 pub fn remainder(&self) -> &'a [T] {
4042 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4043 #[unstable(feature = "chunks_exact", issue = "47115")]
4044 impl<T> Clone for ChunksExact<'_, T> {
4045 fn clone(&self) -> Self {
4049 chunk_size: self.chunk_size,
4054 #[unstable(feature = "chunks_exact", issue = "47115")]
4055 impl<'a, T> Iterator for ChunksExact<'a, T> {
4056 type Item = &'a [T];
4059 fn next(&mut self) -> Option<&'a [T]> {
4060 if self.v.len() < self.chunk_size {
4063 let (fst, snd) = self.v.split_at(self.chunk_size);
4070 fn size_hint(&self) -> (usize, Option<usize>) {
4071 let n = self.v.len() / self.chunk_size;
4076 fn count(self) -> usize {
4081 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4082 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4083 if start >= self.v.len() || overflow {
4087 let (_, snd) = self.v.split_at(start);
4094 fn last(mut self) -> Option<Self::Item> {
4099 #[unstable(feature = "chunks_exact", issue = "47115")]
4100 impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> {
4102 fn next_back(&mut self) -> Option<&'a [T]> {
4103 if self.v.len() < self.chunk_size {
4106 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4113 #[unstable(feature = "chunks_exact", issue = "47115")]
4114 impl<T> ExactSizeIterator for ChunksExact<'_, T> {
4115 fn is_empty(&self) -> bool {
4120 #[unstable(feature = "trusted_len", issue = "37572")]
4121 unsafe impl<T> TrustedLen for ChunksExact<'_, T> {}
4123 #[unstable(feature = "chunks_exact", issue = "47115")]
4124 impl<T> FusedIterator for ChunksExact<'_, T> {}
4127 #[unstable(feature = "chunks_exact", issue = "47115")]
4128 unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> {
4129 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4130 let start = i * self.chunk_size;
4131 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
4133 fn may_have_side_effect() -> bool { false }
4136 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4137 /// elements at a time), starting at the beginning of the slice.
4139 /// When the slice len is not evenly divided by the chunk size, the last up to
4140 /// `chunk_size-1` elements will be omitted but can be retrieved from the
4141 /// [`into_remainder`] function from the iterator.
4143 /// This struct is created by the [`chunks_exact_mut`] method on [slices].
4145 /// [`chunks_exact_mut`]: ../../std/primitive.slice.html#method.chunks_exact_mut
4146 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
4147 /// [slices]: ../../std/primitive.slice.html
4149 #[unstable(feature = "chunks_exact", issue = "47115")]
4150 pub struct ChunksExactMut<'a, T:'a> {
4156 #[unstable(feature = "chunks_exact", issue = "47115")]
4157 impl<'a, T> ChunksExactMut<'a, T> {
4158 /// Return the remainder of the original slice that is not going to be
4159 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4161 pub fn into_remainder(self) -> &'a mut [T] {
4166 #[unstable(feature = "chunks_exact", issue = "47115")]
4167 impl<'a, T> Iterator for ChunksExactMut<'a, T> {
4168 type Item = &'a mut [T];
4171 fn next(&mut self) -> Option<&'a mut [T]> {
4172 if self.v.len() < self.chunk_size {
4175 let tmp = mem::replace(&mut self.v, &mut []);
4176 let (head, tail) = tmp.split_at_mut(self.chunk_size);
4183 fn size_hint(&self) -> (usize, Option<usize>) {
4184 let n = self.v.len() / self.chunk_size;
4189 fn count(self) -> usize {
4194 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4195 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4196 if start >= self.v.len() || overflow {
4200 let tmp = mem::replace(&mut self.v, &mut []);
4201 let (_, snd) = tmp.split_at_mut(start);
4208 fn last(mut self) -> Option<Self::Item> {
4213 #[unstable(feature = "chunks_exact", issue = "47115")]
4214 impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> {
4216 fn next_back(&mut self) -> Option<&'a mut [T]> {
4217 if self.v.len() < self.chunk_size {
4220 let tmp = mem::replace(&mut self.v, &mut []);
4221 let tmp_len = tmp.len();
4222 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
4229 #[unstable(feature = "chunks_exact", issue = "47115")]
4230 impl<T> ExactSizeIterator for ChunksExactMut<'_, T> {
4231 fn is_empty(&self) -> bool {
4236 #[unstable(feature = "trusted_len", issue = "37572")]
4237 unsafe impl<T> TrustedLen for ChunksExactMut<'_, T> {}
4239 #[unstable(feature = "chunks_exact", issue = "47115")]
4240 impl<T> FusedIterator for ChunksExactMut<'_, T> {}
4243 #[unstable(feature = "chunks_exact", issue = "47115")]
4244 unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> {
4245 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4246 let start = i * self.chunk_size;
4247 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
4249 fn may_have_side_effect() -> bool { false }
4252 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4253 /// time), starting at the end of the slice.
4255 /// When the slice len is not evenly divided by the chunk size, the last slice
4256 /// of the iteration will be the remainder.
4258 /// This struct is created by the [`rchunks`] method on [slices].
4260 /// [`rchunks`]: ../../std/primitive.slice.html#method.rchunks
4261 /// [slices]: ../../std/primitive.slice.html
4263 #[unstable(feature = "rchunks", issue = "55177")]
4264 pub struct RChunks<'a, T:'a> {
4269 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4270 #[unstable(feature = "rchunks", issue = "55177")]
4271 impl<'a, T> Clone for RChunks<'a, T> {
4272 fn clone(&self) -> RChunks<'a, T> {
4275 chunk_size: self.chunk_size,
4280 #[unstable(feature = "rchunks", issue = "55177")]
4281 impl<'a, T> Iterator for RChunks<'a, T> {
4282 type Item = &'a [T];
4285 fn next(&mut self) -> Option<&'a [T]> {
4286 if self.v.is_empty() {
4289 let chunksz = cmp::min(self.v.len(), self.chunk_size);
4290 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
4297 fn size_hint(&self) -> (usize, Option<usize>) {
4298 if self.v.is_empty() {
4301 let n = self.v.len() / self.chunk_size;
4302 let rem = self.v.len() % self.chunk_size;
4303 let n = if rem > 0 { n+1 } else { n };
4309 fn count(self) -> usize {
4314 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4315 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4316 if end >= self.v.len() || overflow {
4320 // Can't underflow because of the check above
4321 let end = self.v.len() - end;
4322 let start = match end.checked_sub(self.chunk_size) {
4326 let nth = &self.v[start..end];
4327 self.v = &self.v[0..start];
4333 fn last(self) -> Option<Self::Item> {
4334 if self.v.is_empty() {
4337 let rem = self.v.len() % self.chunk_size;
4338 let end = if rem == 0 { self.chunk_size } else { rem };
4339 Some(&self.v[0..end])
4344 #[unstable(feature = "rchunks", issue = "55177")]
4345 impl<'a, T> DoubleEndedIterator for RChunks<'a, T> {
4347 fn next_back(&mut self) -> Option<&'a [T]> {
4348 if self.v.is_empty() {
4351 let remainder = self.v.len() % self.chunk_size;
4352 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
4353 let (fst, snd) = self.v.split_at(chunksz);
4360 #[unstable(feature = "rchunks", issue = "55177")]
4361 impl<'a, T> ExactSizeIterator for RChunks<'a, T> {}
4363 #[unstable(feature = "trusted_len", issue = "37572")]
4364 unsafe impl<'a, T> TrustedLen for RChunks<'a, T> {}
4366 #[unstable(feature = "rchunks", issue = "55177")]
4367 impl<'a, T> FusedIterator for RChunks<'a, T> {}
4370 #[unstable(feature = "rchunks", issue = "55177")]
4371 unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> {
4372 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4373 let end = self.v.len() - i * self.chunk_size;
4374 let start = match end.checked_sub(self.chunk_size) {
4376 Some(start) => start,
4378 from_raw_parts(self.v.as_ptr().add(start), end - start)
4380 fn may_have_side_effect() -> bool { false }
4383 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4384 /// elements at a time), starting at the end of the slice.
4386 /// When the slice len is not evenly divided by the chunk size, the last slice
4387 /// of the iteration will be the remainder.
4389 /// This struct is created by the [`rchunks_mut`] method on [slices].
4391 /// [`rchunks_mut`]: ../../std/primitive.slice.html#method.rchunks_mut
4392 /// [slices]: ../../std/primitive.slice.html
4394 #[unstable(feature = "rchunks", issue = "55177")]
4395 pub struct RChunksMut<'a, T:'a> {
4400 #[unstable(feature = "rchunks", issue = "55177")]
4401 impl<'a, T> Iterator for RChunksMut<'a, T> {
4402 type Item = &'a mut [T];
4405 fn next(&mut self) -> Option<&'a mut [T]> {
4406 if self.v.is_empty() {
4409 let sz = cmp::min(self.v.len(), self.chunk_size);
4410 let tmp = mem::replace(&mut self.v, &mut []);
4411 let tmp_len = tmp.len();
4412 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4419 fn size_hint(&self) -> (usize, Option<usize>) {
4420 if self.v.is_empty() {
4423 let n = self.v.len() / self.chunk_size;
4424 let rem = self.v.len() % self.chunk_size;
4425 let n = if rem > 0 { n + 1 } else { n };
4431 fn count(self) -> usize {
4436 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4437 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4438 if end >= self.v.len() || overflow {
4442 // Can't underflow because of the check above
4443 let end = self.v.len() - end;
4444 let start = match end.checked_sub(self.chunk_size) {
4448 let tmp = mem::replace(&mut self.v, &mut []);
4449 let (head, tail) = tmp.split_at_mut(start);
4450 let (nth, _) = tail.split_at_mut(end - start);
4457 fn last(self) -> Option<Self::Item> {
4458 if self.v.is_empty() {
4461 let rem = self.v.len() % self.chunk_size;
4462 let end = if rem == 0 { self.chunk_size } else { rem };
4463 Some(&mut self.v[0..end])
4468 #[unstable(feature = "rchunks", issue = "55177")]
4469 impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> {
4471 fn next_back(&mut self) -> Option<&'a mut [T]> {
4472 if self.v.is_empty() {
4475 let remainder = self.v.len() % self.chunk_size;
4476 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4477 let tmp = mem::replace(&mut self.v, &mut []);
4478 let (head, tail) = tmp.split_at_mut(sz);
4485 #[unstable(feature = "rchunks", issue = "55177")]
4486 impl<'a, T> ExactSizeIterator for RChunksMut<'a, T> {}
4488 #[unstable(feature = "trusted_len", issue = "37572")]
4489 unsafe impl<'a, T> TrustedLen for RChunksMut<'a, T> {}
4491 #[unstable(feature = "rchunks", issue = "55177")]
4492 impl<'a, T> FusedIterator for RChunksMut<'a, T> {}
4495 #[unstable(feature = "rchunks", issue = "55177")]
4496 unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> {
4497 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4498 let end = self.v.len() - i * self.chunk_size;
4499 let start = match end.checked_sub(self.chunk_size) {
4501 Some(start) => start,
4503 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4505 fn may_have_side_effect() -> bool { false }
4508 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4509 /// time), starting at the end of the slice.
4511 /// When the slice len is not evenly divided by the chunk size, the last
4512 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4513 /// the [`remainder`] function from the iterator.
4515 /// This struct is created by the [`rchunks_exact`] method on [slices].
4517 /// [`rchunks_exact`]: ../../std/primitive.slice.html#method.rchunks_exact
4518 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4519 /// [slices]: ../../std/primitive.slice.html
4521 #[unstable(feature = "rchunks", issue = "55177")]
4522 pub struct RChunksExact<'a, T:'a> {
4528 #[unstable(feature = "rchunks", issue = "55177")]
4529 impl<'a, T> RChunksExact<'a, T> {
4530 /// Return the remainder of the original slice that is not going to be
4531 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4533 pub fn remainder(&self) -> &'a [T] {
4538 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4539 #[unstable(feature = "rchunks", issue = "55177")]
4540 impl<'a, T> Clone for RChunksExact<'a, T> {
4541 fn clone(&self) -> RChunksExact<'a, T> {
4545 chunk_size: self.chunk_size,
4550 #[unstable(feature = "rchunks", issue = "55177")]
4551 impl<'a, T> Iterator for RChunksExact<'a, T> {
4552 type Item = &'a [T];
4555 fn next(&mut self) -> Option<&'a [T]> {
4556 if self.v.len() < self.chunk_size {
4559 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4566 fn size_hint(&self) -> (usize, Option<usize>) {
4567 let n = self.v.len() / self.chunk_size;
4572 fn count(self) -> usize {
4577 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4578 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4579 if end >= self.v.len() || overflow {
4583 let (fst, _) = self.v.split_at(self.v.len() - end);
4590 fn last(mut self) -> Option<Self::Item> {
4595 #[unstable(feature = "rchunks", issue = "55177")]
4596 impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> {
4598 fn next_back(&mut self) -> Option<&'a [T]> {
4599 if self.v.len() < self.chunk_size {
4602 let (fst, snd) = self.v.split_at(self.chunk_size);
4609 #[unstable(feature = "rchunks", issue = "55177")]
4610 impl<'a, T> ExactSizeIterator for RChunksExact<'a, T> {
4611 fn is_empty(&self) -> bool {
4616 #[unstable(feature = "trusted_len", issue = "37572")]
4617 unsafe impl<'a, T> TrustedLen for RChunksExact<'a, T> {}
4619 #[unstable(feature = "rchunks", issue = "55177")]
4620 impl<'a, T> FusedIterator for RChunksExact<'a, T> {}
4623 #[unstable(feature = "rchunks", issue = "55177")]
4624 unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> {
4625 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4626 let end = self.v.len() - i * self.chunk_size;
4627 let start = end - self.chunk_size;
4628 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
4630 fn may_have_side_effect() -> bool { false }
4633 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4634 /// elements at a time), starting at the end of the slice.
4636 /// When the slice len is not evenly divided by the chunk size, the last up to
4637 /// `chunk_size-1` elements will be omitted but can be retrieved from the
4638 /// [`into_remainder`] function from the iterator.
4640 /// This struct is created by the [`rchunks_exact_mut`] method on [slices].
4642 /// [`rchunks_exact_mut`]: ../../std/primitive.slice.html#method.rchunks_exact_mut
4643 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
4644 /// [slices]: ../../std/primitive.slice.html
4646 #[unstable(feature = "rchunks", issue = "55177")]
4647 pub struct RChunksExactMut<'a, T:'a> {
4653 #[unstable(feature = "rchunks", issue = "55177")]
4654 impl<'a, T> RChunksExactMut<'a, T> {
4655 /// Return the remainder of the original slice that is not going to be
4656 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4658 pub fn into_remainder(self) -> &'a mut [T] {
4663 #[unstable(feature = "rchunks", issue = "55177")]
4664 impl<'a, T> Iterator for RChunksExactMut<'a, T> {
4665 type Item = &'a mut [T];
4668 fn next(&mut self) -> Option<&'a mut [T]> {
4669 if self.v.len() < self.chunk_size {
4672 let tmp = mem::replace(&mut self.v, &mut []);
4673 let tmp_len = tmp.len();
4674 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
4681 fn size_hint(&self) -> (usize, Option<usize>) {
4682 let n = self.v.len() / self.chunk_size;
4687 fn count(self) -> usize {
4692 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4693 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4694 if end >= self.v.len() || overflow {
4698 let tmp = mem::replace(&mut self.v, &mut []);
4699 let tmp_len = tmp.len();
4700 let (fst, _) = tmp.split_at_mut(tmp_len - end);
4707 fn last(mut self) -> Option<Self::Item> {
4712 #[unstable(feature = "rchunks", issue = "55177")]
4713 impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> {
4715 fn next_back(&mut self) -> Option<&'a mut [T]> {
4716 if self.v.len() < self.chunk_size {
4719 let tmp = mem::replace(&mut self.v, &mut []);
4720 let (head, tail) = tmp.split_at_mut(self.chunk_size);
4727 #[unstable(feature = "rchunks", issue = "55177")]
4728 impl<'a, T> ExactSizeIterator for RChunksExactMut<'a, T> {
4729 fn is_empty(&self) -> bool {
4734 #[unstable(feature = "trusted_len", issue = "37572")]
4735 unsafe impl<'a, T> TrustedLen for RChunksExactMut<'a, T> {}
4737 #[unstable(feature = "rchunks", issue = "55177")]
4738 impl<'a, T> FusedIterator for RChunksExactMut<'a, T> {}
4741 #[unstable(feature = "rchunks", issue = "55177")]
4742 unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> {
4743 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4744 let end = self.v.len() - i * self.chunk_size;
4745 let start = end - self.chunk_size;
4746 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
4748 fn may_have_side_effect() -> bool { false }
4755 /// Forms a slice from a pointer and a length.
4757 /// The `len` argument is the number of **elements**, not the number of bytes.
4761 /// This function is unsafe as there is no guarantee that the given pointer is
4762 /// valid for `len` elements, nor whether the lifetime inferred is a suitable
4763 /// lifetime for the returned slice.
4765 /// `data` must be non-null and aligned, even for zero-length slices. One
4766 /// reason for this is that enum layout optimizations may rely on references
4767 /// (including slices of any length) being aligned and non-null to distinguish
4768 /// them from other data. You can obtain a pointer that is usable as `data`
4769 /// for zero-length slices using [`NonNull::dangling()`].
4771 /// The total size of the slice must be no larger than `isize::MAX` **bytes**
4772 /// in memory. See the safety documentation of [`pointer::offset`].
4776 /// The lifetime for the returned slice is inferred from its usage. To
4777 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
4778 /// source lifetime is safe in the context, such as by providing a helper
4779 /// function taking the lifetime of a host value for the slice, or by explicit
4787 /// // manifest a slice for a single element
4789 /// let ptr = &x as *const _;
4790 /// let slice = unsafe { slice::from_raw_parts(ptr, 1) };
4791 /// assert_eq!(slice[0], 42);
4794 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
4795 /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
4797 #[stable(feature = "rust1", since = "1.0.0")]
4798 pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
4799 debug_assert!(data as usize % mem::align_of::<T>() == 0, "attempt to create unaligned slice");
4800 debug_assert!(mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
4801 "attempt to create slice covering half the address space");
4802 Repr { raw: FatPtr { data, len } }.rust
4805 /// Performs the same functionality as [`from_raw_parts`], except that a
4806 /// mutable slice is returned.
4808 /// This function is unsafe for the same reasons as [`from_raw_parts`], as well
4809 /// as not being able to provide a non-aliasing guarantee of the returned
4810 /// mutable slice. `data` must be non-null and aligned even for zero-length
4811 /// slices as with [`from_raw_parts`]. The total size of the slice must be no
4812 /// larger than `isize::MAX` **bytes** in memory.
4814 /// See the documentation of [`from_raw_parts`] for more details.
4816 /// [`from_raw_parts`]: ../../std/slice/fn.from_raw_parts.html
4818 #[stable(feature = "rust1", since = "1.0.0")]
4819 pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
4820 debug_assert!(data as usize % mem::align_of::<T>() == 0, "attempt to create unaligned slice");
4821 debug_assert!(mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
4822 "attempt to create slice covering half the address space");
4823 Repr { raw: FatPtr { data, len } }.rust_mut
4826 /// Converts a reference to T into a slice of length 1 (without copying).
4827 #[stable(feature = "from_ref", since = "1.28.0")]
4828 pub fn from_ref<T>(s: &T) -> &[T] {
4830 from_raw_parts(s, 1)
4834 /// Converts a reference to T into a slice of length 1 (without copying).
4835 #[stable(feature = "from_ref", since = "1.28.0")]
4836 pub fn from_mut<T>(s: &mut T) -> &mut [T] {
4838 from_raw_parts_mut(s, 1)
4842 // This function is public only because there is no other way to unit test heapsort.
4843 #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "0")]
4845 pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
4846 where F: FnMut(&T, &T) -> bool
4848 sort::heapsort(v, &mut is_less);
4852 // Comparison traits
4856 /// Calls implementation provided memcmp.
4858 /// Interprets the data as u8.
4860 /// Returns 0 for equal, < 0 for less than and > 0 for greater
4862 // FIXME(#32610): Return type should be c_int
4863 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
4866 #[stable(feature = "rust1", since = "1.0.0")]
4867 impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
4868 fn eq(&self, other: &[B]) -> bool {
4869 SlicePartialEq::equal(self, other)
4872 fn ne(&self, other: &[B]) -> bool {
4873 SlicePartialEq::not_equal(self, other)
4877 #[stable(feature = "rust1", since = "1.0.0")]
4878 impl<T: Eq> Eq for [T] {}
4880 /// Implements comparison of vectors lexicographically.
4881 #[stable(feature = "rust1", since = "1.0.0")]
4882 impl<T: Ord> Ord for [T] {
4883 fn cmp(&self, other: &[T]) -> Ordering {
4884 SliceOrd::compare(self, other)
4888 /// Implements comparison of vectors lexicographically.
4889 #[stable(feature = "rust1", since = "1.0.0")]
4890 impl<T: PartialOrd> PartialOrd for [T] {
4891 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
4892 SlicePartialOrd::partial_compare(self, other)
4897 // intermediate trait for specialization of slice's PartialEq
4898 trait SlicePartialEq<B> {
4899 fn equal(&self, other: &[B]) -> bool;
4901 fn not_equal(&self, other: &[B]) -> bool { !self.equal(other) }
4904 // Generic slice equality
4905 impl<A, B> SlicePartialEq<B> for [A]
4906 where A: PartialEq<B>
4908 default fn equal(&self, other: &[B]) -> bool {
4909 if self.len() != other.len() {
4913 for i in 0..self.len() {
4914 if !self[i].eq(&other[i]) {
4923 // Use memcmp for bytewise equality when the types allow
4924 impl<A> SlicePartialEq<A> for [A]
4925 where A: PartialEq<A> + BytewiseEquality
4927 fn equal(&self, other: &[A]) -> bool {
4928 if self.len() != other.len() {
4931 if self.as_ptr() == other.as_ptr() {
4935 let size = mem::size_of_val(self);
4936 memcmp(self.as_ptr() as *const u8,
4937 other.as_ptr() as *const u8, size) == 0
4943 // intermediate trait for specialization of slice's PartialOrd
4944 trait SlicePartialOrd<B> {
4945 fn partial_compare(&self, other: &[B]) -> Option<Ordering>;
4948 impl<A> SlicePartialOrd<A> for [A]
4951 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
4952 let l = cmp::min(self.len(), other.len());
4954 // Slice to the loop iteration range to enable bound check
4955 // elimination in the compiler
4956 let lhs = &self[..l];
4957 let rhs = &other[..l];
4960 match lhs[i].partial_cmp(&rhs[i]) {
4961 Some(Ordering::Equal) => (),
4962 non_eq => return non_eq,
4966 self.len().partial_cmp(&other.len())
4970 impl<A> SlicePartialOrd<A> for [A]
4973 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
4974 Some(SliceOrd::compare(self, other))
4979 // intermediate trait for specialization of slice's Ord
4981 fn compare(&self, other: &[B]) -> Ordering;
4984 impl<A> SliceOrd<A> for [A]
4987 default fn compare(&self, other: &[A]) -> Ordering {
4988 let l = cmp::min(self.len(), other.len());
4990 // Slice to the loop iteration range to enable bound check
4991 // elimination in the compiler
4992 let lhs = &self[..l];
4993 let rhs = &other[..l];
4996 match lhs[i].cmp(&rhs[i]) {
4997 Ordering::Equal => (),
4998 non_eq => return non_eq,
5002 self.len().cmp(&other.len())
5006 // memcmp compares a sequence of unsigned bytes lexicographically.
5007 // this matches the order we want for [u8], but no others (not even [i8]).
5008 impl SliceOrd<u8> for [u8] {
5010 fn compare(&self, other: &[u8]) -> Ordering {
5011 let order = unsafe {
5012 memcmp(self.as_ptr(), other.as_ptr(),
5013 cmp::min(self.len(), other.len()))
5016 self.len().cmp(&other.len())
5017 } else if order < 0 {
5026 /// Trait implemented for types that can be compared for equality using
5027 /// their bytewise representation
5028 trait BytewiseEquality { }
5030 macro_rules! impl_marker_for {
5031 ($traitname:ident, $($ty:ty)*) => {
5033 impl $traitname for $ty { }
5038 impl_marker_for!(BytewiseEquality,
5039 u8 i8 u16 i16 u32 i32 u64 i64 usize isize char bool);
5042 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
5043 unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
5046 fn may_have_side_effect() -> bool { false }
5050 unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
5051 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
5052 &mut *self.ptr.add(i)
5054 fn may_have_side_effect() -> bool { false }
5057 trait SliceContains: Sized {
5058 fn slice_contains(&self, x: &[Self]) -> bool;
5061 impl<T> SliceContains for T where T: PartialEq {
5062 default fn slice_contains(&self, x: &[Self]) -> bool {
5063 x.iter().any(|y| *y == *self)
5067 impl SliceContains for u8 {
5068 fn slice_contains(&self, x: &[Self]) -> bool {
5069 memchr::memchr(*self, x).is_some()
5073 impl SliceContains for i8 {
5074 fn slice_contains(&self, x: &[Self]) -> bool {
5075 let byte = *self as u8;
5076 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
5077 memchr::memchr(byte, bytes).is_some()