1 // ignore-tidy-filelength
2 // ignore-tidy-undocumented-unsafe
4 //! Slice management and manipulation.
6 //! For more details see [`std::slice`].
8 //! [`std::slice`]: ../../std/slice/index.html
10 #![stable(feature = "rust1", since = "1.0.0")]
12 // How this module is organized.
14 // The library infrastructure for slices is fairly messy. There's
15 // a lot of stuff defined here. Let's keep it clean.
17 // The layout of this file is thus:
19 // * Inherent methods. This is where most of the slice API resides.
20 // * Implementations of a few common traits with important slice ops.
21 // * Definitions of a bunch of iterators.
23 // * The `raw` and `bytes` submodules.
24 // * Boilerplate trait implementations.
27 use crate::cmp::Ordering::{self, Equal, Greater, Less};
29 use crate::intrinsics::{assume, exact_div, is_aligned_and_not_null, unchecked_sub};
31 use crate::marker::{self, Copy, Send, Sized, Sync};
33 use crate::ops::{self, FnMut, Range};
34 use crate::option::Option;
35 use crate::option::Option::{None, Some};
36 use crate::ptr::{self, NonNull};
37 use crate::result::Result;
38 use crate::result::Result::{Err, Ok};
41 feature = "slice_internals",
43 reason = "exposed from core to be reused in std; use the memchr crate"
45 /// Pure rust memchr implementation, taken from rust-memchr
58 /// Returns the number of elements in the slice.
63 /// let a = [1, 2, 3];
64 /// assert_eq!(a.len(), 3);
66 #[stable(feature = "rust1", since = "1.0.0")]
67 #[rustc_const_stable(feature = "const_slice_len", since = "1.32.0")]
69 // SAFETY: const sound because we transmute out the length field as a usize (which it must be)
70 #[allow(unused_attributes)]
71 #[allow_internal_unstable(const_fn_union)]
72 pub const fn len(&self) -> usize {
73 unsafe { crate::ptr::Repr { rust: self }.raw.len }
76 /// Returns `true` if the slice has a length of 0.
81 /// let a = [1, 2, 3];
82 /// assert!(!a.is_empty());
84 #[stable(feature = "rust1", since = "1.0.0")]
85 #[rustc_const_stable(feature = "const_slice_is_empty", since = "1.32.0")]
87 pub const fn is_empty(&self) -> bool {
91 /// Returns the first element of the slice, or `None` if it is empty.
96 /// let v = [10, 40, 30];
97 /// assert_eq!(Some(&10), v.first());
99 /// let w: &[i32] = &[];
100 /// assert_eq!(None, w.first());
102 #[stable(feature = "rust1", since = "1.0.0")]
104 pub fn first(&self) -> Option<&T> {
105 if let [first, ..] = self { Some(first) } else { None }
108 /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty.
113 /// let x = &mut [0, 1, 2];
115 /// if let Some(first) = x.first_mut() {
118 /// assert_eq!(x, &[5, 1, 2]);
120 #[stable(feature = "rust1", since = "1.0.0")]
122 pub fn first_mut(&mut self) -> Option<&mut T> {
123 if let [first, ..] = self { Some(first) } else { None }
126 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
131 /// let x = &[0, 1, 2];
133 /// if let Some((first, elements)) = x.split_first() {
134 /// assert_eq!(first, &0);
135 /// assert_eq!(elements, &[1, 2]);
138 #[stable(feature = "slice_splits", since = "1.5.0")]
140 pub fn split_first(&self) -> Option<(&T, &[T])> {
141 if let [first, tail @ ..] = self { Some((first, tail)) } else { None }
144 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
149 /// let x = &mut [0, 1, 2];
151 /// if let Some((first, elements)) = x.split_first_mut() {
156 /// assert_eq!(x, &[3, 4, 5]);
158 #[stable(feature = "slice_splits", since = "1.5.0")]
160 pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
161 if let [first, tail @ ..] = self { Some((first, tail)) } else { None }
164 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
169 /// let x = &[0, 1, 2];
171 /// if let Some((last, elements)) = x.split_last() {
172 /// assert_eq!(last, &2);
173 /// assert_eq!(elements, &[0, 1]);
176 #[stable(feature = "slice_splits", since = "1.5.0")]
178 pub fn split_last(&self) -> Option<(&T, &[T])> {
179 if let [init @ .., last] = self { Some((last, init)) } else { None }
182 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
187 /// let x = &mut [0, 1, 2];
189 /// if let Some((last, elements)) = x.split_last_mut() {
194 /// assert_eq!(x, &[4, 5, 3]);
196 #[stable(feature = "slice_splits", since = "1.5.0")]
198 pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
199 if let [init @ .., last] = self { Some((last, init)) } else { None }
202 /// Returns the last element of the slice, or `None` if it is empty.
207 /// let v = [10, 40, 30];
208 /// assert_eq!(Some(&30), v.last());
210 /// let w: &[i32] = &[];
211 /// assert_eq!(None, w.last());
213 #[stable(feature = "rust1", since = "1.0.0")]
215 pub fn last(&self) -> Option<&T> {
216 if let [.., last] = self { Some(last) } else { None }
219 /// Returns a mutable pointer to the last item in the slice.
224 /// let x = &mut [0, 1, 2];
226 /// if let Some(last) = x.last_mut() {
229 /// assert_eq!(x, &[0, 1, 10]);
231 #[stable(feature = "rust1", since = "1.0.0")]
233 pub fn last_mut(&mut self) -> Option<&mut T> {
234 if let [.., last] = self { Some(last) } else { None }
237 /// Returns a reference to an element or subslice depending on the type of
240 /// - If given a position, returns a reference to the element at that
241 /// position or `None` if out of bounds.
242 /// - If given a range, returns the subslice corresponding to that range,
243 /// or `None` if out of bounds.
248 /// let v = [10, 40, 30];
249 /// assert_eq!(Some(&40), v.get(1));
250 /// assert_eq!(Some(&[10, 40][..]), v.get(0..2));
251 /// assert_eq!(None, v.get(3));
252 /// assert_eq!(None, v.get(0..4));
254 #[stable(feature = "rust1", since = "1.0.0")]
256 pub fn get<I>(&self, index: I) -> Option<&I::Output>
263 /// Returns a mutable reference to an element or subslice depending on the
264 /// type of index (see [`get`]) or `None` if the index is out of bounds.
266 /// [`get`]: #method.get
271 /// let x = &mut [0, 1, 2];
273 /// if let Some(elem) = x.get_mut(1) {
276 /// assert_eq!(x, &[0, 42, 2]);
278 #[stable(feature = "rust1", since = "1.0.0")]
280 pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
287 /// Returns a reference to an element or subslice, without doing bounds
290 /// This is generally not recommended, use with caution!
291 /// Calling this method with an out-of-bounds index is *[undefined behavior]*
292 /// even if the resulting reference is not used.
293 /// For a safe alternative see [`get`].
295 /// [`get`]: #method.get
296 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
301 /// let x = &[1, 2, 4];
304 /// assert_eq!(x.get_unchecked(1), &2);
307 #[stable(feature = "rust1", since = "1.0.0")]
309 pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
313 index.get_unchecked(self)
316 /// Returns a mutable reference to an element or subslice, without doing
319 /// This is generally not recommended, use with caution!
320 /// Calling this method with an out-of-bounds index is *[undefined behavior]*
321 /// even if the resulting reference is not used.
322 /// For a safe alternative see [`get_mut`].
324 /// [`get_mut`]: #method.get_mut
325 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
330 /// let x = &mut [1, 2, 4];
333 /// let elem = x.get_unchecked_mut(1);
336 /// assert_eq!(x, &[1, 13, 4]);
338 #[stable(feature = "rust1", since = "1.0.0")]
340 pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
344 index.get_unchecked_mut(self)
347 /// Returns a raw pointer to the slice's buffer.
349 /// The caller must ensure that the slice outlives the pointer this
350 /// function returns, or else it will end up pointing to garbage.
352 /// The caller must also ensure that the memory the pointer (non-transitively) points to
353 /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer
354 /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`].
356 /// Modifying the container referenced by this slice may cause its buffer
357 /// to be reallocated, which would also make any pointers to it invalid.
362 /// let x = &[1, 2, 4];
363 /// let x_ptr = x.as_ptr();
366 /// for i in 0..x.len() {
367 /// assert_eq!(x.get_unchecked(i), &*x_ptr.add(i));
372 /// [`as_mut_ptr`]: #method.as_mut_ptr
373 #[stable(feature = "rust1", since = "1.0.0")]
374 #[rustc_const_stable(feature = "const_slice_as_ptr", since = "1.32.0")]
376 pub const fn as_ptr(&self) -> *const T {
377 self as *const [T] as *const T
380 /// Returns an unsafe mutable pointer to the slice's buffer.
382 /// The caller must ensure that the slice outlives the pointer this
383 /// function returns, or else it will end up pointing to garbage.
385 /// Modifying the container referenced by this slice may cause its buffer
386 /// to be reallocated, which would also make any pointers to it invalid.
391 /// let x = &mut [1, 2, 4];
392 /// let x_ptr = x.as_mut_ptr();
395 /// for i in 0..x.len() {
396 /// *x_ptr.add(i) += 2;
399 /// assert_eq!(x, &[3, 4, 6]);
401 #[stable(feature = "rust1", since = "1.0.0")]
403 pub fn as_mut_ptr(&mut self) -> *mut T {
404 self as *mut [T] as *mut T
407 /// Returns the two raw pointers spanning the slice.
409 /// The returned range is half-open, which means that the end pointer
410 /// points *one past* the last element of the slice. This way, an empty
411 /// slice is represented by two equal pointers, and the difference between
412 /// the two pointers represents the size of the slice.
414 /// See [`as_ptr`] for warnings on using these pointers. The end pointer
415 /// requires extra caution, as it does not point to a valid element in the
418 /// This function is useful for interacting with foreign interfaces which
419 /// use two pointers to refer to a range of elements in memory, as is
422 /// It can also be useful to check if a pointer to an element refers to an
423 /// element of this slice:
426 /// #![feature(slice_ptr_range)]
428 /// let a = [1, 2, 3];
429 /// let x = &a[1] as *const _;
430 /// let y = &5 as *const _;
432 /// assert!(a.as_ptr_range().contains(&x));
433 /// assert!(!a.as_ptr_range().contains(&y));
436 /// [`as_ptr`]: #method.as_ptr
437 #[unstable(feature = "slice_ptr_range", issue = "65807")]
439 pub fn as_ptr_range(&self) -> Range<*const T> {
440 // The `add` here is safe, because:
442 // - Both pointers are part of the same object, as pointing directly
443 // past the object also counts.
445 // - The size of the slice is never larger than isize::MAX bytes, as
447 // - https://github.com/rust-lang/unsafe-code-guidelines/issues/102#issuecomment-473340447
448 // - https://doc.rust-lang.org/reference/behavior-considered-undefined.html
449 // - https://doc.rust-lang.org/core/slice/fn.from_raw_parts.html#safety
450 // (This doesn't seem normative yet, but the very same assumption is
451 // made in many places, including the Index implementation of slices.)
453 // - There is no wrapping around involved, as slices do not wrap past
454 // the end of the address space.
456 // See the documentation of pointer::add.
457 let start = self.as_ptr();
458 let end = unsafe { start.add(self.len()) };
462 /// Returns the two unsafe mutable pointers spanning the slice.
464 /// The returned range is half-open, which means that the end pointer
465 /// points *one past* the last element of the slice. This way, an empty
466 /// slice is represented by two equal pointers, and the difference between
467 /// the two pointers represents the size of the slice.
469 /// See [`as_mut_ptr`] for warnings on using these pointers. The end
470 /// pointer requires extra caution, as it does not point to a valid element
473 /// This function is useful for interacting with foreign interfaces which
474 /// use two pointers to refer to a range of elements in memory, as is
477 /// [`as_mut_ptr`]: #method.as_mut_ptr
478 #[unstable(feature = "slice_ptr_range", issue = "65807")]
480 pub fn as_mut_ptr_range(&mut self) -> Range<*mut T> {
481 // See as_ptr_range() above for why `add` here is safe.
482 let start = self.as_mut_ptr();
483 let end = unsafe { start.add(self.len()) };
487 /// Swaps two elements in the slice.
491 /// * a - The index of the first element
492 /// * b - The index of the second element
496 /// Panics if `a` or `b` are out of bounds.
501 /// let mut v = ["a", "b", "c", "d"];
503 /// assert!(v == ["a", "d", "c", "b"]);
505 #[stable(feature = "rust1", since = "1.0.0")]
507 pub fn swap(&mut self, a: usize, b: usize) {
509 // Can't take two mutable loans from one vector, so instead just cast
510 // them to their raw pointers to do the swap
511 let pa: *mut T = &mut self[a];
512 let pb: *mut T = &mut self[b];
517 /// Reverses the order of elements in the slice, in place.
522 /// let mut v = [1, 2, 3];
524 /// assert!(v == [3, 2, 1]);
526 #[stable(feature = "rust1", since = "1.0.0")]
528 pub fn reverse(&mut self) {
529 let mut i: usize = 0;
532 // For very small types, all the individual reads in the normal
533 // path perform poorly. We can do better, given efficient unaligned
534 // load/store, by loading a larger chunk and reversing a register.
536 // Ideally LLVM would do this for us, as it knows better than we do
537 // whether unaligned reads are efficient (since that changes between
538 // different ARM versions, for example) and what the best chunk size
539 // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
540 // the loop, so we need to do this ourselves. (Hypothesis: reverse
541 // is troublesome because the sides can be aligned differently --
542 // will be, when the length is odd -- so there's no way of emitting
543 // pre- and postludes to use fully-aligned SIMD in the middle.)
545 let fast_unaligned = cfg!(any(target_arch = "x86", target_arch = "x86_64"));
547 if fast_unaligned && mem::size_of::<T>() == 1 {
548 // Use the llvm.bswap intrinsic to reverse u8s in a usize
549 let chunk = mem::size_of::<usize>();
550 while i + chunk - 1 < ln / 2 {
552 let pa: *mut T = self.get_unchecked_mut(i);
553 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
554 let va = ptr::read_unaligned(pa as *mut usize);
555 let vb = ptr::read_unaligned(pb as *mut usize);
556 ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
557 ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
563 if fast_unaligned && mem::size_of::<T>() == 2 {
564 // Use rotate-by-16 to reverse u16s in a u32
565 let chunk = mem::size_of::<u32>() / 2;
566 while i + chunk - 1 < ln / 2 {
568 let pa: *mut T = self.get_unchecked_mut(i);
569 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
570 let va = ptr::read_unaligned(pa as *mut u32);
571 let vb = ptr::read_unaligned(pb as *mut u32);
572 ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
573 ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
580 // Unsafe swap to avoid the bounds check in safe swap.
582 let pa: *mut T = self.get_unchecked_mut(i);
583 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
590 /// Returns an iterator over the slice.
595 /// let x = &[1, 2, 4];
596 /// let mut iterator = x.iter();
598 /// assert_eq!(iterator.next(), Some(&1));
599 /// assert_eq!(iterator.next(), Some(&2));
600 /// assert_eq!(iterator.next(), Some(&4));
601 /// assert_eq!(iterator.next(), None);
603 #[stable(feature = "rust1", since = "1.0.0")]
605 pub fn iter(&self) -> Iter<'_, T> {
607 let ptr = self.as_ptr();
608 assume(!ptr.is_null());
610 let end = if mem::size_of::<T>() == 0 {
611 (ptr as *const u8).wrapping_add(self.len()) as *const T
616 Iter { ptr: NonNull::new_unchecked(ptr as *mut T), end, _marker: marker::PhantomData }
620 /// Returns an iterator that allows modifying each value.
625 /// let x = &mut [1, 2, 4];
626 /// for elem in x.iter_mut() {
629 /// assert_eq!(x, &[3, 4, 6]);
631 #[stable(feature = "rust1", since = "1.0.0")]
633 pub fn iter_mut(&mut self) -> IterMut<'_, T> {
635 let ptr = self.as_mut_ptr();
636 assume(!ptr.is_null());
638 let end = if mem::size_of::<T>() == 0 {
639 (ptr as *mut u8).wrapping_add(self.len()) as *mut T
644 IterMut { ptr: NonNull::new_unchecked(ptr), end, _marker: marker::PhantomData }
648 /// Returns an iterator over all contiguous windows of length
649 /// `size`. The windows overlap. If the slice is shorter than
650 /// `size`, the iterator returns no values.
654 /// Panics if `size` is 0.
659 /// let slice = ['r', 'u', 's', 't'];
660 /// let mut iter = slice.windows(2);
661 /// assert_eq!(iter.next().unwrap(), &['r', 'u']);
662 /// assert_eq!(iter.next().unwrap(), &['u', 's']);
663 /// assert_eq!(iter.next().unwrap(), &['s', 't']);
664 /// assert!(iter.next().is_none());
667 /// If the slice is shorter than `size`:
670 /// let slice = ['f', 'o', 'o'];
671 /// let mut iter = slice.windows(4);
672 /// assert!(iter.next().is_none());
674 #[stable(feature = "rust1", since = "1.0.0")]
676 pub fn windows(&self, size: usize) -> Windows<'_, T> {
678 Windows { v: self, size }
681 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
682 /// beginning of the slice.
684 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
685 /// slice, then the last chunk will not have length `chunk_size`.
687 /// See [`chunks_exact`] for a variant of this iterator that returns chunks of always exactly
688 /// `chunk_size` elements, and [`rchunks`] for the same iterator but starting at the end of the
693 /// Panics if `chunk_size` is 0.
698 /// let slice = ['l', 'o', 'r', 'e', 'm'];
699 /// let mut iter = slice.chunks(2);
700 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
701 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
702 /// assert_eq!(iter.next().unwrap(), &['m']);
703 /// assert!(iter.next().is_none());
706 /// [`chunks_exact`]: #method.chunks_exact
707 /// [`rchunks`]: #method.rchunks
708 #[stable(feature = "rust1", since = "1.0.0")]
710 pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> {
711 assert!(chunk_size != 0);
712 Chunks { v: self, chunk_size }
715 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
716 /// beginning of the slice.
718 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
719 /// length of the slice, then the last chunk will not have length `chunk_size`.
721 /// See [`chunks_exact_mut`] for a variant of this iterator that returns chunks of always
722 /// exactly `chunk_size` elements, and [`rchunks_mut`] for the same iterator but starting at
723 /// the end of the slice.
727 /// Panics if `chunk_size` is 0.
732 /// let v = &mut [0, 0, 0, 0, 0];
733 /// let mut count = 1;
735 /// for chunk in v.chunks_mut(2) {
736 /// for elem in chunk.iter_mut() {
741 /// assert_eq!(v, &[1, 1, 2, 2, 3]);
744 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
745 /// [`rchunks_mut`]: #method.rchunks_mut
746 #[stable(feature = "rust1", since = "1.0.0")]
748 pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<'_, T> {
749 assert!(chunk_size != 0);
750 ChunksMut { v: self, chunk_size }
753 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
754 /// beginning of the slice.
756 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
757 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
758 /// from the `remainder` function of the iterator.
760 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
761 /// resulting code better than in the case of [`chunks`].
763 /// See [`chunks`] for a variant of this iterator that also returns the remainder as a smaller
764 /// chunk, and [`rchunks_exact`] for the same iterator but starting at the end of the slice.
768 /// Panics if `chunk_size` is 0.
773 /// let slice = ['l', 'o', 'r', 'e', 'm'];
774 /// let mut iter = slice.chunks_exact(2);
775 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
776 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
777 /// assert!(iter.next().is_none());
778 /// assert_eq!(iter.remainder(), &['m']);
781 /// [`chunks`]: #method.chunks
782 /// [`rchunks_exact`]: #method.rchunks_exact
783 #[stable(feature = "chunks_exact", since = "1.31.0")]
785 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> {
786 assert!(chunk_size != 0);
787 let rem = self.len() % chunk_size;
788 let len = self.len() - rem;
789 let (fst, snd) = self.split_at(len);
790 ChunksExact { v: fst, rem: snd, chunk_size }
793 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
794 /// beginning of the slice.
796 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
797 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
798 /// retrieved from the `into_remainder` function of the iterator.
800 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
801 /// resulting code better than in the case of [`chunks_mut`].
803 /// See [`chunks_mut`] for a variant of this iterator that also returns the remainder as a
804 /// smaller chunk, and [`rchunks_exact_mut`] for the same iterator but starting at the end of
809 /// Panics if `chunk_size` is 0.
814 /// let v = &mut [0, 0, 0, 0, 0];
815 /// let mut count = 1;
817 /// for chunk in v.chunks_exact_mut(2) {
818 /// for elem in chunk.iter_mut() {
823 /// assert_eq!(v, &[1, 1, 2, 2, 0]);
826 /// [`chunks_mut`]: #method.chunks_mut
827 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
828 #[stable(feature = "chunks_exact", since = "1.31.0")]
830 pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<'_, T> {
831 assert!(chunk_size != 0);
832 let rem = self.len() % chunk_size;
833 let len = self.len() - rem;
834 let (fst, snd) = self.split_at_mut(len);
835 ChunksExactMut { v: fst, rem: snd, chunk_size }
838 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
841 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
842 /// slice, then the last chunk will not have length `chunk_size`.
844 /// See [`rchunks_exact`] for a variant of this iterator that returns chunks of always exactly
845 /// `chunk_size` elements, and [`chunks`] for the same iterator but starting at the beginning
850 /// Panics if `chunk_size` is 0.
855 /// let slice = ['l', 'o', 'r', 'e', 'm'];
856 /// let mut iter = slice.rchunks(2);
857 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
858 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
859 /// assert_eq!(iter.next().unwrap(), &['l']);
860 /// assert!(iter.next().is_none());
863 /// [`rchunks_exact`]: #method.rchunks_exact
864 /// [`chunks`]: #method.chunks
865 #[stable(feature = "rchunks", since = "1.31.0")]
867 pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> {
868 assert!(chunk_size != 0);
869 RChunks { v: self, chunk_size }
872 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
875 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
876 /// length of the slice, then the last chunk will not have length `chunk_size`.
878 /// See [`rchunks_exact_mut`] for a variant of this iterator that returns chunks of always
879 /// exactly `chunk_size` elements, and [`chunks_mut`] for the same iterator but starting at the
880 /// beginning of the slice.
884 /// Panics if `chunk_size` is 0.
889 /// let v = &mut [0, 0, 0, 0, 0];
890 /// let mut count = 1;
892 /// for chunk in v.rchunks_mut(2) {
893 /// for elem in chunk.iter_mut() {
898 /// assert_eq!(v, &[3, 2, 2, 1, 1]);
901 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
902 /// [`chunks_mut`]: #method.chunks_mut
903 #[stable(feature = "rchunks", since = "1.31.0")]
905 pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<'_, T> {
906 assert!(chunk_size != 0);
907 RChunksMut { v: self, chunk_size }
910 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
911 /// end of the slice.
913 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
914 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
915 /// from the `remainder` function of the iterator.
917 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
918 /// resulting code better than in the case of [`chunks`].
920 /// See [`rchunks`] for a variant of this iterator that also returns the remainder as a smaller
921 /// chunk, and [`chunks_exact`] for the same iterator but starting at the beginning of the
926 /// Panics if `chunk_size` is 0.
931 /// let slice = ['l', 'o', 'r', 'e', 'm'];
932 /// let mut iter = slice.rchunks_exact(2);
933 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
934 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
935 /// assert!(iter.next().is_none());
936 /// assert_eq!(iter.remainder(), &['l']);
939 /// [`chunks`]: #method.chunks
940 /// [`rchunks`]: #method.rchunks
941 /// [`chunks_exact`]: #method.chunks_exact
942 #[stable(feature = "rchunks", since = "1.31.0")]
944 pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<'_, T> {
945 assert!(chunk_size != 0);
946 let rem = self.len() % chunk_size;
947 let (fst, snd) = self.split_at(rem);
948 RChunksExact { v: snd, rem: fst, chunk_size }
951 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
954 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
955 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
956 /// retrieved from the `into_remainder` function of the iterator.
958 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
959 /// resulting code better than in the case of [`chunks_mut`].
961 /// See [`rchunks_mut`] for a variant of this iterator that also returns the remainder as a
962 /// smaller chunk, and [`chunks_exact_mut`] for the same iterator but starting at the beginning
967 /// Panics if `chunk_size` is 0.
972 /// let v = &mut [0, 0, 0, 0, 0];
973 /// let mut count = 1;
975 /// for chunk in v.rchunks_exact_mut(2) {
976 /// for elem in chunk.iter_mut() {
981 /// assert_eq!(v, &[0, 2, 2, 1, 1]);
984 /// [`chunks_mut`]: #method.chunks_mut
985 /// [`rchunks_mut`]: #method.rchunks_mut
986 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
987 #[stable(feature = "rchunks", since = "1.31.0")]
989 pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<'_, T> {
990 assert!(chunk_size != 0);
991 let rem = self.len() % chunk_size;
992 let (fst, snd) = self.split_at_mut(rem);
993 RChunksExactMut { v: snd, rem: fst, chunk_size }
996 /// Divides one slice into two at an index.
998 /// The first will contain all indices from `[0, mid)` (excluding
999 /// the index `mid` itself) and the second will contain all
1000 /// indices from `[mid, len)` (excluding the index `len` itself).
1004 /// Panics if `mid > len`.
1009 /// let v = [1, 2, 3, 4, 5, 6];
1012 /// let (left, right) = v.split_at(0);
1013 /// assert!(left == []);
1014 /// assert!(right == [1, 2, 3, 4, 5, 6]);
1018 /// let (left, right) = v.split_at(2);
1019 /// assert!(left == [1, 2]);
1020 /// assert!(right == [3, 4, 5, 6]);
1024 /// let (left, right) = v.split_at(6);
1025 /// assert!(left == [1, 2, 3, 4, 5, 6]);
1026 /// assert!(right == []);
1029 #[stable(feature = "rust1", since = "1.0.0")]
1031 pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
1032 (&self[..mid], &self[mid..])
1035 /// Divides one mutable slice into two at an index.
1037 /// The first will contain all indices from `[0, mid)` (excluding
1038 /// the index `mid` itself) and the second will contain all
1039 /// indices from `[mid, len)` (excluding the index `len` itself).
1043 /// Panics if `mid > len`.
1048 /// let mut v = [1, 0, 3, 0, 5, 6];
1049 /// // scoped to restrict the lifetime of the borrows
1051 /// let (left, right) = v.split_at_mut(2);
1052 /// assert!(left == [1, 0]);
1053 /// assert!(right == [3, 0, 5, 6]);
1057 /// assert!(v == [1, 2, 3, 4, 5, 6]);
1059 #[stable(feature = "rust1", since = "1.0.0")]
1061 pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
1062 let len = self.len();
1063 let ptr = self.as_mut_ptr();
1066 assert!(mid <= len);
1068 (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid))
1072 /// Returns an iterator over subslices separated by elements that match
1073 /// `pred`. The matched element is not contained in the subslices.
1078 /// let slice = [10, 40, 33, 20];
1079 /// let mut iter = slice.split(|num| num % 3 == 0);
1081 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1082 /// assert_eq!(iter.next().unwrap(), &[20]);
1083 /// assert!(iter.next().is_none());
1086 /// If the first element is matched, an empty slice will be the first item
1087 /// returned by the iterator. Similarly, if the last element in the slice
1088 /// is matched, an empty slice will be the last item returned by the
1092 /// let slice = [10, 40, 33];
1093 /// let mut iter = slice.split(|num| num % 3 == 0);
1095 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1096 /// assert_eq!(iter.next().unwrap(), &[]);
1097 /// assert!(iter.next().is_none());
1100 /// If two matched elements are directly adjacent, an empty slice will be
1101 /// present between them:
1104 /// let slice = [10, 6, 33, 20];
1105 /// let mut iter = slice.split(|num| num % 3 == 0);
1107 /// assert_eq!(iter.next().unwrap(), &[10]);
1108 /// assert_eq!(iter.next().unwrap(), &[]);
1109 /// assert_eq!(iter.next().unwrap(), &[20]);
1110 /// assert!(iter.next().is_none());
1112 #[stable(feature = "rust1", since = "1.0.0")]
1114 pub fn split<F>(&self, pred: F) -> Split<'_, T, F>
1116 F: FnMut(&T) -> bool,
1118 Split { v: self, pred, finished: false }
1121 /// Returns an iterator over mutable subslices separated by elements that
1122 /// match `pred`. The matched element is not contained in the subslices.
1127 /// let mut v = [10, 40, 30, 20, 60, 50];
1129 /// for group in v.split_mut(|num| *num % 3 == 0) {
1132 /// assert_eq!(v, [1, 40, 30, 1, 60, 1]);
1134 #[stable(feature = "rust1", since = "1.0.0")]
1136 pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<'_, T, F>
1138 F: FnMut(&T) -> bool,
1140 SplitMut { v: self, pred, finished: false }
1143 /// Returns an iterator over subslices separated by elements that match
1144 /// `pred`. The matched element is contained in the end of the previous
1145 /// subslice as a terminator.
1150 /// #![feature(split_inclusive)]
1151 /// let slice = [10, 40, 33, 20];
1152 /// let mut iter = slice.split_inclusive(|num| num % 3 == 0);
1154 /// assert_eq!(iter.next().unwrap(), &[10, 40, 33]);
1155 /// assert_eq!(iter.next().unwrap(), &[20]);
1156 /// assert!(iter.next().is_none());
1159 /// If the last element of the slice is matched,
1160 /// that element will be considered the terminator of the preceding slice.
1161 /// That slice will be the last item returned by the iterator.
1164 /// #![feature(split_inclusive)]
1165 /// let slice = [3, 10, 40, 33];
1166 /// let mut iter = slice.split_inclusive(|num| num % 3 == 0);
1168 /// assert_eq!(iter.next().unwrap(), &[3]);
1169 /// assert_eq!(iter.next().unwrap(), &[10, 40, 33]);
1170 /// assert!(iter.next().is_none());
1172 #[unstable(feature = "split_inclusive", issue = "72360")]
1174 pub fn split_inclusive<F>(&self, pred: F) -> SplitInclusive<'_, T, F>
1176 F: FnMut(&T) -> bool,
1178 SplitInclusive { v: self, pred, finished: false }
1181 /// Returns an iterator over mutable subslices separated by elements that
1182 /// match `pred`. The matched element is contained in the previous
1183 /// subslice as a terminator.
1188 /// #![feature(split_inclusive)]
1189 /// let mut v = [10, 40, 30, 20, 60, 50];
1191 /// for group in v.split_inclusive_mut(|num| *num % 3 == 0) {
1192 /// let terminator_idx = group.len()-1;
1193 /// group[terminator_idx] = 1;
1195 /// assert_eq!(v, [10, 40, 1, 20, 1, 1]);
1197 #[unstable(feature = "split_inclusive", issue = "72360")]
1199 pub fn split_inclusive_mut<F>(&mut self, pred: F) -> SplitInclusiveMut<'_, T, F>
1201 F: FnMut(&T) -> bool,
1203 SplitInclusiveMut { v: self, pred, finished: false }
1206 /// Returns an iterator over subslices separated by elements that match
1207 /// `pred`, starting at the end of the slice and working backwards.
1208 /// The matched element is not contained in the subslices.
1213 /// let slice = [11, 22, 33, 0, 44, 55];
1214 /// let mut iter = slice.rsplit(|num| *num == 0);
1216 /// assert_eq!(iter.next().unwrap(), &[44, 55]);
1217 /// assert_eq!(iter.next().unwrap(), &[11, 22, 33]);
1218 /// assert_eq!(iter.next(), None);
1221 /// As with `split()`, if the first or last element is matched, an empty
1222 /// slice will be the first (or last) item returned by the iterator.
1225 /// let v = &[0, 1, 1, 2, 3, 5, 8];
1226 /// let mut it = v.rsplit(|n| *n % 2 == 0);
1227 /// assert_eq!(it.next().unwrap(), &[]);
1228 /// assert_eq!(it.next().unwrap(), &[3, 5]);
1229 /// assert_eq!(it.next().unwrap(), &[1, 1]);
1230 /// assert_eq!(it.next().unwrap(), &[]);
1231 /// assert_eq!(it.next(), None);
1233 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1235 pub fn rsplit<F>(&self, pred: F) -> RSplit<'_, T, F>
1237 F: FnMut(&T) -> bool,
1239 RSplit { inner: self.split(pred) }
1242 /// Returns an iterator over mutable subslices separated by elements that
1243 /// match `pred`, starting at the end of the slice and working
1244 /// backwards. The matched element is not contained in the subslices.
1249 /// let mut v = [100, 400, 300, 200, 600, 500];
1251 /// let mut count = 0;
1252 /// for group in v.rsplit_mut(|num| *num % 3 == 0) {
1254 /// group[0] = count;
1256 /// assert_eq!(v, [3, 400, 300, 2, 600, 1]);
1259 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1261 pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<'_, T, F>
1263 F: FnMut(&T) -> bool,
1265 RSplitMut { inner: self.split_mut(pred) }
1268 /// Returns an iterator over subslices separated by elements that match
1269 /// `pred`, limited to returning at most `n` items. The matched element is
1270 /// not contained in the subslices.
1272 /// The last element returned, if any, will contain the remainder of the
1277 /// Print the slice split once by numbers divisible by 3 (i.e., `[10, 40]`,
1278 /// `[20, 60, 50]`):
1281 /// let v = [10, 40, 30, 20, 60, 50];
1283 /// for group in v.splitn(2, |num| *num % 3 == 0) {
1284 /// println!("{:?}", group);
1287 #[stable(feature = "rust1", since = "1.0.0")]
1289 pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<'_, T, F>
1291 F: FnMut(&T) -> bool,
1293 SplitN { inner: GenericSplitN { iter: self.split(pred), count: n } }
1296 /// Returns an iterator over subslices separated by elements that match
1297 /// `pred`, limited to returning at most `n` items. The matched element is
1298 /// not contained in the subslices.
1300 /// The last element returned, if any, will contain the remainder of the
1306 /// let mut v = [10, 40, 30, 20, 60, 50];
1308 /// for group in v.splitn_mut(2, |num| *num % 3 == 0) {
1311 /// assert_eq!(v, [1, 40, 30, 1, 60, 50]);
1313 #[stable(feature = "rust1", since = "1.0.0")]
1315 pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<'_, T, F>
1317 F: FnMut(&T) -> bool,
1319 SplitNMut { inner: GenericSplitN { iter: self.split_mut(pred), count: n } }
1322 /// Returns an iterator over subslices separated by elements that match
1323 /// `pred` limited to returning at most `n` items. This starts at the end of
1324 /// the slice and works backwards. The matched element is not contained in
1327 /// The last element returned, if any, will contain the remainder of the
1332 /// Print the slice split once, starting from the end, by numbers divisible
1333 /// by 3 (i.e., `[50]`, `[10, 40, 30, 20]`):
1336 /// let v = [10, 40, 30, 20, 60, 50];
1338 /// for group in v.rsplitn(2, |num| *num % 3 == 0) {
1339 /// println!("{:?}", group);
1342 #[stable(feature = "rust1", since = "1.0.0")]
1344 pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<'_, T, F>
1346 F: FnMut(&T) -> bool,
1348 RSplitN { inner: GenericSplitN { iter: self.rsplit(pred), count: n } }
1351 /// Returns an iterator over subslices separated by elements that match
1352 /// `pred` limited to returning at most `n` items. This starts at the end of
1353 /// the slice and works backwards. The matched element is not contained in
1356 /// The last element returned, if any, will contain the remainder of the
1362 /// let mut s = [10, 40, 30, 20, 60, 50];
1364 /// for group in s.rsplitn_mut(2, |num| *num % 3 == 0) {
1367 /// assert_eq!(s, [1, 40, 30, 20, 60, 1]);
1369 #[stable(feature = "rust1", since = "1.0.0")]
1371 pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<'_, T, F>
1373 F: FnMut(&T) -> bool,
1375 RSplitNMut { inner: GenericSplitN { iter: self.rsplit_mut(pred), count: n } }
1378 /// Returns `true` if the slice contains an element with the given value.
1383 /// let v = [10, 40, 30];
1384 /// assert!(v.contains(&30));
1385 /// assert!(!v.contains(&50));
1388 /// If you do not have an `&T`, but just an `&U` such that `T: Borrow<U>`
1389 /// (e.g. `String: Borrow<str>`), you can use `iter().any`:
1392 /// let v = [String::from("hello"), String::from("world")]; // slice of `String`
1393 /// assert!(v.iter().any(|e| e == "hello")); // search with `&str`
1394 /// assert!(!v.iter().any(|e| e == "hi"));
1396 #[stable(feature = "rust1", since = "1.0.0")]
1397 pub fn contains(&self, x: &T) -> bool
1401 x.slice_contains(self)
1404 /// Returns `true` if `needle` is a prefix of the slice.
1409 /// let v = [10, 40, 30];
1410 /// assert!(v.starts_with(&[10]));
1411 /// assert!(v.starts_with(&[10, 40]));
1412 /// assert!(!v.starts_with(&[50]));
1413 /// assert!(!v.starts_with(&[10, 50]));
1416 /// Always returns `true` if `needle` is an empty slice:
1419 /// let v = &[10, 40, 30];
1420 /// assert!(v.starts_with(&[]));
1421 /// let v: &[u8] = &[];
1422 /// assert!(v.starts_with(&[]));
1424 #[stable(feature = "rust1", since = "1.0.0")]
1425 pub fn starts_with(&self, needle: &[T]) -> bool
1429 let n = needle.len();
1430 self.len() >= n && needle == &self[..n]
1433 /// Returns `true` if `needle` is a suffix of the slice.
1438 /// let v = [10, 40, 30];
1439 /// assert!(v.ends_with(&[30]));
1440 /// assert!(v.ends_with(&[40, 30]));
1441 /// assert!(!v.ends_with(&[50]));
1442 /// assert!(!v.ends_with(&[50, 30]));
1445 /// Always returns `true` if `needle` is an empty slice:
1448 /// let v = &[10, 40, 30];
1449 /// assert!(v.ends_with(&[]));
1450 /// let v: &[u8] = &[];
1451 /// assert!(v.ends_with(&[]));
1453 #[stable(feature = "rust1", since = "1.0.0")]
1454 pub fn ends_with(&self, needle: &[T]) -> bool
1458 let (m, n) = (self.len(), needle.len());
1459 m >= n && needle == &self[m - n..]
1462 /// Binary searches this sorted slice for a given element.
1464 /// If the value is found then [`Result::Ok`] is returned, containing the
1465 /// index of the matching element. If there are multiple matches, then any
1466 /// one of the matches could be returned. If the value is not found then
1467 /// [`Result::Err`] is returned, containing the index where a matching
1468 /// element could be inserted while maintaining sorted order.
1472 /// Looks up a series of four elements. The first is found, with a
1473 /// uniquely determined position; the second and third are not
1474 /// found; the fourth could match any position in `[1, 4]`.
1477 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1479 /// assert_eq!(s.binary_search(&13), Ok(9));
1480 /// assert_eq!(s.binary_search(&4), Err(7));
1481 /// assert_eq!(s.binary_search(&100), Err(13));
1482 /// let r = s.binary_search(&1);
1483 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1486 /// If you want to insert an item to a sorted vector, while maintaining
1490 /// let mut s = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1492 /// let idx = s.binary_search(&num).unwrap_or_else(|x| x);
1493 /// s.insert(idx, num);
1494 /// assert_eq!(s, [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]);
1496 #[stable(feature = "rust1", since = "1.0.0")]
1497 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
1501 self.binary_search_by(|p| p.cmp(x))
1504 /// Binary searches this sorted slice with a comparator function.
1506 /// The comparator function should implement an order consistent
1507 /// with the sort order of the underlying slice, returning an
1508 /// order code that indicates whether its argument is `Less`,
1509 /// `Equal` or `Greater` the desired target.
1511 /// If the value is found then [`Result::Ok`] is returned, containing the
1512 /// index of the matching element. If there are multiple matches, then any
1513 /// one of the matches could be returned. If the value is not found then
1514 /// [`Result::Err`] is returned, containing the index where a matching
1515 /// element could be inserted while maintaining sorted order.
1519 /// Looks up a series of four elements. The first is found, with a
1520 /// uniquely determined position; the second and third are not
1521 /// found; the fourth could match any position in `[1, 4]`.
1524 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1527 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Ok(9));
1529 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(7));
1531 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13));
1533 /// let r = s.binary_search_by(|probe| probe.cmp(&seek));
1534 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1536 #[stable(feature = "rust1", since = "1.0.0")]
1538 pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
1540 F: FnMut(&'a T) -> Ordering,
1543 let mut size = s.len();
1547 let mut base = 0usize;
1549 let half = size / 2;
1550 let mid = base + half;
1551 // mid is always in [0, size), that means mid is >= 0 and < size.
1552 // mid >= 0: by definition
1553 // mid < size: mid = size / 2 + size / 4 + size / 8 ...
1554 let cmp = f(unsafe { s.get_unchecked(mid) });
1555 base = if cmp == Greater { base } else { mid };
1558 // base is always in [0, size) because base <= mid.
1559 let cmp = f(unsafe { s.get_unchecked(base) });
1560 if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
1563 /// Binary searches this sorted slice with a key extraction function.
1565 /// Assumes that the slice is sorted by the key, for instance with
1566 /// [`sort_by_key`] using the same key extraction function.
1568 /// If the value is found then [`Result::Ok`] is returned, containing the
1569 /// index of the matching element. If there are multiple matches, then any
1570 /// one of the matches could be returned. If the value is not found then
1571 /// [`Result::Err`] is returned, containing the index where a matching
1572 /// element could be inserted while maintaining sorted order.
1574 /// [`sort_by_key`]: #method.sort_by_key
1578 /// Looks up a series of four elements in a slice of pairs sorted by
1579 /// their second elements. The first is found, with a uniquely
1580 /// determined position; the second and third are not found; the
1581 /// fourth could match any position in `[1, 4]`.
1584 /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1),
1585 /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
1586 /// (1, 21), (2, 34), (4, 55)];
1588 /// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9));
1589 /// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7));
1590 /// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13));
1591 /// let r = s.binary_search_by_key(&1, |&(a,b)| b);
1592 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1594 #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
1596 pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
1598 F: FnMut(&'a T) -> B,
1601 self.binary_search_by(|k| f(k).cmp(b))
1604 /// Sorts the slice, but may not preserve the order of equal elements.
1606 /// This sort is unstable (i.e., may reorder equal elements), in-place
1607 /// (i.e., does not allocate), and `O(n * log(n))` worst-case.
1609 /// # Current implementation
1611 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1612 /// which combines the fast average case of randomized quicksort with the fast worst case of
1613 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1614 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1615 /// deterministic behavior.
1617 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1618 /// slice consists of several concatenated sorted sequences.
1623 /// let mut v = [-5, 4, 1, -3, 2];
1625 /// v.sort_unstable();
1626 /// assert!(v == [-5, -3, 1, 2, 4]);
1629 /// [pdqsort]: https://github.com/orlp/pdqsort
1630 #[stable(feature = "sort_unstable", since = "1.20.0")]
1632 pub fn sort_unstable(&mut self)
1636 sort::quicksort(self, |a, b| a.lt(b));
1639 /// Sorts the slice with a comparator function, but may not preserve the order of equal
1642 /// This sort is unstable (i.e., may reorder equal elements), in-place
1643 /// (i.e., does not allocate), and `O(n * log(n))` worst-case.
1645 /// The comparator function must define a total ordering for the elements in the slice. If
1646 /// the ordering is not total, the order of the elements is unspecified. An order is a
1647 /// total order if it is (for all a, b and c):
1649 /// * total and antisymmetric: exactly one of a < b, a == b or a > b is true; and
1650 /// * transitive, a < b and b < c implies a < c. The same must hold for both == and >.
1652 /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
1653 /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
1656 /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
1657 /// floats.sort_unstable_by(|a, b| a.partial_cmp(b).unwrap());
1658 /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
1661 /// # Current implementation
1663 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1664 /// which combines the fast average case of randomized quicksort with the fast worst case of
1665 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1666 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1667 /// deterministic behavior.
1669 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1670 /// slice consists of several concatenated sorted sequences.
1675 /// let mut v = [5, 4, 1, 3, 2];
1676 /// v.sort_unstable_by(|a, b| a.cmp(b));
1677 /// assert!(v == [1, 2, 3, 4, 5]);
1679 /// // reverse sorting
1680 /// v.sort_unstable_by(|a, b| b.cmp(a));
1681 /// assert!(v == [5, 4, 3, 2, 1]);
1684 /// [pdqsort]: https://github.com/orlp/pdqsort
1685 #[stable(feature = "sort_unstable", since = "1.20.0")]
1687 pub fn sort_unstable_by<F>(&mut self, mut compare: F)
1689 F: FnMut(&T, &T) -> Ordering,
1691 sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
1694 /// Sorts the slice with a key extraction function, but may not preserve the order of equal
1697 /// This sort is unstable (i.e., may reorder equal elements), in-place
1698 /// (i.e., does not allocate), and `O(m * n * log(n))` worst-case, where the key function is
1701 /// # Current implementation
1703 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1704 /// which combines the fast average case of randomized quicksort with the fast worst case of
1705 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1706 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1707 /// deterministic behavior.
1709 /// Due to its key calling strategy, [`sort_unstable_by_key`](#method.sort_unstable_by_key)
1710 /// is likely to be slower than [`sort_by_cached_key`](#method.sort_by_cached_key) in
1711 /// cases where the key function is expensive.
1716 /// let mut v = [-5i32, 4, 1, -3, 2];
1718 /// v.sort_unstable_by_key(|k| k.abs());
1719 /// assert!(v == [1, 2, -3, 4, -5]);
1722 /// [pdqsort]: https://github.com/orlp/pdqsort
1723 #[stable(feature = "sort_unstable", since = "1.20.0")]
1725 pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
1730 sort::quicksort(self, |a, b| f(a).lt(&f(b)));
1733 /// Reorder the slice such that the element at `index` is at its final sorted position.
1735 /// This reordering has the additional property that any value at position `i < index` will be
1736 /// less than or equal to any value at a position `j > index`. Additionally, this reordering is
1737 /// unstable (i.e. any number of equal elements may end up at position `index`), in-place
1738 /// (i.e. does not allocate), and `O(n)` worst-case. This function is also/ known as "kth
1739 /// element" in other libraries. It returns a triplet of the following values: all elements less
1740 /// than the one at the given index, the value at the given index, and all elements greater than
1741 /// the one at the given index.
1743 /// # Current implementation
1745 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1746 /// used for [`sort_unstable`].
1748 /// [`sort_unstable`]: #method.sort_unstable
1752 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1757 /// #![feature(slice_partition_at_index)]
1759 /// let mut v = [-5i32, 4, 1, -3, 2];
1761 /// // Find the median
1762 /// v.partition_at_index(2);
1764 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1765 /// // about the specified index.
1766 /// assert!(v == [-3, -5, 1, 2, 4] ||
1767 /// v == [-5, -3, 1, 2, 4] ||
1768 /// v == [-3, -5, 1, 4, 2] ||
1769 /// v == [-5, -3, 1, 4, 2]);
1771 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1773 pub fn partition_at_index(&mut self, index: usize) -> (&mut [T], &mut T, &mut [T])
1777 let mut f = |a: &T, b: &T| a.lt(b);
1778 sort::partition_at_index(self, index, &mut f)
1781 /// Reorder the slice with a comparator function such that the element at `index` is at its
1782 /// final sorted position.
1784 /// This reordering has the additional property that any value at position `i < index` will be
1785 /// less than or equal to any value at a position `j > index` using the comparator function.
1786 /// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
1787 /// position `index`), in-place (i.e. does not allocate), and `O(n)` worst-case. This function
1788 /// is also known as "kth element" in other libraries. It returns a triplet of the following
1789 /// values: all elements less than the one at the given index, the value at the given index,
1790 /// and all elements greater than the one at the given index, using the provided comparator
1793 /// # Current implementation
1795 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1796 /// used for [`sort_unstable`].
1798 /// [`sort_unstable`]: #method.sort_unstable
1802 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1807 /// #![feature(slice_partition_at_index)]
1809 /// let mut v = [-5i32, 4, 1, -3, 2];
1811 /// // Find the median as if the slice were sorted in descending order.
1812 /// v.partition_at_index_by(2, |a, b| b.cmp(a));
1814 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1815 /// // about the specified index.
1816 /// assert!(v == [2, 4, 1, -5, -3] ||
1817 /// v == [2, 4, 1, -3, -5] ||
1818 /// v == [4, 2, 1, -5, -3] ||
1819 /// v == [4, 2, 1, -3, -5]);
1821 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1823 pub fn partition_at_index_by<F>(
1827 ) -> (&mut [T], &mut T, &mut [T])
1829 F: FnMut(&T, &T) -> Ordering,
1831 let mut f = |a: &T, b: &T| compare(a, b) == Less;
1832 sort::partition_at_index(self, index, &mut f)
1835 /// Reorder the slice with a key extraction function such that the element at `index` is at its
1836 /// final sorted position.
1838 /// This reordering has the additional property that any value at position `i < index` will be
1839 /// less than or equal to any value at a position `j > index` using the key extraction function.
1840 /// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
1841 /// position `index`), in-place (i.e. does not allocate), and `O(n)` worst-case. This function
1842 /// is also known as "kth element" in other libraries. It returns a triplet of the following
1843 /// values: all elements less than the one at the given index, the value at the given index, and
1844 /// all elements greater than the one at the given index, using the provided key extraction
1847 /// # Current implementation
1849 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1850 /// used for [`sort_unstable`].
1852 /// [`sort_unstable`]: #method.sort_unstable
1856 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1861 /// #![feature(slice_partition_at_index)]
1863 /// let mut v = [-5i32, 4, 1, -3, 2];
1865 /// // Return the median as if the array were sorted according to absolute value.
1866 /// v.partition_at_index_by_key(2, |a| a.abs());
1868 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1869 /// // about the specified index.
1870 /// assert!(v == [1, 2, -3, 4, -5] ||
1871 /// v == [1, 2, -3, -5, 4] ||
1872 /// v == [2, 1, -3, 4, -5] ||
1873 /// v == [2, 1, -3, -5, 4]);
1875 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1877 pub fn partition_at_index_by_key<K, F>(
1881 ) -> (&mut [T], &mut T, &mut [T])
1886 let mut g = |a: &T, b: &T| f(a).lt(&f(b));
1887 sort::partition_at_index(self, index, &mut g)
1890 /// Moves all consecutive repeated elements to the end of the slice according to the
1891 /// [`PartialEq`] trait implementation.
1893 /// Returns two slices. The first contains no consecutive repeated elements.
1894 /// The second contains all the duplicates in no specified order.
1896 /// If the slice is sorted, the first returned slice contains no duplicates.
1901 /// #![feature(slice_partition_dedup)]
1903 /// let mut slice = [1, 2, 2, 3, 3, 2, 1, 1];
1905 /// let (dedup, duplicates) = slice.partition_dedup();
1907 /// assert_eq!(dedup, [1, 2, 3, 2, 1]);
1908 /// assert_eq!(duplicates, [2, 3, 1]);
1910 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1912 pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T])
1916 self.partition_dedup_by(|a, b| a == b)
1919 /// Moves all but the first of consecutive elements to the end of the slice satisfying
1920 /// a given equality relation.
1922 /// Returns two slices. The first contains no consecutive repeated elements.
1923 /// The second contains all the duplicates in no specified order.
1925 /// The `same_bucket` function is passed references to two elements from the slice and
1926 /// must determine if the elements compare equal. The elements are passed in opposite order
1927 /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is moved
1928 /// at the end of the slice.
1930 /// If the slice is sorted, the first returned slice contains no duplicates.
1935 /// #![feature(slice_partition_dedup)]
1937 /// let mut slice = ["foo", "Foo", "BAZ", "Bar", "bar", "baz", "BAZ"];
1939 /// let (dedup, duplicates) = slice.partition_dedup_by(|a, b| a.eq_ignore_ascii_case(b));
1941 /// assert_eq!(dedup, ["foo", "BAZ", "Bar", "baz"]);
1942 /// assert_eq!(duplicates, ["bar", "Foo", "BAZ"]);
1944 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1946 pub fn partition_dedup_by<F>(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T])
1948 F: FnMut(&mut T, &mut T) -> bool,
1950 // Although we have a mutable reference to `self`, we cannot make
1951 // *arbitrary* changes. The `same_bucket` calls could panic, so we
1952 // must ensure that the slice is in a valid state at all times.
1954 // The way that we handle this is by using swaps; we iterate
1955 // over all the elements, swapping as we go so that at the end
1956 // the elements we wish to keep are in the front, and those we
1957 // wish to reject are at the back. We can then split the slice.
1958 // This operation is still `O(n)`.
1960 // Example: We start in this state, where `r` represents "next
1961 // read" and `w` represents "next_write`.
1964 // +---+---+---+---+---+---+
1965 // | 0 | 1 | 1 | 2 | 3 | 3 |
1966 // +---+---+---+---+---+---+
1969 // Comparing self[r] against self[w-1], this is not a duplicate, so
1970 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1971 // r and w, leaving us with:
1974 // +---+---+---+---+---+---+
1975 // | 0 | 1 | 1 | 2 | 3 | 3 |
1976 // +---+---+---+---+---+---+
1979 // Comparing self[r] against self[w-1], this value is a duplicate,
1980 // so we increment `r` but leave everything else unchanged:
1983 // +---+---+---+---+---+---+
1984 // | 0 | 1 | 1 | 2 | 3 | 3 |
1985 // +---+---+---+---+---+---+
1988 // Comparing self[r] against self[w-1], this is not a duplicate,
1989 // so swap self[r] and self[w] and advance r and w:
1992 // +---+---+---+---+---+---+
1993 // | 0 | 1 | 2 | 1 | 3 | 3 |
1994 // +---+---+---+---+---+---+
1997 // Not a duplicate, repeat:
2000 // +---+---+---+---+---+---+
2001 // | 0 | 1 | 2 | 3 | 1 | 3 |
2002 // +---+---+---+---+---+---+
2005 // Duplicate, advance r. End of slice. Split at w.
2007 let len = self.len();
2009 return (self, &mut []);
2012 let ptr = self.as_mut_ptr();
2013 let mut next_read: usize = 1;
2014 let mut next_write: usize = 1;
2017 // Avoid bounds checks by using raw pointers.
2018 while next_read < len {
2019 let ptr_read = ptr.add(next_read);
2020 let prev_ptr_write = ptr.add(next_write - 1);
2021 if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
2022 if next_read != next_write {
2023 let ptr_write = prev_ptr_write.offset(1);
2024 mem::swap(&mut *ptr_read, &mut *ptr_write);
2032 self.split_at_mut(next_write)
2035 /// Moves all but the first of consecutive elements to the end of the slice that resolve
2036 /// to the same key.
2038 /// Returns two slices. The first contains no consecutive repeated elements.
2039 /// The second contains all the duplicates in no specified order.
2041 /// If the slice is sorted, the first returned slice contains no duplicates.
2046 /// #![feature(slice_partition_dedup)]
2048 /// let mut slice = [10, 20, 21, 30, 30, 20, 11, 13];
2050 /// let (dedup, duplicates) = slice.partition_dedup_by_key(|i| *i / 10);
2052 /// assert_eq!(dedup, [10, 20, 30, 20, 11]);
2053 /// assert_eq!(duplicates, [21, 30, 13]);
2055 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
2057 pub fn partition_dedup_by_key<K, F>(&mut self, mut key: F) -> (&mut [T], &mut [T])
2059 F: FnMut(&mut T) -> K,
2062 self.partition_dedup_by(|a, b| key(a) == key(b))
2065 /// Rotates the slice in-place such that the first `mid` elements of the
2066 /// slice move to the end while the last `self.len() - mid` elements move to
2067 /// the front. After calling `rotate_left`, the element previously at index
2068 /// `mid` will become the first element in the slice.
2072 /// This function will panic if `mid` is greater than the length of the
2073 /// slice. Note that `mid == self.len()` does _not_ panic and is a no-op
2078 /// Takes linear (in `self.len()`) time.
2083 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
2084 /// a.rotate_left(2);
2085 /// assert_eq!(a, ['c', 'd', 'e', 'f', 'a', 'b']);
2088 /// Rotating a subslice:
2091 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
2092 /// a[1..5].rotate_left(1);
2093 /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']);
2095 #[stable(feature = "slice_rotate", since = "1.26.0")]
2096 pub fn rotate_left(&mut self, mid: usize) {
2097 assert!(mid <= self.len());
2098 let k = self.len() - mid;
2101 let p = self.as_mut_ptr();
2102 rotate::ptr_rotate(mid, p.add(mid), k);
2106 /// Rotates the slice in-place such that the first `self.len() - k`
2107 /// elements of the slice move to the end while the last `k` elements move
2108 /// to the front. After calling `rotate_right`, the element previously at
2109 /// index `self.len() - k` will become the first element in the slice.
2113 /// This function will panic if `k` is greater than the length of the
2114 /// slice. Note that `k == self.len()` does _not_ panic and is a no-op
2119 /// Takes linear (in `self.len()`) time.
2124 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
2125 /// a.rotate_right(2);
2126 /// assert_eq!(a, ['e', 'f', 'a', 'b', 'c', 'd']);
2129 /// Rotate a subslice:
2132 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
2133 /// a[1..5].rotate_right(1);
2134 /// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']);
2136 #[stable(feature = "slice_rotate", since = "1.26.0")]
2137 pub fn rotate_right(&mut self, k: usize) {
2138 assert!(k <= self.len());
2139 let mid = self.len() - k;
2142 let p = self.as_mut_ptr();
2143 rotate::ptr_rotate(mid, p.add(mid), k);
2147 /// Fills `self` with elements by cloning `value`.
2152 /// #![feature(slice_fill)]
2154 /// let mut buf = vec![0; 10];
2156 /// assert_eq!(buf, vec![1; 10]);
2158 #[unstable(feature = "slice_fill", issue = "70758")]
2159 pub fn fill(&mut self, value: T)
2163 if let Some((last, elems)) = self.split_last_mut() {
2165 el.clone_from(&value);
2172 /// Copies the elements from `src` into `self`.
2174 /// The length of `src` must be the same as `self`.
2176 /// If `T` implements `Copy`, it can be more performant to use
2177 /// [`copy_from_slice`].
2181 /// This function will panic if the two slices have different lengths.
2185 /// Cloning two elements from a slice into another:
2188 /// let src = [1, 2, 3, 4];
2189 /// let mut dst = [0, 0];
2191 /// // Because the slices have to be the same length,
2192 /// // we slice the source slice from four elements
2193 /// // to two. It will panic if we don't do this.
2194 /// dst.clone_from_slice(&src[2..]);
2196 /// assert_eq!(src, [1, 2, 3, 4]);
2197 /// assert_eq!(dst, [3, 4]);
2200 /// Rust enforces that there can only be one mutable reference with no
2201 /// immutable references to a particular piece of data in a particular
2202 /// scope. Because of this, attempting to use `clone_from_slice` on a
2203 /// single slice will result in a compile failure:
2206 /// let mut slice = [1, 2, 3, 4, 5];
2208 /// slice[..2].clone_from_slice(&slice[3..]); // compile fail!
2211 /// To work around this, we can use [`split_at_mut`] to create two distinct
2212 /// sub-slices from a slice:
2215 /// let mut slice = [1, 2, 3, 4, 5];
2218 /// let (left, right) = slice.split_at_mut(2);
2219 /// left.clone_from_slice(&right[1..]);
2222 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
2225 /// [`copy_from_slice`]: #method.copy_from_slice
2226 /// [`split_at_mut`]: #method.split_at_mut
2227 #[stable(feature = "clone_from_slice", since = "1.7.0")]
2228 pub fn clone_from_slice(&mut self, src: &[T])
2232 assert!(self.len() == src.len(), "destination and source slices have different lengths");
2233 // NOTE: We need to explicitly slice them to the same length
2234 // for bounds checking to be elided, and the optimizer will
2235 // generate memcpy for simple cases (for example T = u8).
2236 let len = self.len();
2237 let src = &src[..len];
2239 self[i].clone_from(&src[i]);
2243 /// Copies all elements from `src` into `self`, using a memcpy.
2245 /// The length of `src` must be the same as `self`.
2247 /// If `T` does not implement `Copy`, use [`clone_from_slice`].
2251 /// This function will panic if the two slices have different lengths.
2255 /// Copying two elements from a slice into another:
2258 /// let src = [1, 2, 3, 4];
2259 /// let mut dst = [0, 0];
2261 /// // Because the slices have to be the same length,
2262 /// // we slice the source slice from four elements
2263 /// // to two. It will panic if we don't do this.
2264 /// dst.copy_from_slice(&src[2..]);
2266 /// assert_eq!(src, [1, 2, 3, 4]);
2267 /// assert_eq!(dst, [3, 4]);
2270 /// Rust enforces that there can only be one mutable reference with no
2271 /// immutable references to a particular piece of data in a particular
2272 /// scope. Because of this, attempting to use `copy_from_slice` on a
2273 /// single slice will result in a compile failure:
2276 /// let mut slice = [1, 2, 3, 4, 5];
2278 /// slice[..2].copy_from_slice(&slice[3..]); // compile fail!
2281 /// To work around this, we can use [`split_at_mut`] to create two distinct
2282 /// sub-slices from a slice:
2285 /// let mut slice = [1, 2, 3, 4, 5];
2288 /// let (left, right) = slice.split_at_mut(2);
2289 /// left.copy_from_slice(&right[1..]);
2292 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
2295 /// [`clone_from_slice`]: #method.clone_from_slice
2296 /// [`split_at_mut`]: #method.split_at_mut
2297 #[stable(feature = "copy_from_slice", since = "1.9.0")]
2298 pub fn copy_from_slice(&mut self, src: &[T])
2302 assert_eq!(self.len(), src.len(), "destination and source slices have different lengths");
2304 ptr::copy_nonoverlapping(src.as_ptr(), self.as_mut_ptr(), self.len());
2308 /// Copies elements from one part of the slice to another part of itself,
2309 /// using a memmove.
2311 /// `src` is the range within `self` to copy from. `dest` is the starting
2312 /// index of the range within `self` to copy to, which will have the same
2313 /// length as `src`. The two ranges may overlap. The ends of the two ranges
2314 /// must be less than or equal to `self.len()`.
2318 /// This function will panic if either range exceeds the end of the slice,
2319 /// or if the end of `src` is before the start.
2323 /// Copying four bytes within a slice:
2326 /// let mut bytes = *b"Hello, World!";
2328 /// bytes.copy_within(1..5, 8);
2330 /// assert_eq!(&bytes, b"Hello, Wello!");
2332 #[stable(feature = "copy_within", since = "1.37.0")]
2334 pub fn copy_within<R: ops::RangeBounds<usize>>(&mut self, src: R, dest: usize)
2338 let src_start = match src.start_bound() {
2339 ops::Bound::Included(&n) => n,
2340 ops::Bound::Excluded(&n) => {
2341 n.checked_add(1).unwrap_or_else(|| slice_index_overflow_fail())
2343 ops::Bound::Unbounded => 0,
2345 let src_end = match src.end_bound() {
2346 ops::Bound::Included(&n) => {
2347 n.checked_add(1).unwrap_or_else(|| slice_index_overflow_fail())
2349 ops::Bound::Excluded(&n) => n,
2350 ops::Bound::Unbounded => self.len(),
2352 assert!(src_start <= src_end, "src end is before src start");
2353 assert!(src_end <= self.len(), "src is out of bounds");
2354 let count = src_end - src_start;
2355 assert!(dest <= self.len() - count, "dest is out of bounds");
2357 ptr::copy(self.as_ptr().add(src_start), self.as_mut_ptr().add(dest), count);
2361 /// Swaps all elements in `self` with those in `other`.
2363 /// The length of `other` must be the same as `self`.
2367 /// This function will panic if the two slices have different lengths.
2371 /// Swapping two elements across slices:
2374 /// let mut slice1 = [0, 0];
2375 /// let mut slice2 = [1, 2, 3, 4];
2377 /// slice1.swap_with_slice(&mut slice2[2..]);
2379 /// assert_eq!(slice1, [3, 4]);
2380 /// assert_eq!(slice2, [1, 2, 0, 0]);
2383 /// Rust enforces that there can only be one mutable reference to a
2384 /// particular piece of data in a particular scope. Because of this,
2385 /// attempting to use `swap_with_slice` on a single slice will result in
2386 /// a compile failure:
2389 /// let mut slice = [1, 2, 3, 4, 5];
2390 /// slice[..2].swap_with_slice(&mut slice[3..]); // compile fail!
2393 /// To work around this, we can use [`split_at_mut`] to create two distinct
2394 /// mutable sub-slices from a slice:
2397 /// let mut slice = [1, 2, 3, 4, 5];
2400 /// let (left, right) = slice.split_at_mut(2);
2401 /// left.swap_with_slice(&mut right[1..]);
2404 /// assert_eq!(slice, [4, 5, 3, 1, 2]);
2407 /// [`split_at_mut`]: #method.split_at_mut
2408 #[stable(feature = "swap_with_slice", since = "1.27.0")]
2409 pub fn swap_with_slice(&mut self, other: &mut [T]) {
2410 assert!(self.len() == other.len(), "destination and source slices have different lengths");
2412 ptr::swap_nonoverlapping(self.as_mut_ptr(), other.as_mut_ptr(), self.len());
2416 /// Function to calculate lengths of the middle and trailing slice for `align_to{,_mut}`.
2417 fn align_to_offsets<U>(&self) -> (usize, usize) {
2418 // What we gonna do about `rest` is figure out what multiple of `U`s we can put in a
2419 // lowest number of `T`s. And how many `T`s we need for each such "multiple".
2421 // Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider
2422 // for example a case where size_of::<T> = 16, size_of::<U> = 24. We can put 2 Us in
2423 // place of every 3 Ts in the `rest` slice. A bit more complicated.
2425 // Formula to calculate this is:
2427 // Us = lcm(size_of::<T>, size_of::<U>) / size_of::<U>
2428 // Ts = lcm(size_of::<T>, size_of::<U>) / size_of::<T>
2430 // Expanded and simplified:
2432 // Us = size_of::<T> / gcd(size_of::<T>, size_of::<U>)
2433 // Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
2435 // Luckily since all this is constant-evaluated... performance here matters not!
2437 fn gcd(a: usize, b: usize) -> usize {
2438 use crate::intrinsics;
2439 // iterative stein’s algorithm
2440 // We should still make this `const fn` (and revert to recursive algorithm if we do)
2441 // because relying on llvm to consteval all this is… well, it makes me uncomfortable.
2442 let (ctz_a, mut ctz_b) = unsafe {
2449 (intrinsics::cttz_nonzero(a), intrinsics::cttz_nonzero(b))
2451 let k = ctz_a.min(ctz_b);
2452 let mut a = a >> ctz_a;
2455 // remove all factors of 2 from b
2458 mem::swap(&mut a, &mut b);
2465 ctz_b = intrinsics::cttz_nonzero(b);
2470 let gcd: usize = gcd(mem::size_of::<T>(), mem::size_of::<U>());
2471 let ts: usize = mem::size_of::<U>() / gcd;
2472 let us: usize = mem::size_of::<T>() / gcd;
2474 // Armed with this knowledge, we can find how many `U`s we can fit!
2475 let us_len = self.len() / ts * us;
2476 // And how many `T`s will be in the trailing slice!
2477 let ts_len = self.len() % ts;
2481 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2484 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2485 /// slice of a new type, and the suffix slice. The method may make the middle slice the greatest
2486 /// length possible for a given type and input slice, but only your algorithm's performance
2487 /// should depend on that, not its correctness. It is permissible for all of the input data to
2488 /// be returned as the prefix or suffix slice.
2490 /// This method has no purpose when either input element `T` or output element `U` are
2491 /// zero-sized and will return the original slice without splitting anything.
2495 /// This method is essentially a `transmute` with respect to the elements in the returned
2496 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2504 /// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2505 /// let (prefix, shorts, suffix) = bytes.align_to::<u16>();
2506 /// // less_efficient_algorithm_for_bytes(prefix);
2507 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2508 /// // less_efficient_algorithm_for_bytes(suffix);
2511 #[stable(feature = "slice_align_to", since = "1.30.0")]
2512 pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
2513 // Note that most of this function will be constant-evaluated,
2514 if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
2515 // handle ZSTs specially, which is – don't handle them at all.
2516 return (self, &[], &[]);
2519 // First, find at what point do we split between the first and 2nd slice. Easy with
2520 // ptr.align_offset.
2521 let ptr = self.as_ptr();
2522 let offset = crate::ptr::align_offset(ptr, mem::align_of::<U>());
2523 if offset > self.len() {
2526 let (left, rest) = self.split_at(offset);
2527 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2528 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2531 from_raw_parts(rest.as_ptr() as *const U, us_len),
2532 from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len),
2537 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2540 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2541 /// slice of a new type, and the suffix slice. The method may make the middle slice the greatest
2542 /// length possible for a given type and input slice, but only your algorithm's performance
2543 /// should depend on that, not its correctness. It is permissible for all of the input data to
2544 /// be returned as the prefix or suffix slice.
2546 /// This method has no purpose when either input element `T` or output element `U` are
2547 /// zero-sized and will return the original slice without splitting anything.
2551 /// This method is essentially a `transmute` with respect to the elements in the returned
2552 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2560 /// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2561 /// let (prefix, shorts, suffix) = bytes.align_to_mut::<u16>();
2562 /// // less_efficient_algorithm_for_bytes(prefix);
2563 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2564 /// // less_efficient_algorithm_for_bytes(suffix);
2567 #[stable(feature = "slice_align_to", since = "1.30.0")]
2568 pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
2569 // Note that most of this function will be constant-evaluated,
2570 if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
2571 // handle ZSTs specially, which is – don't handle them at all.
2572 return (self, &mut [], &mut []);
2575 // First, find at what point do we split between the first and 2nd slice. Easy with
2576 // ptr.align_offset.
2577 let ptr = self.as_ptr();
2578 let offset = crate::ptr::align_offset(ptr, mem::align_of::<U>());
2579 if offset > self.len() {
2580 (self, &mut [], &mut [])
2582 let (left, rest) = self.split_at_mut(offset);
2583 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2584 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2585 let rest_len = rest.len();
2586 let mut_ptr = rest.as_mut_ptr();
2587 // We can't use `rest` again after this, that would invalidate its alias `mut_ptr`!
2590 from_raw_parts_mut(mut_ptr as *mut U, us_len),
2591 from_raw_parts_mut(mut_ptr.add(rest_len - ts_len), ts_len),
2596 /// Checks if the elements of this slice are sorted.
2598 /// That is, for each element `a` and its following element `b`, `a <= b` must hold. If the
2599 /// slice yields exactly zero or one element, `true` is returned.
2601 /// Note that if `Self::Item` is only `PartialOrd`, but not `Ord`, the above definition
2602 /// implies that this function returns `false` if any two consecutive items are not
2608 /// #![feature(is_sorted)]
2609 /// let empty: [i32; 0] = [];
2611 /// assert!([1, 2, 2, 9].is_sorted());
2612 /// assert!(![1, 3, 2, 4].is_sorted());
2613 /// assert!([0].is_sorted());
2614 /// assert!(empty.is_sorted());
2615 /// assert!(![0.0, 1.0, f32::NAN].is_sorted());
2618 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2619 pub fn is_sorted(&self) -> bool
2623 self.is_sorted_by(|a, b| a.partial_cmp(b))
2626 /// Checks if the elements of this slice are sorted using the given comparator function.
2628 /// Instead of using `PartialOrd::partial_cmp`, this function uses the given `compare`
2629 /// function to determine the ordering of two elements. Apart from that, it's equivalent to
2630 /// [`is_sorted`]; see its documentation for more information.
2632 /// [`is_sorted`]: #method.is_sorted
2633 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2634 pub fn is_sorted_by<F>(&self, mut compare: F) -> bool
2636 F: FnMut(&T, &T) -> Option<Ordering>,
2638 self.iter().is_sorted_by(|a, b| compare(*a, *b))
2641 /// Checks if the elements of this slice are sorted using the given key extraction function.
2643 /// Instead of comparing the slice's elements directly, this function compares the keys of the
2644 /// elements, as determined by `f`. Apart from that, it's equivalent to [`is_sorted`]; see its
2645 /// documentation for more information.
2647 /// [`is_sorted`]: #method.is_sorted
2652 /// #![feature(is_sorted)]
2654 /// assert!(["c", "bb", "aaa"].is_sorted_by_key(|s| s.len()));
2655 /// assert!(![-2i32, -1, 0, 3].is_sorted_by_key(|n| n.abs()));
2658 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2659 pub fn is_sorted_by_key<F, K>(&self, f: F) -> bool
2664 self.iter().is_sorted_by_key(f)
2667 /// Returns the index of the partition point according to the given predicate
2668 // (the index of the first element of the second partition).
2670 /// The slice is assumed to be partitioned according to the given predicate.
2671 /// This means that all elements for which the predicate returns true are at the start of the slice
2672 /// and all elements for which the predicate returns false are at the end.
2673 /// For example, [7, 15, 3, 5, 4, 12, 6] is a partitioned under the predicate x % 2 != 0
2674 /// (all odd numbers are at the start, all even at the end).
2676 /// If this slice is not partitioned, the returned result is unspecified and meaningless,
2677 /// as this method performs a kind of binary search.
2682 /// #![feature(partition_point)]
2684 /// let v = [1, 2, 3, 3, 5, 6, 7];
2685 /// let i = v.partition_point(|&x| x < 5);
2687 /// assert_eq!(i, 4);
2688 /// assert!(v[..i].iter().all(|&x| x < 5));
2689 /// assert!(v[i..].iter().all(|&x| !(x < 5)));
2691 #[unstable(feature = "partition_point", reason = "new API", issue = "99999")]
2692 pub fn partition_point<P>(&self, mut pred: P) -> usize
2694 P: FnMut(&T) -> bool,
2697 let mut right = self.len();
2699 while left != right {
2700 let mid = left + (right - left) / 2;
2702 // When left < right, left <= mid < right.
2703 // Therefore left always increases and right always decreases,
2704 // and eigher of them is selected.
2705 // In both cases left <= right is satisfied.
2706 // Therefore if left < right in a step,
2707 // left <= right is satisfied in the next step.
2708 // Therefore as long as left != right, 0 <= left < right <= len is satisfied
2709 // and if this case 0 <= mid < len is satisfied too.
2710 let value = unsafe { self.get_unchecked(mid) };
2722 #[lang = "slice_u8"]
2725 /// Checks if all bytes in this slice are within the ASCII range.
2726 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2728 pub fn is_ascii(&self) -> bool {
2729 self.iter().all(|b| b.is_ascii())
2732 /// Checks that two slices are an ASCII case-insensitive match.
2734 /// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
2735 /// but without allocating and copying temporaries.
2736 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2738 pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
2739 self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a.eq_ignore_ascii_case(b))
2742 /// Converts this slice to its ASCII upper case equivalent in-place.
2744 /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
2745 /// but non-ASCII letters are unchanged.
2747 /// To return a new uppercased value without modifying the existing one, use
2748 /// [`to_ascii_uppercase`].
2750 /// [`to_ascii_uppercase`]: #method.to_ascii_uppercase
2751 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2753 pub fn make_ascii_uppercase(&mut self) {
2755 byte.make_ascii_uppercase();
2759 /// Converts this slice to its ASCII lower case equivalent in-place.
2761 /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
2762 /// but non-ASCII letters are unchanged.
2764 /// To return a new lowercased value without modifying the existing one, use
2765 /// [`to_ascii_lowercase`].
2767 /// [`to_ascii_lowercase`]: #method.to_ascii_lowercase
2768 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2770 pub fn make_ascii_lowercase(&mut self) {
2772 byte.make_ascii_lowercase();
2777 #[stable(feature = "rust1", since = "1.0.0")]
2778 impl<T, I> ops::Index<I> for [T]
2782 type Output = I::Output;
2785 fn index(&self, index: I) -> &I::Output {
2790 #[stable(feature = "rust1", since = "1.0.0")]
2791 impl<T, I> ops::IndexMut<I> for [T]
2796 fn index_mut(&mut self, index: I) -> &mut I::Output {
2797 index.index_mut(self)
2804 fn slice_index_len_fail(index: usize, len: usize) -> ! {
2805 panic!("index {} out of range for slice of length {}", index, len);
2811 fn slice_index_order_fail(index: usize, end: usize) -> ! {
2812 panic!("slice index starts at {} but ends at {}", index, end);
2818 fn slice_index_overflow_fail() -> ! {
2819 panic!("attempted to index slice up to maximum usize");
2822 mod private_slice_index {
2824 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2827 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2828 impl Sealed for usize {}
2829 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2830 impl Sealed for ops::Range<usize> {}
2831 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2832 impl Sealed for ops::RangeTo<usize> {}
2833 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2834 impl Sealed for ops::RangeFrom<usize> {}
2835 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2836 impl Sealed for ops::RangeFull {}
2837 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2838 impl Sealed for ops::RangeInclusive<usize> {}
2839 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2840 impl Sealed for ops::RangeToInclusive<usize> {}
2843 /// A helper trait used for indexing operations.
2844 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2845 #[rustc_on_unimplemented(
2846 on(T = "str", label = "string indices are ranges of `usize`",),
2848 all(any(T = "str", T = "&str", T = "std::string::String"), _Self = "{integer}"),
2849 note = "you can use `.chars().nth()` or `.bytes().nth()`
2850 see chapter in The Book <https://doc.rust-lang.org/book/ch08-02-strings.html#indexing-into-strings>"
2852 message = "the type `{T}` cannot be indexed by `{Self}`",
2853 label = "slice indices are of type `usize` or ranges of `usize`"
2855 pub trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
2856 /// The output type returned by methods.
2857 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2858 type Output: ?Sized;
2860 /// Returns a shared reference to the output at this location, if in
2862 #[unstable(feature = "slice_index_methods", issue = "none")]
2863 fn get(self, slice: &T) -> Option<&Self::Output>;
2865 /// Returns a mutable reference to the output at this location, if in
2867 #[unstable(feature = "slice_index_methods", issue = "none")]
2868 fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
2870 /// Returns a shared reference to the output at this location, without
2871 /// performing any bounds checking.
2872 /// Calling this method with an out-of-bounds index is *[undefined behavior]*
2873 /// even if the resulting reference is not used.
2874 /// [undefined behavior]: ../../reference/behavior-considered-undefined.html
2875 #[unstable(feature = "slice_index_methods", issue = "none")]
2876 unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
2878 /// Returns a mutable reference to the output at this location, without
2879 /// performing any bounds checking.
2880 /// Calling this method with an out-of-bounds index is *[undefined behavior]*
2881 /// even if the resulting reference is not used.
2882 /// [undefined behavior]: ../../reference/behavior-considered-undefined.html
2883 #[unstable(feature = "slice_index_methods", issue = "none")]
2884 unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
2886 /// Returns a shared reference to the output at this location, panicking
2887 /// if out of bounds.
2888 #[unstable(feature = "slice_index_methods", issue = "none")]
2890 fn index(self, slice: &T) -> &Self::Output;
2892 /// Returns a mutable reference to the output at this location, panicking
2893 /// if out of bounds.
2894 #[unstable(feature = "slice_index_methods", issue = "none")]
2896 fn index_mut(self, slice: &mut T) -> &mut Self::Output;
2899 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2900 impl<T> SliceIndex<[T]> for usize {
2904 fn get(self, slice: &[T]) -> Option<&T> {
2905 if self < slice.len() { unsafe { Some(self.get_unchecked(slice)) } } else { None }
2909 fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
2910 if self < slice.len() { unsafe { Some(self.get_unchecked_mut(slice)) } } else { None }
2914 unsafe fn get_unchecked(self, slice: &[T]) -> &T {
2915 &*slice.as_ptr().add(self)
2919 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
2920 &mut *slice.as_mut_ptr().add(self)
2924 fn index(self, slice: &[T]) -> &T {
2925 // N.B., use intrinsic indexing
2930 fn index_mut(self, slice: &mut [T]) -> &mut T {
2931 // N.B., use intrinsic indexing
2936 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2937 impl<T> SliceIndex<[T]> for ops::Range<usize> {
2941 fn get(self, slice: &[T]) -> Option<&[T]> {
2942 if self.start > self.end || self.end > slice.len() {
2945 unsafe { Some(self.get_unchecked(slice)) }
2950 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2951 if self.start > self.end || self.end > slice.len() {
2954 unsafe { Some(self.get_unchecked_mut(slice)) }
2959 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2960 from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
2964 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2965 from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
2969 fn index(self, slice: &[T]) -> &[T] {
2970 if self.start > self.end {
2971 slice_index_order_fail(self.start, self.end);
2972 } else if self.end > slice.len() {
2973 slice_index_len_fail(self.end, slice.len());
2975 unsafe { self.get_unchecked(slice) }
2979 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2980 if self.start > self.end {
2981 slice_index_order_fail(self.start, self.end);
2982 } else if self.end > slice.len() {
2983 slice_index_len_fail(self.end, slice.len());
2985 unsafe { self.get_unchecked_mut(slice) }
2989 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2990 impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
2994 fn get(self, slice: &[T]) -> Option<&[T]> {
2995 (0..self.end).get(slice)
2999 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
3000 (0..self.end).get_mut(slice)
3004 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
3005 (0..self.end).get_unchecked(slice)
3009 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
3010 (0..self.end).get_unchecked_mut(slice)
3014 fn index(self, slice: &[T]) -> &[T] {
3015 (0..self.end).index(slice)
3019 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3020 (0..self.end).index_mut(slice)
3024 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
3025 impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
3029 fn get(self, slice: &[T]) -> Option<&[T]> {
3030 (self.start..slice.len()).get(slice)
3034 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
3035 (self.start..slice.len()).get_mut(slice)
3039 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
3040 (self.start..slice.len()).get_unchecked(slice)
3044 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
3045 (self.start..slice.len()).get_unchecked_mut(slice)
3049 fn index(self, slice: &[T]) -> &[T] {
3050 (self.start..slice.len()).index(slice)
3054 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3055 (self.start..slice.len()).index_mut(slice)
3059 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
3060 impl<T> SliceIndex<[T]> for ops::RangeFull {
3064 fn get(self, slice: &[T]) -> Option<&[T]> {
3069 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
3074 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
3079 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
3084 fn index(self, slice: &[T]) -> &[T] {
3089 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3094 #[stable(feature = "inclusive_range", since = "1.26.0")]
3095 impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
3099 fn get(self, slice: &[T]) -> Option<&[T]> {
3100 if *self.end() == usize::MAX { None } else { (*self.start()..self.end() + 1).get(slice) }
3104 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
3105 if *self.end() == usize::MAX {
3108 (*self.start()..self.end() + 1).get_mut(slice)
3113 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
3114 (*self.start()..self.end() + 1).get_unchecked(slice)
3118 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
3119 (*self.start()..self.end() + 1).get_unchecked_mut(slice)
3123 fn index(self, slice: &[T]) -> &[T] {
3124 if *self.end() == usize::MAX {
3125 slice_index_overflow_fail();
3127 (*self.start()..self.end() + 1).index(slice)
3131 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3132 if *self.end() == usize::MAX {
3133 slice_index_overflow_fail();
3135 (*self.start()..self.end() + 1).index_mut(slice)
3139 #[stable(feature = "inclusive_range", since = "1.26.0")]
3140 impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
3144 fn get(self, slice: &[T]) -> Option<&[T]> {
3145 (0..=self.end).get(slice)
3149 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
3150 (0..=self.end).get_mut(slice)
3154 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
3155 (0..=self.end).get_unchecked(slice)
3159 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
3160 (0..=self.end).get_unchecked_mut(slice)
3164 fn index(self, slice: &[T]) -> &[T] {
3165 (0..=self.end).index(slice)
3169 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3170 (0..=self.end).index_mut(slice)
3174 ////////////////////////////////////////////////////////////////////////////////
3176 ////////////////////////////////////////////////////////////////////////////////
3178 #[stable(feature = "rust1", since = "1.0.0")]
3179 impl<T> Default for &[T] {
3180 /// Creates an empty slice.
3181 fn default() -> Self {
3186 #[stable(feature = "mut_slice_default", since = "1.5.0")]
3187 impl<T> Default for &mut [T] {
3188 /// Creates a mutable empty slice.
3189 fn default() -> Self {
3198 #[stable(feature = "rust1", since = "1.0.0")]
3199 impl<'a, T> IntoIterator for &'a [T] {
3201 type IntoIter = Iter<'a, T>;
3203 fn into_iter(self) -> Iter<'a, T> {
3208 #[stable(feature = "rust1", since = "1.0.0")]
3209 impl<'a, T> IntoIterator for &'a mut [T] {
3210 type Item = &'a mut T;
3211 type IntoIter = IterMut<'a, T>;
3213 fn into_iter(self) -> IterMut<'a, T> {
3218 // Macro helper functions
3220 fn size_from_ptr<T>(_: *const T) -> usize {
3224 // Inlining is_empty and len makes a huge performance difference
3225 macro_rules! is_empty {
3226 // The way we encode the length of a ZST iterator, this works both for ZST
3229 $self.ptr.as_ptr() as *const T == $self.end
3233 // To get rid of some bounds checks (see `position`), we compute the length in a somewhat
3234 // unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
3236 ($self: ident) => {{
3237 #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
3239 let start = $self.ptr;
3240 let size = size_from_ptr(start.as_ptr());
3242 // This _cannot_ use `unchecked_sub` because we depend on wrapping
3243 // to represent the length of long ZST slice iterators.
3244 ($self.end as usize).wrapping_sub(start.as_ptr() as usize)
3246 // We know that `start <= end`, so can do better than `offset_from`,
3247 // which needs to deal in signed. By setting appropriate flags here
3248 // we can tell LLVM this, which helps it remove bounds checks.
3249 // SAFETY: By the type invariant, `start <= end`
3250 let diff = unsafe { unchecked_sub($self.end as usize, start.as_ptr() as usize) };
3251 // By also telling LLVM that the pointers are apart by an exact
3252 // multiple of the type size, it can optimize `len() == 0` down to
3253 // `start == end` instead of `(end - start) < size`.
3254 // SAFETY: By the type invariant, the pointers are aligned so the
3255 // distance between them must be a multiple of pointee size
3256 unsafe { exact_div(diff, size) }
3261 // The shared definition of the `Iter` and `IterMut` iterators
3262 macro_rules! iterator {
3264 struct $name:ident -> $ptr:ty,
3270 // Returns the first element and moves the start of the iterator forwards by 1.
3271 // Greatly improves performance compared to an inlined function. The iterator
3272 // must not be empty.
3273 macro_rules! next_unchecked {
3274 ($self: ident) => {& $( $mut_ )* *$self.post_inc_start(1)}
3277 // Returns the last element and moves the end of the iterator backwards by 1.
3278 // Greatly improves performance compared to an inlined function. The iterator
3279 // must not be empty.
3280 macro_rules! next_back_unchecked {
3281 ($self: ident) => {& $( $mut_ )* *$self.pre_dec_end(1)}
3284 // Shrinks the iterator when T is a ZST, by moving the end of the iterator
3285 // backwards by `n`. `n` must not exceed `self.len()`.
3286 macro_rules! zst_shrink {
3287 ($self: ident, $n: ident) => {
3288 $self.end = ($self.end as * $raw_mut u8).wrapping_offset(-$n) as * $raw_mut T;
3292 impl<'a, T> $name<'a, T> {
3293 // Helper function for creating a slice from the iterator.
3295 fn make_slice(&self) -> &'a [T] {
3296 unsafe { from_raw_parts(self.ptr.as_ptr(), len!(self)) }
3299 // Helper function for moving the start of the iterator forwards by `offset` elements,
3300 // returning the old start.
3301 // Unsafe because the offset must not exceed `self.len()`.
3303 unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T {
3304 if mem::size_of::<T>() == 0 {
3305 zst_shrink!(self, offset);
3308 let old = self.ptr.as_ptr();
3309 self.ptr = NonNull::new_unchecked(self.ptr.as_ptr().offset(offset));
3314 // Helper function for moving the end of the iterator backwards by `offset` elements,
3315 // returning the new end.
3316 // Unsafe because the offset must not exceed `self.len()`.
3318 unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T {
3319 if mem::size_of::<T>() == 0 {
3320 zst_shrink!(self, offset);
3323 self.end = self.end.offset(-offset);
3329 #[stable(feature = "rust1", since = "1.0.0")]
3330 impl<T> ExactSizeIterator for $name<'_, T> {
3332 fn len(&self) -> usize {
3337 fn is_empty(&self) -> bool {
3342 #[stable(feature = "rust1", since = "1.0.0")]
3343 impl<'a, T> Iterator for $name<'a, T> {
3347 fn next(&mut self) -> Option<$elem> {
3348 // could be implemented with slices, but this avoids bounds checks
3350 assume(!self.ptr.as_ptr().is_null());
3351 if mem::size_of::<T>() != 0 {
3352 assume(!self.end.is_null());
3354 if is_empty!(self) {
3357 Some(next_unchecked!(self))
3363 fn size_hint(&self) -> (usize, Option<usize>) {
3364 let exact = len!(self);
3365 (exact, Some(exact))
3369 fn count(self) -> usize {
3374 fn nth(&mut self, n: usize) -> Option<$elem> {
3375 if n >= len!(self) {
3376 // This iterator is now empty.
3377 if mem::size_of::<T>() == 0 {
3378 // We have to do it this way as `ptr` may never be 0, but `end`
3379 // could be (due to wrapping).
3380 self.end = self.ptr.as_ptr();
3383 // End can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr
3384 self.ptr = NonNull::new_unchecked(self.end as *mut T);
3389 // We are in bounds. `post_inc_start` does the right thing even for ZSTs.
3391 self.post_inc_start(n as isize);
3392 Some(next_unchecked!(self))
3397 fn last(mut self) -> Option<$elem> {
3401 // We override the default implementation, which uses `try_fold`,
3402 // because this simple implementation generates less LLVM IR and is
3403 // faster to compile.
3405 fn for_each<F>(mut self, mut f: F)
3408 F: FnMut(Self::Item),
3410 while let Some(x) = self.next() {
3415 // We override the default implementation, which uses `try_fold`,
3416 // because this simple implementation generates less LLVM IR and is
3417 // faster to compile.
3419 fn all<F>(&mut self, mut f: F) -> bool
3422 F: FnMut(Self::Item) -> bool,
3424 while let Some(x) = self.next() {
3432 // We override the default implementation, which uses `try_fold`,
3433 // because this simple implementation generates less LLVM IR and is
3434 // faster to compile.
3436 fn any<F>(&mut self, mut f: F) -> bool
3439 F: FnMut(Self::Item) -> bool,
3441 while let Some(x) = self.next() {
3449 // We override the default implementation, which uses `try_fold`,
3450 // because this simple implementation generates less LLVM IR and is
3451 // faster to compile.
3453 fn find<P>(&mut self, mut predicate: P) -> Option<Self::Item>
3456 P: FnMut(&Self::Item) -> bool,
3458 while let Some(x) = self.next() {
3466 // We override the default implementation, which uses `try_fold`,
3467 // because this simple implementation generates less LLVM IR and is
3468 // faster to compile.
3470 fn find_map<B, F>(&mut self, mut f: F) -> Option<B>
3473 F: FnMut(Self::Item) -> Option<B>,
3475 while let Some(x) = self.next() {
3476 if let Some(y) = f(x) {
3483 // We override the default implementation, which uses `try_fold`,
3484 // because this simple implementation generates less LLVM IR and is
3485 // faster to compile. Also, the `assume` avoids a bounds check.
3487 #[rustc_inherit_overflow_checks]
3488 fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
3490 P: FnMut(Self::Item) -> bool,
3494 while let Some(x) = self.next() {
3496 unsafe { assume(i < n) };
3504 // We override the default implementation, which uses `try_fold`,
3505 // because this simple implementation generates less LLVM IR and is
3506 // faster to compile. Also, the `assume` avoids a bounds check.
3508 fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
3509 P: FnMut(Self::Item) -> bool,
3510 Self: Sized + ExactSizeIterator + DoubleEndedIterator
3514 while let Some(x) = self.next_back() {
3517 unsafe { assume(i < n) };
3527 #[stable(feature = "rust1", since = "1.0.0")]
3528 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
3530 fn next_back(&mut self) -> Option<$elem> {
3531 // could be implemented with slices, but this avoids bounds checks
3533 assume(!self.ptr.as_ptr().is_null());
3534 if mem::size_of::<T>() != 0 {
3535 assume(!self.end.is_null());
3537 if is_empty!(self) {
3540 Some(next_back_unchecked!(self))
3546 fn nth_back(&mut self, n: usize) -> Option<$elem> {
3547 if n >= len!(self) {
3548 // This iterator is now empty.
3549 self.end = self.ptr.as_ptr();
3552 // We are in bounds. `pre_dec_end` does the right thing even for ZSTs.
3554 self.pre_dec_end(n as isize);
3555 Some(next_back_unchecked!(self))
3560 #[stable(feature = "fused", since = "1.26.0")]
3561 impl<T> FusedIterator for $name<'_, T> {}
3563 #[unstable(feature = "trusted_len", issue = "37572")]
3564 unsafe impl<T> TrustedLen for $name<'_, T> {}
3568 /// Immutable slice iterator
3570 /// This struct is created by the [`iter`] method on [slices].
3577 /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
3578 /// let slice = &[1, 2, 3];
3580 /// // Then, we iterate over it:
3581 /// for element in slice.iter() {
3582 /// println!("{}", element);
3586 /// [`iter`]: ../../std/primitive.slice.html#method.iter
3587 /// [slices]: ../../std/primitive.slice.html
3588 #[stable(feature = "rust1", since = "1.0.0")]
3589 pub struct Iter<'a, T: 'a> {
3591 end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3592 // ptr == end is a quick test for the Iterator being empty, that works
3593 // for both ZST and non-ZST.
3594 _marker: marker::PhantomData<&'a T>,
3597 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3598 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
3599 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3600 f.debug_tuple("Iter").field(&self.as_slice()).finish()
3604 #[stable(feature = "rust1", since = "1.0.0")]
3605 unsafe impl<T: Sync> Sync for Iter<'_, T> {}
3606 #[stable(feature = "rust1", since = "1.0.0")]
3607 unsafe impl<T: Sync> Send for Iter<'_, T> {}
3609 impl<'a, T> Iter<'a, T> {
3610 /// Views the underlying data as a subslice of the original data.
3612 /// This has the same lifetime as the original slice, and so the
3613 /// iterator can continue to be used while this exists.
3620 /// // First, we declare a type which has the `iter` method to get the `Iter`
3621 /// // struct (&[usize here]):
3622 /// let slice = &[1, 2, 3];
3624 /// // Then, we get the iterator:
3625 /// let mut iter = slice.iter();
3626 /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
3627 /// println!("{:?}", iter.as_slice());
3629 /// // Next, we move to the second element of the slice:
3631 /// // Now `as_slice` returns "[2, 3]":
3632 /// println!("{:?}", iter.as_slice());
3634 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3635 pub fn as_slice(&self) -> &'a [T] {
3640 iterator! {struct Iter -> *const T, &'a T, const, {/* no mut */}, {
3641 fn is_sorted_by<F>(self, mut compare: F) -> bool
3644 F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>,
3646 self.as_slice().windows(2).all(|w| {
3647 compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false)
3652 #[stable(feature = "rust1", since = "1.0.0")]
3653 impl<T> Clone for Iter<'_, T> {
3654 fn clone(&self) -> Self {
3655 Iter { ptr: self.ptr, end: self.end, _marker: self._marker }
3659 #[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
3660 impl<T> AsRef<[T]> for Iter<'_, T> {
3661 fn as_ref(&self) -> &[T] {
3666 /// Mutable slice iterator.
3668 /// This struct is created by the [`iter_mut`] method on [slices].
3675 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3676 /// // struct (&[usize here]):
3677 /// let mut slice = &mut [1, 2, 3];
3679 /// // Then, we iterate over it and increment each element value:
3680 /// for element in slice.iter_mut() {
3684 /// // We now have "[2, 3, 4]":
3685 /// println!("{:?}", slice);
3688 /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
3689 /// [slices]: ../../std/primitive.slice.html
3690 #[stable(feature = "rust1", since = "1.0.0")]
3691 pub struct IterMut<'a, T: 'a> {
3693 end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3694 // ptr == end is a quick test for the Iterator being empty, that works
3695 // for both ZST and non-ZST.
3696 _marker: marker::PhantomData<&'a mut T>,
3699 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3700 impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
3701 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3702 f.debug_tuple("IterMut").field(&self.make_slice()).finish()
3706 #[stable(feature = "rust1", since = "1.0.0")]
3707 unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
3708 #[stable(feature = "rust1", since = "1.0.0")]
3709 unsafe impl<T: Send> Send for IterMut<'_, T> {}
3711 impl<'a, T> IterMut<'a, T> {
3712 /// Views the underlying data as a subslice of the original data.
3714 /// To avoid creating `&mut` references that alias, this is forced
3715 /// to consume the iterator.
3722 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3723 /// // struct (&[usize here]):
3724 /// let mut slice = &mut [1, 2, 3];
3727 /// // Then, we get the iterator:
3728 /// let mut iter = slice.iter_mut();
3729 /// // We move to next element:
3731 /// // So if we print what `into_slice` method returns here, we have "[2, 3]":
3732 /// println!("{:?}", iter.into_slice());
3735 /// // Now let's modify a value of the slice:
3737 /// // First we get back the iterator:
3738 /// let mut iter = slice.iter_mut();
3739 /// // We change the value of the first element of the slice returned by the `next` method:
3740 /// *iter.next().unwrap() += 1;
3742 /// // Now slice is "[2, 2, 3]":
3743 /// println!("{:?}", slice);
3745 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3746 pub fn into_slice(self) -> &'a mut [T] {
3747 unsafe { from_raw_parts_mut(self.ptr.as_ptr(), len!(self)) }
3750 /// Views the underlying data as a subslice of the original data.
3752 /// To avoid creating `&mut [T]` references that alias, the returned slice
3753 /// borrows its lifetime from the iterator the method is applied on.
3760 /// # #![feature(slice_iter_mut_as_slice)]
3761 /// let mut slice: &mut [usize] = &mut [1, 2, 3];
3763 /// // First, we get the iterator:
3764 /// let mut iter = slice.iter_mut();
3765 /// // So if we check what the `as_slice` method returns here, we have "[1, 2, 3]":
3766 /// assert_eq!(iter.as_slice(), &[1, 2, 3]);
3768 /// // Next, we move to the second element of the slice:
3770 /// // Now `as_slice` returns "[2, 3]":
3771 /// assert_eq!(iter.as_slice(), &[2, 3]);
3773 #[unstable(feature = "slice_iter_mut_as_slice", reason = "recently added", issue = "58957")]
3774 pub fn as_slice(&self) -> &[T] {
3779 iterator! {struct IterMut -> *mut T, &'a mut T, mut, {mut}, {}}
3781 /// An internal abstraction over the splitting iterators, so that
3782 /// splitn, splitn_mut etc can be implemented once.
3784 trait SplitIter: DoubleEndedIterator {
3785 /// Marks the underlying iterator as complete, extracting the remaining
3786 /// portion of the slice.
3787 fn finish(&mut self) -> Option<Self::Item>;
3790 /// An iterator over subslices separated by elements that match a predicate
3793 /// This struct is created by the [`split`] method on [slices].
3795 /// [`split`]: ../../std/primitive.slice.html#method.split
3796 /// [slices]: ../../std/primitive.slice.html
3797 #[stable(feature = "rust1", since = "1.0.0")]
3798 pub struct Split<'a, T: 'a, P>
3800 P: FnMut(&T) -> bool,
3807 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3808 impl<T: fmt::Debug, P> fmt::Debug for Split<'_, T, P>
3810 P: FnMut(&T) -> bool,
3812 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3813 f.debug_struct("Split").field("v", &self.v).field("finished", &self.finished).finish()
3817 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3818 #[stable(feature = "rust1", since = "1.0.0")]
3819 impl<T, P> Clone for Split<'_, T, P>
3821 P: Clone + FnMut(&T) -> bool,
3823 fn clone(&self) -> Self {
3824 Split { v: self.v, pred: self.pred.clone(), finished: self.finished }
3828 #[stable(feature = "rust1", since = "1.0.0")]
3829 impl<'a, T, P> Iterator for Split<'a, T, P>
3831 P: FnMut(&T) -> bool,
3833 type Item = &'a [T];
3836 fn next(&mut self) -> Option<&'a [T]> {
3841 match self.v.iter().position(|x| (self.pred)(x)) {
3842 None => self.finish(),
3844 let ret = Some(&self.v[..idx]);
3845 self.v = &self.v[idx + 1..];
3852 fn size_hint(&self) -> (usize, Option<usize>) {
3853 if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) }
3857 #[stable(feature = "rust1", since = "1.0.0")]
3858 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P>
3860 P: FnMut(&T) -> bool,
3863 fn next_back(&mut self) -> Option<&'a [T]> {
3868 match self.v.iter().rposition(|x| (self.pred)(x)) {
3869 None => self.finish(),
3871 let ret = Some(&self.v[idx + 1..]);
3872 self.v = &self.v[..idx];
3879 impl<'a, T, P> SplitIter for Split<'a, T, P>
3881 P: FnMut(&T) -> bool,
3884 fn finish(&mut self) -> Option<&'a [T]> {
3888 self.finished = true;
3894 #[stable(feature = "fused", since = "1.26.0")]
3895 impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
3897 /// An iterator over subslices separated by elements that match a predicate
3898 /// function. Unlike `Split`, it contains the matched part as a terminator
3899 /// of the subslice.
3901 /// This struct is created by the [`split_inclusive`] method on [slices].
3903 /// [`split_inclusive`]: ../../std/primitive.slice.html#method.split_inclusive
3904 /// [slices]: ../../std/primitive.slice.html
3905 #[unstable(feature = "split_inclusive", issue = "72360")]
3906 pub struct SplitInclusive<'a, T: 'a, P>
3908 P: FnMut(&T) -> bool,
3915 #[unstable(feature = "split_inclusive", issue = "72360")]
3916 impl<T: fmt::Debug, P> fmt::Debug for SplitInclusive<'_, T, P>
3918 P: FnMut(&T) -> bool,
3920 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3921 f.debug_struct("SplitInclusive")
3922 .field("v", &self.v)
3923 .field("finished", &self.finished)
3928 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3929 #[unstable(feature = "split_inclusive", issue = "72360")]
3930 impl<T, P> Clone for SplitInclusive<'_, T, P>
3932 P: Clone + FnMut(&T) -> bool,
3934 fn clone(&self) -> Self {
3935 SplitInclusive { v: self.v, pred: self.pred.clone(), finished: self.finished }
3939 #[unstable(feature = "split_inclusive", issue = "72360")]
3940 impl<'a, T, P> Iterator for SplitInclusive<'a, T, P>
3942 P: FnMut(&T) -> bool,
3944 type Item = &'a [T];
3947 fn next(&mut self) -> Option<&'a [T]> {
3953 self.v.iter().position(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(self.v.len());
3954 if idx == self.v.len() {
3955 self.finished = true;
3957 let ret = Some(&self.v[..idx]);
3958 self.v = &self.v[idx..];
3963 fn size_hint(&self) -> (usize, Option<usize>) {
3964 if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) }
3968 #[unstable(feature = "split_inclusive", issue = "72360")]
3969 impl<'a, T, P> DoubleEndedIterator for SplitInclusive<'a, T, P>
3971 P: FnMut(&T) -> bool,
3974 fn next_back(&mut self) -> Option<&'a [T]> {
3979 // The last index of self.v is already checked and found to match
3980 // by the last iteration, so we start searching a new match
3981 // one index to the left.
3982 let remainder = if self.v.is_empty() { &[] } else { &self.v[..(self.v.len() - 1)] };
3983 let idx = remainder.iter().rposition(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(0);
3985 self.finished = true;
3987 let ret = Some(&self.v[idx..]);
3988 self.v = &self.v[..idx];
3993 #[unstable(feature = "split_inclusive", issue = "72360")]
3994 impl<T, P> FusedIterator for SplitInclusive<'_, T, P> where P: FnMut(&T) -> bool {}
3996 /// An iterator over the mutable subslices of the vector which are separated
3997 /// by elements that match `pred`.
3999 /// This struct is created by the [`split_mut`] method on [slices].
4001 /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
4002 /// [slices]: ../../std/primitive.slice.html
4003 #[stable(feature = "rust1", since = "1.0.0")]
4004 pub struct SplitMut<'a, T: 'a, P>
4006 P: FnMut(&T) -> bool,
4013 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4014 impl<T: fmt::Debug, P> fmt::Debug for SplitMut<'_, T, P>
4016 P: FnMut(&T) -> bool,
4018 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4019 f.debug_struct("SplitMut").field("v", &self.v).field("finished", &self.finished).finish()
4023 impl<'a, T, P> SplitIter for SplitMut<'a, T, P>
4025 P: FnMut(&T) -> bool,
4028 fn finish(&mut self) -> Option<&'a mut [T]> {
4032 self.finished = true;
4033 Some(mem::replace(&mut self.v, &mut []))
4038 #[stable(feature = "rust1", since = "1.0.0")]
4039 impl<'a, T, P> Iterator for SplitMut<'a, T, P>
4041 P: FnMut(&T) -> bool,
4043 type Item = &'a mut [T];
4046 fn next(&mut self) -> Option<&'a mut [T]> {
4052 // work around borrowck limitations
4053 let pred = &mut self.pred;
4054 self.v.iter().position(|x| (*pred)(x))
4057 None => self.finish(),
4059 let tmp = mem::replace(&mut self.v, &mut []);
4060 let (head, tail) = tmp.split_at_mut(idx);
4061 self.v = &mut tail[1..];
4068 fn size_hint(&self) -> (usize, Option<usize>) {
4072 // if the predicate doesn't match anything, we yield one slice
4073 // if it matches every element, we yield len+1 empty slices.
4074 (1, Some(self.v.len() + 1))
4079 #[stable(feature = "rust1", since = "1.0.0")]
4080 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P>
4082 P: FnMut(&T) -> bool,
4085 fn next_back(&mut self) -> Option<&'a mut [T]> {
4091 // work around borrowck limitations
4092 let pred = &mut self.pred;
4093 self.v.iter().rposition(|x| (*pred)(x))
4096 None => self.finish(),
4098 let tmp = mem::replace(&mut self.v, &mut []);
4099 let (head, tail) = tmp.split_at_mut(idx);
4101 Some(&mut tail[1..])
4107 #[stable(feature = "fused", since = "1.26.0")]
4108 impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
4110 /// An iterator over the mutable subslices of the vector which are separated
4111 /// by elements that match `pred`. Unlike `SplitMut`, it contains the matched
4112 /// parts in the ends of the subslices.
4114 /// This struct is created by the [`split_inclusive_mut`] method on [slices].
4116 /// [`split_inclusive_mut`]: ../../std/primitive.slice.html#method.split_inclusive_mut
4117 /// [slices]: ../../std/primitive.slice.html
4118 #[unstable(feature = "split_inclusive", issue = "72360")]
4119 pub struct SplitInclusiveMut<'a, T: 'a, P>
4121 P: FnMut(&T) -> bool,
4128 #[unstable(feature = "split_inclusive", issue = "72360")]
4129 impl<T: fmt::Debug, P> fmt::Debug for SplitInclusiveMut<'_, T, P>
4131 P: FnMut(&T) -> bool,
4133 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4134 f.debug_struct("SplitInclusiveMut")
4135 .field("v", &self.v)
4136 .field("finished", &self.finished)
4141 #[unstable(feature = "split_inclusive", issue = "72360")]
4142 impl<'a, T, P> Iterator for SplitInclusiveMut<'a, T, P>
4144 P: FnMut(&T) -> bool,
4146 type Item = &'a mut [T];
4149 fn next(&mut self) -> Option<&'a mut [T]> {
4155 // work around borrowck limitations
4156 let pred = &mut self.pred;
4157 self.v.iter().position(|x| (*pred)(x))
4159 let idx = idx_opt.map(|idx| idx + 1).unwrap_or(self.v.len());
4160 if idx == self.v.len() {
4161 self.finished = true;
4163 let tmp = mem::replace(&mut self.v, &mut []);
4164 let (head, tail) = tmp.split_at_mut(idx);
4170 fn size_hint(&self) -> (usize, Option<usize>) {
4174 // if the predicate doesn't match anything, we yield one slice
4175 // if it matches every element, we yield len+1 empty slices.
4176 (1, Some(self.v.len() + 1))
4181 #[unstable(feature = "split_inclusive", issue = "72360")]
4182 impl<'a, T, P> DoubleEndedIterator for SplitInclusiveMut<'a, T, P>
4184 P: FnMut(&T) -> bool,
4187 fn next_back(&mut self) -> Option<&'a mut [T]> {
4192 let idx_opt = if self.v.is_empty() {
4195 // work around borrowck limitations
4196 let pred = &mut self.pred;
4198 // The last index of self.v is already checked and found to match
4199 // by the last iteration, so we start searching a new match
4200 // one index to the left.
4201 let remainder = &self.v[..(self.v.len() - 1)];
4202 remainder.iter().rposition(|x| (*pred)(x))
4204 let idx = idx_opt.map(|idx| idx + 1).unwrap_or(0);
4206 self.finished = true;
4208 let tmp = mem::replace(&mut self.v, &mut []);
4209 let (head, tail) = tmp.split_at_mut(idx);
4215 #[unstable(feature = "split_inclusive", issue = "72360")]
4216 impl<T, P> FusedIterator for SplitInclusiveMut<'_, T, P> where P: FnMut(&T) -> bool {}
4218 /// An iterator over subslices separated by elements that match a predicate
4219 /// function, starting from the end of the slice.
4221 /// This struct is created by the [`rsplit`] method on [slices].
4223 /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
4224 /// [slices]: ../../std/primitive.slice.html
4225 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4226 #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
4227 pub struct RSplit<'a, T: 'a, P>
4229 P: FnMut(&T) -> bool,
4231 inner: Split<'a, T, P>,
4234 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4235 impl<T: fmt::Debug, P> fmt::Debug for RSplit<'_, T, P>
4237 P: FnMut(&T) -> bool,
4239 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4240 f.debug_struct("RSplit")
4241 .field("v", &self.inner.v)
4242 .field("finished", &self.inner.finished)
4247 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4248 impl<'a, T, P> Iterator for RSplit<'a, T, P>
4250 P: FnMut(&T) -> bool,
4252 type Item = &'a [T];
4255 fn next(&mut self) -> Option<&'a [T]> {
4256 self.inner.next_back()
4260 fn size_hint(&self) -> (usize, Option<usize>) {
4261 self.inner.size_hint()
4265 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4266 impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P>
4268 P: FnMut(&T) -> bool,
4271 fn next_back(&mut self) -> Option<&'a [T]> {
4276 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4277 impl<'a, T, P> SplitIter for RSplit<'a, T, P>
4279 P: FnMut(&T) -> bool,
4282 fn finish(&mut self) -> Option<&'a [T]> {
4287 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4288 impl<T, P> FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {}
4290 /// An iterator over the subslices of the vector which are separated
4291 /// by elements that match `pred`, starting from the end of the slice.
4293 /// This struct is created by the [`rsplit_mut`] method on [slices].
4295 /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
4296 /// [slices]: ../../std/primitive.slice.html
4297 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4298 pub struct RSplitMut<'a, T: 'a, P>
4300 P: FnMut(&T) -> bool,
4302 inner: SplitMut<'a, T, P>,
4305 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4306 impl<T: fmt::Debug, P> fmt::Debug for RSplitMut<'_, T, P>
4308 P: FnMut(&T) -> bool,
4310 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4311 f.debug_struct("RSplitMut")
4312 .field("v", &self.inner.v)
4313 .field("finished", &self.inner.finished)
4318 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4319 impl<'a, T, P> SplitIter for RSplitMut<'a, T, P>
4321 P: FnMut(&T) -> bool,
4324 fn finish(&mut self) -> Option<&'a mut [T]> {
4329 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4330 impl<'a, T, P> Iterator for RSplitMut<'a, T, P>
4332 P: FnMut(&T) -> bool,
4334 type Item = &'a mut [T];
4337 fn next(&mut self) -> Option<&'a mut [T]> {
4338 self.inner.next_back()
4342 fn size_hint(&self) -> (usize, Option<usize>) {
4343 self.inner.size_hint()
4347 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4348 impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P>
4350 P: FnMut(&T) -> bool,
4353 fn next_back(&mut self) -> Option<&'a mut [T]> {
4358 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4359 impl<T, P> FusedIterator for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
4361 /// An private iterator over subslices separated by elements that
4362 /// match a predicate function, splitting at most a fixed number of
4365 struct GenericSplitN<I> {
4370 impl<T, I: SplitIter<Item = T>> Iterator for GenericSplitN<I> {
4374 fn next(&mut self) -> Option<T> {
4389 fn size_hint(&self) -> (usize, Option<usize>) {
4390 let (lower, upper_opt) = self.iter.size_hint();
4391 (lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
4395 /// An iterator over subslices separated by elements that match a predicate
4396 /// function, limited to a given number of splits.
4398 /// This struct is created by the [`splitn`] method on [slices].
4400 /// [`splitn`]: ../../std/primitive.slice.html#method.splitn
4401 /// [slices]: ../../std/primitive.slice.html
4402 #[stable(feature = "rust1", since = "1.0.0")]
4403 pub struct SplitN<'a, T: 'a, P>
4405 P: FnMut(&T) -> bool,
4407 inner: GenericSplitN<Split<'a, T, P>>,
4410 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4411 impl<T: fmt::Debug, P> fmt::Debug for SplitN<'_, T, P>
4413 P: FnMut(&T) -> bool,
4415 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4416 f.debug_struct("SplitN").field("inner", &self.inner).finish()
4420 /// An iterator over subslices separated by elements that match a
4421 /// predicate function, limited to a given number of splits, starting
4422 /// from the end of the slice.
4424 /// This struct is created by the [`rsplitn`] method on [slices].
4426 /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
4427 /// [slices]: ../../std/primitive.slice.html
4428 #[stable(feature = "rust1", since = "1.0.0")]
4429 pub struct RSplitN<'a, T: 'a, P>
4431 P: FnMut(&T) -> bool,
4433 inner: GenericSplitN<RSplit<'a, T, P>>,
4436 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4437 impl<T: fmt::Debug, P> fmt::Debug for RSplitN<'_, T, P>
4439 P: FnMut(&T) -> bool,
4441 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4442 f.debug_struct("RSplitN").field("inner", &self.inner).finish()
4446 /// An iterator over subslices separated by elements that match a predicate
4447 /// function, limited to a given number of splits.
4449 /// This struct is created by the [`splitn_mut`] method on [slices].
4451 /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
4452 /// [slices]: ../../std/primitive.slice.html
4453 #[stable(feature = "rust1", since = "1.0.0")]
4454 pub struct SplitNMut<'a, T: 'a, P>
4456 P: FnMut(&T) -> bool,
4458 inner: GenericSplitN<SplitMut<'a, T, P>>,
4461 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4462 impl<T: fmt::Debug, P> fmt::Debug for SplitNMut<'_, T, P>
4464 P: FnMut(&T) -> bool,
4466 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4467 f.debug_struct("SplitNMut").field("inner", &self.inner).finish()
4471 /// An iterator over subslices separated by elements that match a
4472 /// predicate function, limited to a given number of splits, starting
4473 /// from the end of the slice.
4475 /// This struct is created by the [`rsplitn_mut`] method on [slices].
4477 /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
4478 /// [slices]: ../../std/primitive.slice.html
4479 #[stable(feature = "rust1", since = "1.0.0")]
4480 pub struct RSplitNMut<'a, T: 'a, P>
4482 P: FnMut(&T) -> bool,
4484 inner: GenericSplitN<RSplitMut<'a, T, P>>,
4487 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4488 impl<T: fmt::Debug, P> fmt::Debug for RSplitNMut<'_, T, P>
4490 P: FnMut(&T) -> bool,
4492 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4493 f.debug_struct("RSplitNMut").field("inner", &self.inner).finish()
4497 macro_rules! forward_iterator {
4498 ($name:ident: $elem:ident, $iter_of:ty) => {
4499 #[stable(feature = "rust1", since = "1.0.0")]
4500 impl<'a, $elem, P> Iterator for $name<'a, $elem, P>
4502 P: FnMut(&T) -> bool,
4504 type Item = $iter_of;
4507 fn next(&mut self) -> Option<$iter_of> {
4512 fn size_hint(&self) -> (usize, Option<usize>) {
4513 self.inner.size_hint()
4517 #[stable(feature = "fused", since = "1.26.0")]
4518 impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P> where P: FnMut(&T) -> bool {}
4522 forward_iterator! { SplitN: T, &'a [T] }
4523 forward_iterator! { RSplitN: T, &'a [T] }
4524 forward_iterator! { SplitNMut: T, &'a mut [T] }
4525 forward_iterator! { RSplitNMut: T, &'a mut [T] }
4527 /// An iterator over overlapping subslices of length `size`.
4529 /// This struct is created by the [`windows`] method on [slices].
4531 /// [`windows`]: ../../std/primitive.slice.html#method.windows
4532 /// [slices]: ../../std/primitive.slice.html
4534 #[stable(feature = "rust1", since = "1.0.0")]
4535 pub struct Windows<'a, T: 'a> {
4540 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4541 #[stable(feature = "rust1", since = "1.0.0")]
4542 impl<T> Clone for Windows<'_, T> {
4543 fn clone(&self) -> Self {
4544 Windows { v: self.v, size: self.size }
4548 #[stable(feature = "rust1", since = "1.0.0")]
4549 impl<'a, T> Iterator for Windows<'a, T> {
4550 type Item = &'a [T];
4553 fn next(&mut self) -> Option<&'a [T]> {
4554 if self.size > self.v.len() {
4557 let ret = Some(&self.v[..self.size]);
4558 self.v = &self.v[1..];
4564 fn size_hint(&self) -> (usize, Option<usize>) {
4565 if self.size > self.v.len() {
4568 let size = self.v.len() - self.size + 1;
4574 fn count(self) -> usize {
4579 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4580 let (end, overflow) = self.size.overflowing_add(n);
4581 if end > self.v.len() || overflow {
4585 let nth = &self.v[n..end];
4586 self.v = &self.v[n + 1..];
4592 fn last(self) -> Option<Self::Item> {
4593 if self.size > self.v.len() {
4596 let start = self.v.len() - self.size;
4597 Some(&self.v[start..])
4602 #[stable(feature = "rust1", since = "1.0.0")]
4603 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
4605 fn next_back(&mut self) -> Option<&'a [T]> {
4606 if self.size > self.v.len() {
4609 let ret = Some(&self.v[self.v.len() - self.size..]);
4610 self.v = &self.v[..self.v.len() - 1];
4616 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4617 let (end, overflow) = self.v.len().overflowing_sub(n);
4618 if end < self.size || overflow {
4622 let ret = &self.v[end - self.size..end];
4623 self.v = &self.v[..end - 1];
4629 #[stable(feature = "rust1", since = "1.0.0")]
4630 impl<T> ExactSizeIterator for Windows<'_, T> {}
4632 #[unstable(feature = "trusted_len", issue = "37572")]
4633 unsafe impl<T> TrustedLen for Windows<'_, T> {}
4635 #[stable(feature = "fused", since = "1.26.0")]
4636 impl<T> FusedIterator for Windows<'_, T> {}
4639 unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
4640 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4641 from_raw_parts(self.v.as_ptr().add(i), self.size)
4643 fn may_have_side_effect() -> bool {
4648 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4649 /// time), starting at the beginning of the slice.
4651 /// When the slice len is not evenly divided by the chunk size, the last slice
4652 /// of the iteration will be the remainder.
4654 /// This struct is created by the [`chunks`] method on [slices].
4656 /// [`chunks`]: ../../std/primitive.slice.html#method.chunks
4657 /// [slices]: ../../std/primitive.slice.html
4659 #[stable(feature = "rust1", since = "1.0.0")]
4660 pub struct Chunks<'a, T: 'a> {
4665 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4666 #[stable(feature = "rust1", since = "1.0.0")]
4667 impl<T> Clone for Chunks<'_, T> {
4668 fn clone(&self) -> Self {
4669 Chunks { v: self.v, chunk_size: self.chunk_size }
4673 #[stable(feature = "rust1", since = "1.0.0")]
4674 impl<'a, T> Iterator for Chunks<'a, T> {
4675 type Item = &'a [T];
4678 fn next(&mut self) -> Option<&'a [T]> {
4679 if self.v.is_empty() {
4682 let chunksz = cmp::min(self.v.len(), self.chunk_size);
4683 let (fst, snd) = self.v.split_at(chunksz);
4690 fn size_hint(&self) -> (usize, Option<usize>) {
4691 if self.v.is_empty() {
4694 let n = self.v.len() / self.chunk_size;
4695 let rem = self.v.len() % self.chunk_size;
4696 let n = if rem > 0 { n + 1 } else { n };
4702 fn count(self) -> usize {
4707 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4708 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4709 if start >= self.v.len() || overflow {
4713 let end = match start.checked_add(self.chunk_size) {
4714 Some(sum) => cmp::min(self.v.len(), sum),
4715 None => self.v.len(),
4717 let nth = &self.v[start..end];
4718 self.v = &self.v[end..];
4724 fn last(self) -> Option<Self::Item> {
4725 if self.v.is_empty() {
4728 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
4729 Some(&self.v[start..])
4734 #[stable(feature = "rust1", since = "1.0.0")]
4735 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
4737 fn next_back(&mut self) -> Option<&'a [T]> {
4738 if self.v.is_empty() {
4741 let remainder = self.v.len() % self.chunk_size;
4742 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
4743 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
4750 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4751 let len = self.len();
4756 let start = (len - 1 - n) * self.chunk_size;
4757 let end = match start.checked_add(self.chunk_size) {
4758 Some(res) => cmp::min(res, self.v.len()),
4759 None => self.v.len(),
4761 let nth_back = &self.v[start..end];
4762 self.v = &self.v[..start];
4768 #[stable(feature = "rust1", since = "1.0.0")]
4769 impl<T> ExactSizeIterator for Chunks<'_, T> {}
4771 #[unstable(feature = "trusted_len", issue = "37572")]
4772 unsafe impl<T> TrustedLen for Chunks<'_, T> {}
4774 #[stable(feature = "fused", since = "1.26.0")]
4775 impl<T> FusedIterator for Chunks<'_, T> {}
4778 unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
4779 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4780 let start = i * self.chunk_size;
4781 let end = match start.checked_add(self.chunk_size) {
4782 None => self.v.len(),
4783 Some(end) => cmp::min(end, self.v.len()),
4785 from_raw_parts(self.v.as_ptr().add(start), end - start)
4787 fn may_have_side_effect() -> bool {
4792 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4793 /// elements at a time), starting at the beginning of the slice.
4795 /// When the slice len is not evenly divided by the chunk size, the last slice
4796 /// of the iteration will be the remainder.
4798 /// This struct is created by the [`chunks_mut`] method on [slices].
4800 /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
4801 /// [slices]: ../../std/primitive.slice.html
4803 #[stable(feature = "rust1", since = "1.0.0")]
4804 pub struct ChunksMut<'a, T: 'a> {
4809 #[stable(feature = "rust1", since = "1.0.0")]
4810 impl<'a, T> Iterator for ChunksMut<'a, T> {
4811 type Item = &'a mut [T];
4814 fn next(&mut self) -> Option<&'a mut [T]> {
4815 if self.v.is_empty() {
4818 let sz = cmp::min(self.v.len(), self.chunk_size);
4819 let tmp = mem::replace(&mut self.v, &mut []);
4820 let (head, tail) = tmp.split_at_mut(sz);
4827 fn size_hint(&self) -> (usize, Option<usize>) {
4828 if self.v.is_empty() {
4831 let n = self.v.len() / self.chunk_size;
4832 let rem = self.v.len() % self.chunk_size;
4833 let n = if rem > 0 { n + 1 } else { n };
4839 fn count(self) -> usize {
4844 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4845 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4846 if start >= self.v.len() || overflow {
4850 let end = match start.checked_add(self.chunk_size) {
4851 Some(sum) => cmp::min(self.v.len(), sum),
4852 None => self.v.len(),
4854 let tmp = mem::replace(&mut self.v, &mut []);
4855 let (head, tail) = tmp.split_at_mut(end);
4856 let (_, nth) = head.split_at_mut(start);
4863 fn last(self) -> Option<Self::Item> {
4864 if self.v.is_empty() {
4867 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
4868 Some(&mut self.v[start..])
4873 #[stable(feature = "rust1", since = "1.0.0")]
4874 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
4876 fn next_back(&mut self) -> Option<&'a mut [T]> {
4877 if self.v.is_empty() {
4880 let remainder = self.v.len() % self.chunk_size;
4881 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4882 let tmp = mem::replace(&mut self.v, &mut []);
4883 let tmp_len = tmp.len();
4884 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4891 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4892 let len = self.len();
4897 let start = (len - 1 - n) * self.chunk_size;
4898 let end = match start.checked_add(self.chunk_size) {
4899 Some(res) => cmp::min(res, self.v.len()),
4900 None => self.v.len(),
4902 let (temp, _tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
4903 let (head, nth_back) = temp.split_at_mut(start);
4910 #[stable(feature = "rust1", since = "1.0.0")]
4911 impl<T> ExactSizeIterator for ChunksMut<'_, T> {}
4913 #[unstable(feature = "trusted_len", issue = "37572")]
4914 unsafe impl<T> TrustedLen for ChunksMut<'_, T> {}
4916 #[stable(feature = "fused", since = "1.26.0")]
4917 impl<T> FusedIterator for ChunksMut<'_, T> {}
4920 unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
4921 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4922 let start = i * self.chunk_size;
4923 let end = match start.checked_add(self.chunk_size) {
4924 None => self.v.len(),
4925 Some(end) => cmp::min(end, self.v.len()),
4927 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4929 fn may_have_side_effect() -> bool {
4934 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4935 /// time), starting at the beginning of the slice.
4937 /// When the slice len is not evenly divided by the chunk size, the last
4938 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4939 /// the [`remainder`] function from the iterator.
4941 /// This struct is created by the [`chunks_exact`] method on [slices].
4943 /// [`chunks_exact`]: ../../std/primitive.slice.html#method.chunks_exact
4944 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4945 /// [slices]: ../../std/primitive.slice.html
4947 #[stable(feature = "chunks_exact", since = "1.31.0")]
4948 pub struct ChunksExact<'a, T: 'a> {
4954 impl<'a, T> ChunksExact<'a, T> {
4955 /// Returns the remainder of the original slice that is not going to be
4956 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4958 #[stable(feature = "chunks_exact", since = "1.31.0")]
4959 pub fn remainder(&self) -> &'a [T] {
4964 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4965 #[stable(feature = "chunks_exact", since = "1.31.0")]
4966 impl<T> Clone for ChunksExact<'_, T> {
4967 fn clone(&self) -> Self {
4968 ChunksExact { v: self.v, rem: self.rem, chunk_size: self.chunk_size }
4972 #[stable(feature = "chunks_exact", since = "1.31.0")]
4973 impl<'a, T> Iterator for ChunksExact<'a, T> {
4974 type Item = &'a [T];
4977 fn next(&mut self) -> Option<&'a [T]> {
4978 if self.v.len() < self.chunk_size {
4981 let (fst, snd) = self.v.split_at(self.chunk_size);
4988 fn size_hint(&self) -> (usize, Option<usize>) {
4989 let n = self.v.len() / self.chunk_size;
4994 fn count(self) -> usize {
4999 fn nth(&mut self, n: usize) -> Option<Self::Item> {
5000 let (start, overflow) = n.overflowing_mul(self.chunk_size);
5001 if start >= self.v.len() || overflow {
5005 let (_, snd) = self.v.split_at(start);
5012 fn last(mut self) -> Option<Self::Item> {
5017 #[stable(feature = "chunks_exact", since = "1.31.0")]
5018 impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> {
5020 fn next_back(&mut self) -> Option<&'a [T]> {
5021 if self.v.len() < self.chunk_size {
5024 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
5031 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5032 let len = self.len();
5037 let start = (len - 1 - n) * self.chunk_size;
5038 let end = start + self.chunk_size;
5039 let nth_back = &self.v[start..end];
5040 self.v = &self.v[..start];
5046 #[stable(feature = "chunks_exact", since = "1.31.0")]
5047 impl<T> ExactSizeIterator for ChunksExact<'_, T> {
5048 fn is_empty(&self) -> bool {
5053 #[unstable(feature = "trusted_len", issue = "37572")]
5054 unsafe impl<T> TrustedLen for ChunksExact<'_, T> {}
5056 #[stable(feature = "chunks_exact", since = "1.31.0")]
5057 impl<T> FusedIterator for ChunksExact<'_, T> {}
5060 #[stable(feature = "chunks_exact", since = "1.31.0")]
5061 unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> {
5062 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
5063 let start = i * self.chunk_size;
5064 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
5066 fn may_have_side_effect() -> bool {
5071 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
5072 /// elements at a time), starting at the beginning of the slice.
5074 /// When the slice len is not evenly divided by the chunk size, the last up to
5075 /// `chunk_size-1` elements will be omitted but can be retrieved from the
5076 /// [`into_remainder`] function from the iterator.
5078 /// This struct is created by the [`chunks_exact_mut`] method on [slices].
5080 /// [`chunks_exact_mut`]: ../../std/primitive.slice.html#method.chunks_exact_mut
5081 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
5082 /// [slices]: ../../std/primitive.slice.html
5084 #[stable(feature = "chunks_exact", since = "1.31.0")]
5085 pub struct ChunksExactMut<'a, T: 'a> {
5091 impl<'a, T> ChunksExactMut<'a, T> {
5092 /// Returns the remainder of the original slice that is not going to be
5093 /// returned by the iterator. The returned slice has at most `chunk_size-1`
5095 #[stable(feature = "chunks_exact", since = "1.31.0")]
5096 pub fn into_remainder(self) -> &'a mut [T] {
5101 #[stable(feature = "chunks_exact", since = "1.31.0")]
5102 impl<'a, T> Iterator for ChunksExactMut<'a, T> {
5103 type Item = &'a mut [T];
5106 fn next(&mut self) -> Option<&'a mut [T]> {
5107 if self.v.len() < self.chunk_size {
5110 let tmp = mem::replace(&mut self.v, &mut []);
5111 let (head, tail) = tmp.split_at_mut(self.chunk_size);
5118 fn size_hint(&self) -> (usize, Option<usize>) {
5119 let n = self.v.len() / self.chunk_size;
5124 fn count(self) -> usize {
5129 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
5130 let (start, overflow) = n.overflowing_mul(self.chunk_size);
5131 if start >= self.v.len() || overflow {
5135 let tmp = mem::replace(&mut self.v, &mut []);
5136 let (_, snd) = tmp.split_at_mut(start);
5143 fn last(mut self) -> Option<Self::Item> {
5148 #[stable(feature = "chunks_exact", since = "1.31.0")]
5149 impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> {
5151 fn next_back(&mut self) -> Option<&'a mut [T]> {
5152 if self.v.len() < self.chunk_size {
5155 let tmp = mem::replace(&mut self.v, &mut []);
5156 let tmp_len = tmp.len();
5157 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
5164 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5165 let len = self.len();
5170 let start = (len - 1 - n) * self.chunk_size;
5171 let end = start + self.chunk_size;
5172 let (temp, _tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
5173 let (head, nth_back) = temp.split_at_mut(start);
5180 #[stable(feature = "chunks_exact", since = "1.31.0")]
5181 impl<T> ExactSizeIterator for ChunksExactMut<'_, T> {
5182 fn is_empty(&self) -> bool {
5187 #[unstable(feature = "trusted_len", issue = "37572")]
5188 unsafe impl<T> TrustedLen for ChunksExactMut<'_, T> {}
5190 #[stable(feature = "chunks_exact", since = "1.31.0")]
5191 impl<T> FusedIterator for ChunksExactMut<'_, T> {}
5194 #[stable(feature = "chunks_exact", since = "1.31.0")]
5195 unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> {
5196 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
5197 let start = i * self.chunk_size;
5198 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
5200 fn may_have_side_effect() -> bool {
5205 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
5206 /// time), starting at the end of the slice.
5208 /// When the slice len is not evenly divided by the chunk size, the last slice
5209 /// of the iteration will be the remainder.
5211 /// This struct is created by the [`rchunks`] method on [slices].
5213 /// [`rchunks`]: ../../std/primitive.slice.html#method.rchunks
5214 /// [slices]: ../../std/primitive.slice.html
5216 #[stable(feature = "rchunks", since = "1.31.0")]
5217 pub struct RChunks<'a, T: 'a> {
5222 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
5223 #[stable(feature = "rchunks", since = "1.31.0")]
5224 impl<T> Clone for RChunks<'_, T> {
5225 fn clone(&self) -> Self {
5226 RChunks { v: self.v, chunk_size: self.chunk_size }
5230 #[stable(feature = "rchunks", since = "1.31.0")]
5231 impl<'a, T> Iterator for RChunks<'a, T> {
5232 type Item = &'a [T];
5235 fn next(&mut self) -> Option<&'a [T]> {
5236 if self.v.is_empty() {
5239 let chunksz = cmp::min(self.v.len(), self.chunk_size);
5240 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
5247 fn size_hint(&self) -> (usize, Option<usize>) {
5248 if self.v.is_empty() {
5251 let n = self.v.len() / self.chunk_size;
5252 let rem = self.v.len() % self.chunk_size;
5253 let n = if rem > 0 { n + 1 } else { n };
5259 fn count(self) -> usize {
5264 fn nth(&mut self, n: usize) -> Option<Self::Item> {
5265 let (end, overflow) = n.overflowing_mul(self.chunk_size);
5266 if end >= self.v.len() || overflow {
5270 // Can't underflow because of the check above
5271 let end = self.v.len() - end;
5272 let start = match end.checked_sub(self.chunk_size) {
5276 let nth = &self.v[start..end];
5277 self.v = &self.v[0..start];
5283 fn last(self) -> Option<Self::Item> {
5284 if self.v.is_empty() {
5287 let rem = self.v.len() % self.chunk_size;
5288 let end = if rem == 0 { self.chunk_size } else { rem };
5289 Some(&self.v[0..end])
5294 #[stable(feature = "rchunks", since = "1.31.0")]
5295 impl<'a, T> DoubleEndedIterator for RChunks<'a, T> {
5297 fn next_back(&mut self) -> Option<&'a [T]> {
5298 if self.v.is_empty() {
5301 let remainder = self.v.len() % self.chunk_size;
5302 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
5303 let (fst, snd) = self.v.split_at(chunksz);
5310 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5311 let len = self.len();
5316 // can't underflow because `n < len`
5317 let offset_from_end = (len - 1 - n) * self.chunk_size;
5318 let end = self.v.len() - offset_from_end;
5319 let start = end.saturating_sub(self.chunk_size);
5320 let nth_back = &self.v[start..end];
5321 self.v = &self.v[end..];
5327 #[stable(feature = "rchunks", since = "1.31.0")]
5328 impl<T> ExactSizeIterator for RChunks<'_, T> {}
5330 #[unstable(feature = "trusted_len", issue = "37572")]
5331 unsafe impl<T> TrustedLen for RChunks<'_, T> {}
5333 #[stable(feature = "rchunks", since = "1.31.0")]
5334 impl<T> FusedIterator for RChunks<'_, T> {}
5337 #[stable(feature = "rchunks", since = "1.31.0")]
5338 unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> {
5339 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
5340 let end = self.v.len() - i * self.chunk_size;
5341 let start = match end.checked_sub(self.chunk_size) {
5343 Some(start) => start,
5345 from_raw_parts(self.v.as_ptr().add(start), end - start)
5347 fn may_have_side_effect() -> bool {
5352 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
5353 /// elements at a time), starting at the end of the slice.
5355 /// When the slice len is not evenly divided by the chunk size, the last slice
5356 /// of the iteration will be the remainder.
5358 /// This struct is created by the [`rchunks_mut`] method on [slices].
5360 /// [`rchunks_mut`]: ../../std/primitive.slice.html#method.rchunks_mut
5361 /// [slices]: ../../std/primitive.slice.html
5363 #[stable(feature = "rchunks", since = "1.31.0")]
5364 pub struct RChunksMut<'a, T: 'a> {
5369 #[stable(feature = "rchunks", since = "1.31.0")]
5370 impl<'a, T> Iterator for RChunksMut<'a, T> {
5371 type Item = &'a mut [T];
5374 fn next(&mut self) -> Option<&'a mut [T]> {
5375 if self.v.is_empty() {
5378 let sz = cmp::min(self.v.len(), self.chunk_size);
5379 let tmp = mem::replace(&mut self.v, &mut []);
5380 let tmp_len = tmp.len();
5381 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
5388 fn size_hint(&self) -> (usize, Option<usize>) {
5389 if self.v.is_empty() {
5392 let n = self.v.len() / self.chunk_size;
5393 let rem = self.v.len() % self.chunk_size;
5394 let n = if rem > 0 { n + 1 } else { n };
5400 fn count(self) -> usize {
5405 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
5406 let (end, overflow) = n.overflowing_mul(self.chunk_size);
5407 if end >= self.v.len() || overflow {
5411 // Can't underflow because of the check above
5412 let end = self.v.len() - end;
5413 let start = match end.checked_sub(self.chunk_size) {
5417 let tmp = mem::replace(&mut self.v, &mut []);
5418 let (head, tail) = tmp.split_at_mut(start);
5419 let (nth, _) = tail.split_at_mut(end - start);
5426 fn last(self) -> Option<Self::Item> {
5427 if self.v.is_empty() {
5430 let rem = self.v.len() % self.chunk_size;
5431 let end = if rem == 0 { self.chunk_size } else { rem };
5432 Some(&mut self.v[0..end])
5437 #[stable(feature = "rchunks", since = "1.31.0")]
5438 impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> {
5440 fn next_back(&mut self) -> Option<&'a mut [T]> {
5441 if self.v.is_empty() {
5444 let remainder = self.v.len() % self.chunk_size;
5445 let sz = if remainder != 0 { remainder } else { self.chunk_size };
5446 let tmp = mem::replace(&mut self.v, &mut []);
5447 let (head, tail) = tmp.split_at_mut(sz);
5454 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5455 let len = self.len();
5460 // can't underflow because `n < len`
5461 let offset_from_end = (len - 1 - n) * self.chunk_size;
5462 let end = self.v.len() - offset_from_end;
5463 let start = end.saturating_sub(self.chunk_size);
5464 let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
5465 let (_, nth_back) = tmp.split_at_mut(start);
5472 #[stable(feature = "rchunks", since = "1.31.0")]
5473 impl<T> ExactSizeIterator for RChunksMut<'_, T> {}
5475 #[unstable(feature = "trusted_len", issue = "37572")]
5476 unsafe impl<T> TrustedLen for RChunksMut<'_, T> {}
5478 #[stable(feature = "rchunks", since = "1.31.0")]
5479 impl<T> FusedIterator for RChunksMut<'_, T> {}
5482 #[stable(feature = "rchunks", since = "1.31.0")]
5483 unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> {
5484 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
5485 let end = self.v.len() - i * self.chunk_size;
5486 let start = match end.checked_sub(self.chunk_size) {
5488 Some(start) => start,
5490 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
5492 fn may_have_side_effect() -> bool {
5497 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
5498 /// time), starting at the end of the slice.
5500 /// When the slice len is not evenly divided by the chunk size, the last
5501 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
5502 /// the [`remainder`] function from the iterator.
5504 /// This struct is created by the [`rchunks_exact`] method on [slices].
5506 /// [`rchunks_exact`]: ../../std/primitive.slice.html#method.rchunks_exact
5507 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
5508 /// [slices]: ../../std/primitive.slice.html
5510 #[stable(feature = "rchunks", since = "1.31.0")]
5511 pub struct RChunksExact<'a, T: 'a> {
5517 impl<'a, T> RChunksExact<'a, T> {
5518 /// Returns the remainder of the original slice that is not going to be
5519 /// returned by the iterator. The returned slice has at most `chunk_size-1`
5521 #[stable(feature = "rchunks", since = "1.31.0")]
5522 pub fn remainder(&self) -> &'a [T] {
5527 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
5528 #[stable(feature = "rchunks", since = "1.31.0")]
5529 impl<'a, T> Clone for RChunksExact<'a, T> {
5530 fn clone(&self) -> RChunksExact<'a, T> {
5531 RChunksExact { v: self.v, rem: self.rem, chunk_size: self.chunk_size }
5535 #[stable(feature = "rchunks", since = "1.31.0")]
5536 impl<'a, T> Iterator for RChunksExact<'a, T> {
5537 type Item = &'a [T];
5540 fn next(&mut self) -> Option<&'a [T]> {
5541 if self.v.len() < self.chunk_size {
5544 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
5551 fn size_hint(&self) -> (usize, Option<usize>) {
5552 let n = self.v.len() / self.chunk_size;
5557 fn count(self) -> usize {
5562 fn nth(&mut self, n: usize) -> Option<Self::Item> {
5563 let (end, overflow) = n.overflowing_mul(self.chunk_size);
5564 if end >= self.v.len() || overflow {
5568 let (fst, _) = self.v.split_at(self.v.len() - end);
5575 fn last(mut self) -> Option<Self::Item> {
5580 #[stable(feature = "rchunks", since = "1.31.0")]
5581 impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> {
5583 fn next_back(&mut self) -> Option<&'a [T]> {
5584 if self.v.len() < self.chunk_size {
5587 let (fst, snd) = self.v.split_at(self.chunk_size);
5594 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5595 let len = self.len();
5600 // now that we know that `n` corresponds to a chunk,
5601 // none of these operations can underflow/overflow
5602 let offset = (len - n) * self.chunk_size;
5603 let start = self.v.len() - offset;
5604 let end = start + self.chunk_size;
5605 let nth_back = &self.v[start..end];
5606 self.v = &self.v[end..];
5612 #[stable(feature = "rchunks", since = "1.31.0")]
5613 impl<'a, T> ExactSizeIterator for RChunksExact<'a, T> {
5614 fn is_empty(&self) -> bool {
5619 #[unstable(feature = "trusted_len", issue = "37572")]
5620 unsafe impl<T> TrustedLen for RChunksExact<'_, T> {}
5622 #[stable(feature = "rchunks", since = "1.31.0")]
5623 impl<T> FusedIterator for RChunksExact<'_, T> {}
5626 #[stable(feature = "rchunks", since = "1.31.0")]
5627 unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> {
5628 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
5629 let end = self.v.len() - i * self.chunk_size;
5630 let start = end - self.chunk_size;
5631 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
5633 fn may_have_side_effect() -> bool {
5638 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
5639 /// elements at a time), starting at the end of the slice.
5641 /// When the slice len is not evenly divided by the chunk size, the last up to
5642 /// `chunk_size-1` elements will be omitted but can be retrieved from the
5643 /// [`into_remainder`] function from the iterator.
5645 /// This struct is created by the [`rchunks_exact_mut`] method on [slices].
5647 /// [`rchunks_exact_mut`]: ../../std/primitive.slice.html#method.rchunks_exact_mut
5648 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
5649 /// [slices]: ../../std/primitive.slice.html
5651 #[stable(feature = "rchunks", since = "1.31.0")]
5652 pub struct RChunksExactMut<'a, T: 'a> {
5658 impl<'a, T> RChunksExactMut<'a, T> {
5659 /// Returns the remainder of the original slice that is not going to be
5660 /// returned by the iterator. The returned slice has at most `chunk_size-1`
5662 #[stable(feature = "rchunks", since = "1.31.0")]
5663 pub fn into_remainder(self) -> &'a mut [T] {
5668 #[stable(feature = "rchunks", since = "1.31.0")]
5669 impl<'a, T> Iterator for RChunksExactMut<'a, T> {
5670 type Item = &'a mut [T];
5673 fn next(&mut self) -> Option<&'a mut [T]> {
5674 if self.v.len() < self.chunk_size {
5677 let tmp = mem::replace(&mut self.v, &mut []);
5678 let tmp_len = tmp.len();
5679 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
5686 fn size_hint(&self) -> (usize, Option<usize>) {
5687 let n = self.v.len() / self.chunk_size;
5692 fn count(self) -> usize {
5697 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
5698 let (end, overflow) = n.overflowing_mul(self.chunk_size);
5699 if end >= self.v.len() || overflow {
5703 let tmp = mem::replace(&mut self.v, &mut []);
5704 let tmp_len = tmp.len();
5705 let (fst, _) = tmp.split_at_mut(tmp_len - end);
5712 fn last(mut self) -> Option<Self::Item> {
5717 #[stable(feature = "rchunks", since = "1.31.0")]
5718 impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> {
5720 fn next_back(&mut self) -> Option<&'a mut [T]> {
5721 if self.v.len() < self.chunk_size {
5724 let tmp = mem::replace(&mut self.v, &mut []);
5725 let (head, tail) = tmp.split_at_mut(self.chunk_size);
5732 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5733 let len = self.len();
5738 // now that we know that `n` corresponds to a chunk,
5739 // none of these operations can underflow/overflow
5740 let offset = (len - n) * self.chunk_size;
5741 let start = self.v.len() - offset;
5742 let end = start + self.chunk_size;
5743 let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
5744 let (_, nth_back) = tmp.split_at_mut(start);
5751 #[stable(feature = "rchunks", since = "1.31.0")]
5752 impl<T> ExactSizeIterator for RChunksExactMut<'_, T> {
5753 fn is_empty(&self) -> bool {
5758 #[unstable(feature = "trusted_len", issue = "37572")]
5759 unsafe impl<T> TrustedLen for RChunksExactMut<'_, T> {}
5761 #[stable(feature = "rchunks", since = "1.31.0")]
5762 impl<T> FusedIterator for RChunksExactMut<'_, T> {}
5765 #[stable(feature = "rchunks", since = "1.31.0")]
5766 unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> {
5767 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
5768 let end = self.v.len() - i * self.chunk_size;
5769 let start = end - self.chunk_size;
5770 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
5772 fn may_have_side_effect() -> bool {
5781 /// Forms a slice from a pointer and a length.
5783 /// The `len` argument is the number of **elements**, not the number of bytes.
5787 /// Behavior is undefined if any of the following conditions are violated:
5789 /// * `data` must be [valid] for reads for `len * mem::size_of::<T>()` many bytes,
5790 /// and it must be properly aligned. This means in particular:
5792 /// * The entire memory range of this slice must be contained within a single allocated object!
5793 /// Slices can never span across multiple allocated objects. See [below](#incorrect-usage)
5794 /// for an example incorrectly not taking this into account.
5795 /// * `data` must be non-null and aligned even for zero-length slices. One
5796 /// reason for this is that enum layout optimizations may rely on references
5797 /// (including slices of any length) being aligned and non-null to distinguish
5798 /// them from other data. You can obtain a pointer that is usable as `data`
5799 /// for zero-length slices using [`NonNull::dangling()`].
5801 /// * The memory referenced by the returned slice must not be mutated for the duration
5802 /// of lifetime `'a`, except inside an `UnsafeCell`.
5804 /// * The total size `len * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
5805 /// See the safety documentation of [`pointer::offset`].
5809 /// The lifetime for the returned slice is inferred from its usage. To
5810 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
5811 /// source lifetime is safe in the context, such as by providing a helper
5812 /// function taking the lifetime of a host value for the slice, or by explicit
5820 /// // manifest a slice for a single element
5822 /// let ptr = &x as *const _;
5823 /// let slice = unsafe { slice::from_raw_parts(ptr, 1) };
5824 /// assert_eq!(slice[0], 42);
5827 /// ### Incorrect usage
5829 /// The following `join_slices` function is **unsound** ⚠️
5834 /// fn join_slices<'a, T>(fst: &'a [T], snd: &'a [T]) -> &'a [T] {
5835 /// let fst_end = fst.as_ptr().wrapping_add(fst.len());
5836 /// let snd_start = snd.as_ptr();
5837 /// assert_eq!(fst_end, snd_start, "Slices must be contiguous!");
5839 /// // The assertion above ensures `fst` and `snd` are contiguous, but they might
5840 /// // still be contained within _different allocated objects_, in which case
5841 /// // creating this slice is undefined behavior.
5842 /// slice::from_raw_parts(fst.as_ptr(), fst.len() + snd.len())
5847 /// // `a` and `b` are different allocated objects...
5850 /// // ... which may nevertheless be laid out contiguously in memory: | a | b |
5851 /// let _ = join_slices(slice::from_ref(&a), slice::from_ref(&b)); // UB
5855 /// [valid]: ../../std/ptr/index.html#safety
5856 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
5857 /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
5859 #[stable(feature = "rust1", since = "1.0.0")]
5860 pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
5861 debug_assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice");
5863 mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
5864 "attempt to create slice covering at least half the address space"
5866 &*ptr::slice_from_raw_parts(data, len)
5869 /// Performs the same functionality as [`from_raw_parts`], except that a
5870 /// mutable slice is returned.
5874 /// Behavior is undefined if any of the following conditions are violated:
5876 /// * `data` must be [valid] for writes for `len * mem::size_of::<T>()` many bytes,
5877 /// and it must be properly aligned. This means in particular:
5879 /// * The entire memory range of this slice must be contained within a single allocated object!
5880 /// Slices can never span across multiple allocated objects.
5881 /// * `data` must be non-null and aligned even for zero-length slices. One
5882 /// reason for this is that enum layout optimizations may rely on references
5883 /// (including slices of any length) being aligned and non-null to distinguish
5884 /// them from other data. You can obtain a pointer that is usable as `data`
5885 /// for zero-length slices using [`NonNull::dangling()`].
5887 /// * The memory referenced by the returned slice must not be accessed through any other pointer
5888 /// (not derived from the return value) for the duration of lifetime `'a`.
5889 /// Both read and write accesses are forbidden.
5891 /// * The total size `len * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
5892 /// See the safety documentation of [`pointer::offset`].
5894 /// [valid]: ../../std/ptr/index.html#safety
5895 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
5896 /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
5897 /// [`from_raw_parts`]: ../../std/slice/fn.from_raw_parts.html
5899 #[stable(feature = "rust1", since = "1.0.0")]
5900 pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
5901 debug_assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice");
5903 mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
5904 "attempt to create slice covering at least half the address space"
5906 &mut *ptr::slice_from_raw_parts_mut(data, len)
5909 /// Converts a reference to T into a slice of length 1 (without copying).
5910 #[stable(feature = "from_ref", since = "1.28.0")]
5911 pub fn from_ref<T>(s: &T) -> &[T] {
5912 unsafe { from_raw_parts(s, 1) }
5915 /// Converts a reference to T into a slice of length 1 (without copying).
5916 #[stable(feature = "from_ref", since = "1.28.0")]
5917 pub fn from_mut<T>(s: &mut T) -> &mut [T] {
5918 unsafe { from_raw_parts_mut(s, 1) }
5921 // This function is public only because there is no other way to unit test heapsort.
5922 #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "none")]
5924 pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
5926 F: FnMut(&T, &T) -> bool,
5928 sort::heapsort(v, &mut is_less);
5932 // Comparison traits
5936 /// Calls implementation provided memcmp.
5938 /// Interprets the data as u8.
5940 /// Returns 0 for equal, < 0 for less than and > 0 for greater
5942 // FIXME(#32610): Return type should be c_int
5943 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
5946 #[stable(feature = "rust1", since = "1.0.0")]
5947 impl<A, B> PartialEq<[B]> for [A]
5951 fn eq(&self, other: &[B]) -> bool {
5952 SlicePartialEq::equal(self, other)
5955 fn ne(&self, other: &[B]) -> bool {
5956 SlicePartialEq::not_equal(self, other)
5960 #[stable(feature = "rust1", since = "1.0.0")]
5961 impl<T: Eq> Eq for [T] {}
5963 /// Implements comparison of vectors lexicographically.
5964 #[stable(feature = "rust1", since = "1.0.0")]
5965 impl<T: Ord> Ord for [T] {
5966 fn cmp(&self, other: &[T]) -> Ordering {
5967 SliceOrd::compare(self, other)
5971 /// Implements comparison of vectors lexicographically.
5972 #[stable(feature = "rust1", since = "1.0.0")]
5973 impl<T: PartialOrd> PartialOrd for [T] {
5974 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
5975 SlicePartialOrd::partial_compare(self, other)
5980 // intermediate trait for specialization of slice's PartialEq
5981 trait SlicePartialEq<B> {
5982 fn equal(&self, other: &[B]) -> bool;
5984 fn not_equal(&self, other: &[B]) -> bool {
5989 // Generic slice equality
5990 impl<A, B> SlicePartialEq<B> for [A]
5994 default fn equal(&self, other: &[B]) -> bool {
5995 if self.len() != other.len() {
5999 self.iter().zip(other.iter()).all(|(x, y)| x == y)
6003 // Use an equal-pointer optimization when types are `Eq`
6004 impl<A> SlicePartialEq<A> for [A]
6006 A: PartialEq<A> + Eq,
6008 default fn equal(&self, other: &[A]) -> bool {
6009 if self.len() != other.len() {
6013 if self.as_ptr() == other.as_ptr() {
6017 self.iter().zip(other.iter()).all(|(x, y)| x == y)
6021 // Use memcmp for bytewise equality when the types allow
6022 impl<A> SlicePartialEq<A> for [A]
6024 A: PartialEq<A> + BytewiseEquality,
6026 fn equal(&self, other: &[A]) -> bool {
6027 if self.len() != other.len() {
6030 if self.as_ptr() == other.as_ptr() {
6034 let size = mem::size_of_val(self);
6035 memcmp(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
6041 // intermediate trait for specialization of slice's PartialOrd
6042 trait SlicePartialOrd: Sized {
6043 fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
6046 impl<A: PartialOrd> SlicePartialOrd for A {
6047 default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
6048 let l = cmp::min(left.len(), right.len());
6050 // Slice to the loop iteration range to enable bound check
6051 // elimination in the compiler
6052 let lhs = &left[..l];
6053 let rhs = &right[..l];
6056 match lhs[i].partial_cmp(&rhs[i]) {
6057 Some(Ordering::Equal) => (),
6058 non_eq => return non_eq,
6062 left.len().partial_cmp(&right.len())
6066 // This is the impl that we would like to have. Unfortunately it's not sound.
6067 // See `partial_ord_slice.rs`.
6069 impl<A> SlicePartialOrd for A
6073 default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
6074 Some(SliceOrd::compare(left, right))
6079 impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
6080 fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
6081 Some(SliceOrd::compare(left, right))
6085 trait AlwaysApplicableOrd: SliceOrd + Ord {}
6087 macro_rules! always_applicable_ord {
6088 ($([$($p:tt)*] $t:ty,)*) => {
6089 $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
6093 always_applicable_ord! {
6094 [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
6095 [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
6097 [T: ?Sized] *const T, [T: ?Sized] *mut T,
6098 [T: AlwaysApplicableOrd] &T,
6099 [T: AlwaysApplicableOrd] &mut T,
6100 [T: AlwaysApplicableOrd] Option<T>,
6104 // intermediate trait for specialization of slice's Ord
6105 trait SliceOrd: Sized {
6106 fn compare(left: &[Self], right: &[Self]) -> Ordering;
6109 impl<A: Ord> SliceOrd for A {
6110 default fn compare(left: &[Self], right: &[Self]) -> Ordering {
6111 let l = cmp::min(left.len(), right.len());
6113 // Slice to the loop iteration range to enable bound check
6114 // elimination in the compiler
6115 let lhs = &left[..l];
6116 let rhs = &right[..l];
6119 match lhs[i].cmp(&rhs[i]) {
6120 Ordering::Equal => (),
6121 non_eq => return non_eq,
6125 left.len().cmp(&right.len())
6129 // memcmp compares a sequence of unsigned bytes lexicographically.
6130 // this matches the order we want for [u8], but no others (not even [i8]).
6131 impl SliceOrd for u8 {
6133 fn compare(left: &[Self], right: &[Self]) -> Ordering {
6135 unsafe { memcmp(left.as_ptr(), right.as_ptr(), cmp::min(left.len(), right.len())) };
6137 left.len().cmp(&right.len())
6138 } else if order < 0 {
6147 /// Trait implemented for types that can be compared for equality using
6148 /// their bytewise representation
6149 trait BytewiseEquality: Eq + Copy {}
6151 macro_rules! impl_marker_for {
6152 ($traitname:ident, $($ty:ty)*) => {
6154 impl $traitname for $ty { }
6159 impl_marker_for!(BytewiseEquality,
6160 u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize char bool);
6163 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
6164 unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
6165 &*self.ptr.as_ptr().add(i)
6167 fn may_have_side_effect() -> bool {
6173 unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
6174 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
6175 &mut *self.ptr.as_ptr().add(i)
6177 fn may_have_side_effect() -> bool {
6182 trait SliceContains: Sized {
6183 fn slice_contains(&self, x: &[Self]) -> bool;
6186 impl<T> SliceContains for T
6190 default fn slice_contains(&self, x: &[Self]) -> bool {
6191 x.iter().any(|y| *y == *self)
6195 impl SliceContains for u8 {
6196 fn slice_contains(&self, x: &[Self]) -> bool {
6197 memchr::memchr(*self, x).is_some()
6201 impl SliceContains for i8 {
6202 fn slice_contains(&self, x: &[Self]) -> bool {
6203 let byte = *self as u8;
6204 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
6205 memchr::memchr(byte, bytes).is_some()