1 // Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Slice management and manipulation
13 //! For more details see [`std::slice`].
15 //! [`std::slice`]: ../../std/slice/index.html
17 #![stable(feature = "rust1", since = "1.0.0")]
19 // How this module is organized.
21 // The library infrastructure for slices is fairly messy. There's
22 // a lot of stuff defined here. Let's keep it clean.
24 // The layout of this file is thus:
26 // * Inherent methods. This is where most of the slice API resides.
27 // * Implementations of a few common traits with important slice ops.
28 // * Definitions of a bunch of iterators.
30 // * The `raw` and `bytes` submodules.
31 // * Boilerplate trait implementations.
33 use cmp::Ordering::{self, Less, Equal, Greater};
36 use intrinsics::assume;
38 use ops::{FnMut, Try, self};
40 use option::Option::{None, Some};
42 use result::Result::{Ok, Err};
45 use marker::{Copy, Send, Sync, Sized, self};
46 use iter_private::TrustedRandomAccess;
48 #[unstable(feature = "slice_internals", issue = "0",
49 reason = "exposed from core to be reused in std; use the memchr crate")]
50 /// Pure rust memchr implementation, taken from rust-memchr
57 union Repr<'a, T: 'a> {
59 rust_mut: &'a mut [T],
76 /// Returns the number of elements in the slice.
81 /// let a = [1, 2, 3];
82 /// assert_eq!(a.len(), 3);
84 #[stable(feature = "rust1", since = "1.0.0")]
86 #[rustc_const_unstable(feature = "const_slice_len")]
87 pub const fn len(&self) -> usize {
89 Repr { rust: self }.raw.len
93 /// Returns `true` if the slice has a length of 0.
98 /// let a = [1, 2, 3];
99 /// assert!(!a.is_empty());
101 #[stable(feature = "rust1", since = "1.0.0")]
103 #[rustc_const_unstable(feature = "const_slice_len")]
104 pub const fn is_empty(&self) -> bool {
108 /// Returns the first element of the slice, or `None` if it is empty.
113 /// let v = [10, 40, 30];
114 /// assert_eq!(Some(&10), v.first());
116 /// let w: &[i32] = &[];
117 /// assert_eq!(None, w.first());
119 #[stable(feature = "rust1", since = "1.0.0")]
121 pub fn first(&self) -> Option<&T> {
125 /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty.
130 /// let x = &mut [0, 1, 2];
132 /// if let Some(first) = x.first_mut() {
135 /// assert_eq!(x, &[5, 1, 2]);
137 #[stable(feature = "rust1", since = "1.0.0")]
139 pub fn first_mut(&mut self) -> Option<&mut T> {
143 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
148 /// let x = &[0, 1, 2];
150 /// if let Some((first, elements)) = x.split_first() {
151 /// assert_eq!(first, &0);
152 /// assert_eq!(elements, &[1, 2]);
155 #[stable(feature = "slice_splits", since = "1.5.0")]
157 pub fn split_first(&self) -> Option<(&T, &[T])> {
158 if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
161 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
166 /// let x = &mut [0, 1, 2];
168 /// if let Some((first, elements)) = x.split_first_mut() {
173 /// assert_eq!(x, &[3, 4, 5]);
175 #[stable(feature = "slice_splits", since = "1.5.0")]
177 pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
178 if self.is_empty() { None } else {
179 let split = self.split_at_mut(1);
180 Some((&mut split.0[0], split.1))
184 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
189 /// let x = &[0, 1, 2];
191 /// if let Some((last, elements)) = x.split_last() {
192 /// assert_eq!(last, &2);
193 /// assert_eq!(elements, &[0, 1]);
196 #[stable(feature = "slice_splits", since = "1.5.0")]
198 pub fn split_last(&self) -> Option<(&T, &[T])> {
199 let len = self.len();
200 if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
203 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
208 /// let x = &mut [0, 1, 2];
210 /// if let Some((last, elements)) = x.split_last_mut() {
215 /// assert_eq!(x, &[4, 5, 3]);
217 #[stable(feature = "slice_splits", since = "1.5.0")]
219 pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
220 let len = self.len();
221 if len == 0 { None } else {
222 let split = self.split_at_mut(len - 1);
223 Some((&mut split.1[0], split.0))
228 /// Returns the last element of the slice, or `None` if it is empty.
233 /// let v = [10, 40, 30];
234 /// assert_eq!(Some(&30), v.last());
236 /// let w: &[i32] = &[];
237 /// assert_eq!(None, w.last());
239 #[stable(feature = "rust1", since = "1.0.0")]
241 pub fn last(&self) -> Option<&T> {
242 let last_idx = self.len().checked_sub(1)?;
246 /// Returns a mutable pointer to the last item in the slice.
251 /// let x = &mut [0, 1, 2];
253 /// if let Some(last) = x.last_mut() {
256 /// assert_eq!(x, &[0, 1, 10]);
258 #[stable(feature = "rust1", since = "1.0.0")]
260 pub fn last_mut(&mut self) -> Option<&mut T> {
261 let last_idx = self.len().checked_sub(1)?;
262 self.get_mut(last_idx)
265 /// Returns a reference to an element or subslice depending on the type of
268 /// - If given a position, returns a reference to the element at that
269 /// position or `None` if out of bounds.
270 /// - If given a range, returns the subslice corresponding to that range,
271 /// or `None` if out of bounds.
276 /// let v = [10, 40, 30];
277 /// assert_eq!(Some(&40), v.get(1));
278 /// assert_eq!(Some(&[10, 40][..]), v.get(0..2));
279 /// assert_eq!(None, v.get(3));
280 /// assert_eq!(None, v.get(0..4));
282 #[stable(feature = "rust1", since = "1.0.0")]
284 pub fn get<I>(&self, index: I) -> Option<&I::Output>
285 where I: SliceIndex<Self>
290 /// Returns a mutable reference to an element or subslice depending on the
291 /// type of index (see [`get`]) or `None` if the index is out of bounds.
293 /// [`get`]: #method.get
298 /// let x = &mut [0, 1, 2];
300 /// if let Some(elem) = x.get_mut(1) {
303 /// assert_eq!(x, &[0, 42, 2]);
305 #[stable(feature = "rust1", since = "1.0.0")]
307 pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
308 where I: SliceIndex<Self>
313 /// Returns a reference to an element or subslice, without doing bounds
316 /// This is generally not recommended, use with caution! For a safe
317 /// alternative see [`get`].
319 /// [`get`]: #method.get
324 /// let x = &[1, 2, 4];
327 /// assert_eq!(x.get_unchecked(1), &2);
330 #[stable(feature = "rust1", since = "1.0.0")]
332 pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
333 where I: SliceIndex<Self>
335 index.get_unchecked(self)
338 /// Returns a mutable reference to an element or subslice, without doing
341 /// This is generally not recommended, use with caution! For a safe
342 /// alternative see [`get_mut`].
344 /// [`get_mut`]: #method.get_mut
349 /// let x = &mut [1, 2, 4];
352 /// let elem = x.get_unchecked_mut(1);
355 /// assert_eq!(x, &[1, 13, 4]);
357 #[stable(feature = "rust1", since = "1.0.0")]
359 pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
360 where I: SliceIndex<Self>
362 index.get_unchecked_mut(self)
365 /// Returns a raw pointer to the slice's buffer.
367 /// The caller must ensure that the slice outlives the pointer this
368 /// function returns, or else it will end up pointing to garbage.
370 /// Modifying the container referenced by this slice may cause its buffer
371 /// to be reallocated, which would also make any pointers to it invalid.
376 /// let x = &[1, 2, 4];
377 /// let x_ptr = x.as_ptr();
380 /// for i in 0..x.len() {
381 /// assert_eq!(x.get_unchecked(i), &*x_ptr.add(i));
385 #[stable(feature = "rust1", since = "1.0.0")]
387 #[rustc_const_unstable(feature = "const_slice_as_ptr")]
388 pub const fn as_ptr(&self) -> *const T {
389 self as *const [T] as *const T
392 /// Returns an unsafe mutable pointer to the slice's buffer.
394 /// The caller must ensure that the slice outlives the pointer this
395 /// function returns, or else it will end up pointing to garbage.
397 /// Modifying the container referenced by this slice may cause its buffer
398 /// to be reallocated, which would also make any pointers to it invalid.
403 /// let x = &mut [1, 2, 4];
404 /// let x_ptr = x.as_mut_ptr();
407 /// for i in 0..x.len() {
408 /// *x_ptr.add(i) += 2;
411 /// assert_eq!(x, &[3, 4, 6]);
413 #[stable(feature = "rust1", since = "1.0.0")]
415 pub fn as_mut_ptr(&mut self) -> *mut T {
416 self as *mut [T] as *mut T
419 /// Swaps two elements in the slice.
423 /// * a - The index of the first element
424 /// * b - The index of the second element
428 /// Panics if `a` or `b` are out of bounds.
433 /// let mut v = ["a", "b", "c", "d"];
435 /// assert!(v == ["a", "d", "c", "b"]);
437 #[stable(feature = "rust1", since = "1.0.0")]
439 pub fn swap(&mut self, a: usize, b: usize) {
441 // Can't take two mutable loans from one vector, so instead just cast
442 // them to their raw pointers to do the swap
443 let pa: *mut T = &mut self[a];
444 let pb: *mut T = &mut self[b];
449 /// Reverses the order of elements in the slice, in place.
454 /// let mut v = [1, 2, 3];
456 /// assert!(v == [3, 2, 1]);
458 #[stable(feature = "rust1", since = "1.0.0")]
460 pub fn reverse(&mut self) {
461 let mut i: usize = 0;
464 // For very small types, all the individual reads in the normal
465 // path perform poorly. We can do better, given efficient unaligned
466 // load/store, by loading a larger chunk and reversing a register.
468 // Ideally LLVM would do this for us, as it knows better than we do
469 // whether unaligned reads are efficient (since that changes between
470 // different ARM versions, for example) and what the best chunk size
471 // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
472 // the loop, so we need to do this ourselves. (Hypothesis: reverse
473 // is troublesome because the sides can be aligned differently --
474 // will be, when the length is odd -- so there's no way of emitting
475 // pre- and postludes to use fully-aligned SIMD in the middle.)
478 cfg!(any(target_arch = "x86", target_arch = "x86_64"));
480 if fast_unaligned && mem::size_of::<T>() == 1 {
481 // Use the llvm.bswap intrinsic to reverse u8s in a usize
482 let chunk = mem::size_of::<usize>();
483 while i + chunk - 1 < ln / 2 {
485 let pa: *mut T = self.get_unchecked_mut(i);
486 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
487 let va = ptr::read_unaligned(pa as *mut usize);
488 let vb = ptr::read_unaligned(pb as *mut usize);
489 ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
490 ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
496 if fast_unaligned && mem::size_of::<T>() == 2 {
497 // Use rotate-by-16 to reverse u16s in a u32
498 let chunk = mem::size_of::<u32>() / 2;
499 while i + chunk - 1 < ln / 2 {
501 let pa: *mut T = self.get_unchecked_mut(i);
502 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
503 let va = ptr::read_unaligned(pa as *mut u32);
504 let vb = ptr::read_unaligned(pb as *mut u32);
505 ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
506 ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
513 // Unsafe swap to avoid the bounds check in safe swap.
515 let pa: *mut T = self.get_unchecked_mut(i);
516 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
523 /// Returns an iterator over the slice.
528 /// let x = &[1, 2, 4];
529 /// let mut iterator = x.iter();
531 /// assert_eq!(iterator.next(), Some(&1));
532 /// assert_eq!(iterator.next(), Some(&2));
533 /// assert_eq!(iterator.next(), Some(&4));
534 /// assert_eq!(iterator.next(), None);
536 #[stable(feature = "rust1", since = "1.0.0")]
538 pub fn iter(&self) -> Iter<T> {
540 let ptr = self.as_ptr();
541 assume(!ptr.is_null());
543 let end = if mem::size_of::<T>() == 0 {
544 (ptr as *const u8).wrapping_add(self.len()) as *const T
552 _marker: marker::PhantomData
557 /// Returns an iterator that allows modifying each value.
562 /// let x = &mut [1, 2, 4];
563 /// for elem in x.iter_mut() {
566 /// assert_eq!(x, &[3, 4, 6]);
568 #[stable(feature = "rust1", since = "1.0.0")]
570 pub fn iter_mut(&mut self) -> IterMut<T> {
572 let ptr = self.as_mut_ptr();
573 assume(!ptr.is_null());
575 let end = if mem::size_of::<T>() == 0 {
576 (ptr as *mut u8).wrapping_add(self.len()) as *mut T
584 _marker: marker::PhantomData
589 /// Returns an iterator over all contiguous windows of length
590 /// `size`. The windows overlap. If the slice is shorter than
591 /// `size`, the iterator returns no values.
595 /// Panics if `size` is 0.
600 /// let slice = ['r', 'u', 's', 't'];
601 /// let mut iter = slice.windows(2);
602 /// assert_eq!(iter.next().unwrap(), &['r', 'u']);
603 /// assert_eq!(iter.next().unwrap(), &['u', 's']);
604 /// assert_eq!(iter.next().unwrap(), &['s', 't']);
605 /// assert!(iter.next().is_none());
608 /// If the slice is shorter than `size`:
611 /// let slice = ['f', 'o', 'o'];
612 /// let mut iter = slice.windows(4);
613 /// assert!(iter.next().is_none());
615 #[stable(feature = "rust1", since = "1.0.0")]
617 pub fn windows(&self, size: usize) -> Windows<T> {
619 Windows { v: self, size }
622 /// Returns an iterator over `chunk_size` elements of the slice at a
623 /// time. The chunks are slices and do not overlap. If `chunk_size` does
624 /// not divide the length of the slice, then the last chunk will
625 /// not have length `chunk_size`.
627 /// See [`chunks_exact`] for a variant of this iterator that returns chunks
628 /// of always exactly `chunk_size` elements.
632 /// Panics if `chunk_size` is 0.
637 /// let slice = ['l', 'o', 'r', 'e', 'm'];
638 /// let mut iter = slice.chunks(2);
639 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
640 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
641 /// assert_eq!(iter.next().unwrap(), &['m']);
642 /// assert!(iter.next().is_none());
645 /// [`chunks_exact`]: #method.chunks_exact
646 #[stable(feature = "rust1", since = "1.0.0")]
648 pub fn chunks(&self, chunk_size: usize) -> Chunks<T> {
649 assert!(chunk_size != 0);
650 Chunks { v: self, chunk_size }
653 /// Returns an iterator over `chunk_size` elements of the slice at a time.
654 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does
655 /// not divide the length of the slice, then the last chunk will not
656 /// have length `chunk_size`.
658 /// See [`chunks_exact_mut`] for a variant of this iterator that returns chunks
659 /// of always exactly `chunk_size` elements.
663 /// Panics if `chunk_size` is 0.
668 /// let v = &mut [0, 0, 0, 0, 0];
669 /// let mut count = 1;
671 /// for chunk in v.chunks_mut(2) {
672 /// for elem in chunk.iter_mut() {
677 /// assert_eq!(v, &[1, 1, 2, 2, 3]);
680 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
681 #[stable(feature = "rust1", since = "1.0.0")]
683 pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
684 assert!(chunk_size != 0);
685 ChunksMut { v: self, chunk_size }
688 /// Returns an iterator over `chunk_size` elements of the slice at a
689 /// time. The chunks are slices and do not overlap. If `chunk_size` does
690 /// not divide the length of the slice, then the last up to `chunk_size-1`
691 /// elements will be omitted and can be retrieved from the `remainder`
692 /// function of the iterator.
694 /// Due to each chunk having exactly `chunk_size` elements, the compiler
695 /// can often optimize the resulting code better than in the case of
700 /// Panics if `chunk_size` is 0.
705 /// #![feature(chunks_exact)]
707 /// let slice = ['l', 'o', 'r', 'e', 'm'];
708 /// let mut iter = slice.chunks_exact(2);
709 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
710 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
711 /// assert!(iter.next().is_none());
714 /// [`chunks`]: #method.chunks
715 #[unstable(feature = "chunks_exact", issue = "47115")]
717 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<T> {
718 assert!(chunk_size != 0);
719 let rem = self.len() % chunk_size;
720 let len = self.len() - rem;
721 let (fst, snd) = self.split_at(len);
722 ChunksExact { v: fst, rem: snd, chunk_size }
725 /// Returns an iterator over `chunk_size` elements of the slice at a time.
726 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does
727 /// not divide the length of the slice, then the last up to `chunk_size-1`
728 /// elements will be omitted and can be retrieved from the `into_remainder`
729 /// function of the iterator.
731 /// Due to each chunk having exactly `chunk_size` elements, the compiler
732 /// can often optimize the resulting code better than in the case of
737 /// Panics if `chunk_size` is 0.
742 /// #![feature(chunks_exact)]
744 /// let v = &mut [0, 0, 0, 0, 0];
745 /// let mut count = 1;
747 /// for chunk in v.chunks_exact_mut(2) {
748 /// for elem in chunk.iter_mut() {
753 /// assert_eq!(v, &[1, 1, 2, 2, 0]);
756 /// [`chunks_mut`]: #method.chunks_mut
757 #[unstable(feature = "chunks_exact", issue = "47115")]
759 pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<T> {
760 assert!(chunk_size != 0);
761 let rem = self.len() % chunk_size;
762 let len = self.len() - rem;
763 let (fst, snd) = self.split_at_mut(len);
764 ChunksExactMut { v: fst, rem: snd, chunk_size }
767 /// Divides one slice into two at an index.
769 /// The first will contain all indices from `[0, mid)` (excluding
770 /// the index `mid` itself) and the second will contain all
771 /// indices from `[mid, len)` (excluding the index `len` itself).
775 /// Panics if `mid > len`.
780 /// let v = [1, 2, 3, 4, 5, 6];
783 /// let (left, right) = v.split_at(0);
784 /// assert!(left == []);
785 /// assert!(right == [1, 2, 3, 4, 5, 6]);
789 /// let (left, right) = v.split_at(2);
790 /// assert!(left == [1, 2]);
791 /// assert!(right == [3, 4, 5, 6]);
795 /// let (left, right) = v.split_at(6);
796 /// assert!(left == [1, 2, 3, 4, 5, 6]);
797 /// assert!(right == []);
800 #[stable(feature = "rust1", since = "1.0.0")]
802 pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
803 (&self[..mid], &self[mid..])
806 /// Divides one mutable slice into two at an index.
808 /// The first will contain all indices from `[0, mid)` (excluding
809 /// the index `mid` itself) and the second will contain all
810 /// indices from `[mid, len)` (excluding the index `len` itself).
814 /// Panics if `mid > len`.
819 /// let mut v = [1, 0, 3, 0, 5, 6];
820 /// // scoped to restrict the lifetime of the borrows
822 /// let (left, right) = v.split_at_mut(2);
823 /// assert!(left == [1, 0]);
824 /// assert!(right == [3, 0, 5, 6]);
828 /// assert!(v == [1, 2, 3, 4, 5, 6]);
830 #[stable(feature = "rust1", since = "1.0.0")]
832 pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
833 let len = self.len();
834 let ptr = self.as_mut_ptr();
839 (from_raw_parts_mut(ptr, mid),
840 from_raw_parts_mut(ptr.add(mid), len - mid))
844 /// Returns an iterator over subslices separated by elements that match
845 /// `pred`. The matched element is not contained in the subslices.
850 /// let slice = [10, 40, 33, 20];
851 /// let mut iter = slice.split(|num| num % 3 == 0);
853 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
854 /// assert_eq!(iter.next().unwrap(), &[20]);
855 /// assert!(iter.next().is_none());
858 /// If the first element is matched, an empty slice will be the first item
859 /// returned by the iterator. Similarly, if the last element in the slice
860 /// is matched, an empty slice will be the last item returned by the
864 /// let slice = [10, 40, 33];
865 /// let mut iter = slice.split(|num| num % 3 == 0);
867 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
868 /// assert_eq!(iter.next().unwrap(), &[]);
869 /// assert!(iter.next().is_none());
872 /// If two matched elements are directly adjacent, an empty slice will be
873 /// present between them:
876 /// let slice = [10, 6, 33, 20];
877 /// let mut iter = slice.split(|num| num % 3 == 0);
879 /// assert_eq!(iter.next().unwrap(), &[10]);
880 /// assert_eq!(iter.next().unwrap(), &[]);
881 /// assert_eq!(iter.next().unwrap(), &[20]);
882 /// assert!(iter.next().is_none());
884 #[stable(feature = "rust1", since = "1.0.0")]
886 pub fn split<F>(&self, pred: F) -> Split<T, F>
887 where F: FnMut(&T) -> bool
896 /// Returns an iterator over mutable subslices separated by elements that
897 /// match `pred`. The matched element is not contained in the subslices.
902 /// let mut v = [10, 40, 30, 20, 60, 50];
904 /// for group in v.split_mut(|num| *num % 3 == 0) {
907 /// assert_eq!(v, [1, 40, 30, 1, 60, 1]);
909 #[stable(feature = "rust1", since = "1.0.0")]
911 pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<T, F>
912 where F: FnMut(&T) -> bool
914 SplitMut { v: self, pred, finished: false }
917 /// Returns an iterator over subslices separated by elements that match
918 /// `pred`, starting at the end of the slice and working backwards.
919 /// The matched element is not contained in the subslices.
924 /// let slice = [11, 22, 33, 0, 44, 55];
925 /// let mut iter = slice.rsplit(|num| *num == 0);
927 /// assert_eq!(iter.next().unwrap(), &[44, 55]);
928 /// assert_eq!(iter.next().unwrap(), &[11, 22, 33]);
929 /// assert_eq!(iter.next(), None);
932 /// As with `split()`, if the first or last element is matched, an empty
933 /// slice will be the first (or last) item returned by the iterator.
936 /// let v = &[0, 1, 1, 2, 3, 5, 8];
937 /// let mut it = v.rsplit(|n| *n % 2 == 0);
938 /// assert_eq!(it.next().unwrap(), &[]);
939 /// assert_eq!(it.next().unwrap(), &[3, 5]);
940 /// assert_eq!(it.next().unwrap(), &[1, 1]);
941 /// assert_eq!(it.next().unwrap(), &[]);
942 /// assert_eq!(it.next(), None);
944 #[stable(feature = "slice_rsplit", since = "1.27.0")]
946 pub fn rsplit<F>(&self, pred: F) -> RSplit<T, F>
947 where F: FnMut(&T) -> bool
949 RSplit { inner: self.split(pred) }
952 /// Returns an iterator over mutable subslices separated by elements that
953 /// match `pred`, starting at the end of the slice and working
954 /// backwards. The matched element is not contained in the subslices.
959 /// let mut v = [100, 400, 300, 200, 600, 500];
961 /// let mut count = 0;
962 /// for group in v.rsplit_mut(|num| *num % 3 == 0) {
964 /// group[0] = count;
966 /// assert_eq!(v, [3, 400, 300, 2, 600, 1]);
969 #[stable(feature = "slice_rsplit", since = "1.27.0")]
971 pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<T, F>
972 where F: FnMut(&T) -> bool
974 RSplitMut { inner: self.split_mut(pred) }
977 /// Returns an iterator over subslices separated by elements that match
978 /// `pred`, limited to returning at most `n` items. The matched element is
979 /// not contained in the subslices.
981 /// The last element returned, if any, will contain the remainder of the
986 /// Print the slice split once by numbers divisible by 3 (i.e. `[10, 40]`,
990 /// let v = [10, 40, 30, 20, 60, 50];
992 /// for group in v.splitn(2, |num| *num % 3 == 0) {
993 /// println!("{:?}", group);
996 #[stable(feature = "rust1", since = "1.0.0")]
998 pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<T, F>
999 where F: FnMut(&T) -> bool
1002 inner: GenericSplitN {
1003 iter: self.split(pred),
1009 /// Returns an iterator over subslices separated by elements that match
1010 /// `pred`, limited to returning at most `n` items. The matched element is
1011 /// not contained in the subslices.
1013 /// The last element returned, if any, will contain the remainder of the
1019 /// let mut v = [10, 40, 30, 20, 60, 50];
1021 /// for group in v.splitn_mut(2, |num| *num % 3 == 0) {
1024 /// assert_eq!(v, [1, 40, 30, 1, 60, 50]);
1026 #[stable(feature = "rust1", since = "1.0.0")]
1028 pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<T, F>
1029 where F: FnMut(&T) -> bool
1032 inner: GenericSplitN {
1033 iter: self.split_mut(pred),
1039 /// Returns an iterator over subslices separated by elements that match
1040 /// `pred` limited to returning at most `n` items. This starts at the end of
1041 /// the slice and works backwards. The matched element is not contained in
1044 /// The last element returned, if any, will contain the remainder of the
1049 /// Print the slice split once, starting from the end, by numbers divisible
1050 /// by 3 (i.e. `[50]`, `[10, 40, 30, 20]`):
1053 /// let v = [10, 40, 30, 20, 60, 50];
1055 /// for group in v.rsplitn(2, |num| *num % 3 == 0) {
1056 /// println!("{:?}", group);
1059 #[stable(feature = "rust1", since = "1.0.0")]
1061 pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<T, F>
1062 where F: FnMut(&T) -> bool
1065 inner: GenericSplitN {
1066 iter: self.rsplit(pred),
1072 /// Returns an iterator over subslices separated by elements that match
1073 /// `pred` limited to returning at most `n` items. This starts at the end of
1074 /// the slice and works backwards. The matched element is not contained in
1077 /// The last element returned, if any, will contain the remainder of the
1083 /// let mut s = [10, 40, 30, 20, 60, 50];
1085 /// for group in s.rsplitn_mut(2, |num| *num % 3 == 0) {
1088 /// assert_eq!(s, [1, 40, 30, 20, 60, 1]);
1090 #[stable(feature = "rust1", since = "1.0.0")]
1092 pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<T, F>
1093 where F: FnMut(&T) -> bool
1096 inner: GenericSplitN {
1097 iter: self.rsplit_mut(pred),
1103 /// Returns `true` if the slice contains an element with the given value.
1108 /// let v = [10, 40, 30];
1109 /// assert!(v.contains(&30));
1110 /// assert!(!v.contains(&50));
1112 #[stable(feature = "rust1", since = "1.0.0")]
1113 pub fn contains(&self, x: &T) -> bool
1116 x.slice_contains(self)
1119 /// Returns `true` if `needle` is a prefix of the slice.
1124 /// let v = [10, 40, 30];
1125 /// assert!(v.starts_with(&[10]));
1126 /// assert!(v.starts_with(&[10, 40]));
1127 /// assert!(!v.starts_with(&[50]));
1128 /// assert!(!v.starts_with(&[10, 50]));
1131 /// Always returns `true` if `needle` is an empty slice:
1134 /// let v = &[10, 40, 30];
1135 /// assert!(v.starts_with(&[]));
1136 /// let v: &[u8] = &[];
1137 /// assert!(v.starts_with(&[]));
1139 #[stable(feature = "rust1", since = "1.0.0")]
1140 pub fn starts_with(&self, needle: &[T]) -> bool
1143 let n = needle.len();
1144 self.len() >= n && needle == &self[..n]
1147 /// Returns `true` if `needle` is a suffix of the slice.
1152 /// let v = [10, 40, 30];
1153 /// assert!(v.ends_with(&[30]));
1154 /// assert!(v.ends_with(&[40, 30]));
1155 /// assert!(!v.ends_with(&[50]));
1156 /// assert!(!v.ends_with(&[50, 30]));
1159 /// Always returns `true` if `needle` is an empty slice:
1162 /// let v = &[10, 40, 30];
1163 /// assert!(v.ends_with(&[]));
1164 /// let v: &[u8] = &[];
1165 /// assert!(v.ends_with(&[]));
1167 #[stable(feature = "rust1", since = "1.0.0")]
1168 pub fn ends_with(&self, needle: &[T]) -> bool
1171 let (m, n) = (self.len(), needle.len());
1172 m >= n && needle == &self[m-n..]
1175 /// Binary searches this sorted slice for a given element.
1177 /// If the value is found then [`Result::Ok`] is returned, containing the
1178 /// index of the matching element. If there are multiple matches, then any
1179 /// one of the matches could be returned. If the value is not found then
1180 /// [`Result::Err`] is returned, containing the index where a matching
1181 /// element could be inserted while maintaining sorted order.
1185 /// Looks up a series of four elements. The first is found, with a
1186 /// uniquely determined position; the second and third are not
1187 /// found; the fourth could match any position in `[1, 4]`.
1190 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1192 /// assert_eq!(s.binary_search(&13), Ok(9));
1193 /// assert_eq!(s.binary_search(&4), Err(7));
1194 /// assert_eq!(s.binary_search(&100), Err(13));
1195 /// let r = s.binary_search(&1);
1196 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1198 #[stable(feature = "rust1", since = "1.0.0")]
1199 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
1202 self.binary_search_by(|p| p.cmp(x))
1205 /// Binary searches this sorted slice with a comparator function.
1207 /// The comparator function should implement an order consistent
1208 /// with the sort order of the underlying slice, returning an
1209 /// order code that indicates whether its argument is `Less`,
1210 /// `Equal` or `Greater` the desired target.
1212 /// If the value is found then [`Result::Ok`] is returned, containing the
1213 /// index of the matching element. If there are multiple matches, then any
1214 /// one of the matches could be returned. If the value is not found then
1215 /// [`Result::Err`] is returned, containing the index where a matching
1216 /// element could be inserted while maintaining sorted order.
1220 /// Looks up a series of four elements. The first is found, with a
1221 /// uniquely determined position; the second and third are not
1222 /// found; the fourth could match any position in `[1, 4]`.
1225 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1228 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Ok(9));
1230 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(7));
1232 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13));
1234 /// let r = s.binary_search_by(|probe| probe.cmp(&seek));
1235 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1237 #[stable(feature = "rust1", since = "1.0.0")]
1239 pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
1240 where F: FnMut(&'a T) -> Ordering
1243 let mut size = s.len();
1247 let mut base = 0usize;
1249 let half = size / 2;
1250 let mid = base + half;
1251 // mid is always in [0, size), that means mid is >= 0 and < size.
1252 // mid >= 0: by definition
1253 // mid < size: mid = size / 2 + size / 4 + size / 8 ...
1254 let cmp = f(unsafe { s.get_unchecked(mid) });
1255 base = if cmp == Greater { base } else { mid };
1258 // base is always in [0, size) because base <= mid.
1259 let cmp = f(unsafe { s.get_unchecked(base) });
1260 if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
1264 /// Binary searches this sorted slice with a key extraction function.
1266 /// Assumes that the slice is sorted by the key, for instance with
1267 /// [`sort_by_key`] using the same key extraction function.
1269 /// If the value is found then [`Result::Ok`] is returned, containing the
1270 /// index of the matching element. If there are multiple matches, then any
1271 /// one of the matches could be returned. If the value is not found then
1272 /// [`Result::Err`] is returned, containing the index where a matching
1273 /// element could be inserted while maintaining sorted order.
1275 /// [`sort_by_key`]: #method.sort_by_key
1279 /// Looks up a series of four elements in a slice of pairs sorted by
1280 /// their second elements. The first is found, with a uniquely
1281 /// determined position; the second and third are not found; the
1282 /// fourth could match any position in `[1, 4]`.
1285 /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1),
1286 /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
1287 /// (1, 21), (2, 34), (4, 55)];
1289 /// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9));
1290 /// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7));
1291 /// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13));
1292 /// let r = s.binary_search_by_key(&1, |&(a,b)| b);
1293 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1295 #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
1297 pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
1298 where F: FnMut(&'a T) -> B,
1301 self.binary_search_by(|k| f(k).cmp(b))
1304 /// Sorts the slice, but may not preserve the order of equal elements.
1306 /// This sort is unstable (i.e. may reorder equal elements), in-place (i.e. does not allocate),
1307 /// and `O(n log n)` worst-case.
1309 /// # Current implementation
1311 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1312 /// which combines the fast average case of randomized quicksort with the fast worst case of
1313 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1314 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1315 /// deterministic behavior.
1317 /// It is typically faster than stable sorting, except in a few special cases, e.g. when the
1318 /// slice consists of several concatenated sorted sequences.
1323 /// let mut v = [-5, 4, 1, -3, 2];
1325 /// v.sort_unstable();
1326 /// assert!(v == [-5, -3, 1, 2, 4]);
1329 /// [pdqsort]: https://github.com/orlp/pdqsort
1330 #[stable(feature = "sort_unstable", since = "1.20.0")]
1332 pub fn sort_unstable(&mut self)
1335 sort::quicksort(self, |a, b| a.lt(b));
1338 /// Sorts the slice with a comparator function, but may not preserve the order of equal
1341 /// This sort is unstable (i.e. may reorder equal elements), in-place (i.e. does not allocate),
1342 /// and `O(n log n)` worst-case.
1344 /// # Current implementation
1346 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1347 /// which combines the fast average case of randomized quicksort with the fast worst case of
1348 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1349 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1350 /// deterministic behavior.
1352 /// It is typically faster than stable sorting, except in a few special cases, e.g. when the
1353 /// slice consists of several concatenated sorted sequences.
1358 /// let mut v = [5, 4, 1, 3, 2];
1359 /// v.sort_unstable_by(|a, b| a.cmp(b));
1360 /// assert!(v == [1, 2, 3, 4, 5]);
1362 /// // reverse sorting
1363 /// v.sort_unstable_by(|a, b| b.cmp(a));
1364 /// assert!(v == [5, 4, 3, 2, 1]);
1367 /// [pdqsort]: https://github.com/orlp/pdqsort
1368 #[stable(feature = "sort_unstable", since = "1.20.0")]
1370 pub fn sort_unstable_by<F>(&mut self, mut compare: F)
1371 where F: FnMut(&T, &T) -> Ordering
1373 sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
1376 /// Sorts the slice with a key extraction function, but may not preserve the order of equal
1379 /// This sort is unstable (i.e. may reorder equal elements), in-place (i.e. does not allocate),
1380 /// and `O(m n log(m n))` worst-case, where the key function is `O(m)`.
1382 /// # Current implementation
1384 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1385 /// which combines the fast average case of randomized quicksort with the fast worst case of
1386 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1387 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1388 /// deterministic behavior.
1393 /// let mut v = [-5i32, 4, 1, -3, 2];
1395 /// v.sort_unstable_by_key(|k| k.abs());
1396 /// assert!(v == [1, 2, -3, 4, -5]);
1399 /// [pdqsort]: https://github.com/orlp/pdqsort
1400 #[stable(feature = "sort_unstable", since = "1.20.0")]
1402 pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
1403 where F: FnMut(&T) -> K, K: Ord
1405 sort::quicksort(self, |a, b| f(a).lt(&f(b)));
1408 /// Moves all consecutive repeated elements to the end of the slice according to the
1409 /// [`PartialEq`] trait implementation.
1411 /// Returns two slices. The first contains no consecutive repeated elements.
1412 /// The second contains all the duplicates in no specified order.
1414 /// If the slice is sorted, the first returned slice contains no duplicates.
1419 /// #![feature(slice_partition_dedup)]
1421 /// let mut slice = [1, 2, 2, 3, 3, 2, 1, 1];
1423 /// let (dedup, duplicates) = slice.partition_dedup();
1425 /// assert_eq!(dedup, [1, 2, 3, 2, 1]);
1426 /// assert_eq!(duplicates, [2, 3, 1]);
1428 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1430 pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T])
1433 self.partition_dedup_by(|a, b| a == b)
1436 /// Moves all but the first of consecutive elements to the end of the slice satisfying
1437 /// a given equality relation.
1439 /// Returns two slices. The first contains no consecutive repeated elements.
1440 /// The second contains all the duplicates in no specified order.
1442 /// The `same_bucket` function is passed references to two elements from the slice and
1443 /// must determine if the elements compare equal. The elements are passed in opposite order
1444 /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is moved
1445 /// at the end of the slice.
1447 /// If the slice is sorted, the first returned slice contains no duplicates.
1452 /// #![feature(slice_partition_dedup)]
1454 /// let mut slice = ["foo", "Foo", "BAZ", "Bar", "bar", "baz", "BAZ"];
1456 /// let (dedup, duplicates) = slice.partition_dedup_by(|a, b| a.eq_ignore_ascii_case(b));
1458 /// assert_eq!(dedup, ["foo", "BAZ", "Bar", "baz"]);
1459 /// assert_eq!(duplicates, ["bar", "Foo", "BAZ"]);
1461 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1463 pub fn partition_dedup_by<F>(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T])
1464 where F: FnMut(&mut T, &mut T) -> bool
1466 // Although we have a mutable reference to `self`, we cannot make
1467 // *arbitrary* changes. The `same_bucket` calls could panic, so we
1468 // must ensure that the slice is in a valid state at all times.
1470 // The way that we handle this is by using swaps; we iterate
1471 // over all the elements, swapping as we go so that at the end
1472 // the elements we wish to keep are in the front, and those we
1473 // wish to reject are at the back. We can then split the slice.
1474 // This operation is still O(n).
1476 // Example: We start in this state, where `r` represents "next
1477 // read" and `w` represents "next_write`.
1480 // +---+---+---+---+---+---+
1481 // | 0 | 1 | 1 | 2 | 3 | 3 |
1482 // +---+---+---+---+---+---+
1485 // Comparing self[r] against self[w-1], this is not a duplicate, so
1486 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1487 // r and w, leaving us with:
1490 // +---+---+---+---+---+---+
1491 // | 0 | 1 | 1 | 2 | 3 | 3 |
1492 // +---+---+---+---+---+---+
1495 // Comparing self[r] against self[w-1], this value is a duplicate,
1496 // so we increment `r` but leave everything else unchanged:
1499 // +---+---+---+---+---+---+
1500 // | 0 | 1 | 1 | 2 | 3 | 3 |
1501 // +---+---+---+---+---+---+
1504 // Comparing self[r] against self[w-1], this is not a duplicate,
1505 // so swap self[r] and self[w] and advance r and w:
1508 // +---+---+---+---+---+---+
1509 // | 0 | 1 | 2 | 1 | 3 | 3 |
1510 // +---+---+---+---+---+---+
1513 // Not a duplicate, repeat:
1516 // +---+---+---+---+---+---+
1517 // | 0 | 1 | 2 | 3 | 1 | 3 |
1518 // +---+---+---+---+---+---+
1521 // Duplicate, advance r. End of slice. Split at w.
1523 let len = self.len();
1525 return (self, &mut [])
1528 let ptr = self.as_mut_ptr();
1529 let mut next_read: usize = 1;
1530 let mut next_write: usize = 1;
1533 // Avoid bounds checks by using raw pointers.
1534 while next_read < len {
1535 let ptr_read = ptr.add(next_read);
1536 let prev_ptr_write = ptr.add(next_write - 1);
1537 if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
1538 if next_read != next_write {
1539 let ptr_write = prev_ptr_write.offset(1);
1540 mem::swap(&mut *ptr_read, &mut *ptr_write);
1548 self.split_at_mut(next_write)
1551 /// Moves all but the first of consecutive elements to the end of the slice that resolve
1552 /// to the same key.
1554 /// Returns two slices. The first contains no consecutive repeated elements.
1555 /// The second contains all the duplicates in no specified order.
1557 /// If the slice is sorted, the first returned slice contains no duplicates.
1562 /// #![feature(slice_partition_dedup)]
1564 /// let mut slice = [10, 20, 21, 30, 30, 20, 11, 13];
1566 /// let (dedup, duplicates) = slice.partition_dedup_by_key(|i| *i / 10);
1568 /// assert_eq!(dedup, [10, 20, 30, 20, 11]);
1569 /// assert_eq!(duplicates, [21, 30, 13]);
1571 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1573 pub fn partition_dedup_by_key<K, F>(&mut self, mut key: F) -> (&mut [T], &mut [T])
1574 where F: FnMut(&mut T) -> K,
1577 self.partition_dedup_by(|a, b| key(a) == key(b))
1580 /// Rotates the slice in-place such that the first `mid` elements of the
1581 /// slice move to the end while the last `self.len() - mid` elements move to
1582 /// the front. After calling `rotate_left`, the element previously at index
1583 /// `mid` will become the first element in the slice.
1587 /// This function will panic if `mid` is greater than the length of the
1588 /// slice. Note that `mid == self.len()` does _not_ panic and is a no-op
1593 /// Takes linear (in `self.len()`) time.
1598 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1599 /// a.rotate_left(2);
1600 /// assert_eq!(a, ['c', 'd', 'e', 'f', 'a', 'b']);
1603 /// Rotating a subslice:
1606 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1607 /// a[1..5].rotate_left(1);
1608 /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']);
1610 #[stable(feature = "slice_rotate", since = "1.26.0")]
1611 pub fn rotate_left(&mut self, mid: usize) {
1612 assert!(mid <= self.len());
1613 let k = self.len() - mid;
1616 let p = self.as_mut_ptr();
1617 rotate::ptr_rotate(mid, p.add(mid), k);
1621 /// Rotates the slice in-place such that the first `self.len() - k`
1622 /// elements of the slice move to the end while the last `k` elements move
1623 /// to the front. After calling `rotate_right`, the element previously at
1624 /// index `self.len() - k` will become the first element in the slice.
1628 /// This function will panic if `k` is greater than the length of the
1629 /// slice. Note that `k == self.len()` does _not_ panic and is a no-op
1634 /// Takes linear (in `self.len()`) time.
1639 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1640 /// a.rotate_right(2);
1641 /// assert_eq!(a, ['e', 'f', 'a', 'b', 'c', 'd']);
1644 /// Rotate a subslice:
1647 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1648 /// a[1..5].rotate_right(1);
1649 /// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']);
1651 #[stable(feature = "slice_rotate", since = "1.26.0")]
1652 pub fn rotate_right(&mut self, k: usize) {
1653 assert!(k <= self.len());
1654 let mid = self.len() - k;
1657 let p = self.as_mut_ptr();
1658 rotate::ptr_rotate(mid, p.add(mid), k);
1662 /// Copies the elements from `src` into `self`.
1664 /// The length of `src` must be the same as `self`.
1666 /// If `src` implements `Copy`, it can be more performant to use
1667 /// [`copy_from_slice`].
1671 /// This function will panic if the two slices have different lengths.
1675 /// Cloning two elements from a slice into another:
1678 /// let src = [1, 2, 3, 4];
1679 /// let mut dst = [0, 0];
1681 /// // Because the slices have to be the same length,
1682 /// // we slice the source slice from four elements
1683 /// // to two. It will panic if we don't do this.
1684 /// dst.clone_from_slice(&src[2..]);
1686 /// assert_eq!(src, [1, 2, 3, 4]);
1687 /// assert_eq!(dst, [3, 4]);
1690 /// Rust enforces that there can only be one mutable reference with no
1691 /// immutable references to a particular piece of data in a particular
1692 /// scope. Because of this, attempting to use `clone_from_slice` on a
1693 /// single slice will result in a compile failure:
1696 /// let mut slice = [1, 2, 3, 4, 5];
1698 /// slice[..2].clone_from_slice(&slice[3..]); // compile fail!
1701 /// To work around this, we can use [`split_at_mut`] to create two distinct
1702 /// sub-slices from a slice:
1705 /// let mut slice = [1, 2, 3, 4, 5];
1708 /// let (left, right) = slice.split_at_mut(2);
1709 /// left.clone_from_slice(&right[1..]);
1712 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
1715 /// [`copy_from_slice`]: #method.copy_from_slice
1716 /// [`split_at_mut`]: #method.split_at_mut
1717 #[stable(feature = "clone_from_slice", since = "1.7.0")]
1718 pub fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
1719 assert!(self.len() == src.len(),
1720 "destination and source slices have different lengths");
1721 // NOTE: We need to explicitly slice them to the same length
1722 // for bounds checking to be elided, and the optimizer will
1723 // generate memcpy for simple cases (for example T = u8).
1724 let len = self.len();
1725 let src = &src[..len];
1727 self[i].clone_from(&src[i]);
1732 /// Copies all elements from `src` into `self`, using a memcpy.
1734 /// The length of `src` must be the same as `self`.
1736 /// If `src` does not implement `Copy`, use [`clone_from_slice`].
1740 /// This function will panic if the two slices have different lengths.
1744 /// Copying two elements from a slice into another:
1747 /// let src = [1, 2, 3, 4];
1748 /// let mut dst = [0, 0];
1750 /// // Because the slices have to be the same length,
1751 /// // we slice the source slice from four elements
1752 /// // to two. It will panic if we don't do this.
1753 /// dst.copy_from_slice(&src[2..]);
1755 /// assert_eq!(src, [1, 2, 3, 4]);
1756 /// assert_eq!(dst, [3, 4]);
1759 /// Rust enforces that there can only be one mutable reference with no
1760 /// immutable references to a particular piece of data in a particular
1761 /// scope. Because of this, attempting to use `copy_from_slice` on a
1762 /// single slice will result in a compile failure:
1765 /// let mut slice = [1, 2, 3, 4, 5];
1767 /// slice[..2].copy_from_slice(&slice[3..]); // compile fail!
1770 /// To work around this, we can use [`split_at_mut`] to create two distinct
1771 /// sub-slices from a slice:
1774 /// let mut slice = [1, 2, 3, 4, 5];
1777 /// let (left, right) = slice.split_at_mut(2);
1778 /// left.copy_from_slice(&right[1..]);
1781 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
1784 /// [`clone_from_slice`]: #method.clone_from_slice
1785 /// [`split_at_mut`]: #method.split_at_mut
1786 #[stable(feature = "copy_from_slice", since = "1.9.0")]
1787 pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
1788 assert_eq!(self.len(), src.len(),
1789 "destination and source slices have different lengths");
1791 ptr::copy_nonoverlapping(
1792 src.as_ptr(), self.as_mut_ptr(), self.len());
1796 /// Copies elements from one part of the slice to another part of itself,
1797 /// using a memmove.
1799 /// `src` is the range within `self` to copy from. `dest` is the starting
1800 /// index of the range within `self` to copy to, which will have the same
1801 /// length as `src`. The two ranges may overlap. The ends of the two ranges
1802 /// must be less than or equal to `self.len()`.
1806 /// This function will panic if either range exceeds the end of the slice,
1807 /// or if the end of `src` is before the start.
1811 /// Copying four bytes within a slice:
1814 /// # #![feature(copy_within)]
1815 /// let mut bytes = *b"Hello, World!";
1817 /// bytes.copy_within(1..5, 8);
1819 /// assert_eq!(&bytes, b"Hello, Wello!");
1821 #[unstable(feature = "copy_within", issue = "54236")]
1822 pub fn copy_within<R: ops::RangeBounds<usize>>(&mut self, src: R, dest: usize)
1826 let src_start = match src.start_bound() {
1827 ops::Bound::Included(&n) => n,
1828 ops::Bound::Excluded(&n) => n
1830 .unwrap_or_else(|| slice_index_overflow_fail()),
1831 ops::Bound::Unbounded => 0,
1833 let src_end = match src.end_bound() {
1834 ops::Bound::Included(&n) => n
1836 .unwrap_or_else(|| slice_index_overflow_fail()),
1837 ops::Bound::Excluded(&n) => n,
1838 ops::Bound::Unbounded => self.len(),
1840 assert!(src_start <= src_end, "src end is before src start");
1841 assert!(src_end <= self.len(), "src is out of bounds");
1842 let count = src_end - src_start;
1843 assert!(dest <= self.len() - count, "dest is out of bounds");
1846 self.get_unchecked(src_start),
1847 self.get_unchecked_mut(dest),
1853 /// Swaps all elements in `self` with those in `other`.
1855 /// The length of `other` must be the same as `self`.
1859 /// This function will panic if the two slices have different lengths.
1863 /// Swapping two elements across slices:
1866 /// let mut slice1 = [0, 0];
1867 /// let mut slice2 = [1, 2, 3, 4];
1869 /// slice1.swap_with_slice(&mut slice2[2..]);
1871 /// assert_eq!(slice1, [3, 4]);
1872 /// assert_eq!(slice2, [1, 2, 0, 0]);
1875 /// Rust enforces that there can only be one mutable reference to a
1876 /// particular piece of data in a particular scope. Because of this,
1877 /// attempting to use `swap_with_slice` on a single slice will result in
1878 /// a compile failure:
1881 /// let mut slice = [1, 2, 3, 4, 5];
1882 /// slice[..2].swap_with_slice(&mut slice[3..]); // compile fail!
1885 /// To work around this, we can use [`split_at_mut`] to create two distinct
1886 /// mutable sub-slices from a slice:
1889 /// let mut slice = [1, 2, 3, 4, 5];
1892 /// let (left, right) = slice.split_at_mut(2);
1893 /// left.swap_with_slice(&mut right[1..]);
1896 /// assert_eq!(slice, [4, 5, 3, 1, 2]);
1899 /// [`split_at_mut`]: #method.split_at_mut
1900 #[stable(feature = "swap_with_slice", since = "1.27.0")]
1901 pub fn swap_with_slice(&mut self, other: &mut [T]) {
1902 assert!(self.len() == other.len(),
1903 "destination and source slices have different lengths");
1905 ptr::swap_nonoverlapping(
1906 self.as_mut_ptr(), other.as_mut_ptr(), self.len());
1910 /// Function to calculate lengths of the middle and trailing slice for `align_to{,_mut}`.
1911 fn align_to_offsets<U>(&self) -> (usize, usize) {
1912 // What we gonna do about `rest` is figure out what multiple of `U`s we can put in a
1913 // lowest number of `T`s. And how many `T`s we need for each such "multiple".
1915 // Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider
1916 // for example a case where size_of::<T> = 16, size_of::<U> = 24. We can put 2 Us in
1917 // place of every 3 Ts in the `rest` slice. A bit more complicated.
1919 // Formula to calculate this is:
1921 // Us = lcm(size_of::<T>, size_of::<U>) / size_of::<U>
1922 // Ts = lcm(size_of::<T>, size_of::<U>) / size_of::<T>
1924 // Expanded and simplified:
1926 // Us = size_of::<T> / gcd(size_of::<T>, size_of::<U>)
1927 // Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
1929 // Luckily since all this is constant-evaluated... performance here matters not!
1931 fn gcd(a: usize, b: usize) -> usize {
1932 // iterative stein’s algorithm
1933 // We should still make this `const fn` (and revert to recursive algorithm if we do)
1934 // because relying on llvm to consteval all this is… well, it makes me
1935 let (ctz_a, mut ctz_b) = unsafe {
1936 if a == 0 { return b; }
1937 if b == 0 { return a; }
1938 (::intrinsics::cttz_nonzero(a), ::intrinsics::cttz_nonzero(b))
1940 let k = ctz_a.min(ctz_b);
1941 let mut a = a >> ctz_a;
1944 // remove all factors of 2 from b
1947 ::mem::swap(&mut a, &mut b);
1954 ctz_b = ::intrinsics::cttz_nonzero(b);
1959 let gcd: usize = gcd(::mem::size_of::<T>(), ::mem::size_of::<U>());
1960 let ts: usize = ::mem::size_of::<U>() / gcd;
1961 let us: usize = ::mem::size_of::<T>() / gcd;
1963 // Armed with this knowledge, we can find how many `U`s we can fit!
1964 let us_len = self.len() / ts * us;
1965 // And how many `T`s will be in the trailing slice!
1966 let ts_len = self.len() % ts;
1970 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
1973 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
1974 /// slice of a new type, and the suffix slice. The method does a best effort to make the
1975 /// middle slice the greatest length possible for a given type and input slice, but only
1976 /// your algorithm's performance should depend on that, not its correctness.
1978 /// This method has no purpose when either input element `T` or output element `U` are
1979 /// zero-sized and will return the original slice without splitting anything.
1983 /// This method is essentially a `transmute` with respect to the elements in the returned
1984 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
1992 /// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
1993 /// let (prefix, shorts, suffix) = bytes.align_to::<u16>();
1994 /// // less_efficient_algorithm_for_bytes(prefix);
1995 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
1996 /// // less_efficient_algorithm_for_bytes(suffix);
1999 #[stable(feature = "slice_align_to", since = "1.30.0")]
2000 pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
2001 // Note that most of this function will be constant-evaluated,
2002 if ::mem::size_of::<U>() == 0 || ::mem::size_of::<T>() == 0 {
2003 // handle ZSTs specially, which is – don't handle them at all.
2004 return (self, &[], &[]);
2007 // First, find at what point do we split between the first and 2nd slice. Easy with
2008 // ptr.align_offset.
2009 let ptr = self.as_ptr();
2010 let offset = ::ptr::align_offset(ptr, ::mem::align_of::<U>());
2011 if offset > self.len() {
2014 let (left, rest) = self.split_at(offset);
2015 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2016 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2018 from_raw_parts(rest.as_ptr() as *const U, us_len),
2019 from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len))
2023 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2026 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2027 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2028 /// middle slice the greatest length possible for a given type and input slice, but only
2029 /// your algorithm's performance should depend on that, not its correctness.
2031 /// This method has no purpose when either input element `T` or output element `U` are
2032 /// zero-sized and will return the original slice without splitting anything.
2036 /// This method is essentially a `transmute` with respect to the elements in the returned
2037 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2045 /// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2046 /// let (prefix, shorts, suffix) = bytes.align_to_mut::<u16>();
2047 /// // less_efficient_algorithm_for_bytes(prefix);
2048 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2049 /// // less_efficient_algorithm_for_bytes(suffix);
2052 #[stable(feature = "slice_align_to", since = "1.30.0")]
2053 pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
2054 // Note that most of this function will be constant-evaluated,
2055 if ::mem::size_of::<U>() == 0 || ::mem::size_of::<T>() == 0 {
2056 // handle ZSTs specially, which is – don't handle them at all.
2057 return (self, &mut [], &mut []);
2060 // First, find at what point do we split between the first and 2nd slice. Easy with
2061 // ptr.align_offset.
2062 let ptr = self.as_ptr();
2063 let offset = ::ptr::align_offset(ptr, ::mem::align_of::<U>());
2064 if offset > self.len() {
2065 (self, &mut [], &mut [])
2067 let (left, rest) = self.split_at_mut(offset);
2068 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2069 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2070 let mut_ptr = rest.as_mut_ptr();
2072 from_raw_parts_mut(mut_ptr as *mut U, us_len),
2073 from_raw_parts_mut(mut_ptr.add(rest.len() - ts_len), ts_len))
2078 #[lang = "slice_u8"]
2081 /// Checks if all bytes in this slice are within the ASCII range.
2082 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2084 pub fn is_ascii(&self) -> bool {
2085 self.iter().all(|b| b.is_ascii())
2088 /// Checks that two slices are an ASCII case-insensitive match.
2090 /// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
2091 /// but without allocating and copying temporaries.
2092 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2094 pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
2095 self.len() == other.len() &&
2096 self.iter().zip(other).all(|(a, b)| {
2097 a.eq_ignore_ascii_case(b)
2101 /// Converts this slice to its ASCII upper case equivalent in-place.
2103 /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
2104 /// but non-ASCII letters are unchanged.
2106 /// To return a new uppercased value without modifying the existing one, use
2107 /// [`to_ascii_uppercase`].
2109 /// [`to_ascii_uppercase`]: #method.to_ascii_uppercase
2110 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2112 pub fn make_ascii_uppercase(&mut self) {
2114 byte.make_ascii_uppercase();
2118 /// Converts this slice to its ASCII lower case equivalent in-place.
2120 /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
2121 /// but non-ASCII letters are unchanged.
2123 /// To return a new lowercased value without modifying the existing one, use
2124 /// [`to_ascii_lowercase`].
2126 /// [`to_ascii_lowercase`]: #method.to_ascii_lowercase
2127 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2129 pub fn make_ascii_lowercase(&mut self) {
2131 byte.make_ascii_lowercase();
2137 #[stable(feature = "rust1", since = "1.0.0")]
2138 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
2139 impl<T, I> ops::Index<I> for [T]
2140 where I: SliceIndex<[T]>
2142 type Output = I::Output;
2145 fn index(&self, index: I) -> &I::Output {
2150 #[stable(feature = "rust1", since = "1.0.0")]
2151 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
2152 impl<T, I> ops::IndexMut<I> for [T]
2153 where I: SliceIndex<[T]>
2156 fn index_mut(&mut self, index: I) -> &mut I::Output {
2157 index.index_mut(self)
2163 fn slice_index_len_fail(index: usize, len: usize) -> ! {
2164 panic!("index {} out of range for slice of length {}", index, len);
2169 fn slice_index_order_fail(index: usize, end: usize) -> ! {
2170 panic!("slice index starts at {} but ends at {}", index, end);
2175 fn slice_index_overflow_fail() -> ! {
2176 panic!("attempted to index slice up to maximum usize");
2179 mod private_slice_index {
2181 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2184 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2185 impl Sealed for usize {}
2186 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2187 impl Sealed for ops::Range<usize> {}
2188 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2189 impl Sealed for ops::RangeTo<usize> {}
2190 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2191 impl Sealed for ops::RangeFrom<usize> {}
2192 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2193 impl Sealed for ops::RangeFull {}
2194 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2195 impl Sealed for ops::RangeInclusive<usize> {}
2196 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2197 impl Sealed for ops::RangeToInclusive<usize> {}
2200 /// A helper trait used for indexing operations.
2201 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2202 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
2203 pub trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
2204 /// The output type returned by methods.
2205 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2206 type Output: ?Sized;
2208 /// Returns a shared reference to the output at this location, if in
2210 #[unstable(feature = "slice_index_methods", issue = "0")]
2211 fn get(self, slice: &T) -> Option<&Self::Output>;
2213 /// Returns a mutable reference to the output at this location, if in
2215 #[unstable(feature = "slice_index_methods", issue = "0")]
2216 fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
2218 /// Returns a shared reference to the output at this location, without
2219 /// performing any bounds checking.
2220 #[unstable(feature = "slice_index_methods", issue = "0")]
2221 unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
2223 /// Returns a mutable reference to the output at this location, without
2224 /// performing any bounds checking.
2225 #[unstable(feature = "slice_index_methods", issue = "0")]
2226 unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
2228 /// Returns a shared reference to the output at this location, panicking
2229 /// if out of bounds.
2230 #[unstable(feature = "slice_index_methods", issue = "0")]
2231 fn index(self, slice: &T) -> &Self::Output;
2233 /// Returns a mutable reference to the output at this location, panicking
2234 /// if out of bounds.
2235 #[unstable(feature = "slice_index_methods", issue = "0")]
2236 fn index_mut(self, slice: &mut T) -> &mut Self::Output;
2239 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2240 impl<T> SliceIndex<[T]> for usize {
2244 fn get(self, slice: &[T]) -> Option<&T> {
2245 if self < slice.len() {
2247 Some(self.get_unchecked(slice))
2255 fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
2256 if self < slice.len() {
2258 Some(self.get_unchecked_mut(slice))
2266 unsafe fn get_unchecked(self, slice: &[T]) -> &T {
2267 &*slice.as_ptr().add(self)
2271 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
2272 &mut *slice.as_mut_ptr().add(self)
2276 fn index(self, slice: &[T]) -> &T {
2277 // NB: use intrinsic indexing
2282 fn index_mut(self, slice: &mut [T]) -> &mut T {
2283 // NB: use intrinsic indexing
2288 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2289 impl<T> SliceIndex<[T]> for ops::Range<usize> {
2293 fn get(self, slice: &[T]) -> Option<&[T]> {
2294 if self.start > self.end || self.end > slice.len() {
2298 Some(self.get_unchecked(slice))
2304 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2305 if self.start > self.end || self.end > slice.len() {
2309 Some(self.get_unchecked_mut(slice))
2315 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2316 from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
2320 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2321 from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
2325 fn index(self, slice: &[T]) -> &[T] {
2326 if self.start > self.end {
2327 slice_index_order_fail(self.start, self.end);
2328 } else if self.end > slice.len() {
2329 slice_index_len_fail(self.end, slice.len());
2332 self.get_unchecked(slice)
2337 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2338 if self.start > self.end {
2339 slice_index_order_fail(self.start, self.end);
2340 } else if self.end > slice.len() {
2341 slice_index_len_fail(self.end, slice.len());
2344 self.get_unchecked_mut(slice)
2349 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2350 impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
2354 fn get(self, slice: &[T]) -> Option<&[T]> {
2355 (0..self.end).get(slice)
2359 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2360 (0..self.end).get_mut(slice)
2364 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2365 (0..self.end).get_unchecked(slice)
2369 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2370 (0..self.end).get_unchecked_mut(slice)
2374 fn index(self, slice: &[T]) -> &[T] {
2375 (0..self.end).index(slice)
2379 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2380 (0..self.end).index_mut(slice)
2384 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2385 impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
2389 fn get(self, slice: &[T]) -> Option<&[T]> {
2390 (self.start..slice.len()).get(slice)
2394 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2395 (self.start..slice.len()).get_mut(slice)
2399 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2400 (self.start..slice.len()).get_unchecked(slice)
2404 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2405 (self.start..slice.len()).get_unchecked_mut(slice)
2409 fn index(self, slice: &[T]) -> &[T] {
2410 (self.start..slice.len()).index(slice)
2414 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2415 (self.start..slice.len()).index_mut(slice)
2419 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2420 impl<T> SliceIndex<[T]> for ops::RangeFull {
2424 fn get(self, slice: &[T]) -> Option<&[T]> {
2429 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2434 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2439 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2444 fn index(self, slice: &[T]) -> &[T] {
2449 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2455 #[stable(feature = "inclusive_range", since = "1.26.0")]
2456 impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
2460 fn get(self, slice: &[T]) -> Option<&[T]> {
2461 if *self.end() == usize::max_value() { None }
2462 else { (*self.start()..self.end() + 1).get(slice) }
2466 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2467 if *self.end() == usize::max_value() { None }
2468 else { (*self.start()..self.end() + 1).get_mut(slice) }
2472 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2473 (*self.start()..self.end() + 1).get_unchecked(slice)
2477 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2478 (*self.start()..self.end() + 1).get_unchecked_mut(slice)
2482 fn index(self, slice: &[T]) -> &[T] {
2483 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2484 (*self.start()..self.end() + 1).index(slice)
2488 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2489 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2490 (*self.start()..self.end() + 1).index_mut(slice)
2494 #[stable(feature = "inclusive_range", since = "1.26.0")]
2495 impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
2499 fn get(self, slice: &[T]) -> Option<&[T]> {
2500 (0..=self.end).get(slice)
2504 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2505 (0..=self.end).get_mut(slice)
2509 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2510 (0..=self.end).get_unchecked(slice)
2514 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2515 (0..=self.end).get_unchecked_mut(slice)
2519 fn index(self, slice: &[T]) -> &[T] {
2520 (0..=self.end).index(slice)
2524 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2525 (0..=self.end).index_mut(slice)
2529 ////////////////////////////////////////////////////////////////////////////////
2531 ////////////////////////////////////////////////////////////////////////////////
2533 #[stable(feature = "rust1", since = "1.0.0")]
2534 impl<'a, T> Default for &'a [T] {
2535 /// Creates an empty slice.
2536 fn default() -> &'a [T] { &[] }
2539 #[stable(feature = "mut_slice_default", since = "1.5.0")]
2540 impl<'a, T> Default for &'a mut [T] {
2541 /// Creates a mutable empty slice.
2542 fn default() -> &'a mut [T] { &mut [] }
2549 #[stable(feature = "rust1", since = "1.0.0")]
2550 impl<'a, T> IntoIterator for &'a [T] {
2552 type IntoIter = Iter<'a, T>;
2554 fn into_iter(self) -> Iter<'a, T> {
2559 #[stable(feature = "rust1", since = "1.0.0")]
2560 impl<'a, T> IntoIterator for &'a mut [T] {
2561 type Item = &'a mut T;
2562 type IntoIter = IterMut<'a, T>;
2564 fn into_iter(self) -> IterMut<'a, T> {
2569 // Macro helper functions
2571 fn size_from_ptr<T>(_: *const T) -> usize {
2575 // Inlining is_empty and len makes a huge performance difference
2576 macro_rules! is_empty {
2577 // The way we encode the length of a ZST iterator, this works both for ZST
2579 ($self: ident) => {$self.ptr == $self.end}
2581 // To get rid of some bounds checks (see `position`), we compute the length in a somewhat
2582 // unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
2584 ($self: ident) => {{
2585 let start = $self.ptr;
2586 let diff = ($self.end as usize).wrapping_sub(start as usize);
2587 let size = size_from_ptr(start);
2591 // Using division instead of `offset_from` helps LLVM remove bounds checks
2597 // The shared definition of the `Iter` and `IterMut` iterators
2598 macro_rules! iterator {
2599 (struct $name:ident -> $ptr:ty, $elem:ty, $raw_mut:tt, $( $mut_:tt )*) => {
2600 impl<'a, T> $name<'a, T> {
2601 // Helper function for creating a slice from the iterator.
2603 fn make_slice(&self) -> &'a [T] {
2604 unsafe { from_raw_parts(self.ptr, len!(self)) }
2607 // Helper function for moving the start of the iterator forwards by `offset` elements,
2608 // returning the old start.
2609 // Unsafe because the offset must be in-bounds or one-past-the-end.
2611 unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T {
2612 if mem::size_of::<T>() == 0 {
2613 // This is *reducing* the length. `ptr` never changes with ZST.
2614 self.end = (self.end as * $raw_mut u8).wrapping_offset(-offset) as * $raw_mut T;
2618 self.ptr = self.ptr.offset(offset);
2623 // Helper function for moving the end of the iterator backwards by `offset` elements,
2624 // returning the new end.
2625 // Unsafe because the offset must be in-bounds or one-past-the-end.
2627 unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T {
2628 if mem::size_of::<T>() == 0 {
2629 self.end = (self.end as * $raw_mut u8).wrapping_offset(-offset) as * $raw_mut T;
2632 self.end = self.end.offset(-offset);
2638 #[stable(feature = "rust1", since = "1.0.0")]
2639 impl<'a, T> ExactSizeIterator for $name<'a, T> {
2641 fn len(&self) -> usize {
2646 fn is_empty(&self) -> bool {
2651 #[stable(feature = "rust1", since = "1.0.0")]
2652 impl<'a, T> Iterator for $name<'a, T> {
2656 fn next(&mut self) -> Option<$elem> {
2657 // could be implemented with slices, but this avoids bounds checks
2659 assume(!self.ptr.is_null());
2660 if mem::size_of::<T>() != 0 {
2661 assume(!self.end.is_null());
2663 if is_empty!(self) {
2666 Some(& $( $mut_ )* *self.post_inc_start(1))
2672 fn size_hint(&self) -> (usize, Option<usize>) {
2673 let exact = len!(self);
2674 (exact, Some(exact))
2678 fn count(self) -> usize {
2683 fn nth(&mut self, n: usize) -> Option<$elem> {
2684 if n >= len!(self) {
2685 // This iterator is now empty.
2686 if mem::size_of::<T>() == 0 {
2687 // We have to do it this way as `ptr` may never be 0, but `end`
2688 // could be (due to wrapping).
2689 self.end = self.ptr;
2691 self.ptr = self.end;
2695 // We are in bounds. `offset` does the right thing even for ZSTs.
2697 let elem = Some(& $( $mut_ )* *self.ptr.add(n));
2698 self.post_inc_start((n as isize).wrapping_add(1));
2704 fn last(mut self) -> Option<$elem> {
2709 fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where
2710 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
2712 // manual unrolling is needed when there are conditional exits from the loop
2713 let mut accum = init;
2715 while len!(self) >= 4 {
2716 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2717 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2718 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2719 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2721 while !is_empty!(self) {
2722 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2729 fn fold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
2730 where Fold: FnMut(Acc, Self::Item) -> Acc,
2732 // Let LLVM unroll this, rather than using the default
2733 // impl that would force the manual unrolling above
2734 let mut accum = init;
2735 while let Some(x) = self.next() {
2736 accum = f(accum, x);
2742 #[rustc_inherit_overflow_checks]
2743 fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
2745 P: FnMut(Self::Item) -> bool,
2747 // The addition might panic on overflow.
2749 self.try_fold(0, move |i, x| {
2750 if predicate(x) { Err(i) }
2754 unsafe { assume(i < n) };
2760 fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
2761 P: FnMut(Self::Item) -> bool,
2762 Self: Sized + ExactSizeIterator + DoubleEndedIterator
2764 // No need for an overflow check here, because `ExactSizeIterator`
2766 self.try_rfold(n, move |i, x| {
2768 if predicate(x) { Err(i) }
2772 unsafe { assume(i < n) };
2778 #[stable(feature = "rust1", since = "1.0.0")]
2779 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
2781 fn next_back(&mut self) -> Option<$elem> {
2782 // could be implemented with slices, but this avoids bounds checks
2784 assume(!self.ptr.is_null());
2785 if mem::size_of::<T>() != 0 {
2786 assume(!self.end.is_null());
2788 if is_empty!(self) {
2791 Some(& $( $mut_ )* *self.pre_dec_end(1))
2797 fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
2798 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
2800 // manual unrolling is needed when there are conditional exits from the loop
2801 let mut accum = init;
2803 while len!(self) >= 4 {
2804 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
2805 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
2806 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
2807 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
2809 // inlining is_empty everywhere makes a huge performance difference
2810 while !is_empty!(self) {
2811 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
2818 fn rfold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
2819 where Fold: FnMut(Acc, Self::Item) -> Acc,
2821 // Let LLVM unroll this, rather than using the default
2822 // impl that would force the manual unrolling above
2823 let mut accum = init;
2824 while let Some(x) = self.next_back() {
2825 accum = f(accum, x);
2831 #[stable(feature = "fused", since = "1.26.0")]
2832 impl<'a, T> FusedIterator for $name<'a, T> {}
2834 #[unstable(feature = "trusted_len", issue = "37572")]
2835 unsafe impl<'a, T> TrustedLen for $name<'a, T> {}
2839 /// Immutable slice iterator
2841 /// This struct is created by the [`iter`] method on [slices].
2848 /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
2849 /// let slice = &[1, 2, 3];
2851 /// // Then, we iterate over it:
2852 /// for element in slice.iter() {
2853 /// println!("{}", element);
2857 /// [`iter`]: ../../std/primitive.slice.html#method.iter
2858 /// [slices]: ../../std/primitive.slice.html
2859 #[stable(feature = "rust1", since = "1.0.0")]
2860 pub struct Iter<'a, T: 'a> {
2862 end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
2863 // ptr == end is a quick test for the Iterator being empty, that works
2864 // for both ZST and non-ZST.
2865 _marker: marker::PhantomData<&'a T>,
2868 #[stable(feature = "core_impl_debug", since = "1.9.0")]
2869 impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
2870 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2871 f.debug_tuple("Iter")
2872 .field(&self.as_slice())
2877 #[stable(feature = "rust1", since = "1.0.0")]
2878 unsafe impl<'a, T: Sync> Sync for Iter<'a, T> {}
2879 #[stable(feature = "rust1", since = "1.0.0")]
2880 unsafe impl<'a, T: Sync> Send for Iter<'a, T> {}
2882 impl<'a, T> Iter<'a, T> {
2883 /// View the underlying data as a subslice of the original data.
2885 /// This has the same lifetime as the original slice, and so the
2886 /// iterator can continue to be used while this exists.
2893 /// // First, we declare a type which has the `iter` method to get the `Iter`
2894 /// // struct (&[usize here]):
2895 /// let slice = &[1, 2, 3];
2897 /// // Then, we get the iterator:
2898 /// let mut iter = slice.iter();
2899 /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
2900 /// println!("{:?}", iter.as_slice());
2902 /// // Next, we move to the second element of the slice:
2904 /// // Now `as_slice` returns "[2, 3]":
2905 /// println!("{:?}", iter.as_slice());
2907 #[stable(feature = "iter_to_slice", since = "1.4.0")]
2908 pub fn as_slice(&self) -> &'a [T] {
2913 iterator!{struct Iter -> *const T, &'a T, const, /* no mut */}
2915 #[stable(feature = "rust1", since = "1.0.0")]
2916 impl<'a, T> Clone for Iter<'a, T> {
2917 fn clone(&self) -> Iter<'a, T> { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
2920 #[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
2921 impl<'a, T> AsRef<[T]> for Iter<'a, T> {
2922 fn as_ref(&self) -> &[T] {
2927 /// Mutable slice iterator.
2929 /// This struct is created by the [`iter_mut`] method on [slices].
2936 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
2937 /// // struct (&[usize here]):
2938 /// let mut slice = &mut [1, 2, 3];
2940 /// // Then, we iterate over it and increment each element value:
2941 /// for element in slice.iter_mut() {
2945 /// // We now have "[2, 3, 4]":
2946 /// println!("{:?}", slice);
2949 /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
2950 /// [slices]: ../../std/primitive.slice.html
2951 #[stable(feature = "rust1", since = "1.0.0")]
2952 pub struct IterMut<'a, T: 'a> {
2954 end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
2955 // ptr == end is a quick test for the Iterator being empty, that works
2956 // for both ZST and non-ZST.
2957 _marker: marker::PhantomData<&'a mut T>,
2960 #[stable(feature = "core_impl_debug", since = "1.9.0")]
2961 impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> {
2962 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2963 f.debug_tuple("IterMut")
2964 .field(&self.make_slice())
2969 #[stable(feature = "rust1", since = "1.0.0")]
2970 unsafe impl<'a, T: Sync> Sync for IterMut<'a, T> {}
2971 #[stable(feature = "rust1", since = "1.0.0")]
2972 unsafe impl<'a, T: Send> Send for IterMut<'a, T> {}
2974 impl<'a, T> IterMut<'a, T> {
2975 /// View the underlying data as a subslice of the original data.
2977 /// To avoid creating `&mut` references that alias, this is forced
2978 /// to consume the iterator.
2985 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
2986 /// // struct (&[usize here]):
2987 /// let mut slice = &mut [1, 2, 3];
2990 /// // Then, we get the iterator:
2991 /// let mut iter = slice.iter_mut();
2992 /// // We move to next element:
2994 /// // So if we print what `into_slice` method returns here, we have "[2, 3]":
2995 /// println!("{:?}", iter.into_slice());
2998 /// // Now let's modify a value of the slice:
3000 /// // First we get back the iterator:
3001 /// let mut iter = slice.iter_mut();
3002 /// // We change the value of the first element of the slice returned by the `next` method:
3003 /// *iter.next().unwrap() += 1;
3005 /// // Now slice is "[2, 2, 3]":
3006 /// println!("{:?}", slice);
3008 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3009 pub fn into_slice(self) -> &'a mut [T] {
3010 unsafe { from_raw_parts_mut(self.ptr, len!(self)) }
3014 iterator!{struct IterMut -> *mut T, &'a mut T, mut, mut}
3016 /// An internal abstraction over the splitting iterators, so that
3017 /// splitn, splitn_mut etc can be implemented once.
3019 trait SplitIter: DoubleEndedIterator {
3020 /// Marks the underlying iterator as complete, extracting the remaining
3021 /// portion of the slice.
3022 fn finish(&mut self) -> Option<Self::Item>;
3025 /// An iterator over subslices separated by elements that match a predicate
3028 /// This struct is created by the [`split`] method on [slices].
3030 /// [`split`]: ../../std/primitive.slice.html#method.split
3031 /// [slices]: ../../std/primitive.slice.html
3032 #[stable(feature = "rust1", since = "1.0.0")]
3033 pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
3039 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3040 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for Split<'a, T, P> where P: FnMut(&T) -> bool {
3041 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3042 f.debug_struct("Split")
3043 .field("v", &self.v)
3044 .field("finished", &self.finished)
3049 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3050 #[stable(feature = "rust1", since = "1.0.0")]
3051 impl<'a, T, P> Clone for Split<'a, T, P> where P: Clone + FnMut(&T) -> bool {
3052 fn clone(&self) -> Split<'a, T, P> {
3055 pred: self.pred.clone(),
3056 finished: self.finished,
3061 #[stable(feature = "rust1", since = "1.0.0")]
3062 impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3063 type Item = &'a [T];
3066 fn next(&mut self) -> Option<&'a [T]> {
3067 if self.finished { return None; }
3069 match self.v.iter().position(|x| (self.pred)(x)) {
3070 None => self.finish(),
3072 let ret = Some(&self.v[..idx]);
3073 self.v = &self.v[idx + 1..];
3080 fn size_hint(&self) -> (usize, Option<usize>) {
3084 (1, Some(self.v.len() + 1))
3089 #[stable(feature = "rust1", since = "1.0.0")]
3090 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3092 fn next_back(&mut self) -> Option<&'a [T]> {
3093 if self.finished { return None; }
3095 match self.v.iter().rposition(|x| (self.pred)(x)) {
3096 None => self.finish(),
3098 let ret = Some(&self.v[idx + 1..]);
3099 self.v = &self.v[..idx];
3106 impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool {
3108 fn finish(&mut self) -> Option<&'a [T]> {
3109 if self.finished { None } else { self.finished = true; Some(self.v) }
3113 #[stable(feature = "fused", since = "1.26.0")]
3114 impl<'a, T, P> FusedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {}
3116 /// An iterator over the subslices of the vector which are separated
3117 /// by elements that match `pred`.
3119 /// This struct is created by the [`split_mut`] method on [slices].
3121 /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
3122 /// [slices]: ../../std/primitive.slice.html
3123 #[stable(feature = "rust1", since = "1.0.0")]
3124 pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3130 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3131 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3132 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3133 f.debug_struct("SplitMut")
3134 .field("v", &self.v)
3135 .field("finished", &self.finished)
3140 impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3142 fn finish(&mut self) -> Option<&'a mut [T]> {
3146 self.finished = true;
3147 Some(mem::replace(&mut self.v, &mut []))
3152 #[stable(feature = "rust1", since = "1.0.0")]
3153 impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3154 type Item = &'a mut [T];
3157 fn next(&mut self) -> Option<&'a mut [T]> {
3158 if self.finished { return None; }
3160 let idx_opt = { // work around borrowck limitations
3161 let pred = &mut self.pred;
3162 self.v.iter().position(|x| (*pred)(x))
3165 None => self.finish(),
3167 let tmp = mem::replace(&mut self.v, &mut []);
3168 let (head, tail) = tmp.split_at_mut(idx);
3169 self.v = &mut tail[1..];
3176 fn size_hint(&self) -> (usize, Option<usize>) {
3180 // if the predicate doesn't match anything, we yield one slice
3181 // if it matches every element, we yield len+1 empty slices.
3182 (1, Some(self.v.len() + 1))
3187 #[stable(feature = "rust1", since = "1.0.0")]
3188 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where
3189 P: FnMut(&T) -> bool,
3192 fn next_back(&mut self) -> Option<&'a mut [T]> {
3193 if self.finished { return None; }
3195 let idx_opt = { // work around borrowck limitations
3196 let pred = &mut self.pred;
3197 self.v.iter().rposition(|x| (*pred)(x))
3200 None => self.finish(),
3202 let tmp = mem::replace(&mut self.v, &mut []);
3203 let (head, tail) = tmp.split_at_mut(idx);
3205 Some(&mut tail[1..])
3211 #[stable(feature = "fused", since = "1.26.0")]
3212 impl<'a, T, P> FusedIterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {}
3214 /// An iterator over subslices separated by elements that match a predicate
3215 /// function, starting from the end of the slice.
3217 /// This struct is created by the [`rsplit`] method on [slices].
3219 /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
3220 /// [slices]: ../../std/primitive.slice.html
3221 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3222 #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
3223 pub struct RSplit<'a, T:'a, P> where P: FnMut(&T) -> bool {
3224 inner: Split<'a, T, P>
3227 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3228 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3229 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3230 f.debug_struct("RSplit")
3231 .field("v", &self.inner.v)
3232 .field("finished", &self.inner.finished)
3237 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3238 impl<'a, T, P> Iterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3239 type Item = &'a [T];
3242 fn next(&mut self) -> Option<&'a [T]> {
3243 self.inner.next_back()
3247 fn size_hint(&self) -> (usize, Option<usize>) {
3248 self.inner.size_hint()
3252 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3253 impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3255 fn next_back(&mut self) -> Option<&'a [T]> {
3260 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3261 impl<'a, T, P> SplitIter for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3263 fn finish(&mut self) -> Option<&'a [T]> {
3268 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3269 impl<'a, T, P> FusedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {}
3271 /// An iterator over the subslices of the vector which are separated
3272 /// by elements that match `pred`, starting from the end of the slice.
3274 /// This struct is created by the [`rsplit_mut`] method on [slices].
3276 /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
3277 /// [slices]: ../../std/primitive.slice.html
3278 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3279 pub struct RSplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3280 inner: SplitMut<'a, T, P>
3283 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3284 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3285 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3286 f.debug_struct("RSplitMut")
3287 .field("v", &self.inner.v)
3288 .field("finished", &self.inner.finished)
3293 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3294 impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3296 fn finish(&mut self) -> Option<&'a mut [T]> {
3301 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3302 impl<'a, T, P> Iterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3303 type Item = &'a mut [T];
3306 fn next(&mut self) -> Option<&'a mut [T]> {
3307 self.inner.next_back()
3311 fn size_hint(&self) -> (usize, Option<usize>) {
3312 self.inner.size_hint()
3316 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3317 impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P> where
3318 P: FnMut(&T) -> bool,
3321 fn next_back(&mut self) -> Option<&'a mut [T]> {
3326 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3327 impl<'a, T, P> FusedIterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {}
3329 /// An private iterator over subslices separated by elements that
3330 /// match a predicate function, splitting at most a fixed number of
3333 struct GenericSplitN<I> {
3338 impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> {
3342 fn next(&mut self) -> Option<T> {
3345 1 => { self.count -= 1; self.iter.finish() }
3346 _ => { self.count -= 1; self.iter.next() }
3351 fn size_hint(&self) -> (usize, Option<usize>) {
3352 let (lower, upper_opt) = self.iter.size_hint();
3353 (lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
3357 /// An iterator over subslices separated by elements that match a predicate
3358 /// function, limited to a given number of splits.
3360 /// This struct is created by the [`splitn`] method on [slices].
3362 /// [`splitn`]: ../../std/primitive.slice.html#method.splitn
3363 /// [slices]: ../../std/primitive.slice.html
3364 #[stable(feature = "rust1", since = "1.0.0")]
3365 pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3366 inner: GenericSplitN<Split<'a, T, P>>
3369 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3370 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for SplitN<'a, T, P> where P: FnMut(&T) -> bool {
3371 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3372 f.debug_struct("SplitN")
3373 .field("inner", &self.inner)
3378 /// An iterator over subslices separated by elements that match a
3379 /// predicate function, limited to a given number of splits, starting
3380 /// from the end of the slice.
3382 /// This struct is created by the [`rsplitn`] method on [slices].
3384 /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
3385 /// [slices]: ../../std/primitive.slice.html
3386 #[stable(feature = "rust1", since = "1.0.0")]
3387 pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3388 inner: GenericSplitN<RSplit<'a, T, P>>
3391 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3392 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplitN<'a, T, P> where P: FnMut(&T) -> bool {
3393 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3394 f.debug_struct("RSplitN")
3395 .field("inner", &self.inner)
3400 /// An iterator over subslices separated by elements that match a predicate
3401 /// function, limited to a given number of splits.
3403 /// This struct is created by the [`splitn_mut`] method on [slices].
3405 /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
3406 /// [slices]: ../../std/primitive.slice.html
3407 #[stable(feature = "rust1", since = "1.0.0")]
3408 pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3409 inner: GenericSplitN<SplitMut<'a, T, P>>
3412 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3413 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for SplitNMut<'a, T, P> where P: FnMut(&T) -> bool {
3414 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3415 f.debug_struct("SplitNMut")
3416 .field("inner", &self.inner)
3421 /// An iterator over subslices separated by elements that match a
3422 /// predicate function, limited to a given number of splits, starting
3423 /// from the end of the slice.
3425 /// This struct is created by the [`rsplitn_mut`] method on [slices].
3427 /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
3428 /// [slices]: ../../std/primitive.slice.html
3429 #[stable(feature = "rust1", since = "1.0.0")]
3430 pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3431 inner: GenericSplitN<RSplitMut<'a, T, P>>
3434 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3435 impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplitNMut<'a, T, P> where P: FnMut(&T) -> bool {
3436 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3437 f.debug_struct("RSplitNMut")
3438 .field("inner", &self.inner)
3443 macro_rules! forward_iterator {
3444 ($name:ident: $elem:ident, $iter_of:ty) => {
3445 #[stable(feature = "rust1", since = "1.0.0")]
3446 impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where
3447 P: FnMut(&T) -> bool
3449 type Item = $iter_of;
3452 fn next(&mut self) -> Option<$iter_of> {
3457 fn size_hint(&self) -> (usize, Option<usize>) {
3458 self.inner.size_hint()
3462 #[stable(feature = "fused", since = "1.26.0")]
3463 impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P>
3464 where P: FnMut(&T) -> bool {}
3468 forward_iterator! { SplitN: T, &'a [T] }
3469 forward_iterator! { RSplitN: T, &'a [T] }
3470 forward_iterator! { SplitNMut: T, &'a mut [T] }
3471 forward_iterator! { RSplitNMut: T, &'a mut [T] }
3473 /// An iterator over overlapping subslices of length `size`.
3475 /// This struct is created by the [`windows`] method on [slices].
3477 /// [`windows`]: ../../std/primitive.slice.html#method.windows
3478 /// [slices]: ../../std/primitive.slice.html
3480 #[stable(feature = "rust1", since = "1.0.0")]
3481 pub struct Windows<'a, T:'a> {
3486 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3487 #[stable(feature = "rust1", since = "1.0.0")]
3488 impl<'a, T> Clone for Windows<'a, T> {
3489 fn clone(&self) -> Windows<'a, T> {
3497 #[stable(feature = "rust1", since = "1.0.0")]
3498 impl<'a, T> Iterator for Windows<'a, T> {
3499 type Item = &'a [T];
3502 fn next(&mut self) -> Option<&'a [T]> {
3503 if self.size > self.v.len() {
3506 let ret = Some(&self.v[..self.size]);
3507 self.v = &self.v[1..];
3513 fn size_hint(&self) -> (usize, Option<usize>) {
3514 if self.size > self.v.len() {
3517 let size = self.v.len() - self.size + 1;
3523 fn count(self) -> usize {
3528 fn nth(&mut self, n: usize) -> Option<Self::Item> {
3529 let (end, overflow) = self.size.overflowing_add(n);
3530 if end > self.v.len() || overflow {
3534 let nth = &self.v[n..end];
3535 self.v = &self.v[n+1..];
3541 fn last(self) -> Option<Self::Item> {
3542 if self.size > self.v.len() {
3545 let start = self.v.len() - self.size;
3546 Some(&self.v[start..])
3551 #[stable(feature = "rust1", since = "1.0.0")]
3552 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
3554 fn next_back(&mut self) -> Option<&'a [T]> {
3555 if self.size > self.v.len() {
3558 let ret = Some(&self.v[self.v.len()-self.size..]);
3559 self.v = &self.v[..self.v.len()-1];
3565 #[stable(feature = "rust1", since = "1.0.0")]
3566 impl<'a, T> ExactSizeIterator for Windows<'a, T> {}
3568 #[unstable(feature = "trusted_len", issue = "37572")]
3569 unsafe impl<'a, T> TrustedLen for Windows<'a, T> {}
3571 #[stable(feature = "fused", since = "1.26.0")]
3572 impl<'a, T> FusedIterator for Windows<'a, T> {}
3575 unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
3576 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
3577 from_raw_parts(self.v.as_ptr().add(i), self.size)
3579 fn may_have_side_effect() -> bool { false }
3582 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
3585 /// When the slice len is not evenly divided by the chunk size, the last slice
3586 /// of the iteration will be the remainder.
3588 /// This struct is created by the [`chunks`] method on [slices].
3590 /// [`chunks`]: ../../std/primitive.slice.html#method.chunks
3591 /// [slices]: ../../std/primitive.slice.html
3593 #[stable(feature = "rust1", since = "1.0.0")]
3594 pub struct Chunks<'a, T:'a> {
3599 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3600 #[stable(feature = "rust1", since = "1.0.0")]
3601 impl<'a, T> Clone for Chunks<'a, T> {
3602 fn clone(&self) -> Chunks<'a, T> {
3605 chunk_size: self.chunk_size,
3610 #[stable(feature = "rust1", since = "1.0.0")]
3611 impl<'a, T> Iterator for Chunks<'a, T> {
3612 type Item = &'a [T];
3615 fn next(&mut self) -> Option<&'a [T]> {
3616 if self.v.is_empty() {
3619 let chunksz = cmp::min(self.v.len(), self.chunk_size);
3620 let (fst, snd) = self.v.split_at(chunksz);
3627 fn size_hint(&self) -> (usize, Option<usize>) {
3628 if self.v.is_empty() {
3631 let n = self.v.len() / self.chunk_size;
3632 let rem = self.v.len() % self.chunk_size;
3633 let n = if rem > 0 { n+1 } else { n };
3639 fn count(self) -> usize {
3644 fn nth(&mut self, n: usize) -> Option<Self::Item> {
3645 let (start, overflow) = n.overflowing_mul(self.chunk_size);
3646 if start >= self.v.len() || overflow {
3650 let end = match start.checked_add(self.chunk_size) {
3651 Some(sum) => cmp::min(self.v.len(), sum),
3652 None => self.v.len(),
3654 let nth = &self.v[start..end];
3655 self.v = &self.v[end..];
3661 fn last(self) -> Option<Self::Item> {
3662 if self.v.is_empty() {
3665 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
3666 Some(&self.v[start..])
3671 #[stable(feature = "rust1", since = "1.0.0")]
3672 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
3674 fn next_back(&mut self) -> Option<&'a [T]> {
3675 if self.v.is_empty() {
3678 let remainder = self.v.len() % self.chunk_size;
3679 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
3680 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
3687 #[stable(feature = "rust1", since = "1.0.0")]
3688 impl<'a, T> ExactSizeIterator for Chunks<'a, T> {}
3690 #[unstable(feature = "trusted_len", issue = "37572")]
3691 unsafe impl<'a, T> TrustedLen for Chunks<'a, T> {}
3693 #[stable(feature = "fused", since = "1.26.0")]
3694 impl<'a, T> FusedIterator for Chunks<'a, T> {}
3697 unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
3698 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
3699 let start = i * self.chunk_size;
3700 let end = match start.checked_add(self.chunk_size) {
3701 None => self.v.len(),
3702 Some(end) => cmp::min(end, self.v.len()),
3704 from_raw_parts(self.v.as_ptr().add(start), end - start)
3706 fn may_have_side_effect() -> bool { false }
3709 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
3710 /// elements at a time). When the slice len is not evenly divided by the chunk
3711 /// size, the last slice of the iteration will be the remainder.
3713 /// This struct is created by the [`chunks_mut`] method on [slices].
3715 /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
3716 /// [slices]: ../../std/primitive.slice.html
3718 #[stable(feature = "rust1", since = "1.0.0")]
3719 pub struct ChunksMut<'a, T:'a> {
3724 #[stable(feature = "rust1", since = "1.0.0")]
3725 impl<'a, T> Iterator for ChunksMut<'a, T> {
3726 type Item = &'a mut [T];
3729 fn next(&mut self) -> Option<&'a mut [T]> {
3730 if self.v.is_empty() {
3733 let sz = cmp::min(self.v.len(), self.chunk_size);
3734 let tmp = mem::replace(&mut self.v, &mut []);
3735 let (head, tail) = tmp.split_at_mut(sz);
3742 fn size_hint(&self) -> (usize, Option<usize>) {
3743 if self.v.is_empty() {
3746 let n = self.v.len() / self.chunk_size;
3747 let rem = self.v.len() % self.chunk_size;
3748 let n = if rem > 0 { n + 1 } else { n };
3754 fn count(self) -> usize {
3759 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
3760 let (start, overflow) = n.overflowing_mul(self.chunk_size);
3761 if start >= self.v.len() || overflow {
3765 let end = match start.checked_add(self.chunk_size) {
3766 Some(sum) => cmp::min(self.v.len(), sum),
3767 None => self.v.len(),
3769 let tmp = mem::replace(&mut self.v, &mut []);
3770 let (head, tail) = tmp.split_at_mut(end);
3771 let (_, nth) = head.split_at_mut(start);
3778 fn last(self) -> Option<Self::Item> {
3779 if self.v.is_empty() {
3782 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
3783 Some(&mut self.v[start..])
3788 #[stable(feature = "rust1", since = "1.0.0")]
3789 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
3791 fn next_back(&mut self) -> Option<&'a mut [T]> {
3792 if self.v.is_empty() {
3795 let remainder = self.v.len() % self.chunk_size;
3796 let sz = if remainder != 0 { remainder } else { self.chunk_size };
3797 let tmp = mem::replace(&mut self.v, &mut []);
3798 let tmp_len = tmp.len();
3799 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
3806 #[stable(feature = "rust1", since = "1.0.0")]
3807 impl<'a, T> ExactSizeIterator for ChunksMut<'a, T> {}
3809 #[unstable(feature = "trusted_len", issue = "37572")]
3810 unsafe impl<'a, T> TrustedLen for ChunksMut<'a, T> {}
3812 #[stable(feature = "fused", since = "1.26.0")]
3813 impl<'a, T> FusedIterator for ChunksMut<'a, T> {}
3816 unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
3817 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
3818 let start = i * self.chunk_size;
3819 let end = match start.checked_add(self.chunk_size) {
3820 None => self.v.len(),
3821 Some(end) => cmp::min(end, self.v.len()),
3823 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
3825 fn may_have_side_effect() -> bool { false }
3828 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
3831 /// When the slice len is not evenly divided by the chunk size, the last
3832 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
3833 /// the [`remainder`] function from the iterator.
3835 /// This struct is created by the [`chunks_exact`] method on [slices].
3837 /// [`chunks_exact`]: ../../std/primitive.slice.html#method.chunks_exact
3838 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
3839 /// [slices]: ../../std/primitive.slice.html
3841 #[unstable(feature = "chunks_exact", issue = "47115")]
3842 pub struct ChunksExact<'a, T:'a> {
3848 #[unstable(feature = "chunks_exact", issue = "47115")]
3849 impl<'a, T> ChunksExact<'a, T> {
3850 /// Return the remainder of the original slice that is not going to be
3851 /// returned by the iterator. The returned slice has at most `chunk_size-1`
3853 pub fn remainder(&self) -> &'a [T] {
3858 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3859 #[unstable(feature = "chunks_exact", issue = "47115")]
3860 impl<'a, T> Clone for ChunksExact<'a, T> {
3861 fn clone(&self) -> ChunksExact<'a, T> {
3865 chunk_size: self.chunk_size,
3870 #[unstable(feature = "chunks_exact", issue = "47115")]
3871 impl<'a, T> Iterator for ChunksExact<'a, T> {
3872 type Item = &'a [T];
3875 fn next(&mut self) -> Option<&'a [T]> {
3876 if self.v.len() < self.chunk_size {
3879 let (fst, snd) = self.v.split_at(self.chunk_size);
3886 fn size_hint(&self) -> (usize, Option<usize>) {
3887 let n = self.v.len() / self.chunk_size;
3892 fn count(self) -> usize {
3897 fn nth(&mut self, n: usize) -> Option<Self::Item> {
3898 let (start, overflow) = n.overflowing_mul(self.chunk_size);
3899 if start >= self.v.len() || overflow {
3903 let (_, snd) = self.v.split_at(start);
3910 fn last(mut self) -> Option<Self::Item> {
3915 #[unstable(feature = "chunks_exact", issue = "47115")]
3916 impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> {
3918 fn next_back(&mut self) -> Option<&'a [T]> {
3919 if self.v.len() < self.chunk_size {
3922 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
3929 #[unstable(feature = "chunks_exact", issue = "47115")]
3930 impl<'a, T> ExactSizeIterator for ChunksExact<'a, T> {
3931 fn is_empty(&self) -> bool {
3936 #[unstable(feature = "trusted_len", issue = "37572")]
3937 unsafe impl<'a, T> TrustedLen for ChunksExact<'a, T> {}
3939 #[unstable(feature = "chunks_exact", issue = "47115")]
3940 impl<'a, T> FusedIterator for ChunksExact<'a, T> {}
3943 unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> {
3944 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
3945 let start = i * self.chunk_size;
3946 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
3948 fn may_have_side_effect() -> bool { false }
3951 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
3952 /// elements at a time).
3954 /// When the slice len is not evenly divided by the chunk size, the last up to
3955 /// `chunk_size-1` elements will be omitted but can be retrieved from the
3956 /// [`into_remainder`] function from the iterator.
3958 /// This struct is created by the [`chunks_exact_mut`] method on [slices].
3960 /// [`chunks_exact_mut`]: ../../std/primitive.slice.html#method.chunks_exact_mut
3961 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
3962 /// [slices]: ../../std/primitive.slice.html
3964 #[unstable(feature = "chunks_exact", issue = "47115")]
3965 pub struct ChunksExactMut<'a, T:'a> {
3971 #[unstable(feature = "chunks_exact", issue = "47115")]
3972 impl<'a, T> ChunksExactMut<'a, T> {
3973 /// Return the remainder of the original slice that is not going to be
3974 /// returned by the iterator. The returned slice has at most `chunk_size-1`
3976 pub fn into_remainder(self) -> &'a mut [T] {
3981 #[unstable(feature = "chunks_exact", issue = "47115")]
3982 impl<'a, T> Iterator for ChunksExactMut<'a, T> {
3983 type Item = &'a mut [T];
3986 fn next(&mut self) -> Option<&'a mut [T]> {
3987 if self.v.len() < self.chunk_size {
3990 let tmp = mem::replace(&mut self.v, &mut []);
3991 let (head, tail) = tmp.split_at_mut(self.chunk_size);
3998 fn size_hint(&self) -> (usize, Option<usize>) {
3999 let n = self.v.len() / self.chunk_size;
4004 fn count(self) -> usize {
4009 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4010 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4011 if start >= self.v.len() || overflow {
4015 let tmp = mem::replace(&mut self.v, &mut []);
4016 let (_, snd) = tmp.split_at_mut(start);
4023 fn last(mut self) -> Option<Self::Item> {
4028 #[unstable(feature = "chunks_exact", issue = "47115")]
4029 impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> {
4031 fn next_back(&mut self) -> Option<&'a mut [T]> {
4032 if self.v.len() < self.chunk_size {
4035 let tmp = mem::replace(&mut self.v, &mut []);
4036 let tmp_len = tmp.len();
4037 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
4044 #[unstable(feature = "chunks_exact", issue = "47115")]
4045 impl<'a, T> ExactSizeIterator for ChunksExactMut<'a, T> {
4046 fn is_empty(&self) -> bool {
4051 #[unstable(feature = "trusted_len", issue = "37572")]
4052 unsafe impl<'a, T> TrustedLen for ChunksExactMut<'a, T> {}
4054 #[unstable(feature = "chunks_exact", issue = "47115")]
4055 impl<'a, T> FusedIterator for ChunksExactMut<'a, T> {}
4058 unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> {
4059 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4060 let start = i * self.chunk_size;
4061 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
4063 fn may_have_side_effect() -> bool { false }
4070 /// Forms a slice from a pointer and a length.
4072 /// The `len` argument is the number of **elements**, not the number of bytes.
4076 /// This function is unsafe as there is no guarantee that the given pointer is
4077 /// valid for `len` elements, nor whether the lifetime inferred is a suitable
4078 /// lifetime for the returned slice.
4080 /// `data` must be non-null and aligned, even for zero-length slices. One
4081 /// reason for this is that enum layout optimizations may rely on references
4082 /// (including slices of any length) being aligned and non-null to distinguish
4083 /// them from other data. You can obtain a pointer that is usable as `data`
4084 /// for zero-length slices using [`NonNull::dangling()`].
4088 /// The lifetime for the returned slice is inferred from its usage. To
4089 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
4090 /// source lifetime is safe in the context, such as by providing a helper
4091 /// function taking the lifetime of a host value for the slice, or by explicit
4099 /// // manifest a slice for a single element
4101 /// let ptr = &x as *const _;
4102 /// let slice = unsafe { slice::from_raw_parts(ptr, 1) };
4103 /// assert_eq!(slice[0], 42);
4106 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
4108 #[stable(feature = "rust1", since = "1.0.0")]
4109 pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
4110 debug_assert!(data as usize % mem::align_of::<T>() == 0, "attempt to create unaligned slice");
4111 Repr { raw: FatPtr { data, len } }.rust
4114 /// Performs the same functionality as [`from_raw_parts`], except that a
4115 /// mutable slice is returned.
4117 /// This function is unsafe for the same reasons as [`from_raw_parts`], as well
4118 /// as not being able to provide a non-aliasing guarantee of the returned
4119 /// mutable slice. `data` must be non-null and aligned even for zero-length
4120 /// slices as with [`from_raw_parts`]. See the documentation of
4121 /// [`from_raw_parts`] for more details.
4123 /// [`from_raw_parts`]: ../../std/slice/fn.from_raw_parts.html
4125 #[stable(feature = "rust1", since = "1.0.0")]
4126 pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
4127 debug_assert!(data as usize % mem::align_of::<T>() == 0, "attempt to create unaligned slice");
4128 Repr { raw: FatPtr { data, len} }.rust_mut
4131 /// Converts a reference to T into a slice of length 1 (without copying).
4132 #[stable(feature = "from_ref", since = "1.28.0")]
4133 pub fn from_ref<T>(s: &T) -> &[T] {
4135 from_raw_parts(s, 1)
4139 /// Converts a reference to T into a slice of length 1 (without copying).
4140 #[stable(feature = "from_ref", since = "1.28.0")]
4141 pub fn from_mut<T>(s: &mut T) -> &mut [T] {
4143 from_raw_parts_mut(s, 1)
4147 // This function is public only because there is no other way to unit test heapsort.
4148 #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "0")]
4150 pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
4151 where F: FnMut(&T, &T) -> bool
4153 sort::heapsort(v, &mut is_less);
4157 // Comparison traits
4161 /// Calls implementation provided memcmp.
4163 /// Interprets the data as u8.
4165 /// Returns 0 for equal, < 0 for less than and > 0 for greater
4167 // FIXME(#32610): Return type should be c_int
4168 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
4171 #[stable(feature = "rust1", since = "1.0.0")]
4172 impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
4173 fn eq(&self, other: &[B]) -> bool {
4174 SlicePartialEq::equal(self, other)
4177 fn ne(&self, other: &[B]) -> bool {
4178 SlicePartialEq::not_equal(self, other)
4182 #[stable(feature = "rust1", since = "1.0.0")]
4183 impl<T: Eq> Eq for [T] {}
4185 /// Implements comparison of vectors lexicographically.
4186 #[stable(feature = "rust1", since = "1.0.0")]
4187 impl<T: Ord> Ord for [T] {
4188 fn cmp(&self, other: &[T]) -> Ordering {
4189 SliceOrd::compare(self, other)
4193 /// Implements comparison of vectors lexicographically.
4194 #[stable(feature = "rust1", since = "1.0.0")]
4195 impl<T: PartialOrd> PartialOrd for [T] {
4196 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
4197 SlicePartialOrd::partial_compare(self, other)
4202 // intermediate trait for specialization of slice's PartialEq
4203 trait SlicePartialEq<B> {
4204 fn equal(&self, other: &[B]) -> bool;
4206 fn not_equal(&self, other: &[B]) -> bool { !self.equal(other) }
4209 // Generic slice equality
4210 impl<A, B> SlicePartialEq<B> for [A]
4211 where A: PartialEq<B>
4213 default fn equal(&self, other: &[B]) -> bool {
4214 if self.len() != other.len() {
4218 for i in 0..self.len() {
4219 if !self[i].eq(&other[i]) {
4228 // Use memcmp for bytewise equality when the types allow
4229 impl<A> SlicePartialEq<A> for [A]
4230 where A: PartialEq<A> + BytewiseEquality
4232 fn equal(&self, other: &[A]) -> bool {
4233 if self.len() != other.len() {
4236 if self.as_ptr() == other.as_ptr() {
4240 let size = mem::size_of_val(self);
4241 memcmp(self.as_ptr() as *const u8,
4242 other.as_ptr() as *const u8, size) == 0
4248 // intermediate trait for specialization of slice's PartialOrd
4249 trait SlicePartialOrd<B> {
4250 fn partial_compare(&self, other: &[B]) -> Option<Ordering>;
4253 impl<A> SlicePartialOrd<A> for [A]
4256 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
4257 let l = cmp::min(self.len(), other.len());
4259 // Slice to the loop iteration range to enable bound check
4260 // elimination in the compiler
4261 let lhs = &self[..l];
4262 let rhs = &other[..l];
4265 match lhs[i].partial_cmp(&rhs[i]) {
4266 Some(Ordering::Equal) => (),
4267 non_eq => return non_eq,
4271 self.len().partial_cmp(&other.len())
4275 impl<A> SlicePartialOrd<A> for [A]
4278 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
4279 Some(SliceOrd::compare(self, other))
4284 // intermediate trait for specialization of slice's Ord
4286 fn compare(&self, other: &[B]) -> Ordering;
4289 impl<A> SliceOrd<A> for [A]
4292 default fn compare(&self, other: &[A]) -> Ordering {
4293 let l = cmp::min(self.len(), other.len());
4295 // Slice to the loop iteration range to enable bound check
4296 // elimination in the compiler
4297 let lhs = &self[..l];
4298 let rhs = &other[..l];
4301 match lhs[i].cmp(&rhs[i]) {
4302 Ordering::Equal => (),
4303 non_eq => return non_eq,
4307 self.len().cmp(&other.len())
4311 // memcmp compares a sequence of unsigned bytes lexicographically.
4312 // this matches the order we want for [u8], but no others (not even [i8]).
4313 impl SliceOrd<u8> for [u8] {
4315 fn compare(&self, other: &[u8]) -> Ordering {
4316 let order = unsafe {
4317 memcmp(self.as_ptr(), other.as_ptr(),
4318 cmp::min(self.len(), other.len()))
4321 self.len().cmp(&other.len())
4322 } else if order < 0 {
4331 /// Trait implemented for types that can be compared for equality using
4332 /// their bytewise representation
4333 trait BytewiseEquality { }
4335 macro_rules! impl_marker_for {
4336 ($traitname:ident, $($ty:ty)*) => {
4338 impl $traitname for $ty { }
4343 impl_marker_for!(BytewiseEquality,
4344 u8 i8 u16 i16 u32 i32 u64 i64 usize isize char bool);
4347 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
4348 unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
4351 fn may_have_side_effect() -> bool { false }
4355 unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
4356 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
4357 &mut *self.ptr.add(i)
4359 fn may_have_side_effect() -> bool { false }
4362 trait SliceContains: Sized {
4363 fn slice_contains(&self, x: &[Self]) -> bool;
4366 impl<T> SliceContains for T where T: PartialEq {
4367 default fn slice_contains(&self, x: &[Self]) -> bool {
4368 x.iter().any(|y| *y == *self)
4372 impl SliceContains for u8 {
4373 fn slice_contains(&self, x: &[Self]) -> bool {
4374 memchr::memchr(*self, x).is_some()
4378 impl SliceContains for i8 {
4379 fn slice_contains(&self, x: &[Self]) -> bool {
4380 let byte = *self as u8;
4381 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
4382 memchr::memchr(byte, bytes).is_some()