1 use crate::arena::Arena;
2 use rustc_serialize::{Encodable, Encoder};
3 use std::alloc::Layout;
4 use std::cmp::Ordering;
6 use std::hash::{Hash, Hasher};
13 /// `List<T>` is a bit like `&[T]`, but with some critical differences.
14 /// - IMPORTANT: Every `List<T>` is *required* to have unique contents. The
15 /// type's correctness relies on this, *but it does not enforce it*.
16 /// Therefore, any code that creates a `List<T>` must ensure uniqueness
17 /// itself. In practice this is achieved by interning.
18 /// - The length is stored within the `List<T>`, so `&List<Ty>` is a thin
20 /// - Because of this, you cannot get a `List<T>` that is a sub-list of another
21 /// `List<T>`. You can get a sub-slice `&[T]`, however.
22 /// - `List<T>` can be used with `CopyTaggedPtr`, which is useful within
23 /// structs whose size must be minimized.
24 /// - Because of the uniqueness assumption, we can use the address of a
25 /// `List<T>` for faster equality comparisons and hashing.
26 /// - `T` must be `Copy`. This lets `List<T>` be stored in a dropless arena and
27 /// iterators return a `T` rather than a `&T`.
28 /// - `T` must not be zero-sized.
33 /// Although this claims to be a zero-length array, in practice `len`
34 /// elements are actually present.
37 opaque: OpaqueListContents,
41 /// A dummy type used to force `List` to be unsized while not requiring
42 /// references to it be wide pointers.
43 type OpaqueListContents;
47 /// Returns a reference to the (unique, static) empty list.
49 pub fn empty<'a>() -> &'a List<T> {
53 assert!(mem::align_of::<T>() <= mem::align_of::<MaxAlign>());
56 struct InOrder<T, U>(T, U);
58 // The empty slice is static and contains a single `0` usize (for the
59 // length) that is 64-byte aligned, thus featuring the necessary
60 // trailing padding for elements with up to 64-byte alignment.
61 static EMPTY_SLICE: InOrder<usize, MaxAlign> = InOrder(0, MaxAlign);
62 unsafe { &*(&EMPTY_SLICE as *const _ as *const List<T>) }
65 pub fn len(&self) -> usize {
70 impl<T: Copy> List<T> {
71 /// Allocates a list from `arena` and copies the contents of `slice` into it.
73 /// WARNING: the contents *must be unique*, such that no list with these
74 /// contents has been previously created. If not, operations such as `eq`
75 /// and `hash` might give incorrect results.
77 /// Panics if `T` is `Drop`, or `T` is zero-sized, or the slice is empty
78 /// (because the empty list exists statically, and is available via
81 pub(super) fn from_arena<'tcx>(arena: &'tcx Arena<'tcx>, slice: &[T]) -> &'tcx List<T> {
82 assert!(!mem::needs_drop::<T>());
83 assert!(mem::size_of::<T>() != 0);
84 assert!(!slice.is_empty());
86 let (layout, _offset) =
87 Layout::new::<usize>().extend(Layout::for_value::<[T]>(slice)).unwrap();
88 let mem = arena.dropless.alloc_raw(layout) as *mut List<T>;
91 ptr::addr_of_mut!((*mem).len).write(slice.len());
94 ptr::addr_of_mut!((*mem).data)
96 .copy_from_nonoverlapping(slice.as_ptr(), slice.len());
102 // If this method didn't exist, we would use `slice.iter` due to
105 // This would be weird, as `self.into_iter` iterates over `T` directly.
107 pub fn iter(&self) -> <&'_ List<T> as IntoIterator>::IntoIter {
112 impl<T: fmt::Debug> fmt::Debug for List<T> {
113 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
118 impl<S: Encoder, T: Encodable<S>> Encodable<S> for List<T> {
120 fn encode(&self, s: &mut S) -> Result<(), S::Error> {
125 impl<S: Encoder, T: Encodable<S>> Encodable<S> for &List<T> {
127 fn encode(&self, s: &mut S) -> Result<(), S::Error> {
132 impl<T: PartialEq> PartialEq for List<T> {
134 fn eq(&self, other: &List<T>) -> bool {
135 // Pointer equality implies list equality (due to the unique contents
141 impl<T: Eq> Eq for List<T> {}
143 impl<T> Ord for List<T>
147 fn cmp(&self, other: &List<T>) -> Ordering {
148 // Pointer equality implies list equality (due to the unique contents
149 // assumption), but the contents must be compared otherwise.
150 if self == other { Ordering::Equal } else { <[T] as Ord>::cmp(&**self, &**other) }
154 impl<T> PartialOrd for List<T>
158 fn partial_cmp(&self, other: &List<T>) -> Option<Ordering> {
159 // Pointer equality implies list equality (due to the unique contents
160 // assumption), but the contents must be compared otherwise.
162 Some(Ordering::Equal)
164 <[T] as PartialOrd>::partial_cmp(&**self, &**other)
169 impl<T> Hash for List<T> {
171 fn hash<H: Hasher>(&self, s: &mut H) {
172 // Pointer hashing is sufficient (due to the unique contents
174 (self as *const List<T>).hash(s)
178 impl<T> Deref for List<T> {
181 fn deref(&self) -> &[T] {
186 impl<T> AsRef<[T]> for List<T> {
188 fn as_ref(&self) -> &[T] {
189 unsafe { slice::from_raw_parts(self.data.as_ptr(), self.len) }
193 impl<'a, T: Copy> IntoIterator for &'a List<T> {
195 type IntoIter = iter::Copied<<&'a [T] as IntoIterator>::IntoIter>;
197 fn into_iter(self) -> Self::IntoIter {
198 self[..].iter().copied()
202 unsafe impl<T: Sync> Sync for List<T> {}
204 unsafe impl<'a, T: 'a> rustc_data_structures::tagged_ptr::Pointer for &'a List<T> {
205 const BITS: usize = std::mem::align_of::<usize>().trailing_zeros() as usize;
208 fn into_usize(self) -> usize {
209 self as *const List<T> as usize
213 unsafe fn from_usize(ptr: usize) -> &'a List<T> {
214 &*(ptr as *const List<T>)
217 unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
218 // `Self` is `&'a List<T>` which impls `Copy`, so this is fine.
219 let ptr = Self::from_usize(ptr);