1 use crate::arena::Arena;
2 use rustc_serialize::{Encodable, Encoder};
3 use std::alloc::Layout;
4 use std::cmp::Ordering;
6 use std::hash::{Hash, Hasher};
13 /// `List<T>` is a bit like `&[T]`, but with some critical differences.
14 /// - IMPORTANT: Every `List<T>` is *required* to have unique contents. The
15 /// type's correctness relies on this, *but it does not enforce it*.
16 /// Therefore, any code that creates a `List<T>` must ensure uniqueness
17 /// itself. In practice this is achieved by interning.
18 /// - The length is stored within the `List<T>`, so `&List<Ty>` is a thin
20 /// - Because of this, you cannot get a `List<T>` that is a sub-list of another
21 /// `List<T>`. You can get a sub-slice `&[T]`, however.
22 /// - `List<T>` can be used with `CopyTaggedPtr`, which is useful within
23 /// structs whose size must be minimized.
24 /// - Because of the uniqueness assumption, we can use the address of a
25 /// `List<T>` for faster equality comparisons and hashing.
26 /// - `T` must be `Copy`. This lets `List<T>` be stored in a dropless arena and
27 /// iterators return a `T` rather than a `&T`.
28 /// - `T` must not be zero-sized.
33 /// Although this claims to be a zero-length array, in practice `len`
34 /// elements are actually present.
37 opaque: OpaqueListContents,
41 /// A dummy type used to force `List` to be unsized while not requiring
42 /// references to it be wide pointers.
43 type OpaqueListContents;
47 /// Returns a reference to the (unique, static) empty list.
49 pub fn empty<'a>() -> &'a List<T> {
53 assert!(mem::align_of::<T>() <= mem::align_of::<MaxAlign>());
56 struct InOrder<T, U>(T, U);
58 // The empty slice is static and contains a single `0` usize (for the
59 // length) that is 64-byte aligned, thus featuring the necessary
60 // trailing padding for elements with up to 64-byte alignment.
61 static EMPTY_SLICE: InOrder<usize, MaxAlign> = InOrder(0, MaxAlign);
62 unsafe { &*(&EMPTY_SLICE as *const _ as *const List<T>) }
66 impl<T: Copy> List<T> {
67 /// Allocates a list from `arena` and copies the contents of `slice` into it.
69 /// WARNING: the contents *must be unique*, such that no list with these
70 /// contents has been previously created. If not, operations such as `eq`
71 /// and `hash` might give incorrect results.
73 /// Panics if `T` is `Drop`, or `T` is zero-sized, or the slice is empty
74 /// (because the empty list exists statically, and is available via
77 pub(super) fn from_arena<'tcx>(arena: &'tcx Arena<'tcx>, slice: &[T]) -> &'tcx List<T> {
78 assert!(!mem::needs_drop::<T>());
79 assert!(mem::size_of::<T>() != 0);
80 assert!(!slice.is_empty());
82 let (layout, _offset) =
83 Layout::new::<usize>().extend(Layout::for_value::<[T]>(slice)).unwrap();
84 let mem = arena.dropless.alloc_raw(layout) as *mut List<T>;
87 ptr::addr_of_mut!((*mem).len).write(slice.len());
90 ptr::addr_of_mut!((*mem).data)
92 .copy_from_nonoverlapping(slice.as_ptr(), slice.len());
98 // If this method didn't exist, we would use `slice.iter` due to
101 // This would be weird, as `self.into_iter` iterates over `T` directly.
103 pub fn iter(&self) -> <&'_ List<T> as IntoIterator>::IntoIter {
108 impl<T: fmt::Debug> fmt::Debug for List<T> {
109 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
114 impl<S: Encoder, T: Encodable<S>> Encodable<S> for List<T> {
116 fn encode(&self, s: &mut S) -> Result<(), S::Error> {
121 impl<S: Encoder, T: Encodable<S>> Encodable<S> for &List<T> {
123 fn encode(&self, s: &mut S) -> Result<(), S::Error> {
128 impl<T: PartialEq> PartialEq for List<T> {
130 fn eq(&self, other: &List<T>) -> bool {
131 // Pointer equality implies list equality (due to the unique contents
137 impl<T: Eq> Eq for List<T> {}
139 impl<T> Ord for List<T>
143 fn cmp(&self, other: &List<T>) -> Ordering {
144 // Pointer equality implies list equality (due to the unique contents
145 // assumption), but the contents must be compared otherwise.
146 if self == other { Ordering::Equal } else { <[T] as Ord>::cmp(&**self, &**other) }
150 impl<T> PartialOrd for List<T>
154 fn partial_cmp(&self, other: &List<T>) -> Option<Ordering> {
155 // Pointer equality implies list equality (due to the unique contents
156 // assumption), but the contents must be compared otherwise.
158 Some(Ordering::Equal)
160 <[T] as PartialOrd>::partial_cmp(&**self, &**other)
165 impl<T> Hash for List<T> {
167 fn hash<H: Hasher>(&self, s: &mut H) {
168 // Pointer hashing is sufficient (due to the unique contents
170 (self as *const List<T>).hash(s)
174 impl<T> Deref for List<T> {
177 fn deref(&self) -> &[T] {
182 impl<T> AsRef<[T]> for List<T> {
184 fn as_ref(&self) -> &[T] {
185 unsafe { slice::from_raw_parts(self.data.as_ptr(), self.len) }
189 impl<'a, T: Copy> IntoIterator for &'a List<T> {
191 type IntoIter = iter::Copied<<&'a [T] as IntoIterator>::IntoIter>;
193 fn into_iter(self) -> Self::IntoIter {
194 self[..].iter().copied()
198 unsafe impl<T: Sync> Sync for List<T> {}
200 unsafe impl<'a, T: 'a> rustc_data_structures::tagged_ptr::Pointer for &'a List<T> {
201 const BITS: usize = std::mem::align_of::<usize>().trailing_zeros() as usize;
204 fn into_usize(self) -> usize {
205 self as *const List<T> as usize
209 unsafe fn from_usize(ptr: usize) -> &'a List<T> {
210 &*(ptr as *const List<T>)
213 unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
214 // `Self` is `&'a List<T>` which impls `Copy`, so this is fine.
215 let ptr = Self::from_usize(ptr);