1 use crate::arena::Arena;
2 use rustc_serialize::{Encodable, Encoder};
3 use std::alloc::Layout;
4 use std::cmp::Ordering;
6 use std::hash::{Hash, Hasher};
13 /// `List<T>` is a bit like `&[T]`, but with some critical differences.
14 /// - IMPORTANT: Every `List<T>` is *required* to have unique contents. The
15 /// type's correctness relies on this, *but it does not enforce it*.
16 /// Therefore, any code that creates a `List<T>` must ensure uniqueness
17 /// itself. In practice this is achieved by interning.
18 /// - The length is stored within the `List<T>`, so `&List<Ty>` is a thin
20 /// - Because of this, you cannot get a `List<T>` that is a sub-list of another
21 /// `List<T>`. You can get a sub-slice `&[T]`, however.
22 /// - `List<T>` can be used with `CopyTaggedPtr`, which is useful within
23 /// structs whose size must be minimized.
24 /// - Because of the uniqueness assumption, we can use the address of a
25 /// `List<T>` for faster equality comparisons and hashing.
26 /// - `T` must be `Copy`. This lets `List<T>` be stored in a dropless arena and
27 /// iterators return a `T` rather than a `&T`.
28 /// - `T` must not be zero-sized.
33 /// Although this claims to be a zero-length array, in practice `len`
34 /// elements are actually present.
37 opaque: OpaqueListContents,
41 /// A dummy type used to force `List` to be unsized while not requiring
42 /// references to it be wide pointers.
43 type OpaqueListContents;
47 /// Returns a reference to the (unique, static) empty list.
49 pub fn empty<'a>() -> &'a List<T> {
53 assert!(mem::align_of::<T>() <= mem::align_of::<MaxAlign>());
56 struct InOrder<T, U>(T, U);
58 // The empty slice is static and contains a single `0` usize (for the
59 // length) that is 64-byte aligned, thus featuring the necessary
60 // trailing padding for elements with up to 64-byte alignment.
61 static EMPTY_SLICE: InOrder<usize, MaxAlign> = InOrder(0, MaxAlign);
62 unsafe { &*(&EMPTY_SLICE as *const _ as *const List<T>) }
65 pub fn len(&self) -> usize {
69 pub fn as_slice(&self) -> &[T] {
74 impl<T: Copy> List<T> {
75 /// Allocates a list from `arena` and copies the contents of `slice` into it.
77 /// WARNING: the contents *must be unique*, such that no list with these
78 /// contents has been previously created. If not, operations such as `eq`
79 /// and `hash` might give incorrect results.
81 /// Panics if `T` is `Drop`, or `T` is zero-sized, or the slice is empty
82 /// (because the empty list exists statically, and is available via
85 pub(super) fn from_arena<'tcx>(arena: &'tcx Arena<'tcx>, slice: &[T]) -> &'tcx List<T> {
86 assert!(!mem::needs_drop::<T>());
87 assert!(mem::size_of::<T>() != 0);
88 assert!(!slice.is_empty());
90 let (layout, _offset) =
91 Layout::new::<usize>().extend(Layout::for_value::<[T]>(slice)).unwrap();
92 let mem = arena.dropless.alloc_raw(layout) as *mut List<T>;
95 ptr::addr_of_mut!((*mem).len).write(slice.len());
98 ptr::addr_of_mut!((*mem).data)
100 .copy_from_nonoverlapping(slice.as_ptr(), slice.len());
106 // If this method didn't exist, we would use `slice.iter` due to
109 // This would be weird, as `self.into_iter` iterates over `T` directly.
111 pub fn iter(&self) -> <&'_ List<T> as IntoIterator>::IntoIter {
116 impl<T: fmt::Debug> fmt::Debug for List<T> {
117 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
122 impl<S: Encoder, T: Encodable<S>> Encodable<S> for List<T> {
124 fn encode(&self, s: &mut S) {
129 impl<T: PartialEq> PartialEq for List<T> {
131 fn eq(&self, other: &List<T>) -> bool {
132 // Pointer equality implies list equality (due to the unique contents
138 impl<T: Eq> Eq for List<T> {}
140 impl<T> Ord for List<T>
144 fn cmp(&self, other: &List<T>) -> Ordering {
145 // Pointer equality implies list equality (due to the unique contents
146 // assumption), but the contents must be compared otherwise.
147 if self == other { Ordering::Equal } else { <[T] as Ord>::cmp(&**self, &**other) }
151 impl<T> PartialOrd for List<T>
155 fn partial_cmp(&self, other: &List<T>) -> Option<Ordering> {
156 // Pointer equality implies list equality (due to the unique contents
157 // assumption), but the contents must be compared otherwise.
159 Some(Ordering::Equal)
161 <[T] as PartialOrd>::partial_cmp(&**self, &**other)
166 impl<T> Hash for List<T> {
168 fn hash<H: Hasher>(&self, s: &mut H) {
169 // Pointer hashing is sufficient (due to the unique contents
171 (self as *const List<T>).hash(s)
175 impl<T> Deref for List<T> {
178 fn deref(&self) -> &[T] {
183 impl<T> AsRef<[T]> for List<T> {
185 fn as_ref(&self) -> &[T] {
186 unsafe { slice::from_raw_parts(self.data.as_ptr(), self.len) }
190 impl<'a, T: Copy> IntoIterator for &'a List<T> {
192 type IntoIter = iter::Copied<<&'a [T] as IntoIterator>::IntoIter>;
194 fn into_iter(self) -> Self::IntoIter {
195 self[..].iter().copied()
199 unsafe impl<T: Sync> Sync for List<T> {}
201 unsafe impl<'a, T: 'a> rustc_data_structures::tagged_ptr::Pointer for &'a List<T> {
202 const BITS: usize = std::mem::align_of::<usize>().trailing_zeros() as usize;
205 fn into_usize(self) -> usize {
206 self as *const List<T> as usize
210 unsafe fn from_usize(ptr: usize) -> &'a List<T> {
211 &*(ptr as *const List<T>)
214 unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
215 // `Self` is `&'a List<T>` which impls `Copy`, so this is fine.
216 let ptr = Self::from_usize(ptr);