use core::fmt;
use core::cmp::Ordering;
use core::intrinsics::abort;
-use core::mem::{self, align_of_val, size_of_val, uninitialized};
+use core::mem::{self, align_of_val, size_of_val};
use core::ops::Deref;
use core::ops::CoerceUnsized;
use core::ptr::{self, NonNull};
/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
const MAX_REFCOUNT: usize = (isize::MAX) as usize;
+/// A sentinel value that is used for the pointer of `Weak::new()`.
+const WEAK_EMPTY: usize = 1;
+
/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
/// Reference Counted'.
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
#[stable(feature = "arc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> {
+ // This is a `NonNull` to allow optimizing the size of this type in enums,
+ // but it is actually not truly "non-null". A `Weak::new()` will set this
+ // to a sentinel value, instead of needing to allocate some space in the
+ // heap.
ptr: NonNull<ArcInner<T>>,
}
}
impl<T> Weak<T> {
- /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
- /// it. Calling [`upgrade`] on the return value always gives [`None`].
+ /// Constructs a new `Weak<T>`, without allocating any memory.
+ /// Calling [`upgrade`] on the return value always gives [`None`].
///
/// [`upgrade`]: struct.Weak.html#method.upgrade
/// [`None`]: ../../std/option/enum.Option.html#variant.None
pub fn new() -> Weak<T> {
unsafe {
Weak {
- ptr: Box::into_raw_non_null(box ArcInner {
- strong: atomic::AtomicUsize::new(0),
- weak: atomic::AtomicUsize::new(1),
- data: uninitialized(),
- }),
+ ptr: NonNull::new_unchecked(WEAK_EMPTY as *mut _),
}
}
}
pub fn upgrade(&self) -> Option<Arc<T>> {
// We use a CAS loop to increment the strong count instead of a
// fetch_add because once the count hits 0 it must never be above 0.
- let inner = self.inner();
+ let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY {
+ return None;
+ } else {
+ unsafe { self.ptr.as_ref() }
+ };
// Relaxed load because any write of 0 that we can observe
// leaves the field in a permanently zero state (so a
// Relaxed is valid for the same reason it is on Arc's Clone impl
match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
- Ok(_) => return Some(Arc { ptr: self.ptr, phantom: PhantomData }),
+ Ok(_) => return Some(Arc {
+ // null checked above
+ ptr: self.ptr,
+ phantom: PhantomData,
+ }),
Err(old) => n = old,
}
}
}
-
- #[inline]
- fn inner(&self) -> &ArcInner<T> {
- // See comments above for why this is "safe"
- unsafe { self.ptr.as_ref() }
- }
}
#[stable(feature = "arc_weak", since = "1.4.0")]
/// ```
#[inline]
fn clone(&self) -> Weak<T> {
+ let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY {
+ return Weak { ptr: self.ptr };
+ } else {
+ unsafe { self.ptr.as_ref() }
+ };
// See comments in Arc::clone() for why this is relaxed. This can use a
// fetch_add (ignoring the lock) because the weak count is only locked
// where are *no other* weak pointers in existence. (So we can't be
// running this code in that case).
- let old_size = self.inner().weak.fetch_add(1, Relaxed);
+ let old_size = inner.weak.fetch_add(1, Relaxed);
// See comments in Arc::clone() for why we do this (for mem::forget).
if old_size > MAX_REFCOUNT {
#[stable(feature = "downgraded_weak", since = "1.10.0")]
impl<T> Default for Weak<T> {
- /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
- /// it. Calling [`upgrade`] on the return value always gives [`None`].
+ /// Constructs a new `Weak<T>`, without allocating memory.
+ /// Calling [`upgrade`] on the return value always gives [`None`].
///
/// [`upgrade`]: struct.Weak.html#method.upgrade
/// [`None`]: ../../std/option/enum.Option.html#variant.None
// weak count can only be locked if there was precisely one weak ref,
// meaning that drop could only subsequently run ON that remaining weak
// ref, which can only happen after the lock is released.
- if self.inner().weak.fetch_sub(1, Release) == 1 {
+ let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY {
+ return;
+ } else {
+ unsafe { self.ptr.as_ref() }
+ };
+
+ if inner.weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe {
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))