use core::any::Any;
use core::array::LengthAtMost32;
-use core::sync::atomic;
-use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
use core::borrow;
+use core::cmp::Ordering;
+use core::convert::{From, TryFrom};
use core::fmt;
-use core::cmp::{self, Ordering};
-use core::iter;
+use core::hash::{Hash, Hasher};
use core::intrinsics::abort;
+use core::iter;
+use core::marker::{PhantomData, Unpin, Unsize};
use core::mem::{self, align_of, align_of_val, size_of_val};
-use core::ops::{Deref, Receiver, CoerceUnsized, DispatchFromDyn};
+use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
use core::pin::Pin;
use core::ptr::{self, NonNull};
-use core::marker::{Unpin, Unsize, PhantomData};
-use core::hash::{Hash, Hasher};
-use core::{isize, usize};
-use core::convert::{From, TryFrom};
use core::slice::{self, from_raw_parts_mut};
+use core::sync::atomic;
+use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
+use core::{isize, usize};
-use crate::alloc::{Global, Alloc, Layout, box_free, handle_alloc_error};
+use crate::alloc::{box_free, handle_alloc_error, Alloc, Global, Layout};
use crate::boxed::Box;
use crate::rc::is_dangling;
use crate::string::String;
#[unstable(feature = "coerce_unsized", issue = "27732")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
-#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Arc<U>> for Arc<T> {}
impl<T: ?Sized> Arc<T> {
fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
- Self {
- ptr,
- phantom: PhantomData,
- }
+ Self { ptr, phantom: PhantomData }
}
unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
#[unstable(feature = "coerce_unsized", issue = "27732")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
-#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
#[stable(feature = "arc_weak", since = "1.4.0")]
#[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_uninit() -> Arc<mem::MaybeUninit<T>> {
unsafe {
- Arc::from_ptr(Arc::allocate_for_layout(
- Layout::new::<T>(),
- |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>,
- ))
+ Arc::from_ptr(Arc::allocate_for_layout(Layout::new::<T>(), |mem| {
+ mem as *mut ArcInner<mem::MaybeUninit<T>>
+ }))
}
}
/// ```
#[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_uninit_slice(len: usize) -> Arc<[mem::MaybeUninit<T>]> {
- unsafe {
- Arc::from_ptr(Arc::allocate_for_slice(len))
- }
+ unsafe { Arc::from_ptr(Arc::allocate_for_slice(len)) }
}
}
/// and must return back a (potentially fat)-pointer for the `ArcInner<T>`.
unsafe fn allocate_for_layout(
value_layout: Layout,
- mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner<T>
+ mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner<T>,
) -> *mut ArcInner<T> {
// Calculate layout using the given value layout.
// Previously, layout was calculated on the expression
// `&*(ptr as *const ArcInner<T>)`, but this created a misaligned
// reference (see #54908).
- let layout = Layout::new::<ArcInner<()>>()
- .extend(value_layout).unwrap().0
- .pad_to_align();
+ let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
- let mem = Global.alloc(layout)
- .unwrap_or_else(|_| handle_alloc_error(layout));
+ let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
// Initialize the ArcInner
let inner = mem_to_arcinner(mem.as_ptr());
/// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value.
unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
// Allocate for the `ArcInner<T>` using the given value.
- Self::allocate_for_layout(
- Layout::for_value(&*ptr),
- |mem| set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>,
- )
+ Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
+ set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>
+ })
}
fn from_box(v: Box<T>) -> Arc<T> {
ptr::copy_nonoverlapping(
bptr as *const T as *const u8,
&mut (*ptr).data as *mut _ as *mut u8,
- value_size);
+ value_size,
+ );
// Free the allocation without dropping its contents
box_free(box_unique);
impl<T> Arc<[T]> {
/// Allocates an `ArcInner<[T]>` with the given length.
unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> {
- Self::allocate_for_layout(
- Layout::array::<T>(len).unwrap(),
- |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>,
- )
+ Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
+ ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>
+ })
}
}
unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
let ptr = Self::allocate_for_slice(v.len());
- ptr::copy_nonoverlapping(
- v.as_ptr(),
- &mut (*ptr).data as *mut [T] as *mut T,
- v.len());
+ ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());
Self::from_ptr(ptr)
}
// Pointer to first element
let elems = &mut (*ptr).data as *mut [T] as *mut T;
- let mut guard = Guard {
- mem: NonNull::new_unchecked(mem),
- elems,
- layout,
- n_elems: 0,
- };
+ let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
for (i, item) in iter.enumerate() {
ptr::write(elems.add(i), item);
impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
#[inline]
default fn from_slice(v: &[T]) -> Self {
- unsafe {
- Self::from_iter_exact(v.iter().cloned(), v.len())
- }
+ unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
}
}
}
}
-#[unstable(feature = "receiver_trait", issue = "0")]
+#[unstable(feature = "receiver_trait", issue = "none")]
impl<T: ?Sized> Receiver for Arc<T> {}
impl<T: Clone> Arc<T> {
// As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents.
- unsafe {
- &mut this.ptr.as_mut().data
- }
+ unsafe { &mut this.ptr.as_mut().data }
}
}
// reference count is guaranteed to be 1 at this point, and we required
// the Arc itself to be `mut`, so we're returning the only possible
// reference to the inner data.
- unsafe {
- Some(Arc::get_mut_unchecked(this))
- }
+ unsafe { Some(Arc::get_mut_unchecked(this)) }
} else {
None
}
/// ```
#[stable(feature = "downgraded_weak", since = "1.10.0")]
pub fn new() -> Weak<T> {
- Weak {
- ptr: NonNull::new(usize::MAX as *mut ArcInner<T>).expect("MAX is not 0"),
- }
+ Weak { ptr: NonNull::new(usize::MAX as *mut ArcInner<T>).expect("MAX is not 0") }
}
/// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
let offset = data_offset(ptr);
let fake_ptr = ptr as *mut ArcInner<T>;
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
- Weak {
- ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw"),
- }
+ Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
}
}
}
/// If `self` was created using [`Weak::new`], this will return 0.
///
/// [`Weak::new`]: #method.new
- #[unstable(feature = "weak_counts", issue = "57977")]
+ #[stable(feature = "weak_counts", since = "1.41.0")]
pub fn strong_count(&self) -> usize {
- if let Some(inner) = self.inner() {
- inner.strong.load(SeqCst)
- } else {
- 0
- }
+ if let Some(inner) = self.inner() { inner.strong.load(SeqCst) } else { 0 }
}
/// Gets an approximation of the number of `Weak` pointers pointing to this
/// allocation.
///
- /// If `self` was created using [`Weak::new`], this will return 0. If not,
- /// the returned value is at least 1, since `self` still points to the
- /// allocation.
+ /// If `self` was created using [`Weak::new`], or if there are no remaining
+ /// strong pointers, this will return 0.
///
/// # Accuracy
///
/// `Weak`s pointing to the same allocation.
///
/// [`Weak::new`]: #method.new
- #[unstable(feature = "weak_counts", issue = "57977")]
- pub fn weak_count(&self) -> Option<usize> {
- // Due to the implicit weak pointer added when any strong pointers are
- // around, we cannot implement `weak_count` correctly since it
- // necessarily requires accessing the strong count and weak count in an
- // unsynchronized fashion. So this version is a bit racy.
- self.inner().map(|inner| {
- let strong = inner.strong.load(SeqCst);
- let weak = inner.weak.load(SeqCst);
- if strong == 0 {
- // If the last `Arc` has *just* been dropped, it might not yet
- // have removed the implicit weak count, so the value we get
- // here might be 1 too high.
- weak
- } else {
- // As long as there's still at least 1 `Arc` around, subtract
- // the implicit weak pointer.
- // Note that the last `Arc` might get dropped between the 2
- // loads we do above, removing the implicit weak pointer. This
- // means that the value might be 1 too low here. In order to not
- // return 0 here (which would happen if we're the only weak
- // pointer), we guard against that specifically.
- cmp::max(1, weak - 1)
- }
- })
+ #[stable(feature = "weak_counts", since = "1.41.0")]
+ pub fn weak_count(&self) -> usize {
+ self.inner()
+ .map(|inner| {
+ let weak = inner.weak.load(SeqCst);
+ let strong = inner.strong.load(SeqCst);
+ if strong == 0 {
+ 0
+ } else {
+ // Since we observed that there was at least one strong pointer
+ // after reading the weak count, we know that the implicit weak
+ // reference (present whenever any strong references are alive)
+ // was still around when we observed the weak count, and can
+ // therefore safely subtract it.
+ weak - 1
+ }
+ })
+ .unwrap_or(0)
}
/// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`,
/// (i.e., when this `Weak` was created by `Weak::new`).
#[inline]
fn inner(&self) -> Option<&ArcInner<T>> {
- if is_dangling(self.ptr) {
- None
- } else {
- Some(unsafe { self.ptr.as_ref() })
- }
+ if is_dangling(self.ptr) { None } else { Some(unsafe { self.ptr.as_ref() }) }
}
/// Returns `true` if the two `Weak`s point to the same allocation (similar to
// weak count can only be locked if there was precisely one weak ref,
// meaning that drop could only subsequently run ON that remaining weak
// ref, which can only happen after the lock is released.
- let inner = if let Some(inner) = self.inner() {
- inner
- } else {
- return
- };
+ let inner = if let Some(inner) = self.inner() { inner } else { return };
if inner.weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
- unsafe {
- Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
- }
+ unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) }
}
}
}
}
}
-#[unstable(feature = "boxed_slice_try_from", issue = "0")]
+#[unstable(feature = "boxed_slice_try_from", issue = "none")]
impl<T, const N: usize> TryFrom<Arc<[T]>> for Arc<[T; N]>
where
[T; N]: LengthAtMost32,
let (low, high) = iter.size_hint();
if let Some(high) = high {
debug_assert_eq!(
- low, high,
+ low,
+ high,
"TrustedLen iterator's size hint is not exact: {:?}",
(low, high)
);
}
#[stable(feature = "pin", since = "1.33.0")]
-impl<T: ?Sized> Unpin for Arc<T> { }
+impl<T: ?Sized> Unpin for Arc<T> {}
/// Computes the offset of the data field within `ArcInner`.
unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
// Align the unsized value to the end of the `ArcInner`.
// Because it is `?Sized`, it will always be the last field in memory.
+ // Note: This is a detail of the current implementation of the compiler,
+ // and is not a guaranteed language detail. Do not rely on it outside of std.
data_offset_align(align_of_val(&*ptr))
}