use core::borrow;
use core::cell::Cell;
use core::cmp::Ordering;
+use core::convert::{From, TryFrom};
use core::fmt;
use core::hash::{Hash, Hasher};
use core::intrinsics::abort;
use core::iter;
-use core::marker::{self, Unpin, Unsize, PhantomData};
+use core::marker::{self, PhantomData, Unpin, Unsize};
use core::mem::{self, align_of, align_of_val, forget, size_of_val};
-use core::ops::{Deref, Receiver, CoerceUnsized, DispatchFromDyn};
+use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
use core::pin::Pin;
use core::ptr::{self, NonNull};
use core::slice::{self, from_raw_parts_mut};
-use core::convert::{From, TryFrom};
use core::usize;
-use crate::alloc::{Global, Alloc, Layout, box_free, handle_alloc_error};
+use crate::alloc::{box_free, handle_alloc_error, AllocRef, Global, Layout};
use crate::string::String;
use crate::vec::Vec;
#[unstable(feature = "coerce_unsized", issue = "27732")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {}
-#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
impl<T: ?Sized> Rc<T> {
fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
- Self {
- ptr,
- phantom: PhantomData,
- }
+ Self { ptr, phantom: PhantomData }
}
unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
#[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
unsafe {
- Rc::from_ptr(Rc::allocate_for_layout(
- Layout::new::<T>(),
- |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
- ))
+ Rc::from_ptr(Rc::allocate_for_layout(Layout::new::<T>(), |mem| {
+ mem as *mut RcBox<mem::MaybeUninit<T>>
+ }))
}
}
/// ```
#[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
- unsafe {
- Rc::from_ptr(Rc::allocate_for_slice(len))
- }
+ unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
}
}
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub fn into_raw(this: Self) -> *const T {
- let ptr: *const T = &*this;
+ let ptr: *mut RcBox<T> = NonNull::as_ptr(this.ptr);
+ let fake_ptr = ptr as *mut T;
mem::forget(this);
- ptr
+
+ // SAFETY: This cannot go through Deref::deref.
+ // Instead, we manually offset the pointer rather than manifesting a reference.
+ // This is so that the returned pointer retains the same provenance as our pointer.
+ // This is required so that e.g. `get_mut` can write through the pointer
+ // after the Rc is recovered through `from_raw`.
+ unsafe {
+ let offset = data_offset(&(*ptr).value);
+ set_data_ptr(fake_ptr, (ptr as *mut u8).offset(offset))
+ }
}
/// Constructs an `Rc` from a raw pointer.
#[inline]
#[stable(feature = "rc_unique", since = "1.4.0")]
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
- if Rc::is_unique(this) {
- unsafe {
- Some(Rc::get_mut_unchecked(this))
- }
- } else {
- None
- }
+ if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
}
/// Returns a mutable reference into the given `Rc`,
// reference count is guaranteed to be 1 at this point, and we required
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
// reference to the allocation.
- unsafe {
- &mut this.ptr.as_mut().value
- }
+ unsafe { &mut this.ptr.as_mut().value }
}
}
/// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
unsafe fn allocate_for_layout(
value_layout: Layout,
- mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>
+ mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
) -> *mut RcBox<T> {
// Calculate layout using the given value layout.
// Previously, layout was calculated on the expression
// `&*(ptr as *const RcBox<T>)`, but this created a misaligned
// reference (see #54908).
- let layout = Layout::new::<RcBox<()>>()
- .extend(value_layout).unwrap().0
- .pad_to_align();
+ let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
// Allocate for the layout.
- let mem = Global.alloc(layout)
- .unwrap_or_else(|_| handle_alloc_error(layout));
+ let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
// Initialize the RcBox
let inner = mem_to_rcbox(mem.as_ptr());
/// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
// Allocate for the `RcBox<T>` using the given value.
- Self::allocate_for_layout(
- Layout::for_value(&*ptr),
- |mem| set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>,
- )
+ Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
+ set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>
+ })
}
fn from_box(v: Box<T>) -> Rc<T> {
ptr::copy_nonoverlapping(
bptr as *const T as *const u8,
&mut (*ptr).value as *mut _ as *mut u8,
- value_size);
+ value_size,
+ );
// Free the allocation without dropping its contents
box_free(box_unique);
impl<T> Rc<[T]> {
/// Allocates an `RcBox<[T]>` with the given length.
unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> {
- Self::allocate_for_layout(
- Layout::array::<T>(len).unwrap(),
- |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>,
- )
+ Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
+ ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>
+ })
}
}
unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
let ptr = Self::allocate_for_slice(v.len());
- ptr::copy_nonoverlapping(
- v.as_ptr(),
- &mut (*ptr).value as *mut [T] as *mut T,
- v.len());
+ ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());
Self::from_ptr(ptr)
}
// Pointer to first element
let elems = &mut (*ptr).value as *mut [T] as *mut T;
- let mut guard = Guard {
- mem: NonNull::new_unchecked(mem),
- elems,
- layout,
- n_elems: 0,
- };
+ let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
for (i, item) in iter.enumerate() {
ptr::write(elems.add(i), item);
impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
#[inline]
default fn from_slice(v: &[T]) -> Self {
- unsafe {
- Self::from_iter_exact(v.iter().cloned(), v.len())
- }
+ unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
}
}
}
}
-#[unstable(feature = "receiver_trait", issue = "0")]
+#[unstable(feature = "receiver_trait", issue = "none")]
impl<T: ?Sized> Receiver for Rc<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "boxed_slice_try_from", issue = "0")]
+#[unstable(feature = "boxed_slice_try_from", issue = "none")]
impl<T, const N: usize> TryFrom<Rc<[T]>> for Rc<[T; N]>
where
[T; N]: LengthAtMost32,
}
}
-impl<T, I: iter::TrustedLen<Item = T>> RcFromIter<T, I> for Rc<[T]> {
+impl<T, I: iter::TrustedLen<Item = T>> RcFromIter<T, I> for Rc<[T]> {
default fn from_iter(iter: I) -> Self {
// This is the case for a `TrustedLen` iterator.
let (low, high) = iter.size_hint();
if let Some(high) = high {
debug_assert_eq!(
- low, high,
+ low,
+ high,
"TrustedLen iterator's size hint is not exact: {:?}",
(low, high)
);
#[unstable(feature = "coerce_unsized", issue = "27732")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
-#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
impl<T> Weak<T> {
/// ```
#[stable(feature = "downgraded_weak", since = "1.10.0")]
pub fn new() -> Weak<T> {
- Weak {
- ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0"),
- }
+ Weak { ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0") }
}
/// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
let offset = data_offset(ptr);
let fake_ptr = ptr as *mut RcBox<T>;
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
- Weak {
- ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw"),
- }
+ Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
}
}
}
/// [`Weak::new`]: #method.new
#[stable(feature = "weak_counts", since = "1.41.0")]
pub fn strong_count(&self) -> usize {
- if let Some(inner) = self.inner() {
- inner.strong()
- } else {
- 0
- }
+ if let Some(inner) = self.inner() { inner.strong() } else { 0 }
}
/// Gets the number of `Weak` pointers pointing to this allocation.
/// If no strong pointers remain, this will return zero.
#[stable(feature = "weak_counts", since = "1.41.0")]
pub fn weak_count(&self) -> usize {
- self.inner().map(|inner| {
- if inner.strong() > 0 {
- inner.weak() - 1 // subtract the implicit weak ptr
- } else {
- 0
- }
- }).unwrap_or(0)
+ self.inner()
+ .map(|inner| {
+ if inner.strong() > 0 {
+ inner.weak() - 1 // subtract the implicit weak ptr
+ } else {
+ 0
+ }
+ })
+ .unwrap_or(0)
}
/// Returns `None` when the pointer is dangling and there is no allocated `RcBox`
/// (i.e., when this `Weak` was created by `Weak::new`).
#[inline]
fn inner(&self) -> Option<&RcBox<T>> {
- if is_dangling(self.ptr) {
- None
- } else {
- Some(unsafe { self.ptr.as_ref() })
- }
+ if is_dangling(self.ptr) { None } else { Some(unsafe { self.ptr.as_ref() }) }
}
/// Returns `true` if the two `Weak`s point to the same allocation (similar to
// nevertheless, we insert an abort here to hint LLVM at
// an otherwise missed optimization.
if strong == 0 || strong == usize::max_value() {
- unsafe { abort(); }
+ unsafe {
+ abort();
+ }
}
self.inner().strong.set(strong + 1);
}
// nevertheless, we insert an abort here to hint LLVM at
// an otherwise missed optimization.
if weak == 0 || weak == usize::max_value() {
- unsafe { abort(); }
+ unsafe {
+ abort();
+ }
}
self.inner().weak.set(weak + 1);
}
impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
- unsafe {
- self.ptr.as_ref()
- }
+ unsafe { self.ptr.as_ref() }
}
}
}
#[stable(feature = "pin", since = "1.33.0")]
-impl<T: ?Sized> Unpin for Rc<T> { }
+impl<T: ?Sized> Unpin for Rc<T> {}
unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
// Align the unsized value to the end of the `RcBox`.
// Because it is ?Sized, it will always be the last field in memory.
+ // Note: This is a detail of the current implementation of the compiler,
+ // and is not a guaranteed language detail. Do not rely on it outside of std.
data_offset_align(align_of_val(&*ptr))
}