#[stable(feature = "arc_unique", since = "1.4.0")]
pub fn try_unwrap(this: Self) -> Result<T, Self> {
// See `drop` for why all these atomics are like this
- if this.inner().strong.compare_and_swap(1, 0, Release) != 1 { return Err(this) }
+ if this.inner().strong.compare_and_swap(1, 0, Release) != 1 {
+ return Err(this)
+ }
atomic::fence(Acquire);
let cur = this.inner().weak.load(Relaxed);
// check if the weak counter is currently "locked"; if so, spin.
- if cur == usize::MAX { continue }
+ if cur == usize::MAX {
+ continue
+ }
// NOTE: this code currently ignores the possibility of overflow
// into usize::MAX; in general both Rc and Arc need to be adjusted
// We abort because such a program is incredibly degenerate, and we
// don't care to support it.
if old_size > MAX_REFCOUNT {
- unsafe { abort(); }
+ unsafe {
+ abort();
+ }
}
Arc { _ptr: self._ptr }
// Because `fetch_sub` is already atomic, we do not need to synchronize
// with other threads unless we are going to delete the object. This
// same logic applies to the below `fetch_sub` to the `weak` count.
- if self.inner().strong.fetch_sub(1, Release) != 1 { return }
+ if self.inner().strong.fetch_sub(1, Release) != 1 {
+ return
+ }
// This fence is needed to prevent reordering of use of the data and
// deletion of the data. Because it is marked `Release`, the decreasing
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
atomic::fence(Acquire);
- unsafe {
- self.drop_slow()
- }
+ unsafe { self.drop_slow() }
}
}
// "stale" read of 0 is fine), and any other value is
// confirmed via the CAS below.
let n = inner.strong.load(Relaxed);
- if n == 0 { return None }
+ if n == 0 {
+ return None
+ }
// Relaxed is valid for the same reason it is on Arc's Clone impl
let old = inner.strong.compare_and_swap(n, n + 1, Relaxed);
- if old == n { return Some(Arc { _ptr: self._ptr }) }
+ if old == n {
+ return Some(Arc { _ptr: self._ptr })
+ }
}
}
// See comments in Arc::clone() for why we do this (for mem::forget).
if old_size > MAX_REFCOUNT {
- unsafe { abort(); }
+ unsafe {
+ abort();
+ }
}
return Weak { _ptr: self._ptr }
// ref, which can only happen after the lock is released.
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
- unsafe { deallocate(ptr as *mut u8,
- size_of_val(&*ptr),
- align_of_val(&*ptr)) }
+ unsafe { deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr)) }
}
}
}
///
/// five == Arc::new(5);
/// ```
- fn eq(&self, other: &Arc<T>) -> bool { *(*self) == *(*other) }
+ fn eq(&self, other: &Arc<T>) -> bool {
+ *(*self) == *(*other)
+ }
/// Inequality for two `Arc<T>`s.
///
///
/// five != Arc::new(5);
/// ```
- fn ne(&self, other: &Arc<T>) -> bool { *(*self) != *(*other) }
+ fn ne(&self, other: &Arc<T>) -> bool {
+ *(*self) != *(*other)
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// five < Arc::new(5);
/// ```
- fn lt(&self, other: &Arc<T>) -> bool { *(*self) < *(*other) }
+ fn lt(&self, other: &Arc<T>) -> bool {
+ *(*self) < *(*other)
+ }
/// 'Less-than or equal to' comparison for two `Arc<T>`s.
///
///
/// five <= Arc::new(5);
/// ```
- fn le(&self, other: &Arc<T>) -> bool { *(*self) <= *(*other) }
+ fn le(&self, other: &Arc<T>) -> bool {
+ *(*self) <= *(*other)
+ }
/// Greater-than comparison for two `Arc<T>`s.
///
///
/// five > Arc::new(5);
/// ```
- fn gt(&self, other: &Arc<T>) -> bool { *(*self) > *(*other) }
+ fn gt(&self, other: &Arc<T>) -> bool {
+ *(*self) > *(*other)
+ }
/// 'Greater-than or equal to' comparison for two `Arc<T>`s.
///
///
/// five >= Arc::new(5);
/// ```
- fn ge(&self, other: &Arc<T>) -> bool { *(*self) >= *(*other) }
+ fn ge(&self, other: &Arc<T>) -> bool {
+ *(*self) >= *(*other)
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Ord> Ord for Arc<T> {
- fn cmp(&self, other: &Arc<T>) -> Ordering { (**self).cmp(&**other) }
+ fn cmp(&self, other: &Arc<T>) -> Ordering {
+ (**self).cmp(&**other)
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Eq> Eq for Arc<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Default> Default for Arc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
- fn default() -> Arc<T> { Arc::new(Default::default()) }
+ fn default() -> Arc<T> {
+ Arc::new(Default::default())
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
#[test]
fn weak_self_cyclic() {
struct Cycle {
- x: Mutex<Option<Weak<Cycle>>>
+ x: Mutex<Option<Weak<Cycle>>>,
}
let a = Arc::new(Cycle { x: Mutex::new(None) });
// Make sure deriving works with Arc<T>
#[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
- struct Foo { inner: Arc<i32> }
+ struct Foo {
+ inner: Arc<i32>,
+ }
#[test]
fn test_unsized() {
}
impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
- fn borrow(&self) -> &T { &**self }
+ fn borrow(&self) -> &T {
+ &**self
+ }
}
use core::ops::{CoerceUnsized, Deref, DerefMut};
use core::ops::{Placer, Boxed, Place, InPlace, BoxPlace};
use core::ptr::{self, Unique};
-use core::raw::{TraitObject};
+use core::raw::TraitObject;
/// A value that represents the heap. This is the default place that the `box`
/// keyword allocates into when no place is supplied.
reason = "may be renamed; uncertain about custom allocator design",
issue = "27779")]
#[derive(Copy, Clone)]
-pub struct ExchangeHeapSingleton { _force_singleton: () }
+pub struct ExchangeHeapSingleton {
+ _force_singleton: (),
+}
/// A pointer type for heap allocation.
///
#[unstable(feature = "placement_in",
reason = "placement box design is still being worked out.",
issue = "27779")]
-pub struct IntermediateBox<T: ?Sized>{
+pub struct IntermediateBox<T: ?Sized> {
ptr: *mut u8,
size: usize,
align: usize,
let p = if size == 0 {
heap::EMPTY as *mut u8
} else {
- let p = unsafe {
- heap::allocate(size, align)
- };
+ let p = unsafe { heap::allocate(size, align) };
if p.is_null() {
panic!("Box make_place allocation failure.");
}
}
impl<T> BoxPlace<T> for IntermediateBox<T> {
- fn make_place() -> IntermediateBox<T> { make_place() }
+ fn make_place() -> IntermediateBox<T> {
+ make_place()
+ }
}
impl<T> InPlace<T> for IntermediateBox<T> {
type Owner = Box<T>;
- unsafe fn finalize(self) -> Box<T> { finalize(self) }
+ unsafe fn finalize(self) -> Box<T> {
+ finalize(self)
+ }
}
impl<T> Boxed for Box<T> {
type Data = T;
type Place = IntermediateBox<T>;
- unsafe fn finalize(b: IntermediateBox<T>) -> Box<T> { finalize(b) }
+ unsafe fn finalize(b: IntermediateBox<T>) -> Box<T> {
+ finalize(b)
+ }
}
impl<T> Placer<T> for ExchangeHeapSingleton {
impl<T: ?Sized> Drop for IntermediateBox<T> {
fn drop(&mut self) {
if self.size > 0 {
- unsafe {
- heap::deallocate(self.ptr, self.size, self.align)
- }
+ unsafe { heap::deallocate(self.ptr, self.size, self.align) }
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Default> Default for Box<T> {
#[stable(feature = "rust1", since = "1.0.0")]
- fn default() -> Box<T> { box Default::default() }
+ fn default() -> Box<T> {
+ box Default::default()
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Default for Box<[T]> {
#[stable(feature = "rust1", since = "1.0.0")]
- fn default() -> Box<[T]> { Box::<[T; 0]>::new([]) }
+ fn default() -> Box<[T]> {
+ Box::<[T; 0]>::new([])
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
/// let y = x.clone();
/// ```
#[inline]
- fn clone(&self) -> Box<T> { box {(**self).clone()} }
+ fn clone(&self) -> Box<T> {
+ box {
+ (**self).clone()
+ }
+ }
/// Copies `source`'s contents into `self` without creating a new allocation.
///
/// # Examples
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialEq> PartialEq for Box<T> {
#[inline]
- fn eq(&self, other: &Box<T>) -> bool { PartialEq::eq(&**self, &**other) }
+ fn eq(&self, other: &Box<T>) -> bool {
+ PartialEq::eq(&**self, &**other)
+ }
#[inline]
- fn ne(&self, other: &Box<T>) -> bool { PartialEq::ne(&**self, &**other) }
+ fn ne(&self, other: &Box<T>) -> bool {
+ PartialEq::ne(&**self, &**other)
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialOrd> PartialOrd for Box<T> {
PartialOrd::partial_cmp(&**self, &**other)
}
#[inline]
- fn lt(&self, other: &Box<T>) -> bool { PartialOrd::lt(&**self, &**other) }
+ fn lt(&self, other: &Box<T>) -> bool {
+ PartialOrd::lt(&**self, &**other)
+ }
#[inline]
- fn le(&self, other: &Box<T>) -> bool { PartialOrd::le(&**self, &**other) }
+ fn le(&self, other: &Box<T>) -> bool {
+ PartialOrd::le(&**self, &**other)
+ }
#[inline]
- fn ge(&self, other: &Box<T>) -> bool { PartialOrd::ge(&**self, &**other) }
+ fn ge(&self, other: &Box<T>) -> bool {
+ PartialOrd::ge(&**self, &**other)
+ }
#[inline]
- fn gt(&self, other: &Box<T>) -> bool { PartialOrd::gt(&**self, &**other) }
+ fn gt(&self, other: &Box<T>) -> bool {
+ PartialOrd::gt(&**self, &**other)
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Ord> Ord for Box<T> {
unsafe {
// Get the raw representation of the trait object
let raw = Box::into_raw(self);
- let to: TraitObject =
- mem::transmute::<*mut Any, TraitObject>(raw);
+ let to: TraitObject = mem::transmute::<*mut Any, TraitObject>(raw);
// Extract the data pointer
Ok(Box::from_raw(to.data as *mut T))
impl<T: ?Sized> Deref for Box<T> {
type Target = T;
- fn deref(&self) -> &T { &**self }
+ fn deref(&self) -> &T {
+ &**self
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> DerefMut for Box<T> {
- fn deref_mut(&mut self) -> &mut T { &mut **self }
+ fn deref_mut(&mut self) -> &mut T {
+ &mut **self
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator + ?Sized> Iterator for Box<I> {
type Item = I::Item;
- fn next(&mut self) -> Option<I::Item> { (**self).next() }
- fn size_hint(&self) -> (usize, Option<usize>) { (**self).size_hint() }
+ fn next(&mut self) -> Option<I::Item> {
+ (**self).next()
+ }
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (**self).size_hint()
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> {
- fn next_back(&mut self) -> Option<I::Item> { (**self).next_back() }
+ fn next_back(&mut self) -> Option<I::Item> {
+ (**self).next_back()
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<I> {}
#[stable(feature = "box_slice_clone", since = "1.3.0")]
impl<T: Clone> Clone for Box<[T]> {
fn clone(&self) -> Self {
- let mut new = BoxBuilder {
- data: RawVec::with_capacity(self.len()),
- len: 0
- };
+ let mut new = BoxBuilder { data: RawVec::with_capacity(self.len()), len: 0 };
let mut target = new.data.ptr();
}
impl<T: ?Sized> borrow::Borrow<T> for Box<T> {
- fn borrow(&self) -> &T { &**self }
+ fn borrow(&self) -> &T {
+ &**self
+ }
}
impl<T: ?Sized> borrow::BorrowMut<T> for Box<T> {
- fn borrow_mut(&mut self) -> &mut T { &mut **self }
+ fn borrow_mut(&mut self) -> &mut T {
+ &mut **self
+ }
}
let b = Box::new(Test) as Box<Any>;
match a.downcast::<i32>() {
- Ok(a) => { assert!(a == Box::new(8)); }
- Err(..) => panic!()
+ Ok(a) => {
+ assert!(a == Box::new(8));
+ }
+ Err(..) => panic!(),
}
match b.downcast::<Test>() {
- Ok(a) => { assert!(a == Box::new(Test)); }
- Err(..) => panic!()
+ Ok(a) => {
+ assert!(a == Box::new(Test));
+ }
+ Err(..) => panic!(),
}
let a = Box::new(8) as Box<Any>;
#[test]
fn deref() {
- fn homura<T: Deref<Target=i32>>(_: T) { }
+ fn homura<T: Deref<Target = i32>>(_: T) {
+ }
homura(Box::new(765));
}
#[allocator]
fn __rust_allocate(size: usize, align: usize) -> *mut u8;
fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize);
- fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize,
- align: usize) -> *mut u8;
- fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize,
- align: usize) -> usize;
+ fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8;
+ fn __rust_reallocate_inplace(ptr: *mut u8,
+ old_size: usize,
+ size: usize,
+ align: usize)
+ -> usize;
fn __rust_usable_size(size: usize, align: usize) -> usize;
}
/// create the allocation referenced by `ptr`. The `old_size` parameter may be
/// any value in range_inclusive(requested_size, usable_size).
#[inline]
-pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize,
- align: usize) -> usize {
+pub unsafe fn reallocate_inplace(ptr: *mut u8,
+ old_size: usize,
+ size: usize,
+ align: usize)
+ -> usize {
check_size_and_alignment(size, align);
__rust_reallocate_inplace(ptr, old_size, size, align)
}
EMPTY as *mut u8
} else {
let ptr = allocate(size, align);
- if ptr.is_null() { ::oom() }
+ if ptr.is_null() {
+ ::oom()
+ }
ptr
}
}
unsafe {
let size = 4000;
let ptr = heap::allocate(size, 8);
- if ptr.is_null() { ::oom() }
+ if ptr.is_null() {
+ ::oom()
+ }
let ret = heap::reallocate_inplace(ptr, size, size, 8);
heap::deallocate(ptr, size, 8);
assert_eq!(ret, heap::usable_size(size, 8));
// Allow testing this library
-#[cfg(test)] #[macro_use] extern crate std;
-#[cfg(test)] #[macro_use] extern crate log;
+#[cfg(test)]
+#[macro_use]
+extern crate std;
+#[cfg(test)]
+#[macro_use]
+extern crate log;
// Heaps provided for low-level allocation strategies
#[cfg(not(test))]
pub mod boxed;
#[cfg(test)]
-mod boxed { pub use std::boxed::{Box, HEAP}; }
+mod boxed {
+ pub use std::boxed::{Box, HEAP};
+}
#[cfg(test)]
mod boxed_test;
pub mod arc;
pub fn new() -> Self {
unsafe {
// !0 is usize::MAX. This branch should be stripped at compile time.
- let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
+ let cap = if mem::size_of::<T>() == 0 {
+ !0
+ } else {
+ 0
+ };
// heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: cap }
} else {
let align = mem::align_of::<T>();
let ptr = heap::allocate(alloc_size, align);
- if ptr.is_null() { oom() }
+ if ptr.is_null() {
+ oom()
+ }
ptr
};
///
/// This will always be `usize::MAX` if `T` is zero-sized.
pub fn cap(&self) -> usize {
- if mem::size_of::<T>() == 0 { !0 } else { self.cap }
+ if mem::size_of::<T>() == 0 {
+ !0
+ } else {
+ self.cap
+ }
}
/// Doubles the size of the type's backing allocation. This is common enough
let (new_cap, ptr) = if self.cap == 0 {
// skip to 4 because tiny Vec's are dumb; but not if that would cause overflow
- let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
+ let new_cap = if elem_size > (!0) / 8 {
+ 1
+ } else {
+ 4
+ };
let ptr = heap::allocate(new_cap * elem_size, align);
(new_cap, ptr)
} else {
};
// If allocate or reallocate fail, we'll get `null` back
- if ptr.is_null() { oom() }
+ if ptr.is_null() {
+ oom()
+ }
self.ptr = Unique::new(ptr as *mut _);
self.cap = new_cap;
// Don't actually need any more capacity.
// Wrapping in case they gave a bad `used_cap`.
- if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { return; }
+ if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
+ return;
+ }
// Nothing we can really do about these checks :(
let new_cap = used_cap.checked_add(needed_extra_cap).expect("capacity overflow");
let ptr = if self.cap == 0 {
heap::allocate(new_alloc_size, align)
} else {
- heap::reallocate(self.ptr() as *mut _,
- self.cap * elem_size,
- new_alloc_size,
- align)
+ heap::reallocate(self.ptr() as *mut _, self.cap * elem_size, new_alloc_size, align)
};
// If allocate or reallocate fail, we'll get `null` back
- if ptr.is_null() { oom() }
+ if ptr.is_null() {
+ oom()
+ }
self.ptr = Unique::new(ptr as *mut _);
self.cap = new_cap;
// Don't actually need any more capacity.
// Wrapping in case they give a bas `used_cap`
- if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { return; }
+ if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
+ return;
+ }
// Nothing we can really do about these checks :(
let new_cap = used_cap.checked_add(needed_extra_cap)
let ptr = if self.cap == 0 {
heap::allocate(new_alloc_size, align)
} else {
- heap::reallocate(self.ptr() as *mut _,
- self.cap * elem_size,
- new_alloc_size,
- align)
+ heap::reallocate(self.ptr() as *mut _, self.cap * elem_size, new_alloc_size, align)
};
// If allocate or reallocate fail, we'll get `null` back
- if ptr.is_null() { oom() }
+ if ptr.is_null() {
+ oom()
+ }
self.ptr = Unique::new(ptr as *mut _);
self.cap = new_cap;
self.cap * elem_size,
amount * elem_size,
align);
- if ptr.is_null() { oom() }
+ if ptr.is_null() {
+ oom()
+ }
self.ptr = Unique::new(ptr as *mut _);
}
self.cap = amount;
_ptr: NonZero::new(Box::into_raw(box RcBox {
strong: Cell::new(1),
weak: Cell::new(1),
- value: value
+ value: value,
})),
}
}
#[inline]
#[unstable(feature = "rc_counts", reason = "not clearly useful",
issue = "28356")]
- pub fn weak_count(this: &Self) -> usize { this.weak() - 1 }
+ pub fn weak_count(this: &Self) -> usize {
+ this.weak() - 1
+ }
/// Get the number of strong references to this value.
#[inline]
#[unstable(feature = "rc_counts", reason = "not clearly useful",
issue = "28356")]
- pub fn strong_count(this: &Self) -> usize { this.strong() }
+ pub fn strong_count(this: &Self) -> usize {
+ this.strong()
+ }
/// Returns true if there are no other `Rc` or `Weak<T>` values that share
/// the same inner value.
unsafe {
let ptr = *self._ptr;
if !(*(&ptr as *const _ as *const *const ())).is_null() &&
- ptr as *const () as usize != mem::POST_DROP_USIZE {
+ ptr as *const () as usize != mem::POST_DROP_USIZE {
self.dec_strong();
if self.strong() == 0 {
// destroy the contained object
self.dec_weak();
if self.weak() == 0 {
- deallocate(ptr as *mut u8,
- size_of_val(&*ptr),
- align_of_val(&*ptr))
+ deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
}
}
/// five == Rc::new(5);
/// ```
#[inline(always)]
- fn eq(&self, other: &Rc<T>) -> bool { **self == **other }
+ fn eq(&self, other: &Rc<T>) -> bool {
+ **self == **other
+ }
/// Inequality for two `Rc<T>`s.
///
/// five != Rc::new(5);
/// ```
#[inline(always)]
- fn ne(&self, other: &Rc<T>) -> bool { **self != **other }
+ fn ne(&self, other: &Rc<T>) -> bool {
+ **self != **other
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
/// five < Rc::new(5);
/// ```
#[inline(always)]
- fn lt(&self, other: &Rc<T>) -> bool { **self < **other }
+ fn lt(&self, other: &Rc<T>) -> bool {
+ **self < **other
+ }
/// 'Less-than or equal to' comparison for two `Rc<T>`s.
///
/// five <= Rc::new(5);
/// ```
#[inline(always)]
- fn le(&self, other: &Rc<T>) -> bool { **self <= **other }
+ fn le(&self, other: &Rc<T>) -> bool {
+ **self <= **other
+ }
/// Greater-than comparison for two `Rc<T>`s.
///
/// five > Rc::new(5);
/// ```
#[inline(always)]
- fn gt(&self, other: &Rc<T>) -> bool { **self > **other }
+ fn gt(&self, other: &Rc<T>) -> bool {
+ **self > **other
+ }
/// 'Greater-than or equal to' comparison for two `Rc<T>`s.
///
/// five >= Rc::new(5);
/// ```
#[inline(always)]
- fn ge(&self, other: &Rc<T>) -> bool { **self >= **other }
+ fn ge(&self, other: &Rc<T>) -> bool {
+ **self >= **other
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
/// five.partial_cmp(&Rc::new(5));
/// ```
#[inline]
- fn cmp(&self, other: &Rc<T>) -> Ordering { (**self).cmp(&**other) }
+ fn cmp(&self, other: &Rc<T>) -> Ordering {
+ (**self).cmp(&**other)
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe {
let ptr = *self._ptr;
if !(*(&ptr as *const _ as *const *const ())).is_null() &&
- ptr as *const () as usize != mem::POST_DROP_USIZE {
+ ptr as *const () as usize != mem::POST_DROP_USIZE {
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
- deallocate(ptr as *mut u8, size_of_val(&*ptr),
- align_of_val(&*ptr))
+ deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
}
}
fn inner(&self) -> &RcBox<T>;
#[inline]
- fn strong(&self) -> usize { self.inner().strong.get() }
+ fn strong(&self) -> usize {
+ self.inner().strong.get()
+ }
#[inline]
fn inc_strong(&self) {
}
#[inline]
- fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); }
+ fn dec_strong(&self) {
+ self.inner().strong.set(self.strong() - 1);
+ }
#[inline]
- fn weak(&self) -> usize { self.inner().weak.get() }
+ fn weak(&self) -> usize {
+ self.inner().weak.get()
+ }
#[inline]
fn inc_weak(&self) {
}
#[inline]
- fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); }
+ fn dec_weak(&self) {
+ self.inner().weak.set(self.weak() - 1);
+ }
}
impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
#[test]
fn weak_self_cyclic() {
struct Cycle {
- x: RefCell<Option<Weak<Cycle>>>
+ x: RefCell<Option<Weak<Cycle>>>,
}
let a = Rc::new(Cycle { x: RefCell::new(None) });
}
impl<T: ?Sized> borrow::Borrow<T> for Rc<T> {
- fn borrow(&self) -> &T { &**self }
+ fn borrow(&self) -> &T {
+ &**self
+ }
}