From 8f51c8d687cb6fd7e98f68b93f40445ecd4690fa Mon Sep 17 00:00:00 2001 From: Nick Cameron Date: Thu, 24 Sep 2015 10:00:54 +1200 Subject: [PATCH] rustfmt liballoc --- src/liballoc/arc.rs | 78 +++++++++++++++++++--------- src/liballoc/boxed.rs | 102 ++++++++++++++++++++++++------------- src/liballoc/boxed_test.rs | 15 ++++-- src/liballoc/heap.rs | 25 ++++++--- src/liballoc/lib.rs | 12 +++-- src/liballoc/raw_vec.rs | 56 +++++++++++++------- src/liballoc/rc.rs | 71 +++++++++++++++++--------- 7 files changed, 244 insertions(+), 115 deletions(-) diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 78821403de0..4e95e383f41 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -214,7 +214,9 @@ pub fn new(data: T) -> Arc { #[stable(feature = "arc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { // See `drop` for why all these atomics are like this - if this.inner().strong.compare_and_swap(1, 0, Release) != 1 { return Err(this) } + if this.inner().strong.compare_and_swap(1, 0, Release) != 1 { + return Err(this) + } atomic::fence(Acquire); @@ -251,7 +253,9 @@ pub fn downgrade(this: &Self) -> Weak { let cur = this.inner().weak.load(Relaxed); // check if the weak counter is currently "locked"; if so, spin. - if cur == usize::MAX { continue } + if cur == usize::MAX { + continue + } // NOTE: this code currently ignores the possibility of overflow // into usize::MAX; in general both Rc and Arc need to be adjusted @@ -348,7 +352,9 @@ fn clone(&self) -> Arc { // We abort because such a program is incredibly degenerate, and we // don't care to support it. if old_size > MAX_REFCOUNT { - unsafe { abort(); } + unsafe { + abort(); + } } Arc { _ptr: self._ptr } @@ -556,7 +562,9 @@ fn drop(&mut self) { // Because `fetch_sub` is already atomic, we do not need to synchronize // with other threads unless we are going to delete the object. This // same logic applies to the below `fetch_sub` to the `weak` count. - if self.inner().strong.fetch_sub(1, Release) != 1 { return } + if self.inner().strong.fetch_sub(1, Release) != 1 { + return + } // This fence is needed to prevent reordering of use of the data and // deletion of the data. Because it is marked `Release`, the decreasing @@ -577,9 +585,7 @@ fn drop(&mut self) { // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) atomic::fence(Acquire); - unsafe { - self.drop_slow() - } + unsafe { self.drop_slow() } } } @@ -613,11 +619,15 @@ pub fn upgrade(&self) -> Option> { // "stale" read of 0 is fine), and any other value is // confirmed via the CAS below. let n = inner.strong.load(Relaxed); - if n == 0 { return None } + if n == 0 { + return None + } // Relaxed is valid for the same reason it is on Arc's Clone impl let old = inner.strong.compare_and_swap(n, n + 1, Relaxed); - if old == n { return Some(Arc { _ptr: self._ptr }) } + if old == n { + return Some(Arc { _ptr: self._ptr }) + } } } @@ -653,7 +663,9 @@ fn clone(&self) -> Weak { // See comments in Arc::clone() for why we do this (for mem::forget). if old_size > MAX_REFCOUNT { - unsafe { abort(); } + unsafe { + abort(); + } } return Weak { _ptr: self._ptr } @@ -705,9 +717,7 @@ fn drop(&mut self) { // ref, which can only happen after the lock is released. if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); - unsafe { deallocate(ptr as *mut u8, - size_of_val(&*ptr), - align_of_val(&*ptr)) } + unsafe { deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr)) } } } } @@ -727,7 +737,9 @@ impl PartialEq for Arc { /// /// five == Arc::new(5); /// ``` - fn eq(&self, other: &Arc) -> bool { *(*self) == *(*other) } + fn eq(&self, other: &Arc) -> bool { + *(*self) == *(*other) + } /// Inequality for two `Arc`s. /// @@ -742,7 +754,9 @@ fn eq(&self, other: &Arc) -> bool { *(*self) == *(*other) } /// /// five != Arc::new(5); /// ``` - fn ne(&self, other: &Arc) -> bool { *(*self) != *(*other) } + fn ne(&self, other: &Arc) -> bool { + *(*self) != *(*other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Arc { @@ -776,7 +790,9 @@ fn partial_cmp(&self, other: &Arc) -> Option { /// /// five < Arc::new(5); /// ``` - fn lt(&self, other: &Arc) -> bool { *(*self) < *(*other) } + fn lt(&self, other: &Arc) -> bool { + *(*self) < *(*other) + } /// 'Less-than or equal to' comparison for two `Arc`s. /// @@ -791,7 +807,9 @@ fn lt(&self, other: &Arc) -> bool { *(*self) < *(*other) } /// /// five <= Arc::new(5); /// ``` - fn le(&self, other: &Arc) -> bool { *(*self) <= *(*other) } + fn le(&self, other: &Arc) -> bool { + *(*self) <= *(*other) + } /// Greater-than comparison for two `Arc`s. /// @@ -806,7 +824,9 @@ fn le(&self, other: &Arc) -> bool { *(*self) <= *(*other) } /// /// five > Arc::new(5); /// ``` - fn gt(&self, other: &Arc) -> bool { *(*self) > *(*other) } + fn gt(&self, other: &Arc) -> bool { + *(*self) > *(*other) + } /// 'Greater-than or equal to' comparison for two `Arc`s. /// @@ -821,11 +841,15 @@ fn gt(&self, other: &Arc) -> bool { *(*self) > *(*other) } /// /// five >= Arc::new(5); /// ``` - fn ge(&self, other: &Arc) -> bool { *(*self) >= *(*other) } + fn ge(&self, other: &Arc) -> bool { + *(*self) >= *(*other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Arc { - fn cmp(&self, other: &Arc) -> Ordering { (**self).cmp(&**other) } + fn cmp(&self, other: &Arc) -> Ordering { + (**self).cmp(&**other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for Arc {} @@ -854,7 +878,9 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { #[stable(feature = "rust1", since = "1.0.0")] impl Default for Arc { #[stable(feature = "rust1", since = "1.0.0")] - fn default() -> Arc { Arc::new(Default::default()) } + fn default() -> Arc { + Arc::new(Default::default()) + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1015,7 +1041,7 @@ fn test_dead() { #[test] fn weak_self_cyclic() { struct Cycle { - x: Mutex>> + x: Mutex>>, } let a = Arc::new(Cycle { x: Mutex::new(None) }); @@ -1095,7 +1121,9 @@ fn show_arc() { // Make sure deriving works with Arc #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)] - struct Foo { inner: Arc } + struct Foo { + inner: Arc, + } #[test] fn test_unsized() { @@ -1108,5 +1136,7 @@ fn test_unsized() { } impl borrow::Borrow for Arc { - fn borrow(&self) -> &T { &**self } + fn borrow(&self) -> &T { + &**self + } } diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 4293b4765e1..18301b9d32c 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -66,7 +66,7 @@ use core::ops::{CoerceUnsized, Deref, DerefMut}; use core::ops::{Placer, Boxed, Place, InPlace, BoxPlace}; use core::ptr::{self, Unique}; -use core::raw::{TraitObject}; +use core::raw::TraitObject; /// A value that represents the heap. This is the default place that the `box` /// keyword allocates into when no place is supplied. @@ -96,7 +96,9 @@ reason = "may be renamed; uncertain about custom allocator design", issue = "27779")] #[derive(Copy, Clone)] -pub struct ExchangeHeapSingleton { _force_singleton: () } +pub struct ExchangeHeapSingleton { + _force_singleton: (), +} /// A pointer type for heap allocation. /// @@ -127,7 +129,7 @@ pub struct ExchangeHeapSingleton { _force_singleton: () } #[unstable(feature = "placement_in", reason = "placement box design is still being worked out.", issue = "27779")] -pub struct IntermediateBox{ +pub struct IntermediateBox { ptr: *mut u8, size: usize, align: usize, @@ -153,9 +155,7 @@ fn make_place() -> IntermediateBox { let p = if size == 0 { heap::EMPTY as *mut u8 } else { - let p = unsafe { - heap::allocate(size, align) - }; + let p = unsafe { heap::allocate(size, align) }; if p.is_null() { panic!("Box make_place allocation failure."); } @@ -166,18 +166,24 @@ fn make_place() -> IntermediateBox { } impl BoxPlace for IntermediateBox { - fn make_place() -> IntermediateBox { make_place() } + fn make_place() -> IntermediateBox { + make_place() + } } impl InPlace for IntermediateBox { type Owner = Box; - unsafe fn finalize(self) -> Box { finalize(self) } + unsafe fn finalize(self) -> Box { + finalize(self) + } } impl Boxed for Box { type Data = T; type Place = IntermediateBox; - unsafe fn finalize(b: IntermediateBox) -> Box { finalize(b) } + unsafe fn finalize(b: IntermediateBox) -> Box { + finalize(b) + } } impl Placer for ExchangeHeapSingleton { @@ -191,9 +197,7 @@ fn make_place(self) -> IntermediateBox { impl Drop for IntermediateBox { fn drop(&mut self) { if self.size > 0 { - unsafe { - heap::deallocate(self.ptr, self.size, self.align) - } + unsafe { heap::deallocate(self.ptr, self.size, self.align) } } } } @@ -257,13 +261,17 @@ pub fn into_raw(b: Box) -> *mut T { #[stable(feature = "rust1", since = "1.0.0")] impl Default for Box { #[stable(feature = "rust1", since = "1.0.0")] - fn default() -> Box { box Default::default() } + fn default() -> Box { + box Default::default() + } } #[stable(feature = "rust1", since = "1.0.0")] impl Default for Box<[T]> { #[stable(feature = "rust1", since = "1.0.0")] - fn default() -> Box<[T]> { Box::<[T; 0]>::new([]) } + fn default() -> Box<[T]> { + Box::<[T; 0]>::new([]) + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -277,7 +285,11 @@ impl Clone for Box { /// let y = x.clone(); /// ``` #[inline] - fn clone(&self) -> Box { box {(**self).clone()} } + fn clone(&self) -> Box { + box { + (**self).clone() + } + } /// Copies `source`'s contents into `self` without creating a new allocation. /// /// # Examples @@ -312,9 +324,13 @@ fn clone(&self) -> Self { #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for Box { #[inline] - fn eq(&self, other: &Box) -> bool { PartialEq::eq(&**self, &**other) } + fn eq(&self, other: &Box) -> bool { + PartialEq::eq(&**self, &**other) + } #[inline] - fn ne(&self, other: &Box) -> bool { PartialEq::ne(&**self, &**other) } + fn ne(&self, other: &Box) -> bool { + PartialEq::ne(&**self, &**other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Box { @@ -323,13 +339,21 @@ fn partial_cmp(&self, other: &Box) -> Option { PartialOrd::partial_cmp(&**self, &**other) } #[inline] - fn lt(&self, other: &Box) -> bool { PartialOrd::lt(&**self, &**other) } + fn lt(&self, other: &Box) -> bool { + PartialOrd::lt(&**self, &**other) + } #[inline] - fn le(&self, other: &Box) -> bool { PartialOrd::le(&**self, &**other) } + fn le(&self, other: &Box) -> bool { + PartialOrd::le(&**self, &**other) + } #[inline] - fn ge(&self, other: &Box) -> bool { PartialOrd::ge(&**self, &**other) } + fn ge(&self, other: &Box) -> bool { + PartialOrd::ge(&**self, &**other) + } #[inline] - fn gt(&self, other: &Box) -> bool { PartialOrd::gt(&**self, &**other) } + fn gt(&self, other: &Box) -> bool { + PartialOrd::gt(&**self, &**other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Box { @@ -357,8 +381,7 @@ pub fn downcast(self) -> Result, Box> { unsafe { // Get the raw representation of the trait object let raw = Box::into_raw(self); - let to: TraitObject = - mem::transmute::<*mut Any, TraitObject>(raw); + let to: TraitObject = mem::transmute::<*mut Any, TraitObject>(raw); // Extract the data pointer Ok(Box::from_raw(to.data as *mut T)) @@ -409,23 +432,33 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { impl Deref for Box { type Target = T; - fn deref(&self) -> &T { &**self } + fn deref(&self) -> &T { + &**self + } } #[stable(feature = "rust1", since = "1.0.0")] impl DerefMut for Box { - fn deref_mut(&mut self) -> &mut T { &mut **self } + fn deref_mut(&mut self) -> &mut T { + &mut **self + } } #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for Box { type Item = I::Item; - fn next(&mut self) -> Option { (**self).next() } - fn size_hint(&self) -> (usize, Option) { (**self).size_hint() } + fn next(&mut self) -> Option { + (**self).next() + } + fn size_hint(&self) -> (usize, Option) { + (**self).size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for Box { - fn next_back(&mut self) -> Option { (**self).next_back() } + fn next_back(&mut self) -> Option { + (**self).next_back() + } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Box {} @@ -507,10 +540,7 @@ impl, U: ?Sized> CoerceUnsized> for Box {} #[stable(feature = "box_slice_clone", since = "1.3.0")] impl Clone for Box<[T]> { fn clone(&self) -> Self { - let mut new = BoxBuilder { - data: RawVec::with_capacity(self.len()), - len: 0 - }; + let mut new = BoxBuilder { data: RawVec::with_capacity(self.len()), len: 0 }; let mut target = new.data.ptr(); @@ -556,9 +586,13 @@ fn drop(&mut self) { } impl borrow::Borrow for Box { - fn borrow(&self) -> &T { &**self } + fn borrow(&self) -> &T { + &**self + } } impl borrow::BorrowMut for Box { - fn borrow_mut(&mut self) -> &mut T { &mut **self } + fn borrow_mut(&mut self) -> &mut T { + &mut **self + } } diff --git a/src/liballoc/boxed_test.rs b/src/liballoc/boxed_test.rs index 2ef23b26a56..7f3dadcf24d 100644 --- a/src/liballoc/boxed_test.rs +++ b/src/liballoc/boxed_test.rs @@ -34,12 +34,16 @@ fn any_move() { let b = Box::new(Test) as Box; match a.downcast::() { - Ok(a) => { assert!(a == Box::new(8)); } - Err(..) => panic!() + Ok(a) => { + assert!(a == Box::new(8)); + } + Err(..) => panic!(), } match b.downcast::() { - Ok(a) => { assert!(a == Box::new(Test)); } - Err(..) => panic!() + Ok(a) => { + assert!(a == Box::new(Test)); + } + Err(..) => panic!(), } let a = Box::new(8) as Box; @@ -70,7 +74,8 @@ fn test_show() { #[test] fn deref() { - fn homura>(_: T) { } + fn homura>(_: T) { + } homura(Box::new(765)); } diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs index 10cb84d1da1..de934807e18 100644 --- a/src/liballoc/heap.rs +++ b/src/liballoc/heap.rs @@ -22,10 +22,12 @@ #[allocator] fn __rust_allocate(size: usize, align: usize) -> *mut u8; fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize); - fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, - align: usize) -> *mut u8; - fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, - align: usize) -> usize; + fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8; + fn __rust_reallocate_inplace(ptr: *mut u8, + old_size: usize, + size: usize, + align: usize) + -> usize; fn __rust_usable_size(size: usize, align: usize) -> usize; } @@ -84,8 +86,11 @@ pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usiz /// create the allocation referenced by `ptr`. The `old_size` parameter may be /// any value in range_inclusive(requested_size, usable_size). #[inline] -pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, - align: usize) -> usize { +pub unsafe fn reallocate_inplace(ptr: *mut u8, + old_size: usize, + size: usize, + align: usize) + -> usize { check_size_and_alignment(size, align); __rust_reallocate_inplace(ptr, old_size, size, align) } @@ -124,7 +129,9 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { EMPTY as *mut u8 } else { let ptr = allocate(size, align); - if ptr.is_null() { ::oom() } + if ptr.is_null() { + ::oom() + } ptr } } @@ -148,7 +155,9 @@ fn basic_reallocate_inplace_noop() { unsafe { let size = 4000; let ptr = heap::allocate(size, 8); - if ptr.is_null() { ::oom() } + if ptr.is_null() { + ::oom() + } let ret = heap::reallocate_inplace(ptr, size, size, 8); heap::deallocate(ptr, size, 8); assert_eq!(ret, heap::usable_size(size, 8)); diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 66de5d7bea8..98c729aaba4 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -107,8 +107,12 @@ // Allow testing this library -#[cfg(test)] #[macro_use] extern crate std; -#[cfg(test)] #[macro_use] extern crate log; +#[cfg(test)] +#[macro_use] +extern crate std; +#[cfg(test)] +#[macro_use] +extern crate log; // Heaps provided for low-level allocation strategies @@ -123,7 +127,9 @@ #[cfg(not(test))] pub mod boxed; #[cfg(test)] -mod boxed { pub use std::boxed::{Box, HEAP}; } +mod boxed { + pub use std::boxed::{Box, HEAP}; +} #[cfg(test)] mod boxed_test; pub mod arc; diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 97acd0db524..5c9ca63a109 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -58,7 +58,11 @@ impl RawVec { pub fn new() -> Self { unsafe { // !0 is usize::MAX. This branch should be stripped at compile time. - let cap = if mem::size_of::() == 0 { !0 } else { 0 }; + let cap = if mem::size_of::() == 0 { + !0 + } else { + 0 + }; // heap::EMPTY doubles as "unallocated" and "zero-sized allocation" RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: cap } @@ -92,7 +96,9 @@ pub fn with_capacity(cap: usize) -> Self { } else { let align = mem::align_of::(); let ptr = heap::allocate(alloc_size, align); - if ptr.is_null() { oom() } + if ptr.is_null() { + oom() + } ptr }; @@ -133,7 +139,11 @@ pub fn ptr(&self) -> *mut T { /// /// This will always be `usize::MAX` if `T` is zero-sized. pub fn cap(&self) -> usize { - if mem::size_of::() == 0 { !0 } else { self.cap } + if mem::size_of::() == 0 { + !0 + } else { + self.cap + } } /// Doubles the size of the type's backing allocation. This is common enough @@ -190,7 +200,11 @@ pub fn double(&mut self) { let (new_cap, ptr) = if self.cap == 0 { // skip to 4 because tiny Vec's are dumb; but not if that would cause overflow - let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 }; + let new_cap = if elem_size > (!0) / 8 { + 1 + } else { + 4 + }; let ptr = heap::allocate(new_cap * elem_size, align); (new_cap, ptr) } else { @@ -207,7 +221,9 @@ pub fn double(&mut self) { }; // If allocate or reallocate fail, we'll get `null` back - if ptr.is_null() { oom() } + if ptr.is_null() { + oom() + } self.ptr = Unique::new(ptr as *mut _); self.cap = new_cap; @@ -246,7 +262,9 @@ pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) { // Don't actually need any more capacity. // Wrapping in case they gave a bad `used_cap`. - if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { return; } + if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { + return; + } // Nothing we can really do about these checks :( let new_cap = used_cap.checked_add(needed_extra_cap).expect("capacity overflow"); @@ -256,14 +274,13 @@ pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) { let ptr = if self.cap == 0 { heap::allocate(new_alloc_size, align) } else { - heap::reallocate(self.ptr() as *mut _, - self.cap * elem_size, - new_alloc_size, - align) + heap::reallocate(self.ptr() as *mut _, self.cap * elem_size, new_alloc_size, align) }; // If allocate or reallocate fail, we'll get `null` back - if ptr.is_null() { oom() } + if ptr.is_null() { + oom() + } self.ptr = Unique::new(ptr as *mut _); self.cap = new_cap; @@ -326,7 +343,9 @@ pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) { // Don't actually need any more capacity. // Wrapping in case they give a bas `used_cap` - if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { return; } + if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { + return; + } // Nothing we can really do about these checks :( let new_cap = used_cap.checked_add(needed_extra_cap) @@ -339,14 +358,13 @@ pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) { let ptr = if self.cap == 0 { heap::allocate(new_alloc_size, align) } else { - heap::reallocate(self.ptr() as *mut _, - self.cap * elem_size, - new_alloc_size, - align) + heap::reallocate(self.ptr() as *mut _, self.cap * elem_size, new_alloc_size, align) }; // If allocate or reallocate fail, we'll get `null` back - if ptr.is_null() { oom() } + if ptr.is_null() { + oom() + } self.ptr = Unique::new(ptr as *mut _); self.cap = new_cap; @@ -386,7 +404,9 @@ pub fn shrink_to_fit(&mut self, amount: usize) { self.cap * elem_size, amount * elem_size, align); - if ptr.is_null() { oom() } + if ptr.is_null() { + oom() + } self.ptr = Unique::new(ptr as *mut _); } self.cap = amount; diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 4fe474cef0a..47db2e48f2d 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -213,7 +213,7 @@ pub fn new(value: T) -> Rc { _ptr: NonZero::new(Box::into_raw(box RcBox { strong: Cell::new(1), weak: Cell::new(1), - value: value + value: value, })), } } @@ -290,13 +290,17 @@ pub fn downgrade(this: &Self) -> Weak { #[inline] #[unstable(feature = "rc_counts", reason = "not clearly useful", issue = "28356")] - pub fn weak_count(this: &Self) -> usize { this.weak() - 1 } + pub fn weak_count(this: &Self) -> usize { + this.weak() - 1 + } /// Get the number of strong references to this value. #[inline] #[unstable(feature = "rc_counts", reason = "not clearly useful", issue = "28356")] - pub fn strong_count(this: &Self) -> usize { this.strong() } + pub fn strong_count(this: &Self) -> usize { + this.strong() + } /// Returns true if there are no other `Rc` or `Weak` values that share /// the same inner value. @@ -451,7 +455,7 @@ fn drop(&mut self) { unsafe { let ptr = *self._ptr; if !(*(&ptr as *const _ as *const *const ())).is_null() && - ptr as *const () as usize != mem::POST_DROP_USIZE { + ptr as *const () as usize != mem::POST_DROP_USIZE { self.dec_strong(); if self.strong() == 0 { // destroy the contained object @@ -462,9 +466,7 @@ fn drop(&mut self) { self.dec_weak(); if self.weak() == 0 { - deallocate(ptr as *mut u8, - size_of_val(&*ptr), - align_of_val(&*ptr)) + deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr)) } } } @@ -530,7 +532,9 @@ impl PartialEq for Rc { /// five == Rc::new(5); /// ``` #[inline(always)] - fn eq(&self, other: &Rc) -> bool { **self == **other } + fn eq(&self, other: &Rc) -> bool { + **self == **other + } /// Inequality for two `Rc`s. /// @@ -546,7 +550,9 @@ fn eq(&self, other: &Rc) -> bool { **self == **other } /// five != Rc::new(5); /// ``` #[inline(always)] - fn ne(&self, other: &Rc) -> bool { **self != **other } + fn ne(&self, other: &Rc) -> bool { + **self != **other + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -586,7 +592,9 @@ fn partial_cmp(&self, other: &Rc) -> Option { /// five < Rc::new(5); /// ``` #[inline(always)] - fn lt(&self, other: &Rc) -> bool { **self < **other } + fn lt(&self, other: &Rc) -> bool { + **self < **other + } /// 'Less-than or equal to' comparison for two `Rc`s. /// @@ -602,7 +610,9 @@ fn lt(&self, other: &Rc) -> bool { **self < **other } /// five <= Rc::new(5); /// ``` #[inline(always)] - fn le(&self, other: &Rc) -> bool { **self <= **other } + fn le(&self, other: &Rc) -> bool { + **self <= **other + } /// Greater-than comparison for two `Rc`s. /// @@ -618,7 +628,9 @@ fn le(&self, other: &Rc) -> bool { **self <= **other } /// five > Rc::new(5); /// ``` #[inline(always)] - fn gt(&self, other: &Rc) -> bool { **self > **other } + fn gt(&self, other: &Rc) -> bool { + **self > **other + } /// 'Greater-than or equal to' comparison for two `Rc`s. /// @@ -634,7 +646,9 @@ fn gt(&self, other: &Rc) -> bool { **self > **other } /// five >= Rc::new(5); /// ``` #[inline(always)] - fn ge(&self, other: &Rc) -> bool { **self >= **other } + fn ge(&self, other: &Rc) -> bool { + **self >= **other + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -653,7 +667,9 @@ impl Ord for Rc { /// five.partial_cmp(&Rc::new(5)); /// ``` #[inline] - fn cmp(&self, other: &Rc) -> Ordering { (**self).cmp(&**other) } + fn cmp(&self, other: &Rc) -> Ordering { + (**self).cmp(&**other) + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -764,13 +780,12 @@ fn drop(&mut self) { unsafe { let ptr = *self._ptr; if !(*(&ptr as *const _ as *const *const ())).is_null() && - ptr as *const () as usize != mem::POST_DROP_USIZE { + ptr as *const () as usize != mem::POST_DROP_USIZE { self.dec_weak(); // the weak count starts at 1, and will only go to zero if all // the strong pointers have disappeared. if self.weak() == 0 { - deallocate(ptr as *mut u8, size_of_val(&*ptr), - align_of_val(&*ptr)) + deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr)) } } } @@ -821,7 +836,9 @@ trait RcBoxPtr { fn inner(&self) -> &RcBox; #[inline] - fn strong(&self) -> usize { self.inner().strong.get() } + fn strong(&self) -> usize { + self.inner().strong.get() + } #[inline] fn inc_strong(&self) { @@ -829,10 +846,14 @@ fn inc_strong(&self) { } #[inline] - fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); } + fn dec_strong(&self) { + self.inner().strong.set(self.strong() - 1); + } #[inline] - fn weak(&self) -> usize { self.inner().weak.get() } + fn weak(&self) -> usize { + self.inner().weak.get() + } #[inline] fn inc_weak(&self) { @@ -840,7 +861,9 @@ fn inc_weak(&self) { } #[inline] - fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); } + fn dec_weak(&self) { + self.inner().weak.set(self.weak() - 1); + } } impl RcBoxPtr for Rc { @@ -928,7 +951,7 @@ fn test_dead() { #[test] fn weak_self_cyclic() { struct Cycle { - x: RefCell>> + x: RefCell>>, } let a = Rc::new(Cycle { x: RefCell::new(None) }); @@ -1086,5 +1109,7 @@ fn test_unsized() { } impl borrow::Borrow for Rc { - fn borrow(&self) -> &T { &**self } + fn borrow(&self) -> &T { + &**self + } } -- 2.44.0