#[stable(feature = "arc_unique", since = "1.4.0")]
pub fn try_unwrap(this: Self) -> Result<T, Self> {
// See `drop` for why all these atomics are like this
- if this.inner().strong.compare_and_swap(1, 0, Release) != 1 { return Err(this) }
+ if this.inner().strong.compare_and_swap(1, 0, Release) != 1 {
+ return Err(this)
+ }
atomic::fence(Acquire);
let cur = this.inner().weak.load(Relaxed);
// check if the weak counter is currently "locked"; if so, spin.
- if cur == usize::MAX { continue }
+ if cur == usize::MAX {
+ continue
+ }
// NOTE: this code currently ignores the possibility of overflow
// into usize::MAX; in general both Rc and Arc need to be adjusted
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
- deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
+ deallocate(ptr as *mut u8,
+ size_of_val(&*ptr),
+ align_of_val(&*ptr))
}
}
}
// We abort because such a program is incredibly degenerate, and we
// don't care to support it.
if old_size > MAX_REFCOUNT {
- unsafe { abort(); }
+ unsafe {
+ abort();
+ }
}
Arc { _ptr: self._ptr }
// Because `fetch_sub` is already atomic, we do not need to synchronize
// with other threads unless we are going to delete the object. This
// same logic applies to the below `fetch_sub` to the `weak` count.
- if self.inner().strong.fetch_sub(1, Release) != 1 { return }
+ if self.inner().strong.fetch_sub(1, Release) != 1 {
+ return
+ }
// This fence is needed to prevent reordering of use of the data and
// deletion of the data. Because it is marked `Release`, the decreasing
atomic::fence(Acquire);
unsafe {
- self.drop_slow()
+ self.drop_slow();
}
}
}
// "stale" read of 0 is fine), and any other value is
// confirmed via the CAS below.
let n = inner.strong.load(Relaxed);
- if n == 0 { return None }
+ if n == 0 {
+ return None
+ }
// Relaxed is valid for the same reason it is on Arc's Clone impl
let old = inner.strong.compare_and_swap(n, n + 1, Relaxed);
- if old == n { return Some(Arc { _ptr: self._ptr }) }
+ if old == n {
+ return Some(Arc { _ptr: self._ptr })
+ }
}
}
// See comments in Arc::clone() for why we do this (for mem::forget).
if old_size > MAX_REFCOUNT {
- unsafe { abort(); }
+ unsafe {
+ abort();
+ }
}
return Weak { _ptr: self._ptr }
// ref, which can only happen after the lock is released.
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
- unsafe { deallocate(ptr as *mut u8,
- size_of_val(&*ptr),
- align_of_val(&*ptr)) }
+ unsafe {
+ deallocate(ptr as *mut u8,
+ size_of_val(&*ptr),
+ align_of_val(&*ptr))
+ }
}
}
}
///
/// five == Arc::new(5);
/// ```
- fn eq(&self, other: &Arc<T>) -> bool { *(*self) == *(*other) }
+ fn eq(&self, other: &Arc<T>) -> bool {
+ *(*self) == *(*other)
+ }
/// Inequality for two `Arc<T>`s.
///
///
/// five != Arc::new(5);
/// ```
- fn ne(&self, other: &Arc<T>) -> bool { *(*self) != *(*other) }
+ fn ne(&self, other: &Arc<T>) -> bool {
+ *(*self) != *(*other)
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// five < Arc::new(5);
/// ```
- fn lt(&self, other: &Arc<T>) -> bool { *(*self) < *(*other) }
+ fn lt(&self, other: &Arc<T>) -> bool {
+ *(*self) < *(*other)
+ }
/// 'Less-than or equal to' comparison for two `Arc<T>`s.
///
///
/// five <= Arc::new(5);
/// ```
- fn le(&self, other: &Arc<T>) -> bool { *(*self) <= *(*other) }
+ fn le(&self, other: &Arc<T>) -> bool {
+ *(*self) <= *(*other)
+ }
/// Greater-than comparison for two `Arc<T>`s.
///
///
/// five > Arc::new(5);
/// ```
- fn gt(&self, other: &Arc<T>) -> bool { *(*self) > *(*other) }
+ fn gt(&self, other: &Arc<T>) -> bool {
+ *(*self) > *(*other)
+ }
/// 'Greater-than or equal to' comparison for two `Arc<T>`s.
///
///
/// five >= Arc::new(5);
/// ```
- fn ge(&self, other: &Arc<T>) -> bool { *(*self) >= *(*other) }
+ fn ge(&self, other: &Arc<T>) -> bool {
+ *(*self) >= *(*other)
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Ord> Ord for Arc<T> {
- fn cmp(&self, other: &Arc<T>) -> Ordering { (**self).cmp(&**other) }
+ fn cmp(&self, other: &Arc<T>) -> Ordering {
+ (**self).cmp(&**other)
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Eq> Eq for Arc<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Default> Default for Arc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
- fn default() -> Arc<T> { Arc::new(Default::default()) }
+ fn default() -> Arc<T> {
+ Arc::new(Default::default())
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
#[test]
fn weak_self_cyclic() {
struct Cycle {
- x: Mutex<Option<Weak<Cycle>>>
+ x: Mutex<Option<Weak<Cycle>>>,
}
let a = Arc::new(Cycle { x: Mutex::new(None) });
// Make sure deriving works with Arc<T>
#[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
- struct Foo { inner: Arc<i32> }
+ struct Foo {
+ inner: Arc<i32>,
+ }
#[test]
fn test_unsized() {
}
impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
- fn borrow(&self) -> &T { &**self }
+ fn borrow(&self) -> &T {
+ &**self
+ }
+}
+
+#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
+impl<T: ?Sized> AsRef<T> for Arc<T> {
+ fn as_ref(&self) -> &T { &**self }
}