The fixed issue that allowed this was #12808.
#[unsafe_no_drop_flag]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Arc<T: ?Sized> {
- // FIXME #12808: strange name to try to avoid interfering with
- // field accesses of the contained type via Deref
- _ptr: Shared<ArcInner<T>>,
+ ptr: Shared<ArcInner<T>>,
}
#[stable(feature = "rust1", since = "1.0.0")]
#[unsafe_no_drop_flag]
#[stable(feature = "arc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> {
- // FIXME #12808: strange name to try to avoid interfering with
- // field accesses of the contained type via Deref
- _ptr: Shared<ArcInner<T>>,
+ ptr: Shared<ArcInner<T>>,
}
#[stable(feature = "arc_weak", since = "1.4.0")]
weak: atomic::AtomicUsize::new(1),
data: data,
};
- Arc { _ptr: unsafe { Shared::new(Box::into_raw(x)) } }
+ Arc { ptr: unsafe { Shared::new(Box::into_raw(x)) } }
}
/// Unwraps the contained value if the `Arc<T>` has exactly one strong reference.
atomic::fence(Acquire);
unsafe {
- let ptr = *this._ptr;
+ let ptr = *this.ptr;
let elem = ptr::read(&(*ptr).data);
// Make a weak pointer to clean up the implicit strong-weak reference
- let _weak = Weak { _ptr: this._ptr };
+ let _weak = Weak { ptr: this.ptr };
mem::forget(this);
Ok(elem)
// synchronize with the write coming from `is_unique`, so that the
// events prior to that write happen before this read.
match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
- Ok(_) => return Weak { _ptr: this._ptr },
+ Ok(_) => return Weak { ptr: this.ptr },
Err(old) => cur = old,
}
}
// `ArcInner` structure itself is `Sync` because the inner data is
// `Sync` as well, so we're ok loaning out an immutable pointer to these
// contents.
- unsafe { &**self._ptr }
+ unsafe { &**self.ptr }
}
// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
- let ptr = *self._ptr;
+ let ptr = *self.ptr;
// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
}
}
- Arc { _ptr: self._ptr }
+ Arc { ptr: self.ptr }
}
}
// Materialize our own implicit weak pointer, so that it can clean
// up the ArcInner as needed.
- let weak = Weak { _ptr: this._ptr };
+ let weak = Weak { ptr: this.ptr };
// mark the data itself as already deallocated
unsafe {
// here (due to zeroing) because data is no longer accessed by
// other threads (due to there being no more strong refs at this
// point).
- let mut swap = Arc::new(ptr::read(&(**weak._ptr).data));
+ let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
mem::swap(this, &mut swap);
mem::forget(swap);
}
// As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents.
unsafe {
- let inner = &mut **this._ptr;
+ let inner = &mut **this.ptr;
&mut inner.data
}
}
// the Arc itself to be `mut`, so we're returning the only possible
// reference to the inner data.
unsafe {
- let inner = &mut **this._ptr;
+ let inner = &mut **this.ptr;
Some(&mut inner.data)
}
} else {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run
// more than once (but it is guaranteed to be zeroed after the first if
// it's run more than once)
- let thin = *self._ptr as *const ();
+ let thin = *self.ptr as *const ();
if thin as usize == mem::POST_DROP_USIZE {
return;
// Relaxed is valid for the same reason it is on Arc's Clone impl
match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
- Ok(_) => return Some(Arc { _ptr: self._ptr }),
+ Ok(_) => return Some(Arc { ptr: self.ptr }),
Err(old) => n = old,
}
}
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
- unsafe { &**self._ptr }
+ unsafe { &**self.ptr }
}
}
}
}
- return Weak { _ptr: self._ptr };
+ return Weak { ptr: self.ptr };
}
}
/// } // implicit drop
/// ```
fn drop(&mut self) {
- let ptr = *self._ptr;
+ let ptr = *self.ptr;
let thin = ptr as *const ();
// see comments above for why this check is here
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> fmt::Pointer for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- fmt::Pointer::fmt(&*self._ptr, f)
+ fmt::Pointer::fmt(&*self.ptr, f)
}
}
issue = "30425")]
pub fn new() -> Weak<T> {
unsafe {
- Weak { _ptr: Shared::new(Box::into_raw(box ArcInner {
+ Weak { ptr: Shared::new(Box::into_raw(box ArcInner {
strong: atomic::AtomicUsize::new(0),
weak: atomic::AtomicUsize::new(1),
data: uninitialized(),
#[unsafe_no_drop_flag]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Rc<T: ?Sized> {
- // FIXME #12808: strange names to try to avoid interfering with field
- // accesses of the contained type via Deref
- _ptr: Shared<RcBox<T>>,
+ ptr: Shared<RcBox<T>>,
}
#[stable(feature = "rust1", since = "1.0.0")]
// pointers, which ensures that the weak destructor never frees
// the allocation while the strong destructor is running, even
// if the weak pointer is stored inside the strong one.
- _ptr: Shared::new(Box::into_raw(box RcBox {
+ ptr: Shared::new(Box::into_raw(box RcBox {
strong: Cell::new(1),
weak: Cell::new(1),
value: value,
// pointer while also handling drop logic by just crafting a
// fake Weak.
this.dec_strong();
- let _weak = Weak { _ptr: this._ptr };
+ let _weak = Weak { ptr: this.ptr };
forget(this);
Ok(val)
}
#[stable(feature = "rc_weak", since = "1.4.0")]
pub fn downgrade(this: &Self) -> Weak<T> {
this.inc_weak();
- Weak { _ptr: this._ptr }
+ Weak { ptr: this.ptr }
}
/// Get the number of weak references to this value.
#[stable(feature = "rc_unique", since = "1.4.0")]
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
if Rc::is_unique(this) {
- let inner = unsafe { &mut **this._ptr };
+ let inner = unsafe { &mut **this.ptr };
Some(&mut inner.value)
} else {
None
} else if Rc::weak_count(this) != 0 {
// Can just steal the data, all that's left is Weaks
unsafe {
- let mut swap = Rc::new(ptr::read(&(**this._ptr).value));
+ let mut swap = Rc::new(ptr::read(&(**this.ptr).value));
mem::swap(this, &mut swap);
swap.dec_strong();
// Remove implicit strong-weak ref (no need to craft a fake
// reference count is guaranteed to be 1 at this point, and we required
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
// reference to the inner value.
- let inner = unsafe { &mut **this._ptr };
+ let inner = unsafe { &mut **this.ptr };
&mut inner.value
}
}
#[unsafe_destructor_blind_to_params]
fn drop(&mut self) {
unsafe {
- let ptr = *self._ptr;
+ let ptr = *self.ptr;
let thin = ptr as *const ();
if thin as usize != mem::POST_DROP_USIZE {
#[inline]
fn clone(&self) -> Rc<T> {
self.inc_strong();
- Rc { _ptr: self._ptr }
+ Rc { ptr: self.ptr }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> fmt::Pointer for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- fmt::Pointer::fmt(&*self._ptr, f)
+ fmt::Pointer::fmt(&*self.ptr, f)
}
}
#[unsafe_no_drop_flag]
#[stable(feature = "rc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> {
- // FIXME #12808: strange names to try to avoid interfering with
- // field accesses of the contained type via Deref
- _ptr: Shared<RcBox<T>>,
+ ptr: Shared<RcBox<T>>,
}
#[stable(feature = "rc_weak", since = "1.4.0")]
None
} else {
self.inc_strong();
- Some(Rc { _ptr: self._ptr })
+ Some(Rc { ptr: self.ptr })
}
}
}
/// ```
fn drop(&mut self) {
unsafe {
- let ptr = *self._ptr;
+ let ptr = *self.ptr;
let thin = ptr as *const ();
if thin as usize != mem::POST_DROP_USIZE {
#[inline]
fn clone(&self) -> Weak<T> {
self.inc_weak();
- Weak { _ptr: self._ptr }
+ Weak { ptr: self.ptr }
}
}
pub fn new() -> Weak<T> {
unsafe {
Weak {
- _ptr: Shared::new(Box::into_raw(box RcBox {
+ ptr: Shared::new(Box::into_raw(box RcBox {
strong: Cell::new(0),
weak: Cell::new(1),
value: uninitialized(),
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
- assume(!(*(&self._ptr as *const _ as *const *const ())).is_null());
- &(**self._ptr)
+ assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
+ &(**self.ptr)
}
}
}
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
- assume(!(*(&self._ptr as *const _ as *const *const ())).is_null());
- &(**self._ptr)
+ assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
+ &(**self.ptr)
}
}
}
pub fn borrow(&self) -> Ref<T> {
match BorrowRef::new(&self.borrow) {
Some(b) => Ref {
- _value: unsafe { &*self.value.get() },
- _borrow: b,
+ value: unsafe { &*self.value.get() },
+ borrow: b,
},
None => panic!("RefCell<T> already mutably borrowed"),
}
pub fn borrow_mut(&self) -> RefMut<T> {
match BorrowRefMut::new(&self.borrow) {
Some(b) => RefMut {
- _value: unsafe { &mut *self.value.get() },
- _borrow: b,
+ value: unsafe { &mut *self.value.get() },
+ borrow: b,
},
None => panic!("RefCell<T> already borrowed"),
}
impl<T: ?Sized + Eq> Eq for RefCell<T> {}
struct BorrowRef<'b> {
- _borrow: &'b Cell<BorrowFlag>,
+ borrow: &'b Cell<BorrowFlag>,
}
impl<'b> BorrowRef<'b> {
WRITING => None,
b => {
borrow.set(b + 1);
- Some(BorrowRef { _borrow: borrow })
+ Some(BorrowRef { borrow: borrow })
},
}
}
impl<'b> Drop for BorrowRef<'b> {
#[inline]
fn drop(&mut self) {
- let borrow = self._borrow.get();
+ let borrow = self.borrow.get();
debug_assert!(borrow != WRITING && borrow != UNUSED);
- self._borrow.set(borrow - 1);
+ self.borrow.set(borrow - 1);
}
}
fn clone(&self) -> BorrowRef<'b> {
// Since this Ref exists, we know the borrow flag
// is not set to WRITING.
- let borrow = self._borrow.get();
+ let borrow = self.borrow.get();
debug_assert!(borrow != WRITING && borrow != UNUSED);
- self._borrow.set(borrow + 1);
- BorrowRef { _borrow: self._borrow }
+ self.borrow.set(borrow + 1);
+ BorrowRef { borrow: self.borrow }
}
}
/// See the [module-level documentation](index.html) for more.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Ref<'b, T: ?Sized + 'b> {
- // FIXME #12808: strange name to try to avoid interfering with
- // field accesses of the contained type via Deref
- _value: &'b T,
- _borrow: BorrowRef<'b>,
+ value: &'b T,
+ borrow: BorrowRef<'b>,
}
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
fn deref(&self) -> &T {
- self._value
+ self.value
}
}
#[inline]
pub fn clone(orig: &Ref<'b, T>) -> Ref<'b, T> {
Ref {
- _value: orig._value,
- _borrow: orig._borrow.clone(),
+ value: orig.value,
+ borrow: orig.borrow.clone(),
}
}
where F: FnOnce(&T) -> &U
{
Ref {
- _value: f(orig._value),
- _borrow: orig._borrow,
+ value: f(orig.value),
+ borrow: orig.borrow,
}
}
pub fn filter_map<U: ?Sized, F>(orig: Ref<'b, T>, f: F) -> Option<Ref<'b, U>>
where F: FnOnce(&T) -> Option<&U>
{
- f(orig._value).map(move |new| Ref {
- _value: new,
- _borrow: orig._borrow,
+ f(orig.value).map(move |new| Ref {
+ value: new,
+ borrow: orig.borrow,
})
}
}
where F: FnOnce(&mut T) -> &mut U
{
RefMut {
- _value: f(orig._value),
- _borrow: orig._borrow,
+ value: f(orig.value),
+ borrow: orig.borrow,
}
}
pub fn filter_map<U: ?Sized, F>(orig: RefMut<'b, T>, f: F) -> Option<RefMut<'b, U>>
where F: FnOnce(&mut T) -> Option<&mut U>
{
- let RefMut { _value, _borrow } = orig;
- f(_value).map(move |new| RefMut {
- _value: new,
- _borrow: _borrow,
+ let RefMut { value, borrow } = orig;
+ f(value).map(move |new| RefMut {
+ value: new,
+ borrow: borrow,
})
}
}
struct BorrowRefMut<'b> {
- _borrow: &'b Cell<BorrowFlag>,
+ borrow: &'b Cell<BorrowFlag>,
}
impl<'b> Drop for BorrowRefMut<'b> {
#[inline]
fn drop(&mut self) {
- let borrow = self._borrow.get();
+ let borrow = self.borrow.get();
debug_assert!(borrow == WRITING);
- self._borrow.set(UNUSED);
+ self.borrow.set(UNUSED);
}
}
match borrow.get() {
UNUSED => {
borrow.set(WRITING);
- Some(BorrowRefMut { _borrow: borrow })
+ Some(BorrowRefMut { borrow: borrow })
},
_ => None,
}
/// See the [module-level documentation](index.html) for more.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RefMut<'b, T: ?Sized + 'b> {
- // FIXME #12808: strange name to try to avoid interfering with
- // field accesses of the contained type via Deref
- _value: &'b mut T,
- _borrow: BorrowRefMut<'b>,
+ value: &'b mut T,
+ borrow: BorrowRefMut<'b>,
}
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
fn deref(&self) -> &T {
- self._value
+ self.value
}
}
impl<'b, T: ?Sized> DerefMut for RefMut<'b, T> {
#[inline]
fn deref_mut(&mut self) -> &mut T {
- self._value
+ self.value
}
}