From 466135bfef4d110213a9aeb46f8199fa89a5f267 Mon Sep 17 00:00:00 2001 From: Luqman Aden Date: Thu, 4 Dec 2014 14:58:21 -0500 Subject: [PATCH] libcore: Make it unsafe to create NonZero and impl Deref. --- src/liballoc/arc.rs | 15 +++++-------- src/liballoc/rc.rs | 29 +++++++----------------- src/libcollections/vec.rs | 46 +++++++++++++++++---------------------- src/libcore/ptr.rs | 24 +++++++++++++++++--- 4 files changed, 55 insertions(+), 59 deletions(-) diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 290617535bb..47e7ddac07c 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -164,7 +164,7 @@ pub fn new(data: T) -> Arc { weak: atomic::AtomicUint::new(1), data: data, }; - Arc { _ptr: NonZero(unsafe { mem::transmute(x) }) } + Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } } } /// Downgrades the `Arc` to a `Weak` reference. @@ -193,8 +193,7 @@ fn inner(&self) -> &ArcInner { // pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync` // because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer // to these contents. - let NonZero(ptr) = self._ptr; - unsafe { &*ptr } + unsafe { &**self._ptr } } } @@ -281,8 +280,7 @@ pub fn make_unique(&mut self) -> &mut T { // pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at // this point, and we required the Arc itself to be `mut`, so we're returning the only // possible reference to the inner data. - let NonZero(ptr) = self._ptr; - let inner = unsafe { &mut *ptr }; + let inner = unsafe { &mut **self._ptr }; &mut inner.data } } @@ -317,7 +315,7 @@ impl Drop for Arc { fn drop(&mut self) { // This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but // it is guaranteed to be zeroed after the first if it's run more than once) - let NonZero(ptr) = self._ptr; + let ptr = *self._ptr; if ptr.is_null() { return } // Because `fetch_sub` is already atomic, we do not need to synchronize with other threads @@ -388,8 +386,7 @@ pub fn upgrade(&self) -> Option> { #[inline] fn inner(&self) -> &ArcInner { // See comments above for why this is "safe" - let NonZero(ptr) = self._ptr; - unsafe { &*ptr } + unsafe { &**self._ptr } } } @@ -445,7 +442,7 @@ impl Drop for Weak { /// } // implicit drop /// ``` fn drop(&mut self) { - let NonZero(ptr) = self._ptr; + let ptr = *self._ptr; // see comments above for why this check is here if ptr.is_null() { return } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 41efa0468ac..3d73c64bf4d 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -195,7 +195,7 @@ pub fn new(value: T) -> Rc { // there is an implicit weak pointer owned by all the strong pointers, which // ensures that the weak destructor never frees the allocation while the strong // destructor is running, even if the weak pointer is stored inside the strong one. - _ptr: NonZero(transmute(box RcBox { + _ptr: NonZero::new(transmute(box RcBox { value: value, strong: Cell::new(1), weak: Cell::new(1) @@ -280,8 +280,7 @@ pub fn try_unwrap(rc: Rc) -> Result> { let val = ptr::read(&*rc); // copy the contained object // destruct the box and skip our Drop // we can ignore the refcounts because we know we're unique - let NonZero(ptr) = rc._ptr; - deallocate(ptr as *mut u8, size_of::>(), + deallocate(*rc._ptr as *mut u8, size_of::>(), min_align_of::>()); forget(rc); Ok(val) @@ -311,10 +310,7 @@ pub fn try_unwrap(rc: Rc) -> Result> { #[experimental] pub fn get_mut<'a, T>(rc: &'a mut Rc) -> Option<&'a mut T> { if is_unique(rc) { - let inner = unsafe { - let NonZero(ptr) = rc._ptr; - &mut *ptr - }; + let inner = unsafe { &mut **rc._ptr }; Some(&mut inner.value) } else { None @@ -346,10 +342,7 @@ pub fn make_unique(&mut self) -> &mut T { // pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at // this point, and we required the `Rc` itself to be `mut`, so we're returning the only // possible reference to the inner value. - let inner = unsafe { - let NonZero(ptr) = self._ptr; - &mut *ptr - }; + let inner = unsafe { &mut **self._ptr }; &mut inner.value } } @@ -397,7 +390,7 @@ impl Drop for Rc { /// ``` fn drop(&mut self) { unsafe { - let NonZero(ptr) = self._ptr; + let ptr = *self._ptr; if !ptr.is_null() { self.dec_strong(); if self.strong() == 0 { @@ -689,7 +682,7 @@ impl Drop for Weak { /// ``` fn drop(&mut self) { unsafe { - let NonZero(ptr) = self._ptr; + let ptr = *self._ptr; if !ptr.is_null() { self.dec_weak(); // the weak count starts at 1, and will only go to zero if all the strong pointers @@ -750,18 +743,12 @@ fn weak(&self) -> uint { self.inner().weak.get() } impl RcBoxPtr for Rc { #[inline(always)] - fn inner(&self) -> &RcBox { - let NonZero(ptr) = self._ptr; - unsafe { &(*ptr) } - } + fn inner(&self) -> &RcBox { unsafe { &(**self._ptr) } } } impl RcBoxPtr for Weak { #[inline(always)] - fn inner(&self) -> &RcBox { - let NonZero(ptr) = self._ptr; - unsafe { &(*ptr) } - } + fn inner(&self) -> &RcBox { unsafe { &(**self._ptr) } } } #[cfg(test)] diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index 0739cef3353..6f618657280 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -176,7 +176,7 @@ pub fn new() -> Vec { // non-null value which is fine since we never call deallocate on the ptr // if cap is 0. The reason for this is because the pointer of a slice // being NULL would break the null pointer optimization for enums. - Vec { ptr: NonZero(EMPTY as *mut T), len: 0, cap: 0 } + Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 } } /// Constructs a new, empty `Vec` with the specified capacity. @@ -209,7 +209,7 @@ pub fn new() -> Vec { #[stable] pub fn with_capacity(capacity: uint) -> Vec { if mem::size_of::() == 0 { - Vec { ptr: NonZero(EMPTY as *mut T), len: 0, cap: uint::MAX } + Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: uint::MAX } } else if capacity == 0 { Vec::new() } else { @@ -217,7 +217,7 @@ pub fn with_capacity(capacity: uint) -> Vec { .expect("capacity overflow"); let ptr = unsafe { allocate(size, mem::min_align_of::()) }; if ptr.is_null() { ::alloc::oom() } - Vec { ptr: NonZero(ptr as *mut T), len: 0, cap: capacity } + Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity } } } @@ -284,7 +284,7 @@ pub fn from_fn(length: uint, mut op: F) -> Vec where F: FnMut(uint) -> T { #[unstable = "needs finalization"] pub unsafe fn from_raw_parts(ptr: *mut T, length: uint, capacity: uint) -> Vec { - Vec { ptr: NonZero(ptr), len: length, cap: capacity } + Vec { ptr: NonZero::new(ptr), len: length, cap: capacity } } /// Creates a vector by copying the elements from a raw pointer. @@ -792,11 +792,10 @@ pub fn reserve_exact(&mut self, additional: uint) { pub fn shrink_to_fit(&mut self) { if mem::size_of::() == 0 { return } - let NonZero(ptr) = self.ptr; if self.len == 0 { if self.cap != 0 { unsafe { - dealloc(ptr, self.cap) + dealloc(*self.ptr, self.cap) } self.cap = 0; } @@ -804,12 +803,12 @@ pub fn shrink_to_fit(&mut self) { unsafe { // Overflow check is unnecessary as the vector is already at // least this large. - let ptr = reallocate(ptr as *mut u8, + let ptr = reallocate(*self.ptr as *mut u8, self.cap * mem::size_of::(), self.len * mem::size_of::(), mem::min_align_of::()) as *mut T; if ptr.is_null() { ::alloc::oom() } - self.ptr = NonZero(ptr); + self.ptr = NonZero::new(ptr); } self.cap = self.len; } @@ -867,10 +866,9 @@ pub fn truncate(&mut self, len: uint) { #[inline] #[stable] pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] { - let NonZero(ptr) = self.ptr; unsafe { mem::transmute(RawSlice { - data: ptr as *const T, + data: *self.ptr as *const T, len: self.len, }) } @@ -893,7 +891,7 @@ pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] { #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn into_iter(self) -> IntoIter { unsafe { - let NonZero(ptr) = self.ptr; + let ptr = *self.ptr; let cap = self.cap; let begin = ptr as *const T; let end = if mem::size_of::() == 0 { @@ -1113,16 +1111,15 @@ pub fn push(&mut self, value: T) { let size = max(old_size, 2 * mem::size_of::()) * 2; if old_size > size { panic!("capacity overflow") } unsafe { - let NonZero(ptr) = self.ptr; - let ptr = alloc_or_realloc(ptr, old_size, size); + let ptr = alloc_or_realloc(*self.ptr, old_size, size); if ptr.is_null() { ::alloc::oom() } - self.ptr = NonZero(ptr); + self.ptr = NonZero::new(ptr); } self.cap = max(self.cap, 2) * 2; } unsafe { - let NonZero(end) = self.ptr.offset(self.len as int); + let end = *self.ptr.offset(self.len as int); ptr::write(&mut *end, value); self.len += 1; } @@ -1167,11 +1164,11 @@ pub fn pop(&mut self) -> Option { #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn drain<'a>(&'a mut self) -> Drain<'a, T> { unsafe { - let begin = self.ptr.0 as *const T; + let begin = *self.ptr as *const T; let end = if mem::size_of::() == 0 { - (self.ptr.0 as uint + self.len()) as *const T + (*self.ptr as uint + self.len()) as *const T } else { - self.ptr.0.offset(self.len() as int) as *const T + (*self.ptr).offset(self.len() as int) as *const T }; self.set_len(0); Drain { @@ -1236,10 +1233,9 @@ fn grow_capacity(&mut self, capacity: uint) { let size = capacity.checked_mul(mem::size_of::()) .expect("capacity overflow"); unsafe { - let NonZero(ptr) = self.ptr; - let ptr = alloc_or_realloc(ptr, self.cap * mem::size_of::(), size); + let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::(), size); if ptr.is_null() { ::alloc::oom() } - self.ptr = NonZero(ptr); + self.ptr = NonZero::new(ptr); } self.cap = capacity; } @@ -1360,10 +1356,9 @@ impl AsSlice for Vec { #[inline] #[stable] fn as_slice<'a>(&'a self) -> &'a [T] { - let NonZero(ptr) = self.ptr; unsafe { mem::transmute(RawSlice { - data: ptr as *const T, + data: *self.ptr as *const T, len: self.len }) } @@ -1388,8 +1383,7 @@ fn drop(&mut self) { for x in self.iter() { ptr::read(x); } - let NonZero(ptr) = self.ptr; - dealloc(ptr, self.cap) + dealloc(*self.ptr, self.cap) } } } @@ -1427,7 +1421,7 @@ pub fn into_inner(mut self) -> Vec { for _x in self { } let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self; mem::forget(self); - Vec { ptr: NonZero(allocation), cap: cap, len: 0 } + Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 } } } diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index 910204edf70..23eb117680a 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -90,10 +90,10 @@ use mem; use clone::Clone; use intrinsics; -use kinds::Copy; +use kinds::{Copy, Send, Sync}; +use ops::Deref; use option::Option; use option::Option::{Some, None}; -use kinds::{Send, Sync}; use cmp::{PartialEq, Eq, Ord, PartialOrd, Equiv}; use cmp::Ordering; @@ -115,7 +115,25 @@ /// NULL or 0 that might allow certain optimizations. #[lang="non_zero"] #[deriving(Clone, PartialEq, Eq, PartialOrd)] -pub struct NonZero(pub T); +#[experimental] +pub struct NonZero(T); + +impl NonZero { + /// Create an instance of NonZero with the provided value. + /// You must indeed ensure that the value is actually "non-zero". + #[inline(always)] + pub unsafe fn new(inner: T) -> NonZero { + NonZero(inner) + } +} + +impl Deref for NonZero { + #[inline] + fn deref<'a>(&'a self) -> &'a T { + let NonZero(ref inner) = *self; + inner + } +} impl Copy for NonZero {} -- 2.44.0