1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 /*! Task-local reference-counted boxes (`Rc` type)
13 The `Rc` type provides shared ownership of an immutable value. Destruction is deterministic, and
14 will occur as soon as the last owner is gone. It is marked as non-sendable because it avoids the
15 overhead of atomic reference counting.
17 The `downgrade` method can be used to create a non-owning `Weak` pointer to the box. A `Weak`
18 pointer can be upgraded to an `Rc` pointer, but will return `None` if the value has already been
21 For example, a tree with parent pointers can be represented by putting the nodes behind `Strong`
22 pointers, and then storing the parent pointers as `Weak` pointers.
26 use core::mem::transmute;
28 use core::clone::Clone;
29 use core::cmp::{PartialEq, PartialOrd, Eq, Ord, Ordering};
30 use core::kinds::marker;
31 use core::ops::{Deref, Drop};
32 use core::option::{Option, Some, None};
34 use core::ptr::RawPtr;
35 use core::mem::{min_align_of, size_of};
45 /// Immutable reference counted pointer type
46 #[unsafe_no_drop_flag]
48 // FIXME #12808: strange names to try to avoid interfering with
49 // field accesses of the contained type via Deref
51 _nosend: marker::NoSend,
52 _noshare: marker::NoShare
56 /// Construct a new reference-counted box
57 pub fn new(value: T) -> Rc<T> {
60 // there is an implicit weak pointer owned by all the
61 // strong pointers, which ensures that the weak
62 // destructor never frees the allocation while the
63 // strong destructor is running, even if the weak
64 // pointer is stored inside the strong one.
65 _ptr: transmute(box RcBox {
70 _nosend: marker::NoSend,
71 _noshare: marker::NoShare
78 /// Downgrade the reference-counted pointer to a weak reference
79 pub fn downgrade(&self) -> Weak<T> {
83 _nosend: marker::NoSend,
84 _noshare: marker::NoShare
89 impl<T: Clone> Rc<T> {
90 /// Acquires a mutable pointer to the inner contents by guaranteeing that
91 /// the reference count is one (no sharing is possible).
93 /// This is also referred to as a copy-on-write operation because the inner
94 /// data is cloned if the reference count is greater than one.
97 pub fn make_unique<'a>(&'a mut self) -> &'a mut T {
98 // Note that we hold a strong reference, which also counts as
99 // a weak reference, so we only clone if there is an
100 // additional reference of either kind.
101 if self.strong() != 1 || self.weak() != 1 {
102 *self = Rc::new(self.deref().clone())
104 // This unsafety is ok because we're guaranteed that the pointer
105 // returned is the *only* pointer that will ever be returned to T. Our
106 // reference count is guaranteed to be 1 at this point, and we required
107 // the Rc itself to be `mut`, so we're returning the only possible
108 // reference to the inner data.
109 let inner = unsafe { &mut *self._ptr };
114 impl<T> Deref<T> for Rc<T> {
115 /// Borrow the value contained in the reference-counted box
117 fn deref<'a>(&'a self) -> &'a T {
123 impl<T> Drop for Rc<T> {
126 if !self._ptr.is_null() {
128 if self.strong() == 0 {
129 ptr::read(self.deref()); // destroy the contained object
131 // remove the implicit "strong weak" pointer now
132 // that we've destroyed the contents.
135 if self.weak() == 0 {
136 deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
137 min_align_of::<RcBox<T>>())
145 impl<T> Clone for Rc<T> {
147 fn clone(&self) -> Rc<T> {
149 Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }
153 impl<T: PartialEq> PartialEq for Rc<T> {
155 fn eq(&self, other: &Rc<T>) -> bool { **self == **other }
157 fn ne(&self, other: &Rc<T>) -> bool { **self != **other }
160 impl<T: Eq> Eq for Rc<T> {}
162 impl<T: PartialOrd> PartialOrd for Rc<T> {
164 fn lt(&self, other: &Rc<T>) -> bool { **self < **other }
167 fn le(&self, other: &Rc<T>) -> bool { **self <= **other }
170 fn gt(&self, other: &Rc<T>) -> bool { **self > **other }
173 fn ge(&self, other: &Rc<T>) -> bool { **self >= **other }
176 impl<T: Ord> Ord for Rc<T> {
178 fn cmp(&self, other: &Rc<T>) -> Ordering { (**self).cmp(&**other) }
181 /// Weak reference to a reference-counted box
182 #[unsafe_no_drop_flag]
184 // FIXME #12808: strange names to try to avoid interfering with
185 // field accesses of the contained type via Deref
187 _nosend: marker::NoSend,
188 _noshare: marker::NoShare
192 /// Upgrade a weak reference to a strong reference
193 pub fn upgrade(&self) -> Option<Rc<T>> {
194 if self.strong() == 0 {
198 Some(Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare })
204 impl<T> Drop for Weak<T> {
207 if !self._ptr.is_null() {
209 // the weak count starts at 1, and will only go to
210 // zero if all the strong pointers have disappeared.
211 if self.weak() == 0 {
212 deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
213 min_align_of::<RcBox<T>>())
220 impl<T> Clone for Weak<T> {
222 fn clone(&self) -> Weak<T> {
224 Weak { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }
230 fn inner<'a>(&'a self) -> &'a RcBox<T>;
233 fn strong(&self) -> uint { self.inner().strong.get() }
236 fn inc_strong(&self) { self.inner().strong.set(self.strong() + 1); }
239 fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); }
242 fn weak(&self) -> uint { self.inner().weak.get() }
245 fn inc_weak(&self) { self.inner().weak.set(self.weak() + 1); }
248 fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); }
251 impl<T> RcBoxPtr<T> for Rc<T> {
253 fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self._ptr) } }
256 impl<T> RcBoxPtr<T> for Weak<T> {
258 fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self._ptr) } }
262 #[allow(experimental)]
264 use super::{Rc, Weak};
265 use std::cell::RefCell;
266 use std::option::{Option, Some, None};
268 use std::clone::Clone;
272 let x = Rc::new(RefCell::new(5));
274 *x.borrow_mut() = 20;
275 assert_eq!(*y.borrow(), 20);
285 fn test_simple_clone() {
293 fn test_destructor() {
294 let x = Rc::new(box 5);
301 let y = x.downgrade();
302 assert!(y.upgrade().is_some());
308 let y = x.downgrade();
310 assert!(y.upgrade().is_none());
317 let a = Rc::new(RefCell::new(Gc::new(1)));
318 assert!(a.try_borrow_mut().is_some());
322 fn weak_self_cyclic() {
324 x: RefCell<Option<Weak<Cycle>>>
327 let a = Rc::new(Cycle { x: RefCell::new(None) });
328 let b = a.clone().downgrade();
329 *a.x.borrow_mut() = Some(b);
331 // hopefully we don't double-free (or leak)...
335 fn test_cowrc_clone_make_unique() {
336 let mut cow0 = Rc::new(75u);
337 let mut cow1 = cow0.clone();
338 let mut cow2 = cow1.clone();
340 assert!(75 == *cow0.make_unique());
341 assert!(75 == *cow1.make_unique());
342 assert!(75 == *cow2.make_unique());
344 *cow0.make_unique() += 1;
345 *cow1.make_unique() += 2;
346 *cow2.make_unique() += 3;
348 assert!(76 == *cow0);
349 assert!(77 == *cow1);
350 assert!(78 == *cow2);
352 // none should point to the same backing memory
353 assert!(*cow0 != *cow1);
354 assert!(*cow0 != *cow2);
355 assert!(*cow1 != *cow2);
359 fn test_cowrc_clone_unique2() {
360 let mut cow0 = Rc::new(75u);
361 let cow1 = cow0.clone();
362 let cow2 = cow1.clone();
364 assert!(75 == *cow0);
365 assert!(75 == *cow1);
366 assert!(75 == *cow2);
368 *cow0.make_unique() += 1;
370 assert!(76 == *cow0);
371 assert!(75 == *cow1);
372 assert!(75 == *cow2);
374 // cow1 and cow2 should share the same contents
375 // cow0 should have a unique reference
376 assert!(*cow0 != *cow1);
377 assert!(*cow0 != *cow2);
378 assert!(*cow1 == *cow2);
382 fn test_cowrc_clone_weak() {
383 let mut cow0 = Rc::new(75u);
384 let cow1_weak = cow0.downgrade();
386 assert!(75 == *cow0);
387 assert!(75 == *cow1_weak.upgrade().unwrap());
389 *cow0.make_unique() += 1;
391 assert!(76 == *cow0);
392 assert!(cow1_weak.upgrade().is_none());