1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 /*! Task-local reference-counted boxes (`Rc` type)
13 The `Rc` type provides shared ownership of an immutable value. Destruction is deterministic, and
14 will occur as soon as the last owner is gone. It is marked as non-sendable because it avoids the
15 overhead of atomic reference counting.
17 The `downgrade` method can be used to create a non-owning `Weak` pointer to the box. A `Weak`
18 pointer can be upgraded to an `Rc` pointer, but will return `None` if the value has already been
21 For example, a tree with parent pointers can be represented by putting the nodes behind `Strong`
22 pointers, and then storing the parent pointers as `Weak` pointers.
26 use core::mem::transmute;
28 use core::clone::Clone;
29 use core::cmp::{PartialEq, PartialOrd, Eq, Ord, Ordering};
30 use core::kinds::marker;
31 use core::ops::{Deref, Drop};
32 use core::option::{Option, Some, None};
34 use core::ptr::RawPtr;
35 use core::mem::{min_align_of, size_of};
46 /// Immutable reference counted pointer type
47 #[unsafe_no_drop_flag]
49 // FIXME #12808: strange names to try to avoid interfering with
50 // field accesses of the contained type via Deref
52 _nosend: marker::NoSend,
53 _noshare: marker::NoShare
57 /// Construct a new reference-counted box
58 pub fn new(value: T) -> Rc<T> {
61 // there is an implicit weak pointer owned by all the
62 // strong pointers, which ensures that the weak
63 // destructor never frees the allocation while the
64 // strong destructor is running, even if the weak
65 // pointer is stored inside the strong one.
66 _ptr: transmute(box RcBox {
71 _nosend: marker::NoSend,
72 _noshare: marker::NoShare
79 /// Downgrade the reference-counted pointer to a weak reference
80 pub fn downgrade(&self) -> Weak<T> {
84 _nosend: marker::NoSend,
85 _noshare: marker::NoShare
90 impl<T: Clone> Rc<T> {
91 /// Acquires a mutable pointer to the inner contents by guaranteeing that
92 /// the reference count is one (no sharing is possible).
94 /// This is also referred to as a copy-on-write operation because the inner
95 /// data is cloned if the reference count is greater than one.
98 pub fn make_unique<'a>(&'a mut self) -> &'a mut T {
99 // Note that we hold a strong reference, which also counts as
100 // a weak reference, so we only clone if there is an
101 // additional reference of either kind.
102 if self.strong() != 1 || self.weak() != 1 {
103 *self = Rc::new(self.deref().clone())
105 // This unsafety is ok because we're guaranteed that the pointer
106 // returned is the *only* pointer that will ever be returned to T. Our
107 // reference count is guaranteed to be 1 at this point, and we required
108 // the Rc itself to be `mut`, so we're returning the only possible
109 // reference to the inner data.
110 let inner = unsafe { &mut *self._ptr };
115 impl<T> Deref<T> for Rc<T> {
116 /// Borrow the value contained in the reference-counted box
118 fn deref<'a>(&'a self) -> &'a T {
124 impl<T> Drop for Rc<T> {
127 if !self._ptr.is_null() {
129 if self.strong() == 0 {
130 ptr::read(self.deref()); // destroy the contained object
132 // remove the implicit "strong weak" pointer now
133 // that we've destroyed the contents.
136 if self.weak() == 0 {
137 deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
138 min_align_of::<RcBox<T>>())
146 impl<T> Clone for Rc<T> {
148 fn clone(&self) -> Rc<T> {
150 Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }
154 impl<T: PartialEq> PartialEq for Rc<T> {
156 fn eq(&self, other: &Rc<T>) -> bool { **self == **other }
158 fn ne(&self, other: &Rc<T>) -> bool { **self != **other }
161 impl<T: Eq> Eq for Rc<T> {}
163 impl<T: PartialOrd> PartialOrd for Rc<T> {
165 fn lt(&self, other: &Rc<T>) -> bool { **self < **other }
168 fn le(&self, other: &Rc<T>) -> bool { **self <= **other }
171 fn gt(&self, other: &Rc<T>) -> bool { **self > **other }
174 fn ge(&self, other: &Rc<T>) -> bool { **self >= **other }
177 impl<T: Ord> Ord for Rc<T> {
179 fn cmp(&self, other: &Rc<T>) -> Ordering { (**self).cmp(&**other) }
182 impl<T: fmt::Show> fmt::Show for Rc<T> {
183 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
188 /// Weak reference to a reference-counted box
189 #[unsafe_no_drop_flag]
191 // FIXME #12808: strange names to try to avoid interfering with
192 // field accesses of the contained type via Deref
194 _nosend: marker::NoSend,
195 _noshare: marker::NoShare
199 /// Upgrade a weak reference to a strong reference
200 pub fn upgrade(&self) -> Option<Rc<T>> {
201 if self.strong() == 0 {
205 Some(Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare })
211 impl<T> Drop for Weak<T> {
214 if !self._ptr.is_null() {
216 // the weak count starts at 1, and will only go to
217 // zero if all the strong pointers have disappeared.
218 if self.weak() == 0 {
219 deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
220 min_align_of::<RcBox<T>>())
227 impl<T> Clone for Weak<T> {
229 fn clone(&self) -> Weak<T> {
231 Weak { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }
237 fn inner<'a>(&'a self) -> &'a RcBox<T>;
240 fn strong(&self) -> uint { self.inner().strong.get() }
243 fn inc_strong(&self) { self.inner().strong.set(self.strong() + 1); }
246 fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); }
249 fn weak(&self) -> uint { self.inner().weak.get() }
252 fn inc_weak(&self) { self.inner().weak.set(self.weak() + 1); }
255 fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); }
258 impl<T> RcBoxPtr<T> for Rc<T> {
260 fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self._ptr) } }
263 impl<T> RcBoxPtr<T> for Weak<T> {
265 fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self._ptr) } }
269 #[allow(experimental)]
271 use super::{Rc, Weak};
272 use std::cell::RefCell;
273 use std::option::{Option, Some, None};
275 use std::clone::Clone;
279 let x = Rc::new(RefCell::new(5i));
281 *x.borrow_mut() = 20;
282 assert_eq!(*y.borrow(), 20);
292 fn test_simple_clone() {
300 fn test_destructor() {
301 let x = Rc::new(box 5i);
308 let y = x.downgrade();
309 assert!(y.upgrade().is_some());
315 let y = x.downgrade();
317 assert!(y.upgrade().is_none());
324 let a = Rc::new(RefCell::new(box(GC) 1i));
325 assert!(a.try_borrow_mut().is_some());
329 fn weak_self_cyclic() {
331 x: RefCell<Option<Weak<Cycle>>>
334 let a = Rc::new(Cycle { x: RefCell::new(None) });
335 let b = a.clone().downgrade();
336 *a.x.borrow_mut() = Some(b);
338 // hopefully we don't double-free (or leak)...
342 fn test_cowrc_clone_make_unique() {
343 let mut cow0 = Rc::new(75u);
344 let mut cow1 = cow0.clone();
345 let mut cow2 = cow1.clone();
347 assert!(75 == *cow0.make_unique());
348 assert!(75 == *cow1.make_unique());
349 assert!(75 == *cow2.make_unique());
351 *cow0.make_unique() += 1;
352 *cow1.make_unique() += 2;
353 *cow2.make_unique() += 3;
355 assert!(76 == *cow0);
356 assert!(77 == *cow1);
357 assert!(78 == *cow2);
359 // none should point to the same backing memory
360 assert!(*cow0 != *cow1);
361 assert!(*cow0 != *cow2);
362 assert!(*cow1 != *cow2);
366 fn test_cowrc_clone_unique2() {
367 let mut cow0 = Rc::new(75u);
368 let cow1 = cow0.clone();
369 let cow2 = cow1.clone();
371 assert!(75 == *cow0);
372 assert!(75 == *cow1);
373 assert!(75 == *cow2);
375 *cow0.make_unique() += 1;
377 assert!(76 == *cow0);
378 assert!(75 == *cow1);
379 assert!(75 == *cow2);
381 // cow1 and cow2 should share the same contents
382 // cow0 should have a unique reference
383 assert!(*cow0 != *cow1);
384 assert!(*cow0 != *cow2);
385 assert!(*cow1 == *cow2);
389 fn test_cowrc_clone_weak() {
390 let mut cow0 = Rc::new(75u);
391 let cow1_weak = cow0.downgrade();
393 assert!(75 == *cow0);
394 assert!(75 == *cow1_weak.upgrade().unwrap());
396 *cow0.make_unique() += 1;
398 assert!(76 == *cow0);
399 assert!(cow1_weak.upgrade().is_none());