1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A pointer type for heap allocation.
13 //! `Box<T>`, casually referred to as a 'box', provides the simplest form of
14 //! heap allocation in Rust. Boxes provide ownership for this allocation, and
15 //! drop their contents when they go out of scope.
22 //! let x = Box::new(5);
25 //! Creating a recursive data structure:
30 //! Cons(T, Box<List<T>>),
35 //! let list: List<i32> = List::Cons(1, Box::new(List::Cons(2, Box::new(List::Nil))));
36 //! println!("{:?}", list);
40 //! This will print `Cons(1, Cons(2, Nil))`.
42 //! Recursive structures must be boxed, because if the definition of `Cons`
45 //! ```compile_fail,E0072
51 //! It wouldn't work. This is because the size of a `List` depends on how many
52 //! elements are in the list, and so we don't know how much memory to allocate
53 //! for a `Cons`. By introducing a `Box`, which has a defined size, we know how
54 //! big `Cons` needs to be.
56 #![stable(feature = "rust1", since = "1.0.0")]
58 use heap::{Heap, Layout, Alloc};
63 use core::cmp::Ordering;
65 use core::hash::{self, Hash, Hasher};
66 use core::iter::FusedIterator;
67 use core::marker::{self, Unsize};
69 use core::ops::{CoerceUnsized, Deref, DerefMut, Generator, GeneratorState};
70 use core::ops::{BoxPlace, Boxed, InPlace, Place, Placer};
71 use core::ptr::{self, NonNull, Unique};
72 use core::convert::From;
73 use str::from_boxed_utf8_unchecked;
75 /// A value that represents the heap. This is the default place that the `box`
76 /// keyword allocates into when no place is supplied.
78 /// The following two examples are equivalent:
81 /// #![feature(box_heap)]
83 /// #![feature(box_syntax, placement_in_syntax)]
84 /// use std::boxed::HEAP;
87 /// let foo: Box<i32> = in HEAP { 5 };
91 #[unstable(feature = "box_heap",
92 reason = "may be renamed; uncertain about custom allocator design",
94 pub const HEAP: ExchangeHeapSingleton = ExchangeHeapSingleton { _force_singleton: () };
96 /// This the singleton type used solely for `boxed::HEAP`.
97 #[unstable(feature = "box_heap",
98 reason = "may be renamed; uncertain about custom allocator design",
100 #[allow(missing_debug_implementations)]
101 #[derive(Copy, Clone)]
102 pub struct ExchangeHeapSingleton {
103 _force_singleton: (),
106 /// A pointer type for heap allocation.
108 /// See the [module-level documentation](../../std/boxed/index.html) for more.
109 #[lang = "owned_box"]
111 #[stable(feature = "rust1", since = "1.0.0")]
112 pub struct Box<T: ?Sized>(Unique<T>);
114 /// `IntermediateBox` represents uninitialized backing storage for `Box`.
116 /// FIXME (pnkfelix): Ideally we would just reuse `Box<T>` instead of
117 /// introducing a separate `IntermediateBox<T>`; but then you hit
118 /// issues when you e.g. attempt to destructure an instance of `Box`,
119 /// since it is a lang item and so it gets special handling by the
120 /// compiler. Easier just to make this parallel type for now.
122 /// FIXME (pnkfelix): Currently the `box` protocol only supports
123 /// creating instances of sized types. This IntermediateBox is
124 /// designed to be forward-compatible with a future protocol that
125 /// supports creating instances of unsized types; that is why the type
126 /// parameter has the `?Sized` generalization marker, and is also why
127 /// this carries an explicit size. However, it probably does not need
128 /// to carry the explicit alignment; that is just a work-around for
129 /// the fact that the `align_of` intrinsic currently requires the
130 /// input type to be Sized (which I do not think is strictly
132 #[unstable(feature = "placement_in",
133 reason = "placement box design is still being worked out.",
135 #[allow(missing_debug_implementations)]
136 pub struct IntermediateBox<T: ?Sized> {
139 marker: marker::PhantomData<*mut T>,
142 #[unstable(feature = "placement_in",
143 reason = "placement box design is still being worked out.",
145 unsafe impl<T> Place<T> for IntermediateBox<T> {
146 fn pointer(&mut self) -> *mut T {
151 unsafe fn finalize<T>(b: IntermediateBox<T>) -> Box<T> {
152 let p = b.ptr as *mut T;
157 fn make_place<T>() -> IntermediateBox<T> {
158 let layout = Layout::new::<T>();
160 let p = if layout.size() == 0 {
161 mem::align_of::<T>() as *mut u8
164 Heap.alloc(layout.clone()).unwrap_or_else(|err| {
173 marker: marker::PhantomData,
177 #[unstable(feature = "placement_in",
178 reason = "placement box design is still being worked out.",
180 impl<T> BoxPlace<T> for IntermediateBox<T> {
181 fn make_place() -> IntermediateBox<T> {
186 #[unstable(feature = "placement_in",
187 reason = "placement box design is still being worked out.",
189 impl<T> InPlace<T> for IntermediateBox<T> {
191 unsafe fn finalize(self) -> Box<T> {
196 #[unstable(feature = "placement_new_protocol", issue = "27779")]
197 impl<T> Boxed for Box<T> {
199 type Place = IntermediateBox<T>;
200 unsafe fn finalize(b: IntermediateBox<T>) -> Box<T> {
205 #[unstable(feature = "placement_in",
206 reason = "placement box design is still being worked out.",
208 impl<T> Placer<T> for ExchangeHeapSingleton {
209 type Place = IntermediateBox<T>;
211 fn make_place(self) -> IntermediateBox<T> {
216 #[unstable(feature = "placement_in",
217 reason = "placement box design is still being worked out.",
219 impl<T: ?Sized> Drop for IntermediateBox<T> {
221 if self.layout.size() > 0 {
223 Heap.dealloc(self.ptr, self.layout.clone())
230 /// Allocates memory on the heap and then places `x` into it.
232 /// This doesn't actually allocate if `T` is zero-sized.
237 /// let five = Box::new(5);
239 #[stable(feature = "rust1", since = "1.0.0")]
241 pub fn new(x: T) -> Box<T> {
246 impl<T: ?Sized> Box<T> {
247 /// Constructs a box from a raw pointer.
249 /// After calling this function, the raw pointer is owned by the
250 /// resulting `Box`. Specifically, the `Box` destructor will call
251 /// the destructor of `T` and free the allocated memory. Since the
252 /// way `Box` allocates and releases memory is unspecified, the
253 /// only valid pointer to pass to this function is the one taken
254 /// from another `Box` via the [`Box::into_raw`] function.
256 /// This function is unsafe because improper use may lead to
257 /// memory problems. For example, a double-free may occur if the
258 /// function is called twice on the same raw pointer.
260 /// [`Box::into_raw`]: struct.Box.html#method.into_raw
265 /// let x = Box::new(5);
266 /// let ptr = Box::into_raw(x);
267 /// let x = unsafe { Box::from_raw(ptr) };
269 #[stable(feature = "box_raw", since = "1.4.0")]
271 pub unsafe fn from_raw(raw: *mut T) -> Self {
272 Box(Unique::new_unchecked(raw))
275 /// Consumes the `Box`, returning the wrapped raw pointer.
277 /// After calling this function, the caller is responsible for the
278 /// memory previously managed by the `Box`. In particular, the
279 /// caller should properly destroy `T` and release the memory. The
280 /// proper way to do so is to convert the raw pointer back into a
281 /// `Box` with the [`Box::from_raw`] function.
283 /// Note: this is an associated function, which means that you have
284 /// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This
285 /// is so that there is no conflict with a method on the inner type.
287 /// [`Box::from_raw`]: struct.Box.html#method.from_raw
292 /// let x = Box::new(5);
293 /// let ptr = Box::into_raw(x);
295 #[stable(feature = "box_raw", since = "1.4.0")]
297 pub fn into_raw(b: Box<T>) -> *mut T {
298 Box::into_raw_non_null(b).as_ptr()
301 /// Consumes the `Box`, returning the wrapped pointer as `NonNull<T>`.
303 /// After calling this function, the caller is responsible for the
304 /// memory previously managed by the `Box`. In particular, the
305 /// caller should properly destroy `T` and release the memory. The
306 /// proper way to do so is to convert the `NonNull<T>` pointer
307 /// into a raw pointer and back into a `Box` with the [`Box::from_raw`]
310 /// Note: this is an associated function, which means that you have
311 /// to call it as `Box::into_raw_non_null(b)`
312 /// instead of `b.into_raw_non_null()`. This
313 /// is so that there is no conflict with a method on the inner type.
315 /// [`Box::from_raw`]: struct.Box.html#method.from_raw
320 /// #![feature(box_into_raw_non_null)]
323 /// let x = Box::new(5);
324 /// let ptr = Box::into_raw_non_null(x);
327 #[unstable(feature = "box_into_raw_non_null", issue = "47336")]
329 pub fn into_raw_non_null(b: Box<T>) -> NonNull<T> {
330 Box::into_unique(b).into()
333 #[unstable(feature = "ptr_internals", issue = "0", reason = "use into_raw_non_null instead")]
335 pub fn into_unique(b: Box<T>) -> Unique<T> {
341 /// Consumes and leaks the `Box`, returning a mutable reference,
342 /// `&'a mut T`. Here, the lifetime `'a` may be chosen to be `'static`.
344 /// This function is mainly useful for data that lives for the remainder of
345 /// the program's life. Dropping the returned reference will cause a memory
346 /// leak. If this is not acceptable, the reference should first be wrapped
347 /// with the [`Box::from_raw`] function producing a `Box`. This `Box` can
348 /// then be dropped which will properly destroy `T` and release the
349 /// allocated memory.
351 /// Note: this is an associated function, which means that you have
352 /// to call it as `Box::leak(b)` instead of `b.leak()`. This
353 /// is so that there is no conflict with a method on the inner type.
355 /// [`Box::from_raw`]: struct.Box.html#method.from_raw
362 /// #![feature(box_leak)]
365 /// let x = Box::new(41);
366 /// let static_ref: &'static mut usize = Box::leak(x);
367 /// *static_ref += 1;
368 /// assert_eq!(*static_ref, 42);
375 /// #![feature(box_leak)]
378 /// let x = vec![1, 2, 3].into_boxed_slice();
379 /// let static_ref = Box::leak(x);
380 /// static_ref[0] = 4;
381 /// assert_eq!(*static_ref, [4, 2, 3]);
384 #[unstable(feature = "box_leak", reason = "needs an FCP to stabilize",
387 pub fn leak<'a>(b: Box<T>) -> &'a mut T
389 T: 'a // Technically not needed, but kept to be explicit.
391 unsafe { &mut *Box::into_raw(b) }
395 #[stable(feature = "rust1", since = "1.0.0")]
396 unsafe impl<#[may_dangle] T: ?Sized> Drop for Box<T> {
398 // FIXME: Do nothing, drop is currently performed by compiler.
402 #[stable(feature = "rust1", since = "1.0.0")]
403 impl<T: Default> Default for Box<T> {
404 /// Creates a `Box<T>`, with the `Default` value for T.
405 fn default() -> Box<T> {
406 box Default::default()
410 #[stable(feature = "rust1", since = "1.0.0")]
411 impl<T> Default for Box<[T]> {
412 fn default() -> Box<[T]> {
413 Box::<[T; 0]>::new([])
417 #[stable(feature = "default_box_extra", since = "1.17.0")]
418 impl Default for Box<str> {
419 fn default() -> Box<str> {
420 unsafe { from_boxed_utf8_unchecked(Default::default()) }
424 #[stable(feature = "rust1", since = "1.0.0")]
425 impl<T: Clone> Clone for Box<T> {
426 /// Returns a new box with a `clone()` of this box's contents.
431 /// let x = Box::new(5);
432 /// let y = x.clone();
436 fn clone(&self) -> Box<T> {
437 box { (**self).clone() }
439 /// Copies `source`'s contents into `self` without creating a new allocation.
444 /// let x = Box::new(5);
445 /// let mut y = Box::new(10);
447 /// y.clone_from(&x);
449 /// assert_eq!(*y, 5);
452 fn clone_from(&mut self, source: &Box<T>) {
453 (**self).clone_from(&(**source));
458 #[stable(feature = "box_slice_clone", since = "1.3.0")]
459 impl Clone for Box<str> {
460 fn clone(&self) -> Self {
461 let len = self.len();
462 let buf = RawVec::with_capacity(len);
464 ptr::copy_nonoverlapping(self.as_ptr(), buf.ptr(), len);
465 from_boxed_utf8_unchecked(buf.into_box())
470 #[stable(feature = "rust1", since = "1.0.0")]
471 impl<T: ?Sized + PartialEq> PartialEq for Box<T> {
473 fn eq(&self, other: &Box<T>) -> bool {
474 PartialEq::eq(&**self, &**other)
477 fn ne(&self, other: &Box<T>) -> bool {
478 PartialEq::ne(&**self, &**other)
481 #[stable(feature = "rust1", since = "1.0.0")]
482 impl<T: ?Sized + PartialOrd> PartialOrd for Box<T> {
484 fn partial_cmp(&self, other: &Box<T>) -> Option<Ordering> {
485 PartialOrd::partial_cmp(&**self, &**other)
488 fn lt(&self, other: &Box<T>) -> bool {
489 PartialOrd::lt(&**self, &**other)
492 fn le(&self, other: &Box<T>) -> bool {
493 PartialOrd::le(&**self, &**other)
496 fn ge(&self, other: &Box<T>) -> bool {
497 PartialOrd::ge(&**self, &**other)
500 fn gt(&self, other: &Box<T>) -> bool {
501 PartialOrd::gt(&**self, &**other)
504 #[stable(feature = "rust1", since = "1.0.0")]
505 impl<T: ?Sized + Ord> Ord for Box<T> {
507 fn cmp(&self, other: &Box<T>) -> Ordering {
508 Ord::cmp(&**self, &**other)
511 #[stable(feature = "rust1", since = "1.0.0")]
512 impl<T: ?Sized + Eq> Eq for Box<T> {}
514 #[stable(feature = "rust1", since = "1.0.0")]
515 impl<T: ?Sized + Hash> Hash for Box<T> {
516 fn hash<H: hash::Hasher>(&self, state: &mut H) {
517 (**self).hash(state);
521 #[stable(feature = "indirect_hasher_impl", since = "1.22.0")]
522 impl<T: ?Sized + Hasher> Hasher for Box<T> {
523 fn finish(&self) -> u64 {
526 fn write(&mut self, bytes: &[u8]) {
527 (**self).write(bytes)
529 fn write_u8(&mut self, i: u8) {
532 fn write_u16(&mut self, i: u16) {
533 (**self).write_u16(i)
535 fn write_u32(&mut self, i: u32) {
536 (**self).write_u32(i)
538 fn write_u64(&mut self, i: u64) {
539 (**self).write_u64(i)
541 fn write_u128(&mut self, i: u128) {
542 (**self).write_u128(i)
544 fn write_usize(&mut self, i: usize) {
545 (**self).write_usize(i)
547 fn write_i8(&mut self, i: i8) {
550 fn write_i16(&mut self, i: i16) {
551 (**self).write_i16(i)
553 fn write_i32(&mut self, i: i32) {
554 (**self).write_i32(i)
556 fn write_i64(&mut self, i: i64) {
557 (**self).write_i64(i)
559 fn write_i128(&mut self, i: i128) {
560 (**self).write_i128(i)
562 fn write_isize(&mut self, i: isize) {
563 (**self).write_isize(i)
567 #[stable(feature = "from_for_ptrs", since = "1.6.0")]
568 impl<T> From<T> for Box<T> {
569 fn from(t: T) -> Self {
574 #[stable(feature = "box_from_slice", since = "1.17.0")]
575 impl<'a, T: Copy> From<&'a [T]> for Box<[T]> {
576 fn from(slice: &'a [T]) -> Box<[T]> {
577 let mut boxed = unsafe { RawVec::with_capacity(slice.len()).into_box() };
578 boxed.copy_from_slice(slice);
583 #[stable(feature = "box_from_slice", since = "1.17.0")]
584 impl<'a> From<&'a str> for Box<str> {
585 fn from(s: &'a str) -> Box<str> {
586 unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) }
590 #[stable(feature = "boxed_str_conv", since = "1.19.0")]
591 impl From<Box<str>> for Box<[u8]> {
592 fn from(s: Box<str>) -> Self {
593 unsafe { Box::from_raw(Box::into_raw(s) as *mut [u8]) }
599 #[stable(feature = "rust1", since = "1.0.0")]
600 /// Attempt to downcast the box to a concrete type.
605 /// use std::any::Any;
607 /// fn print_if_string(value: Box<Any>) {
608 /// if let Ok(string) = value.downcast::<String>() {
609 /// println!("String ({}): {}", string.len(), string);
614 /// let my_string = "Hello World".to_string();
615 /// print_if_string(Box::new(my_string));
616 /// print_if_string(Box::new(0i8));
619 pub fn downcast<T: Any>(self) -> Result<Box<T>, Box<Any>> {
622 let raw: *mut Any = Box::into_raw(self);
623 Ok(Box::from_raw(raw as *mut T))
631 impl Box<Any + Send> {
633 #[stable(feature = "rust1", since = "1.0.0")]
634 /// Attempt to downcast the box to a concrete type.
639 /// use std::any::Any;
641 /// fn print_if_string(value: Box<Any + Send>) {
642 /// if let Ok(string) = value.downcast::<String>() {
643 /// println!("String ({}): {}", string.len(), string);
648 /// let my_string = "Hello World".to_string();
649 /// print_if_string(Box::new(my_string));
650 /// print_if_string(Box::new(0i8));
653 pub fn downcast<T: Any>(self) -> Result<Box<T>, Box<Any + Send>> {
654 <Box<Any>>::downcast(self).map_err(|s| unsafe {
655 // reapply the Send marker
656 Box::from_raw(Box::into_raw(s) as *mut (Any + Send))
661 #[stable(feature = "rust1", since = "1.0.0")]
662 impl<T: fmt::Display + ?Sized> fmt::Display for Box<T> {
663 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
664 fmt::Display::fmt(&**self, f)
668 #[stable(feature = "rust1", since = "1.0.0")]
669 impl<T: fmt::Debug + ?Sized> fmt::Debug for Box<T> {
670 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
671 fmt::Debug::fmt(&**self, f)
675 #[stable(feature = "rust1", since = "1.0.0")]
676 impl<T: ?Sized> fmt::Pointer for Box<T> {
677 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
678 // It's not possible to extract the inner Uniq directly from the Box,
679 // instead we cast it to a *const which aliases the Unique
680 let ptr: *const T = &**self;
681 fmt::Pointer::fmt(&ptr, f)
685 #[stable(feature = "rust1", since = "1.0.0")]
686 impl<T: ?Sized> Deref for Box<T> {
689 fn deref(&self) -> &T {
694 #[stable(feature = "rust1", since = "1.0.0")]
695 impl<T: ?Sized> DerefMut for Box<T> {
696 fn deref_mut(&mut self) -> &mut T {
701 #[stable(feature = "rust1", since = "1.0.0")]
702 impl<I: Iterator + ?Sized> Iterator for Box<I> {
704 fn next(&mut self) -> Option<I::Item> {
707 fn size_hint(&self) -> (usize, Option<usize>) {
710 fn nth(&mut self, n: usize) -> Option<I::Item> {
714 #[stable(feature = "rust1", since = "1.0.0")]
715 impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> {
716 fn next_back(&mut self) -> Option<I::Item> {
720 #[stable(feature = "rust1", since = "1.0.0")]
721 impl<I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<I> {
722 fn len(&self) -> usize {
725 fn is_empty(&self) -> bool {
730 #[unstable(feature = "fused", issue = "35602")]
731 impl<I: FusedIterator + ?Sized> FusedIterator for Box<I> {}
734 /// `FnBox` is a version of the `FnOnce` intended for use with boxed
735 /// closure objects. The idea is that where one would normally store a
736 /// `Box<FnOnce()>` in a data structure, you should use
737 /// `Box<FnBox()>`. The two traits behave essentially the same, except
738 /// that a `FnBox` closure can only be called if it is boxed. (Note
739 /// that `FnBox` may be deprecated in the future if `Box<FnOnce()>`
740 /// closures become directly usable.)
744 /// Here is a snippet of code which creates a hashmap full of boxed
745 /// once closures and then removes them one by one, calling each
746 /// closure as it is removed. Note that the type of the closures
747 /// stored in the map is `Box<FnBox() -> i32>` and not `Box<FnOnce()
751 /// #![feature(fnbox)]
753 /// use std::boxed::FnBox;
754 /// use std::collections::HashMap;
756 /// fn make_map() -> HashMap<i32, Box<FnBox() -> i32>> {
757 /// let mut map: HashMap<i32, Box<FnBox() -> i32>> = HashMap::new();
758 /// map.insert(1, Box::new(|| 22));
759 /// map.insert(2, Box::new(|| 44));
764 /// let mut map = make_map();
765 /// for i in &[1, 2] {
766 /// let f = map.remove(&i).unwrap();
767 /// assert_eq!(f(), i * 22);
772 #[unstable(feature = "fnbox",
773 reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")]
777 fn call_box(self: Box<Self>, args: A) -> Self::Output;
780 #[unstable(feature = "fnbox",
781 reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")]
782 impl<A, F> FnBox<A> for F
785 type Output = F::Output;
787 fn call_box(self: Box<F>, args: A) -> F::Output {
792 #[unstable(feature = "fnbox",
793 reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")]
794 impl<'a, A, R> FnOnce<A> for Box<FnBox<A, Output = R> + 'a> {
797 extern "rust-call" fn call_once(self, args: A) -> R {
802 #[unstable(feature = "fnbox",
803 reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")]
804 impl<'a, A, R> FnOnce<A> for Box<FnBox<A, Output = R> + Send + 'a> {
807 extern "rust-call" fn call_once(self, args: A) -> R {
812 #[unstable(feature = "coerce_unsized", issue = "27732")]
813 impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {}
815 #[stable(feature = "box_slice_clone", since = "1.3.0")]
816 impl<T: Clone> Clone for Box<[T]> {
817 fn clone(&self) -> Self {
818 let mut new = BoxBuilder {
819 data: RawVec::with_capacity(self.len()),
823 let mut target = new.data.ptr();
825 for item in self.iter() {
827 ptr::write(target, item.clone());
828 target = target.offset(1);
834 return unsafe { new.into_box() };
836 // Helper type for responding to panics correctly.
837 struct BoxBuilder<T> {
842 impl<T> BoxBuilder<T> {
843 unsafe fn into_box(self) -> Box<[T]> {
844 let raw = ptr::read(&self.data);
850 impl<T> Drop for BoxBuilder<T> {
852 let mut data = self.data.ptr();
853 let max = unsafe { data.offset(self.len as isize) };
858 data = data.offset(1);
866 #[stable(feature = "box_borrow", since = "1.1.0")]
867 impl<T: ?Sized> borrow::Borrow<T> for Box<T> {
868 fn borrow(&self) -> &T {
873 #[stable(feature = "box_borrow", since = "1.1.0")]
874 impl<T: ?Sized> borrow::BorrowMut<T> for Box<T> {
875 fn borrow_mut(&mut self) -> &mut T {
880 #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
881 impl<T: ?Sized> AsRef<T> for Box<T> {
882 fn as_ref(&self) -> &T {
887 #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
888 impl<T: ?Sized> AsMut<T> for Box<T> {
889 fn as_mut(&mut self) -> &mut T {
894 #[unstable(feature = "generator_trait", issue = "43122")]
895 impl<T> Generator for Box<T>
896 where T: Generator + ?Sized
898 type Yield = T::Yield;
899 type Return = T::Return;
900 fn resume(&mut self) -> GeneratorState<Self::Yield, Self::Return> {