1 // Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 #![unstable(feature = "allocator_api",
12 reason = "the precise API and guarantees it provides may be tweaked \
13 slightly, especially to possibly take into account the \
14 types being stored to make room for a future \
15 tracing garbage collector",
18 use core::intrinsics::{min_align_of_val, size_of_val};
19 use core::mem::{self, ManuallyDrop};
30 #[rustc_allocator_nounwind]
31 fn __rust_alloc(size: usize, align: usize, err: *mut u8) -> *mut u8;
33 #[rustc_allocator_nounwind]
34 fn __rust_oom(err: *const u8) -> !;
35 #[rustc_allocator_nounwind]
36 fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
37 #[rustc_allocator_nounwind]
38 fn __rust_usable_size(layout: *const u8,
41 #[rustc_allocator_nounwind]
42 fn __rust_realloc(ptr: *mut u8,
47 err: *mut u8) -> *mut u8;
48 #[rustc_allocator_nounwind]
49 fn __rust_alloc_zeroed(size: usize, align: usize, err: *mut u8) -> *mut u8;
50 #[rustc_allocator_nounwind]
51 fn __rust_alloc_excess(size: usize,
54 err: *mut u8) -> *mut u8;
55 #[rustc_allocator_nounwind]
56 fn __rust_realloc_excess(ptr: *mut u8,
62 err: *mut u8) -> *mut u8;
63 #[rustc_allocator_nounwind]
64 fn __rust_grow_in_place(ptr: *mut u8,
68 new_align: usize) -> u8;
69 #[rustc_allocator_nounwind]
70 fn __rust_shrink_in_place(ptr: *mut u8,
74 new_align: usize) -> u8;
77 #[derive(Copy, Clone, Default, Debug)]
80 unsafe impl Alloc for Heap {
82 unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
83 let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
84 let ptr = __rust_alloc(layout.size(),
86 &mut *err as *mut AllocErr as *mut u8);
88 Err(ManuallyDrop::into_inner(err))
96 fn oom(&mut self, err: AllocErr) -> ! {
98 __rust_oom(&err as *const AllocErr as *const u8)
103 unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
104 __rust_dealloc(ptr, layout.size(), layout.align())
108 fn usable_size(&self, layout: &Layout) -> (usize, usize) {
112 __rust_usable_size(layout as *const Layout as *const u8,
120 unsafe fn realloc(&mut self,
124 -> Result<*mut u8, AllocErr>
126 let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
127 let ptr = __rust_realloc(ptr,
132 &mut *err as *mut AllocErr as *mut u8);
134 Err(ManuallyDrop::into_inner(err))
142 unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
143 let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
144 let ptr = __rust_alloc_zeroed(layout.size(),
146 &mut *err as *mut AllocErr as *mut u8);
148 Err(ManuallyDrop::into_inner(err))
155 unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
156 let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
158 let ptr = __rust_alloc_excess(layout.size(),
161 &mut *err as *mut AllocErr as *mut u8);
163 Err(ManuallyDrop::into_inner(err))
165 Ok(Excess(ptr, size))
170 unsafe fn realloc_excess(&mut self,
173 new_layout: Layout) -> Result<Excess, AllocErr> {
174 let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
176 let ptr = __rust_realloc_excess(ptr,
182 &mut *err as *mut AllocErr as *mut u8);
184 Err(ManuallyDrop::into_inner(err))
186 Ok(Excess(ptr, size))
191 unsafe fn grow_in_place(&mut self,
195 -> Result<(), CannotReallocInPlace>
197 debug_assert!(new_layout.size() >= layout.size());
198 debug_assert!(new_layout.align() == layout.align());
199 let ret = __rust_grow_in_place(ptr,
207 Err(CannotReallocInPlace)
212 unsafe fn shrink_in_place(&mut self,
215 new_layout: Layout) -> Result<(), CannotReallocInPlace> {
216 debug_assert!(new_layout.size() <= layout.size());
217 debug_assert!(new_layout.align() == layout.align());
218 let ret = __rust_shrink_in_place(ptr,
226 Err(CannotReallocInPlace)
231 /// An arbitrary non-null address to represent zero-size allocations.
233 /// This preserves the non-null invariant for types like `Box<T>`. The address
234 /// may overlap with non-zero-size memory allocations.
235 #[rustc_deprecated(since = "1.19", reason = "Use Unique/Shared::empty() instead")]
236 #[unstable(feature = "heap_api", issue = "27700")]
237 pub const EMPTY: *mut () = 1 as *mut ();
239 /// The allocator for unique pointers.
240 // This function must not unwind. If it does, MIR trans will fail.
242 #[lang = "exchange_malloc"]
244 unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
248 let layout = Layout::from_size_align_unchecked(size, align);
249 Heap.alloc(layout).unwrap_or_else(|err| {
255 #[cfg_attr(not(test), lang = "box_free")]
257 pub(crate) unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
258 let size = size_of_val(&*ptr);
259 let align = min_align_of_val(&*ptr);
260 // We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
262 let layout = Layout::from_size_align_unchecked(size, align);
263 Heap.dealloc(ptr as *mut u8, layout);
270 use self::test::Bencher;
272 use heap::{Heap, Alloc, Layout};
275 fn allocate_zeroed() {
277 let layout = Layout::from_size_align(1024, 1).unwrap();
278 let ptr = Heap.alloc_zeroed(layout.clone())
279 .unwrap_or_else(|e| Heap.oom(e));
281 let end = ptr.offset(layout.size() as isize);
287 Heap.dealloc(ptr, layout);
292 fn alloc_owned_small(b: &mut Bencher) {
294 let _: Box<_> = box 10;